##// END OF EJS Templates
debugdiscovery: use `get_unique_pull_path`...
marmoute -
r47721:9e021cff default
parent child Browse files
Show More
@@ -1,4817 +1,4819
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import contextlib
13 13 import difflib
14 14 import errno
15 15 import glob
16 16 import operator
17 17 import os
18 18 import platform
19 19 import random
20 20 import re
21 21 import socket
22 22 import ssl
23 23 import stat
24 24 import string
25 25 import subprocess
26 26 import sys
27 27 import time
28 28
29 29 from .i18n import _
30 30 from .node import (
31 31 bin,
32 32 hex,
33 33 nullid,
34 34 nullrev,
35 35 short,
36 36 )
37 37 from .pycompat import (
38 38 getattr,
39 39 open,
40 40 )
41 41 from . import (
42 42 bundle2,
43 43 bundlerepo,
44 44 changegroup,
45 45 cmdutil,
46 46 color,
47 47 context,
48 48 copies,
49 49 dagparser,
50 50 encoding,
51 51 error,
52 52 exchange,
53 53 extensions,
54 54 filemerge,
55 55 filesetlang,
56 56 formatter,
57 57 hg,
58 58 httppeer,
59 59 localrepo,
60 60 lock as lockmod,
61 61 logcmdutil,
62 62 mergestate as mergestatemod,
63 63 metadata,
64 64 obsolete,
65 65 obsutil,
66 66 pathutil,
67 67 phases,
68 68 policy,
69 69 pvec,
70 70 pycompat,
71 71 registrar,
72 72 repair,
73 73 repoview,
74 74 revlog,
75 75 revset,
76 76 revsetlang,
77 77 scmutil,
78 78 setdiscovery,
79 79 simplemerge,
80 80 sshpeer,
81 81 sslutil,
82 82 streamclone,
83 83 strip,
84 84 tags as tagsmod,
85 85 templater,
86 86 treediscovery,
87 87 upgrade,
88 88 url as urlmod,
89 89 util,
90 90 vfs as vfsmod,
91 91 wireprotoframing,
92 92 wireprotoserver,
93 93 wireprotov2peer,
94 94 )
95 95 from .utils import (
96 96 cborutil,
97 97 compression,
98 98 dateutil,
99 99 procutil,
100 100 stringutil,
101 101 urlutil,
102 102 )
103 103
104 104 from .revlogutils import (
105 105 deltas as deltautil,
106 106 nodemap,
107 107 sidedata,
108 108 )
109 109
110 110 release = lockmod.release
111 111
112 112 table = {}
113 113 table.update(strip.command._table)
114 114 command = registrar.command(table)
115 115
116 116
117 117 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
118 118 def debugancestor(ui, repo, *args):
119 119 """find the ancestor revision of two revisions in a given index"""
120 120 if len(args) == 3:
121 121 index, rev1, rev2 = args
122 122 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
123 123 lookup = r.lookup
124 124 elif len(args) == 2:
125 125 if not repo:
126 126 raise error.Abort(
127 127 _(b'there is no Mercurial repository here (.hg not found)')
128 128 )
129 129 rev1, rev2 = args
130 130 r = repo.changelog
131 131 lookup = repo.lookup
132 132 else:
133 133 raise error.Abort(_(b'either two or three arguments required'))
134 134 a = r.ancestor(lookup(rev1), lookup(rev2))
135 135 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
136 136
137 137
138 138 @command(b'debugantivirusrunning', [])
139 139 def debugantivirusrunning(ui, repo):
140 140 """attempt to trigger an antivirus scanner to see if one is active"""
141 141 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
142 142 f.write(
143 143 util.b85decode(
144 144 # This is a base85-armored version of the EICAR test file. See
145 145 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
146 146 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
147 147 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
148 148 )
149 149 )
150 150 # Give an AV engine time to scan the file.
151 151 time.sleep(2)
152 152 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
153 153
154 154
155 155 @command(b'debugapplystreamclonebundle', [], b'FILE')
156 156 def debugapplystreamclonebundle(ui, repo, fname):
157 157 """apply a stream clone bundle file"""
158 158 f = hg.openpath(ui, fname)
159 159 gen = exchange.readbundle(ui, f, fname)
160 160 gen.apply(repo)
161 161
162 162
163 163 @command(
164 164 b'debugbuilddag',
165 165 [
166 166 (
167 167 b'm',
168 168 b'mergeable-file',
169 169 None,
170 170 _(b'add single file mergeable changes'),
171 171 ),
172 172 (
173 173 b'o',
174 174 b'overwritten-file',
175 175 None,
176 176 _(b'add single file all revs overwrite'),
177 177 ),
178 178 (b'n', b'new-file', None, _(b'add new file at each rev')),
179 179 ],
180 180 _(b'[OPTION]... [TEXT]'),
181 181 )
182 182 def debugbuilddag(
183 183 ui,
184 184 repo,
185 185 text=None,
186 186 mergeable_file=False,
187 187 overwritten_file=False,
188 188 new_file=False,
189 189 ):
190 190 """builds a repo with a given DAG from scratch in the current empty repo
191 191
192 192 The description of the DAG is read from stdin if not given on the
193 193 command line.
194 194
195 195 Elements:
196 196
197 197 - "+n" is a linear run of n nodes based on the current default parent
198 198 - "." is a single node based on the current default parent
199 199 - "$" resets the default parent to null (implied at the start);
200 200 otherwise the default parent is always the last node created
201 201 - "<p" sets the default parent to the backref p
202 202 - "*p" is a fork at parent p, which is a backref
203 203 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
204 204 - "/p2" is a merge of the preceding node and p2
205 205 - ":tag" defines a local tag for the preceding node
206 206 - "@branch" sets the named branch for subsequent nodes
207 207 - "#...\\n" is a comment up to the end of the line
208 208
209 209 Whitespace between the above elements is ignored.
210 210
211 211 A backref is either
212 212
213 213 - a number n, which references the node curr-n, where curr is the current
214 214 node, or
215 215 - the name of a local tag you placed earlier using ":tag", or
216 216 - empty to denote the default parent.
217 217
218 218 All string valued-elements are either strictly alphanumeric, or must
219 219 be enclosed in double quotes ("..."), with "\\" as escape character.
220 220 """
221 221
222 222 if text is None:
223 223 ui.status(_(b"reading DAG from stdin\n"))
224 224 text = ui.fin.read()
225 225
226 226 cl = repo.changelog
227 227 if len(cl) > 0:
228 228 raise error.Abort(_(b'repository is not empty'))
229 229
230 230 # determine number of revs in DAG
231 231 total = 0
232 232 for type, data in dagparser.parsedag(text):
233 233 if type == b'n':
234 234 total += 1
235 235
236 236 if mergeable_file:
237 237 linesperrev = 2
238 238 # make a file with k lines per rev
239 239 initialmergedlines = [
240 240 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
241 241 ]
242 242 initialmergedlines.append(b"")
243 243
244 244 tags = []
245 245 progress = ui.makeprogress(
246 246 _(b'building'), unit=_(b'revisions'), total=total
247 247 )
248 248 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
249 249 at = -1
250 250 atbranch = b'default'
251 251 nodeids = []
252 252 id = 0
253 253 progress.update(id)
254 254 for type, data in dagparser.parsedag(text):
255 255 if type == b'n':
256 256 ui.note((b'node %s\n' % pycompat.bytestr(data)))
257 257 id, ps = data
258 258
259 259 files = []
260 260 filecontent = {}
261 261
262 262 p2 = None
263 263 if mergeable_file:
264 264 fn = b"mf"
265 265 p1 = repo[ps[0]]
266 266 if len(ps) > 1:
267 267 p2 = repo[ps[1]]
268 268 pa = p1.ancestor(p2)
269 269 base, local, other = [
270 270 x[fn].data() for x in (pa, p1, p2)
271 271 ]
272 272 m3 = simplemerge.Merge3Text(base, local, other)
273 273 ml = [l.strip() for l in m3.merge_lines()]
274 274 ml.append(b"")
275 275 elif at > 0:
276 276 ml = p1[fn].data().split(b"\n")
277 277 else:
278 278 ml = initialmergedlines
279 279 ml[id * linesperrev] += b" r%i" % id
280 280 mergedtext = b"\n".join(ml)
281 281 files.append(fn)
282 282 filecontent[fn] = mergedtext
283 283
284 284 if overwritten_file:
285 285 fn = b"of"
286 286 files.append(fn)
287 287 filecontent[fn] = b"r%i\n" % id
288 288
289 289 if new_file:
290 290 fn = b"nf%i" % id
291 291 files.append(fn)
292 292 filecontent[fn] = b"r%i\n" % id
293 293 if len(ps) > 1:
294 294 if not p2:
295 295 p2 = repo[ps[1]]
296 296 for fn in p2:
297 297 if fn.startswith(b"nf"):
298 298 files.append(fn)
299 299 filecontent[fn] = p2[fn].data()
300 300
301 301 def fctxfn(repo, cx, path):
302 302 if path in filecontent:
303 303 return context.memfilectx(
304 304 repo, cx, path, filecontent[path]
305 305 )
306 306 return None
307 307
308 308 if len(ps) == 0 or ps[0] < 0:
309 309 pars = [None, None]
310 310 elif len(ps) == 1:
311 311 pars = [nodeids[ps[0]], None]
312 312 else:
313 313 pars = [nodeids[p] for p in ps]
314 314 cx = context.memctx(
315 315 repo,
316 316 pars,
317 317 b"r%i" % id,
318 318 files,
319 319 fctxfn,
320 320 date=(id, 0),
321 321 user=b"debugbuilddag",
322 322 extra={b'branch': atbranch},
323 323 )
324 324 nodeid = repo.commitctx(cx)
325 325 nodeids.append(nodeid)
326 326 at = id
327 327 elif type == b'l':
328 328 id, name = data
329 329 ui.note((b'tag %s\n' % name))
330 330 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
331 331 elif type == b'a':
332 332 ui.note((b'branch %s\n' % data))
333 333 atbranch = data
334 334 progress.update(id)
335 335
336 336 if tags:
337 337 repo.vfs.write(b"localtags", b"".join(tags))
338 338
339 339
340 340 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
341 341 indent_string = b' ' * indent
342 342 if all:
343 343 ui.writenoi18n(
344 344 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
345 345 % indent_string
346 346 )
347 347
348 348 def showchunks(named):
349 349 ui.write(b"\n%s%s\n" % (indent_string, named))
350 350 for deltadata in gen.deltaiter():
351 351 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
352 352 ui.write(
353 353 b"%s%s %s %s %s %s %d\n"
354 354 % (
355 355 indent_string,
356 356 hex(node),
357 357 hex(p1),
358 358 hex(p2),
359 359 hex(cs),
360 360 hex(deltabase),
361 361 len(delta),
362 362 )
363 363 )
364 364
365 365 gen.changelogheader()
366 366 showchunks(b"changelog")
367 367 gen.manifestheader()
368 368 showchunks(b"manifest")
369 369 for chunkdata in iter(gen.filelogheader, {}):
370 370 fname = chunkdata[b'filename']
371 371 showchunks(fname)
372 372 else:
373 373 if isinstance(gen, bundle2.unbundle20):
374 374 raise error.Abort(_(b'use debugbundle2 for this file'))
375 375 gen.changelogheader()
376 376 for deltadata in gen.deltaiter():
377 377 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
378 378 ui.write(b"%s%s\n" % (indent_string, hex(node)))
379 379
380 380
381 381 def _debugobsmarkers(ui, part, indent=0, **opts):
382 382 """display version and markers contained in 'data'"""
383 383 opts = pycompat.byteskwargs(opts)
384 384 data = part.read()
385 385 indent_string = b' ' * indent
386 386 try:
387 387 version, markers = obsolete._readmarkers(data)
388 388 except error.UnknownVersion as exc:
389 389 msg = b"%sunsupported version: %s (%d bytes)\n"
390 390 msg %= indent_string, exc.version, len(data)
391 391 ui.write(msg)
392 392 else:
393 393 msg = b"%sversion: %d (%d bytes)\n"
394 394 msg %= indent_string, version, len(data)
395 395 ui.write(msg)
396 396 fm = ui.formatter(b'debugobsolete', opts)
397 397 for rawmarker in sorted(markers):
398 398 m = obsutil.marker(None, rawmarker)
399 399 fm.startitem()
400 400 fm.plain(indent_string)
401 401 cmdutil.showmarker(fm, m)
402 402 fm.end()
403 403
404 404
405 405 def _debugphaseheads(ui, data, indent=0):
406 406 """display version and markers contained in 'data'"""
407 407 indent_string = b' ' * indent
408 408 headsbyphase = phases.binarydecode(data)
409 409 for phase in phases.allphases:
410 410 for head in headsbyphase[phase]:
411 411 ui.write(indent_string)
412 412 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
413 413
414 414
415 415 def _quasirepr(thing):
416 416 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
417 417 return b'{%s}' % (
418 418 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
419 419 )
420 420 return pycompat.bytestr(repr(thing))
421 421
422 422
423 423 def _debugbundle2(ui, gen, all=None, **opts):
424 424 """lists the contents of a bundle2"""
425 425 if not isinstance(gen, bundle2.unbundle20):
426 426 raise error.Abort(_(b'not a bundle2 file'))
427 427 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
428 428 parttypes = opts.get('part_type', [])
429 429 for part in gen.iterparts():
430 430 if parttypes and part.type not in parttypes:
431 431 continue
432 432 msg = b'%s -- %s (mandatory: %r)\n'
433 433 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
434 434 if part.type == b'changegroup':
435 435 version = part.params.get(b'version', b'01')
436 436 cg = changegroup.getunbundler(version, part, b'UN')
437 437 if not ui.quiet:
438 438 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
439 439 if part.type == b'obsmarkers':
440 440 if not ui.quiet:
441 441 _debugobsmarkers(ui, part, indent=4, **opts)
442 442 if part.type == b'phase-heads':
443 443 if not ui.quiet:
444 444 _debugphaseheads(ui, part, indent=4)
445 445
446 446
447 447 @command(
448 448 b'debugbundle',
449 449 [
450 450 (b'a', b'all', None, _(b'show all details')),
451 451 (b'', b'part-type', [], _(b'show only the named part type')),
452 452 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
453 453 ],
454 454 _(b'FILE'),
455 455 norepo=True,
456 456 )
457 457 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
458 458 """lists the contents of a bundle"""
459 459 with hg.openpath(ui, bundlepath) as f:
460 460 if spec:
461 461 spec = exchange.getbundlespec(ui, f)
462 462 ui.write(b'%s\n' % spec)
463 463 return
464 464
465 465 gen = exchange.readbundle(ui, f, bundlepath)
466 466 if isinstance(gen, bundle2.unbundle20):
467 467 return _debugbundle2(ui, gen, all=all, **opts)
468 468 _debugchangegroup(ui, gen, all=all, **opts)
469 469
470 470
471 471 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
472 472 def debugcapabilities(ui, path, **opts):
473 473 """lists the capabilities of a remote peer"""
474 474 opts = pycompat.byteskwargs(opts)
475 475 peer = hg.peer(ui, opts, path)
476 476 try:
477 477 caps = peer.capabilities()
478 478 ui.writenoi18n(b'Main capabilities:\n')
479 479 for c in sorted(caps):
480 480 ui.write(b' %s\n' % c)
481 481 b2caps = bundle2.bundle2caps(peer)
482 482 if b2caps:
483 483 ui.writenoi18n(b'Bundle2 capabilities:\n')
484 484 for key, values in sorted(pycompat.iteritems(b2caps)):
485 485 ui.write(b' %s\n' % key)
486 486 for v in values:
487 487 ui.write(b' %s\n' % v)
488 488 finally:
489 489 peer.close()
490 490
491 491
492 492 @command(
493 493 b'debugchangedfiles',
494 494 [
495 495 (
496 496 b'',
497 497 b'compute',
498 498 False,
499 499 b"compute information instead of reading it from storage",
500 500 ),
501 501 ],
502 502 b'REV',
503 503 )
504 504 def debugchangedfiles(ui, repo, rev, **opts):
505 505 """list the stored files changes for a revision"""
506 506 ctx = scmutil.revsingle(repo, rev, None)
507 507 files = None
508 508
509 509 if opts['compute']:
510 510 files = metadata.compute_all_files_changes(ctx)
511 511 else:
512 512 sd = repo.changelog.sidedata(ctx.rev())
513 513 files_block = sd.get(sidedata.SD_FILES)
514 514 if files_block is not None:
515 515 files = metadata.decode_files_sidedata(sd)
516 516 if files is not None:
517 517 for f in sorted(files.touched):
518 518 if f in files.added:
519 519 action = b"added"
520 520 elif f in files.removed:
521 521 action = b"removed"
522 522 elif f in files.merged:
523 523 action = b"merged"
524 524 elif f in files.salvaged:
525 525 action = b"salvaged"
526 526 else:
527 527 action = b"touched"
528 528
529 529 copy_parent = b""
530 530 copy_source = b""
531 531 if f in files.copied_from_p1:
532 532 copy_parent = b"p1"
533 533 copy_source = files.copied_from_p1[f]
534 534 elif f in files.copied_from_p2:
535 535 copy_parent = b"p2"
536 536 copy_source = files.copied_from_p2[f]
537 537
538 538 data = (action, copy_parent, f, copy_source)
539 539 template = b"%-8s %2s: %s, %s;\n"
540 540 ui.write(template % data)
541 541
542 542
543 543 @command(b'debugcheckstate', [], b'')
544 544 def debugcheckstate(ui, repo):
545 545 """validate the correctness of the current dirstate"""
546 546 parent1, parent2 = repo.dirstate.parents()
547 547 m1 = repo[parent1].manifest()
548 548 m2 = repo[parent2].manifest()
549 549 errors = 0
550 550 for f in repo.dirstate:
551 551 state = repo.dirstate[f]
552 552 if state in b"nr" and f not in m1:
553 553 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
554 554 errors += 1
555 555 if state in b"a" and f in m1:
556 556 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
557 557 errors += 1
558 558 if state in b"m" and f not in m1 and f not in m2:
559 559 ui.warn(
560 560 _(b"%s in state %s, but not in either manifest\n") % (f, state)
561 561 )
562 562 errors += 1
563 563 for f in m1:
564 564 state = repo.dirstate[f]
565 565 if state not in b"nrm":
566 566 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
567 567 errors += 1
568 568 if errors:
569 569 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
570 570 raise error.Abort(errstr)
571 571
572 572
573 573 @command(
574 574 b'debugcolor',
575 575 [(b'', b'style', None, _(b'show all configured styles'))],
576 576 b'hg debugcolor',
577 577 )
578 578 def debugcolor(ui, repo, **opts):
579 579 """show available color, effects or style"""
580 580 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
581 581 if opts.get('style'):
582 582 return _debugdisplaystyle(ui)
583 583 else:
584 584 return _debugdisplaycolor(ui)
585 585
586 586
587 587 def _debugdisplaycolor(ui):
588 588 ui = ui.copy()
589 589 ui._styles.clear()
590 590 for effect in color._activeeffects(ui).keys():
591 591 ui._styles[effect] = effect
592 592 if ui._terminfoparams:
593 593 for k, v in ui.configitems(b'color'):
594 594 if k.startswith(b'color.'):
595 595 ui._styles[k] = k[6:]
596 596 elif k.startswith(b'terminfo.'):
597 597 ui._styles[k] = k[9:]
598 598 ui.write(_(b'available colors:\n'))
599 599 # sort label with a '_' after the other to group '_background' entry.
600 600 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
601 601 for colorname, label in items:
602 602 ui.write(b'%s\n' % colorname, label=label)
603 603
604 604
605 605 def _debugdisplaystyle(ui):
606 606 ui.write(_(b'available style:\n'))
607 607 if not ui._styles:
608 608 return
609 609 width = max(len(s) for s in ui._styles)
610 610 for label, effects in sorted(ui._styles.items()):
611 611 ui.write(b'%s' % label, label=label)
612 612 if effects:
613 613 # 50
614 614 ui.write(b': ')
615 615 ui.write(b' ' * (max(0, width - len(label))))
616 616 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
617 617 ui.write(b'\n')
618 618
619 619
620 620 @command(b'debugcreatestreamclonebundle', [], b'FILE')
621 621 def debugcreatestreamclonebundle(ui, repo, fname):
622 622 """create a stream clone bundle file
623 623
624 624 Stream bundles are special bundles that are essentially archives of
625 625 revlog files. They are commonly used for cloning very quickly.
626 626 """
627 627 # TODO we may want to turn this into an abort when this functionality
628 628 # is moved into `hg bundle`.
629 629 if phases.hassecret(repo):
630 630 ui.warn(
631 631 _(
632 632 b'(warning: stream clone bundle will contain secret '
633 633 b'revisions)\n'
634 634 )
635 635 )
636 636
637 637 requirements, gen = streamclone.generatebundlev1(repo)
638 638 changegroup.writechunks(ui, gen, fname)
639 639
640 640 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
641 641
642 642
643 643 @command(
644 644 b'debugdag',
645 645 [
646 646 (b't', b'tags', None, _(b'use tags as labels')),
647 647 (b'b', b'branches', None, _(b'annotate with branch names')),
648 648 (b'', b'dots', None, _(b'use dots for runs')),
649 649 (b's', b'spaces', None, _(b'separate elements by spaces')),
650 650 ],
651 651 _(b'[OPTION]... [FILE [REV]...]'),
652 652 optionalrepo=True,
653 653 )
654 654 def debugdag(ui, repo, file_=None, *revs, **opts):
655 655 """format the changelog or an index DAG as a concise textual description
656 656
657 657 If you pass a revlog index, the revlog's DAG is emitted. If you list
658 658 revision numbers, they get labeled in the output as rN.
659 659
660 660 Otherwise, the changelog DAG of the current repo is emitted.
661 661 """
662 662 spaces = opts.get('spaces')
663 663 dots = opts.get('dots')
664 664 if file_:
665 665 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
666 666 revs = {int(r) for r in revs}
667 667
668 668 def events():
669 669 for r in rlog:
670 670 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
671 671 if r in revs:
672 672 yield b'l', (r, b"r%i" % r)
673 673
674 674 elif repo:
675 675 cl = repo.changelog
676 676 tags = opts.get('tags')
677 677 branches = opts.get('branches')
678 678 if tags:
679 679 labels = {}
680 680 for l, n in repo.tags().items():
681 681 labels.setdefault(cl.rev(n), []).append(l)
682 682
683 683 def events():
684 684 b = b"default"
685 685 for r in cl:
686 686 if branches:
687 687 newb = cl.read(cl.node(r))[5][b'branch']
688 688 if newb != b:
689 689 yield b'a', newb
690 690 b = newb
691 691 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
692 692 if tags:
693 693 ls = labels.get(r)
694 694 if ls:
695 695 for l in ls:
696 696 yield b'l', (r, l)
697 697
698 698 else:
699 699 raise error.Abort(_(b'need repo for changelog dag'))
700 700
701 701 for line in dagparser.dagtextlines(
702 702 events(),
703 703 addspaces=spaces,
704 704 wraplabels=True,
705 705 wrapannotations=True,
706 706 wrapnonlinear=dots,
707 707 usedots=dots,
708 708 maxlinewidth=70,
709 709 ):
710 710 ui.write(line)
711 711 ui.write(b"\n")
712 712
713 713
714 714 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
715 715 def debugdata(ui, repo, file_, rev=None, **opts):
716 716 """dump the contents of a data file revision"""
717 717 opts = pycompat.byteskwargs(opts)
718 718 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
719 719 if rev is not None:
720 720 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
721 721 file_, rev = None, file_
722 722 elif rev is None:
723 723 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
724 724 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
725 725 try:
726 726 ui.write(r.rawdata(r.lookup(rev)))
727 727 except KeyError:
728 728 raise error.Abort(_(b'invalid revision identifier %s') % rev)
729 729
730 730
731 731 @command(
732 732 b'debugdate',
733 733 [(b'e', b'extended', None, _(b'try extended date formats'))],
734 734 _(b'[-e] DATE [RANGE]'),
735 735 norepo=True,
736 736 optionalrepo=True,
737 737 )
738 738 def debugdate(ui, date, range=None, **opts):
739 739 """parse and display a date"""
740 740 if opts["extended"]:
741 741 d = dateutil.parsedate(date, dateutil.extendeddateformats)
742 742 else:
743 743 d = dateutil.parsedate(date)
744 744 ui.writenoi18n(b"internal: %d %d\n" % d)
745 745 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
746 746 if range:
747 747 m = dateutil.matchdate(range)
748 748 ui.writenoi18n(b"match: %s\n" % m(d[0]))
749 749
750 750
751 751 @command(
752 752 b'debugdeltachain',
753 753 cmdutil.debugrevlogopts + cmdutil.formatteropts,
754 754 _(b'-c|-m|FILE'),
755 755 optionalrepo=True,
756 756 )
757 757 def debugdeltachain(ui, repo, file_=None, **opts):
758 758 """dump information about delta chains in a revlog
759 759
760 760 Output can be templatized. Available template keywords are:
761 761
762 762 :``rev``: revision number
763 763 :``chainid``: delta chain identifier (numbered by unique base)
764 764 :``chainlen``: delta chain length to this revision
765 765 :``prevrev``: previous revision in delta chain
766 766 :``deltatype``: role of delta / how it was computed
767 767 :``compsize``: compressed size of revision
768 768 :``uncompsize``: uncompressed size of revision
769 769 :``chainsize``: total size of compressed revisions in chain
770 770 :``chainratio``: total chain size divided by uncompressed revision size
771 771 (new delta chains typically start at ratio 2.00)
772 772 :``lindist``: linear distance from base revision in delta chain to end
773 773 of this revision
774 774 :``extradist``: total size of revisions not part of this delta chain from
775 775 base of delta chain to end of this revision; a measurement
776 776 of how much extra data we need to read/seek across to read
777 777 the delta chain for this revision
778 778 :``extraratio``: extradist divided by chainsize; another representation of
779 779 how much unrelated data is needed to load this delta chain
780 780
781 781 If the repository is configured to use the sparse read, additional keywords
782 782 are available:
783 783
784 784 :``readsize``: total size of data read from the disk for a revision
785 785 (sum of the sizes of all the blocks)
786 786 :``largestblock``: size of the largest block of data read from the disk
787 787 :``readdensity``: density of useful bytes in the data read from the disk
788 788 :``srchunks``: in how many data hunks the whole revision would be read
789 789
790 790 The sparse read can be enabled with experimental.sparse-read = True
791 791 """
792 792 opts = pycompat.byteskwargs(opts)
793 793 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
794 794 index = r.index
795 795 start = r.start
796 796 length = r.length
797 797 generaldelta = r.version & revlog.FLAG_GENERALDELTA
798 798 withsparseread = getattr(r, '_withsparseread', False)
799 799
800 800 def revinfo(rev):
801 801 e = index[rev]
802 802 compsize = e[1]
803 803 uncompsize = e[2]
804 804 chainsize = 0
805 805
806 806 if generaldelta:
807 807 if e[3] == e[5]:
808 808 deltatype = b'p1'
809 809 elif e[3] == e[6]:
810 810 deltatype = b'p2'
811 811 elif e[3] == rev - 1:
812 812 deltatype = b'prev'
813 813 elif e[3] == rev:
814 814 deltatype = b'base'
815 815 else:
816 816 deltatype = b'other'
817 817 else:
818 818 if e[3] == rev:
819 819 deltatype = b'base'
820 820 else:
821 821 deltatype = b'prev'
822 822
823 823 chain = r._deltachain(rev)[0]
824 824 for iterrev in chain:
825 825 e = index[iterrev]
826 826 chainsize += e[1]
827 827
828 828 return compsize, uncompsize, deltatype, chain, chainsize
829 829
830 830 fm = ui.formatter(b'debugdeltachain', opts)
831 831
832 832 fm.plain(
833 833 b' rev chain# chainlen prev delta '
834 834 b'size rawsize chainsize ratio lindist extradist '
835 835 b'extraratio'
836 836 )
837 837 if withsparseread:
838 838 fm.plain(b' readsize largestblk rddensity srchunks')
839 839 fm.plain(b'\n')
840 840
841 841 chainbases = {}
842 842 for rev in r:
843 843 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
844 844 chainbase = chain[0]
845 845 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
846 846 basestart = start(chainbase)
847 847 revstart = start(rev)
848 848 lineardist = revstart + comp - basestart
849 849 extradist = lineardist - chainsize
850 850 try:
851 851 prevrev = chain[-2]
852 852 except IndexError:
853 853 prevrev = -1
854 854
855 855 if uncomp != 0:
856 856 chainratio = float(chainsize) / float(uncomp)
857 857 else:
858 858 chainratio = chainsize
859 859
860 860 if chainsize != 0:
861 861 extraratio = float(extradist) / float(chainsize)
862 862 else:
863 863 extraratio = extradist
864 864
865 865 fm.startitem()
866 866 fm.write(
867 867 b'rev chainid chainlen prevrev deltatype compsize '
868 868 b'uncompsize chainsize chainratio lindist extradist '
869 869 b'extraratio',
870 870 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
871 871 rev,
872 872 chainid,
873 873 len(chain),
874 874 prevrev,
875 875 deltatype,
876 876 comp,
877 877 uncomp,
878 878 chainsize,
879 879 chainratio,
880 880 lineardist,
881 881 extradist,
882 882 extraratio,
883 883 rev=rev,
884 884 chainid=chainid,
885 885 chainlen=len(chain),
886 886 prevrev=prevrev,
887 887 deltatype=deltatype,
888 888 compsize=comp,
889 889 uncompsize=uncomp,
890 890 chainsize=chainsize,
891 891 chainratio=chainratio,
892 892 lindist=lineardist,
893 893 extradist=extradist,
894 894 extraratio=extraratio,
895 895 )
896 896 if withsparseread:
897 897 readsize = 0
898 898 largestblock = 0
899 899 srchunks = 0
900 900
901 901 for revschunk in deltautil.slicechunk(r, chain):
902 902 srchunks += 1
903 903 blkend = start(revschunk[-1]) + length(revschunk[-1])
904 904 blksize = blkend - start(revschunk[0])
905 905
906 906 readsize += blksize
907 907 if largestblock < blksize:
908 908 largestblock = blksize
909 909
910 910 if readsize:
911 911 readdensity = float(chainsize) / float(readsize)
912 912 else:
913 913 readdensity = 1
914 914
915 915 fm.write(
916 916 b'readsize largestblock readdensity srchunks',
917 917 b' %10d %10d %9.5f %8d',
918 918 readsize,
919 919 largestblock,
920 920 readdensity,
921 921 srchunks,
922 922 readsize=readsize,
923 923 largestblock=largestblock,
924 924 readdensity=readdensity,
925 925 srchunks=srchunks,
926 926 )
927 927
928 928 fm.plain(b'\n')
929 929
930 930 fm.end()
931 931
932 932
933 933 @command(
934 934 b'debugdirstate|debugstate',
935 935 [
936 936 (
937 937 b'',
938 938 b'nodates',
939 939 None,
940 940 _(b'do not display the saved mtime (DEPRECATED)'),
941 941 ),
942 942 (b'', b'dates', True, _(b'display the saved mtime')),
943 943 (b'', b'datesort', None, _(b'sort by saved mtime')),
944 944 ],
945 945 _(b'[OPTION]...'),
946 946 )
947 947 def debugstate(ui, repo, **opts):
948 948 """show the contents of the current dirstate"""
949 949
950 950 nodates = not opts['dates']
951 951 if opts.get('nodates') is not None:
952 952 nodates = True
953 953 datesort = opts.get('datesort')
954 954
955 955 if datesort:
956 956 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
957 957 else:
958 958 keyfunc = None # sort by filename
959 959 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
960 960 if ent[3] == -1:
961 961 timestr = b'unset '
962 962 elif nodates:
963 963 timestr = b'set '
964 964 else:
965 965 timestr = time.strftime(
966 966 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
967 967 )
968 968 timestr = encoding.strtolocal(timestr)
969 969 if ent[1] & 0o20000:
970 970 mode = b'lnk'
971 971 else:
972 972 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
973 973 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
974 974 for f in repo.dirstate.copies():
975 975 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
976 976
977 977
978 978 @command(
979 979 b'debugdiscovery',
980 980 [
981 981 (b'', b'old', None, _(b'use old-style discovery')),
982 982 (
983 983 b'',
984 984 b'nonheads',
985 985 None,
986 986 _(b'use old-style discovery with non-heads included'),
987 987 ),
988 988 (b'', b'rev', [], b'restrict discovery to this set of revs'),
989 989 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
990 990 (
991 991 b'',
992 992 b'local-as-revs',
993 993 b"",
994 994 b'treat local has having these revisions only',
995 995 ),
996 996 (
997 997 b'',
998 998 b'remote-as-revs',
999 999 b"",
1000 1000 b'use local as remote, with only these these revisions',
1001 1001 ),
1002 1002 ]
1003 1003 + cmdutil.remoteopts
1004 1004 + cmdutil.formatteropts,
1005 1005 _(b'[--rev REV] [OTHER]'),
1006 1006 )
1007 1007 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1008 1008 """runs the changeset discovery protocol in isolation
1009 1009
1010 1010 The local peer can be "replaced" by a subset of the local repository by
1011 1011 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1012 1012 be "replaced" by a subset of the local repository using the
1013 1013 `--local-as-revs` flag. This is useful to efficiently debug pathological
1014 1014 discovery situation.
1015 1015
1016 1016 The following developer oriented config are relevant for people playing with this command:
1017 1017
1018 1018 * devel.discovery.exchange-heads=True
1019 1019
1020 1020 If False, the discovery will not start with
1021 1021 remote head fetching and local head querying.
1022 1022
1023 1023 * devel.discovery.grow-sample=True
1024 1024
1025 1025 If False, the sample size used in set discovery will not be increased
1026 1026 through the process
1027 1027
1028 1028 * devel.discovery.grow-sample.dynamic=True
1029 1029
1030 1030 When discovery.grow-sample.dynamic is True, the default, the sample size is
1031 1031 adapted to the shape of the undecided set (it is set to the max of:
1032 1032 <target-size>, len(roots(undecided)), len(heads(undecided)
1033 1033
1034 1034 * devel.discovery.grow-sample.rate=1.05
1035 1035
1036 1036 the rate at which the sample grow
1037 1037
1038 1038 * devel.discovery.randomize=True
1039 1039
1040 1040 If andom sampling during discovery are deterministic. It is meant for
1041 1041 integration tests.
1042 1042
1043 1043 * devel.discovery.sample-size=200
1044 1044
1045 1045 Control the initial size of the discovery sample
1046 1046
1047 1047 * devel.discovery.sample-size.initial=100
1048 1048
1049 1049 Control the initial size of the discovery for initial change
1050 1050 """
1051 1051 opts = pycompat.byteskwargs(opts)
1052 1052 unfi = repo.unfiltered()
1053 1053
1054 1054 # setup potential extra filtering
1055 1055 local_revs = opts[b"local_as_revs"]
1056 1056 remote_revs = opts[b"remote_as_revs"]
1057 1057
1058 1058 # make sure tests are repeatable
1059 1059 random.seed(int(opts[b'seed']))
1060 1060
1061 1061 if not remote_revs:
1062 1062
1063 remoteurl, branches = urlutil.parseurl(ui.expandpath(remoteurl))
1063 remoteurl, branches = urlutil.get_unique_pull_path(
1064 b'debugdiscovery', repo, ui, remoteurl
1065 )
1064 1066 remote = hg.peer(repo, opts, remoteurl)
1065 1067 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1066 1068 else:
1067 1069 branches = (None, [])
1068 1070 remote_filtered_revs = scmutil.revrange(
1069 1071 unfi, [b"not (::(%s))" % remote_revs]
1070 1072 )
1071 1073 remote_filtered_revs = frozenset(remote_filtered_revs)
1072 1074
1073 1075 def remote_func(x):
1074 1076 return remote_filtered_revs
1075 1077
1076 1078 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1077 1079
1078 1080 remote = repo.peer()
1079 1081 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1080 1082
1081 1083 if local_revs:
1082 1084 local_filtered_revs = scmutil.revrange(
1083 1085 unfi, [b"not (::(%s))" % local_revs]
1084 1086 )
1085 1087 local_filtered_revs = frozenset(local_filtered_revs)
1086 1088
1087 1089 def local_func(x):
1088 1090 return local_filtered_revs
1089 1091
1090 1092 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1091 1093 repo = repo.filtered(b'debug-discovery-local-filter')
1092 1094
1093 1095 data = {}
1094 1096 if opts.get(b'old'):
1095 1097
1096 1098 def doit(pushedrevs, remoteheads, remote=remote):
1097 1099 if not util.safehasattr(remote, b'branches'):
1098 1100 # enable in-client legacy support
1099 1101 remote = localrepo.locallegacypeer(remote.local())
1100 1102 common, _in, hds = treediscovery.findcommonincoming(
1101 1103 repo, remote, force=True, audit=data
1102 1104 )
1103 1105 common = set(common)
1104 1106 if not opts.get(b'nonheads'):
1105 1107 ui.writenoi18n(
1106 1108 b"unpruned common: %s\n"
1107 1109 % b" ".join(sorted(short(n) for n in common))
1108 1110 )
1109 1111
1110 1112 clnode = repo.changelog.node
1111 1113 common = repo.revs(b'heads(::%ln)', common)
1112 1114 common = {clnode(r) for r in common}
1113 1115 return common, hds
1114 1116
1115 1117 else:
1116 1118
1117 1119 def doit(pushedrevs, remoteheads, remote=remote):
1118 1120 nodes = None
1119 1121 if pushedrevs:
1120 1122 revs = scmutil.revrange(repo, pushedrevs)
1121 1123 nodes = [repo[r].node() for r in revs]
1122 1124 common, any, hds = setdiscovery.findcommonheads(
1123 1125 ui, repo, remote, ancestorsof=nodes, audit=data
1124 1126 )
1125 1127 return common, hds
1126 1128
1127 1129 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1128 1130 localrevs = opts[b'rev']
1129 1131
1130 1132 fm = ui.formatter(b'debugdiscovery', opts)
1131 1133 if fm.strict_format:
1132 1134
1133 1135 @contextlib.contextmanager
1134 1136 def may_capture_output():
1135 1137 ui.pushbuffer()
1136 1138 yield
1137 1139 data[b'output'] = ui.popbuffer()
1138 1140
1139 1141 else:
1140 1142 may_capture_output = util.nullcontextmanager
1141 1143 with may_capture_output():
1142 1144 with util.timedcm('debug-discovery') as t:
1143 1145 common, hds = doit(localrevs, remoterevs)
1144 1146
1145 1147 # compute all statistics
1146 1148 heads_common = set(common)
1147 1149 heads_remote = set(hds)
1148 1150 heads_local = set(repo.heads())
1149 1151 # note: they cannot be a local or remote head that is in common and not
1150 1152 # itself a head of common.
1151 1153 heads_common_local = heads_common & heads_local
1152 1154 heads_common_remote = heads_common & heads_remote
1153 1155 heads_common_both = heads_common & heads_remote & heads_local
1154 1156
1155 1157 all = repo.revs(b'all()')
1156 1158 common = repo.revs(b'::%ln', common)
1157 1159 roots_common = repo.revs(b'roots(::%ld)', common)
1158 1160 missing = repo.revs(b'not ::%ld', common)
1159 1161 heads_missing = repo.revs(b'heads(%ld)', missing)
1160 1162 roots_missing = repo.revs(b'roots(%ld)', missing)
1161 1163 assert len(common) + len(missing) == len(all)
1162 1164
1163 1165 initial_undecided = repo.revs(
1164 1166 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1165 1167 )
1166 1168 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1167 1169 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1168 1170 common_initial_undecided = initial_undecided & common
1169 1171 missing_initial_undecided = initial_undecided & missing
1170 1172
1171 1173 data[b'elapsed'] = t.elapsed
1172 1174 data[b'nb-common-heads'] = len(heads_common)
1173 1175 data[b'nb-common-heads-local'] = len(heads_common_local)
1174 1176 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1175 1177 data[b'nb-common-heads-both'] = len(heads_common_both)
1176 1178 data[b'nb-common-roots'] = len(roots_common)
1177 1179 data[b'nb-head-local'] = len(heads_local)
1178 1180 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1179 1181 data[b'nb-head-remote'] = len(heads_remote)
1180 1182 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1181 1183 heads_common_remote
1182 1184 )
1183 1185 data[b'nb-revs'] = len(all)
1184 1186 data[b'nb-revs-common'] = len(common)
1185 1187 data[b'nb-revs-missing'] = len(missing)
1186 1188 data[b'nb-missing-heads'] = len(heads_missing)
1187 1189 data[b'nb-missing-roots'] = len(roots_missing)
1188 1190 data[b'nb-ini_und'] = len(initial_undecided)
1189 1191 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1190 1192 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1191 1193 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1192 1194 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1193 1195
1194 1196 fm.startitem()
1195 1197 fm.data(**pycompat.strkwargs(data))
1196 1198 # display discovery summary
1197 1199 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1198 1200 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1199 1201 fm.plain(b"heads summary:\n")
1200 1202 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1201 1203 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1202 1204 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1203 1205 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1204 1206 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1205 1207 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1206 1208 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1207 1209 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1208 1210 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1209 1211 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1210 1212 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1211 1213 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1212 1214 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1213 1215 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1214 1216 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1215 1217 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1216 1218 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1217 1219 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1218 1220 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1219 1221 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1220 1222 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1221 1223 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1222 1224
1223 1225 if ui.verbose:
1224 1226 fm.plain(
1225 1227 b"common heads: %s\n"
1226 1228 % b" ".join(sorted(short(n) for n in heads_common))
1227 1229 )
1228 1230 fm.end()
1229 1231
1230 1232
1231 1233 _chunksize = 4 << 10
1232 1234
1233 1235
1234 1236 @command(
1235 1237 b'debugdownload',
1236 1238 [
1237 1239 (b'o', b'output', b'', _(b'path')),
1238 1240 ],
1239 1241 optionalrepo=True,
1240 1242 )
1241 1243 def debugdownload(ui, repo, url, output=None, **opts):
1242 1244 """download a resource using Mercurial logic and config"""
1243 1245 fh = urlmod.open(ui, url, output)
1244 1246
1245 1247 dest = ui
1246 1248 if output:
1247 1249 dest = open(output, b"wb", _chunksize)
1248 1250 try:
1249 1251 data = fh.read(_chunksize)
1250 1252 while data:
1251 1253 dest.write(data)
1252 1254 data = fh.read(_chunksize)
1253 1255 finally:
1254 1256 if output:
1255 1257 dest.close()
1256 1258
1257 1259
1258 1260 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1259 1261 def debugextensions(ui, repo, **opts):
1260 1262 '''show information about active extensions'''
1261 1263 opts = pycompat.byteskwargs(opts)
1262 1264 exts = extensions.extensions(ui)
1263 1265 hgver = util.version()
1264 1266 fm = ui.formatter(b'debugextensions', opts)
1265 1267 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1266 1268 isinternal = extensions.ismoduleinternal(extmod)
1267 1269 extsource = None
1268 1270
1269 1271 if util.safehasattr(extmod, '__file__'):
1270 1272 extsource = pycompat.fsencode(extmod.__file__)
1271 1273 elif getattr(sys, 'oxidized', False):
1272 1274 extsource = pycompat.sysexecutable
1273 1275 if isinternal:
1274 1276 exttestedwith = [] # never expose magic string to users
1275 1277 else:
1276 1278 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1277 1279 extbuglink = getattr(extmod, 'buglink', None)
1278 1280
1279 1281 fm.startitem()
1280 1282
1281 1283 if ui.quiet or ui.verbose:
1282 1284 fm.write(b'name', b'%s\n', extname)
1283 1285 else:
1284 1286 fm.write(b'name', b'%s', extname)
1285 1287 if isinternal or hgver in exttestedwith:
1286 1288 fm.plain(b'\n')
1287 1289 elif not exttestedwith:
1288 1290 fm.plain(_(b' (untested!)\n'))
1289 1291 else:
1290 1292 lasttestedversion = exttestedwith[-1]
1291 1293 fm.plain(b' (%s!)\n' % lasttestedversion)
1292 1294
1293 1295 fm.condwrite(
1294 1296 ui.verbose and extsource,
1295 1297 b'source',
1296 1298 _(b' location: %s\n'),
1297 1299 extsource or b"",
1298 1300 )
1299 1301
1300 1302 if ui.verbose:
1301 1303 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1302 1304 fm.data(bundled=isinternal)
1303 1305
1304 1306 fm.condwrite(
1305 1307 ui.verbose and exttestedwith,
1306 1308 b'testedwith',
1307 1309 _(b' tested with: %s\n'),
1308 1310 fm.formatlist(exttestedwith, name=b'ver'),
1309 1311 )
1310 1312
1311 1313 fm.condwrite(
1312 1314 ui.verbose and extbuglink,
1313 1315 b'buglink',
1314 1316 _(b' bug reporting: %s\n'),
1315 1317 extbuglink or b"",
1316 1318 )
1317 1319
1318 1320 fm.end()
1319 1321
1320 1322
1321 1323 @command(
1322 1324 b'debugfileset',
1323 1325 [
1324 1326 (
1325 1327 b'r',
1326 1328 b'rev',
1327 1329 b'',
1328 1330 _(b'apply the filespec on this revision'),
1329 1331 _(b'REV'),
1330 1332 ),
1331 1333 (
1332 1334 b'',
1333 1335 b'all-files',
1334 1336 False,
1335 1337 _(b'test files from all revisions and working directory'),
1336 1338 ),
1337 1339 (
1338 1340 b's',
1339 1341 b'show-matcher',
1340 1342 None,
1341 1343 _(b'print internal representation of matcher'),
1342 1344 ),
1343 1345 (
1344 1346 b'p',
1345 1347 b'show-stage',
1346 1348 [],
1347 1349 _(b'print parsed tree at the given stage'),
1348 1350 _(b'NAME'),
1349 1351 ),
1350 1352 ],
1351 1353 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1352 1354 )
1353 1355 def debugfileset(ui, repo, expr, **opts):
1354 1356 '''parse and apply a fileset specification'''
1355 1357 from . import fileset
1356 1358
1357 1359 fileset.symbols # force import of fileset so we have predicates to optimize
1358 1360 opts = pycompat.byteskwargs(opts)
1359 1361 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1360 1362
1361 1363 stages = [
1362 1364 (b'parsed', pycompat.identity),
1363 1365 (b'analyzed', filesetlang.analyze),
1364 1366 (b'optimized', filesetlang.optimize),
1365 1367 ]
1366 1368 stagenames = {n for n, f in stages}
1367 1369
1368 1370 showalways = set()
1369 1371 if ui.verbose and not opts[b'show_stage']:
1370 1372 # show parsed tree by --verbose (deprecated)
1371 1373 showalways.add(b'parsed')
1372 1374 if opts[b'show_stage'] == [b'all']:
1373 1375 showalways.update(stagenames)
1374 1376 else:
1375 1377 for n in opts[b'show_stage']:
1376 1378 if n not in stagenames:
1377 1379 raise error.Abort(_(b'invalid stage name: %s') % n)
1378 1380 showalways.update(opts[b'show_stage'])
1379 1381
1380 1382 tree = filesetlang.parse(expr)
1381 1383 for n, f in stages:
1382 1384 tree = f(tree)
1383 1385 if n in showalways:
1384 1386 if opts[b'show_stage'] or n != b'parsed':
1385 1387 ui.write(b"* %s:\n" % n)
1386 1388 ui.write(filesetlang.prettyformat(tree), b"\n")
1387 1389
1388 1390 files = set()
1389 1391 if opts[b'all_files']:
1390 1392 for r in repo:
1391 1393 c = repo[r]
1392 1394 files.update(c.files())
1393 1395 files.update(c.substate)
1394 1396 if opts[b'all_files'] or ctx.rev() is None:
1395 1397 wctx = repo[None]
1396 1398 files.update(
1397 1399 repo.dirstate.walk(
1398 1400 scmutil.matchall(repo),
1399 1401 subrepos=list(wctx.substate),
1400 1402 unknown=True,
1401 1403 ignored=True,
1402 1404 )
1403 1405 )
1404 1406 files.update(wctx.substate)
1405 1407 else:
1406 1408 files.update(ctx.files())
1407 1409 files.update(ctx.substate)
1408 1410
1409 1411 m = ctx.matchfileset(repo.getcwd(), expr)
1410 1412 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1411 1413 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1412 1414 for f in sorted(files):
1413 1415 if not m(f):
1414 1416 continue
1415 1417 ui.write(b"%s\n" % f)
1416 1418
1417 1419
1418 1420 @command(b'debugformat', [] + cmdutil.formatteropts)
1419 1421 def debugformat(ui, repo, **opts):
1420 1422 """display format information about the current repository
1421 1423
1422 1424 Use --verbose to get extra information about current config value and
1423 1425 Mercurial default."""
1424 1426 opts = pycompat.byteskwargs(opts)
1425 1427 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1426 1428 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1427 1429
1428 1430 def makeformatname(name):
1429 1431 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1430 1432
1431 1433 fm = ui.formatter(b'debugformat', opts)
1432 1434 if fm.isplain():
1433 1435
1434 1436 def formatvalue(value):
1435 1437 if util.safehasattr(value, b'startswith'):
1436 1438 return value
1437 1439 if value:
1438 1440 return b'yes'
1439 1441 else:
1440 1442 return b'no'
1441 1443
1442 1444 else:
1443 1445 formatvalue = pycompat.identity
1444 1446
1445 1447 fm.plain(b'format-variant')
1446 1448 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1447 1449 fm.plain(b' repo')
1448 1450 if ui.verbose:
1449 1451 fm.plain(b' config default')
1450 1452 fm.plain(b'\n')
1451 1453 for fv in upgrade.allformatvariant:
1452 1454 fm.startitem()
1453 1455 repovalue = fv.fromrepo(repo)
1454 1456 configvalue = fv.fromconfig(repo)
1455 1457
1456 1458 if repovalue != configvalue:
1457 1459 namelabel = b'formatvariant.name.mismatchconfig'
1458 1460 repolabel = b'formatvariant.repo.mismatchconfig'
1459 1461 elif repovalue != fv.default:
1460 1462 namelabel = b'formatvariant.name.mismatchdefault'
1461 1463 repolabel = b'formatvariant.repo.mismatchdefault'
1462 1464 else:
1463 1465 namelabel = b'formatvariant.name.uptodate'
1464 1466 repolabel = b'formatvariant.repo.uptodate'
1465 1467
1466 1468 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1467 1469 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1468 1470 if fv.default != configvalue:
1469 1471 configlabel = b'formatvariant.config.special'
1470 1472 else:
1471 1473 configlabel = b'formatvariant.config.default'
1472 1474 fm.condwrite(
1473 1475 ui.verbose,
1474 1476 b'config',
1475 1477 b' %6s',
1476 1478 formatvalue(configvalue),
1477 1479 label=configlabel,
1478 1480 )
1479 1481 fm.condwrite(
1480 1482 ui.verbose,
1481 1483 b'default',
1482 1484 b' %7s',
1483 1485 formatvalue(fv.default),
1484 1486 label=b'formatvariant.default',
1485 1487 )
1486 1488 fm.plain(b'\n')
1487 1489 fm.end()
1488 1490
1489 1491
1490 1492 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1491 1493 def debugfsinfo(ui, path=b"."):
1492 1494 """show information detected about current filesystem"""
1493 1495 ui.writenoi18n(b'path: %s\n' % path)
1494 1496 ui.writenoi18n(
1495 1497 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1496 1498 )
1497 1499 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1498 1500 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1499 1501 ui.writenoi18n(
1500 1502 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1501 1503 )
1502 1504 ui.writenoi18n(
1503 1505 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1504 1506 )
1505 1507 casesensitive = b'(unknown)'
1506 1508 try:
1507 1509 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1508 1510 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1509 1511 except OSError:
1510 1512 pass
1511 1513 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1512 1514
1513 1515
1514 1516 @command(
1515 1517 b'debuggetbundle',
1516 1518 [
1517 1519 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1518 1520 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1519 1521 (
1520 1522 b't',
1521 1523 b'type',
1522 1524 b'bzip2',
1523 1525 _(b'bundle compression type to use'),
1524 1526 _(b'TYPE'),
1525 1527 ),
1526 1528 ],
1527 1529 _(b'REPO FILE [-H|-C ID]...'),
1528 1530 norepo=True,
1529 1531 )
1530 1532 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1531 1533 """retrieves a bundle from a repo
1532 1534
1533 1535 Every ID must be a full-length hex node id string. Saves the bundle to the
1534 1536 given file.
1535 1537 """
1536 1538 opts = pycompat.byteskwargs(opts)
1537 1539 repo = hg.peer(ui, opts, repopath)
1538 1540 if not repo.capable(b'getbundle'):
1539 1541 raise error.Abort(b"getbundle() not supported by target repository")
1540 1542 args = {}
1541 1543 if common:
1542 1544 args['common'] = [bin(s) for s in common]
1543 1545 if head:
1544 1546 args['heads'] = [bin(s) for s in head]
1545 1547 # TODO: get desired bundlecaps from command line.
1546 1548 args['bundlecaps'] = None
1547 1549 bundle = repo.getbundle(b'debug', **args)
1548 1550
1549 1551 bundletype = opts.get(b'type', b'bzip2').lower()
1550 1552 btypes = {
1551 1553 b'none': b'HG10UN',
1552 1554 b'bzip2': b'HG10BZ',
1553 1555 b'gzip': b'HG10GZ',
1554 1556 b'bundle2': b'HG20',
1555 1557 }
1556 1558 bundletype = btypes.get(bundletype)
1557 1559 if bundletype not in bundle2.bundletypes:
1558 1560 raise error.Abort(_(b'unknown bundle type specified with --type'))
1559 1561 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1560 1562
1561 1563
1562 1564 @command(b'debugignore', [], b'[FILE]')
1563 1565 def debugignore(ui, repo, *files, **opts):
1564 1566 """display the combined ignore pattern and information about ignored files
1565 1567
1566 1568 With no argument display the combined ignore pattern.
1567 1569
1568 1570 Given space separated file names, shows if the given file is ignored and
1569 1571 if so, show the ignore rule (file and line number) that matched it.
1570 1572 """
1571 1573 ignore = repo.dirstate._ignore
1572 1574 if not files:
1573 1575 # Show all the patterns
1574 1576 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1575 1577 else:
1576 1578 m = scmutil.match(repo[None], pats=files)
1577 1579 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1578 1580 for f in m.files():
1579 1581 nf = util.normpath(f)
1580 1582 ignored = None
1581 1583 ignoredata = None
1582 1584 if nf != b'.':
1583 1585 if ignore(nf):
1584 1586 ignored = nf
1585 1587 ignoredata = repo.dirstate._ignorefileandline(nf)
1586 1588 else:
1587 1589 for p in pathutil.finddirs(nf):
1588 1590 if ignore(p):
1589 1591 ignored = p
1590 1592 ignoredata = repo.dirstate._ignorefileandline(p)
1591 1593 break
1592 1594 if ignored:
1593 1595 if ignored == nf:
1594 1596 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1595 1597 else:
1596 1598 ui.write(
1597 1599 _(
1598 1600 b"%s is ignored because of "
1599 1601 b"containing directory %s\n"
1600 1602 )
1601 1603 % (uipathfn(f), ignored)
1602 1604 )
1603 1605 ignorefile, lineno, line = ignoredata
1604 1606 ui.write(
1605 1607 _(b"(ignore rule in %s, line %d: '%s')\n")
1606 1608 % (ignorefile, lineno, line)
1607 1609 )
1608 1610 else:
1609 1611 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1610 1612
1611 1613
1612 1614 @command(
1613 1615 b'debugindex',
1614 1616 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1615 1617 _(b'-c|-m|FILE'),
1616 1618 )
1617 1619 def debugindex(ui, repo, file_=None, **opts):
1618 1620 """dump index data for a storage primitive"""
1619 1621 opts = pycompat.byteskwargs(opts)
1620 1622 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1621 1623
1622 1624 if ui.debugflag:
1623 1625 shortfn = hex
1624 1626 else:
1625 1627 shortfn = short
1626 1628
1627 1629 idlen = 12
1628 1630 for i in store:
1629 1631 idlen = len(shortfn(store.node(i)))
1630 1632 break
1631 1633
1632 1634 fm = ui.formatter(b'debugindex', opts)
1633 1635 fm.plain(
1634 1636 b' rev linkrev %s %s p2\n'
1635 1637 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1636 1638 )
1637 1639
1638 1640 for rev in store:
1639 1641 node = store.node(rev)
1640 1642 parents = store.parents(node)
1641 1643
1642 1644 fm.startitem()
1643 1645 fm.write(b'rev', b'%6d ', rev)
1644 1646 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1645 1647 fm.write(b'node', b'%s ', shortfn(node))
1646 1648 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1647 1649 fm.write(b'p2', b'%s', shortfn(parents[1]))
1648 1650 fm.plain(b'\n')
1649 1651
1650 1652 fm.end()
1651 1653
1652 1654
1653 1655 @command(
1654 1656 b'debugindexdot',
1655 1657 cmdutil.debugrevlogopts,
1656 1658 _(b'-c|-m|FILE'),
1657 1659 optionalrepo=True,
1658 1660 )
1659 1661 def debugindexdot(ui, repo, file_=None, **opts):
1660 1662 """dump an index DAG as a graphviz dot file"""
1661 1663 opts = pycompat.byteskwargs(opts)
1662 1664 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1663 1665 ui.writenoi18n(b"digraph G {\n")
1664 1666 for i in r:
1665 1667 node = r.node(i)
1666 1668 pp = r.parents(node)
1667 1669 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1668 1670 if pp[1] != nullid:
1669 1671 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1670 1672 ui.write(b"}\n")
1671 1673
1672 1674
1673 1675 @command(b'debugindexstats', [])
1674 1676 def debugindexstats(ui, repo):
1675 1677 """show stats related to the changelog index"""
1676 1678 repo.changelog.shortest(nullid, 1)
1677 1679 index = repo.changelog.index
1678 1680 if not util.safehasattr(index, b'stats'):
1679 1681 raise error.Abort(_(b'debugindexstats only works with native code'))
1680 1682 for k, v in sorted(index.stats().items()):
1681 1683 ui.write(b'%s: %d\n' % (k, v))
1682 1684
1683 1685
1684 1686 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1685 1687 def debuginstall(ui, **opts):
1686 1688 """test Mercurial installation
1687 1689
1688 1690 Returns 0 on success.
1689 1691 """
1690 1692 opts = pycompat.byteskwargs(opts)
1691 1693
1692 1694 problems = 0
1693 1695
1694 1696 fm = ui.formatter(b'debuginstall', opts)
1695 1697 fm.startitem()
1696 1698
1697 1699 # encoding might be unknown or wrong. don't translate these messages.
1698 1700 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1699 1701 err = None
1700 1702 try:
1701 1703 codecs.lookup(pycompat.sysstr(encoding.encoding))
1702 1704 except LookupError as inst:
1703 1705 err = stringutil.forcebytestr(inst)
1704 1706 problems += 1
1705 1707 fm.condwrite(
1706 1708 err,
1707 1709 b'encodingerror',
1708 1710 b" %s\n (check that your locale is properly set)\n",
1709 1711 err,
1710 1712 )
1711 1713
1712 1714 # Python
1713 1715 pythonlib = None
1714 1716 if util.safehasattr(os, '__file__'):
1715 1717 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1716 1718 elif getattr(sys, 'oxidized', False):
1717 1719 pythonlib = pycompat.sysexecutable
1718 1720
1719 1721 fm.write(
1720 1722 b'pythonexe',
1721 1723 _(b"checking Python executable (%s)\n"),
1722 1724 pycompat.sysexecutable or _(b"unknown"),
1723 1725 )
1724 1726 fm.write(
1725 1727 b'pythonimplementation',
1726 1728 _(b"checking Python implementation (%s)\n"),
1727 1729 pycompat.sysbytes(platform.python_implementation()),
1728 1730 )
1729 1731 fm.write(
1730 1732 b'pythonver',
1731 1733 _(b"checking Python version (%s)\n"),
1732 1734 (b"%d.%d.%d" % sys.version_info[:3]),
1733 1735 )
1734 1736 fm.write(
1735 1737 b'pythonlib',
1736 1738 _(b"checking Python lib (%s)...\n"),
1737 1739 pythonlib or _(b"unknown"),
1738 1740 )
1739 1741
1740 1742 try:
1741 1743 from . import rustext # pytype: disable=import-error
1742 1744
1743 1745 rustext.__doc__ # trigger lazy import
1744 1746 except ImportError:
1745 1747 rustext = None
1746 1748
1747 1749 security = set(sslutil.supportedprotocols)
1748 1750 if sslutil.hassni:
1749 1751 security.add(b'sni')
1750 1752
1751 1753 fm.write(
1752 1754 b'pythonsecurity',
1753 1755 _(b"checking Python security support (%s)\n"),
1754 1756 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1755 1757 )
1756 1758
1757 1759 # These are warnings, not errors. So don't increment problem count. This
1758 1760 # may change in the future.
1759 1761 if b'tls1.2' not in security:
1760 1762 fm.plain(
1761 1763 _(
1762 1764 b' TLS 1.2 not supported by Python install; '
1763 1765 b'network connections lack modern security\n'
1764 1766 )
1765 1767 )
1766 1768 if b'sni' not in security:
1767 1769 fm.plain(
1768 1770 _(
1769 1771 b' SNI not supported by Python install; may have '
1770 1772 b'connectivity issues with some servers\n'
1771 1773 )
1772 1774 )
1773 1775
1774 1776 fm.plain(
1775 1777 _(
1776 1778 b"checking Rust extensions (%s)\n"
1777 1779 % (b'missing' if rustext is None else b'installed')
1778 1780 ),
1779 1781 )
1780 1782
1781 1783 # TODO print CA cert info
1782 1784
1783 1785 # hg version
1784 1786 hgver = util.version()
1785 1787 fm.write(
1786 1788 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1787 1789 )
1788 1790 fm.write(
1789 1791 b'hgverextra',
1790 1792 _(b"checking Mercurial custom build (%s)\n"),
1791 1793 b'+'.join(hgver.split(b'+')[1:]),
1792 1794 )
1793 1795
1794 1796 # compiled modules
1795 1797 hgmodules = None
1796 1798 if util.safehasattr(sys.modules[__name__], '__file__'):
1797 1799 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1798 1800 elif getattr(sys, 'oxidized', False):
1799 1801 hgmodules = pycompat.sysexecutable
1800 1802
1801 1803 fm.write(
1802 1804 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1803 1805 )
1804 1806 fm.write(
1805 1807 b'hgmodules',
1806 1808 _(b"checking installed modules (%s)...\n"),
1807 1809 hgmodules or _(b"unknown"),
1808 1810 )
1809 1811
1810 1812 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1811 1813 rustext = rustandc # for now, that's the only case
1812 1814 cext = policy.policy in (b'c', b'allow') or rustandc
1813 1815 nopure = cext or rustext
1814 1816 if nopure:
1815 1817 err = None
1816 1818 try:
1817 1819 if cext:
1818 1820 from .cext import ( # pytype: disable=import-error
1819 1821 base85,
1820 1822 bdiff,
1821 1823 mpatch,
1822 1824 osutil,
1823 1825 )
1824 1826
1825 1827 # quiet pyflakes
1826 1828 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1827 1829 if rustext:
1828 1830 from .rustext import ( # pytype: disable=import-error
1829 1831 ancestor,
1830 1832 dirstate,
1831 1833 )
1832 1834
1833 1835 dir(ancestor), dir(dirstate) # quiet pyflakes
1834 1836 except Exception as inst:
1835 1837 err = stringutil.forcebytestr(inst)
1836 1838 problems += 1
1837 1839 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1838 1840
1839 1841 compengines = util.compengines._engines.values()
1840 1842 fm.write(
1841 1843 b'compengines',
1842 1844 _(b'checking registered compression engines (%s)\n'),
1843 1845 fm.formatlist(
1844 1846 sorted(e.name() for e in compengines),
1845 1847 name=b'compengine',
1846 1848 fmt=b'%s',
1847 1849 sep=b', ',
1848 1850 ),
1849 1851 )
1850 1852 fm.write(
1851 1853 b'compenginesavail',
1852 1854 _(b'checking available compression engines (%s)\n'),
1853 1855 fm.formatlist(
1854 1856 sorted(e.name() for e in compengines if e.available()),
1855 1857 name=b'compengine',
1856 1858 fmt=b'%s',
1857 1859 sep=b', ',
1858 1860 ),
1859 1861 )
1860 1862 wirecompengines = compression.compengines.supportedwireengines(
1861 1863 compression.SERVERROLE
1862 1864 )
1863 1865 fm.write(
1864 1866 b'compenginesserver',
1865 1867 _(
1866 1868 b'checking available compression engines '
1867 1869 b'for wire protocol (%s)\n'
1868 1870 ),
1869 1871 fm.formatlist(
1870 1872 [e.name() for e in wirecompengines if e.wireprotosupport()],
1871 1873 name=b'compengine',
1872 1874 fmt=b'%s',
1873 1875 sep=b', ',
1874 1876 ),
1875 1877 )
1876 1878 re2 = b'missing'
1877 1879 if util._re2:
1878 1880 re2 = b'available'
1879 1881 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1880 1882 fm.data(re2=bool(util._re2))
1881 1883
1882 1884 # templates
1883 1885 p = templater.templatedir()
1884 1886 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1885 1887 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1886 1888 if p:
1887 1889 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1888 1890 if m:
1889 1891 # template found, check if it is working
1890 1892 err = None
1891 1893 try:
1892 1894 templater.templater.frommapfile(m)
1893 1895 except Exception as inst:
1894 1896 err = stringutil.forcebytestr(inst)
1895 1897 p = None
1896 1898 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1897 1899 else:
1898 1900 p = None
1899 1901 fm.condwrite(
1900 1902 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1901 1903 )
1902 1904 fm.condwrite(
1903 1905 not m,
1904 1906 b'defaulttemplatenotfound',
1905 1907 _(b" template '%s' not found\n"),
1906 1908 b"default",
1907 1909 )
1908 1910 if not p:
1909 1911 problems += 1
1910 1912 fm.condwrite(
1911 1913 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1912 1914 )
1913 1915
1914 1916 # editor
1915 1917 editor = ui.geteditor()
1916 1918 editor = util.expandpath(editor)
1917 1919 editorbin = procutil.shellsplit(editor)[0]
1918 1920 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1919 1921 cmdpath = procutil.findexe(editorbin)
1920 1922 fm.condwrite(
1921 1923 not cmdpath and editor == b'vi',
1922 1924 b'vinotfound',
1923 1925 _(
1924 1926 b" No commit editor set and can't find %s in PATH\n"
1925 1927 b" (specify a commit editor in your configuration"
1926 1928 b" file)\n"
1927 1929 ),
1928 1930 not cmdpath and editor == b'vi' and editorbin,
1929 1931 )
1930 1932 fm.condwrite(
1931 1933 not cmdpath and editor != b'vi',
1932 1934 b'editornotfound',
1933 1935 _(
1934 1936 b" Can't find editor '%s' in PATH\n"
1935 1937 b" (specify a commit editor in your configuration"
1936 1938 b" file)\n"
1937 1939 ),
1938 1940 not cmdpath and editorbin,
1939 1941 )
1940 1942 if not cmdpath and editor != b'vi':
1941 1943 problems += 1
1942 1944
1943 1945 # check username
1944 1946 username = None
1945 1947 err = None
1946 1948 try:
1947 1949 username = ui.username()
1948 1950 except error.Abort as e:
1949 1951 err = e.message
1950 1952 problems += 1
1951 1953
1952 1954 fm.condwrite(
1953 1955 username, b'username', _(b"checking username (%s)\n"), username
1954 1956 )
1955 1957 fm.condwrite(
1956 1958 err,
1957 1959 b'usernameerror',
1958 1960 _(
1959 1961 b"checking username...\n %s\n"
1960 1962 b" (specify a username in your configuration file)\n"
1961 1963 ),
1962 1964 err,
1963 1965 )
1964 1966
1965 1967 for name, mod in extensions.extensions():
1966 1968 handler = getattr(mod, 'debuginstall', None)
1967 1969 if handler is not None:
1968 1970 problems += handler(ui, fm)
1969 1971
1970 1972 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1971 1973 if not problems:
1972 1974 fm.data(problems=problems)
1973 1975 fm.condwrite(
1974 1976 problems,
1975 1977 b'problems',
1976 1978 _(b"%d problems detected, please check your install!\n"),
1977 1979 problems,
1978 1980 )
1979 1981 fm.end()
1980 1982
1981 1983 return problems
1982 1984
1983 1985
1984 1986 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1985 1987 def debugknown(ui, repopath, *ids, **opts):
1986 1988 """test whether node ids are known to a repo
1987 1989
1988 1990 Every ID must be a full-length hex node id string. Returns a list of 0s
1989 1991 and 1s indicating unknown/known.
1990 1992 """
1991 1993 opts = pycompat.byteskwargs(opts)
1992 1994 repo = hg.peer(ui, opts, repopath)
1993 1995 if not repo.capable(b'known'):
1994 1996 raise error.Abort(b"known() not supported by target repository")
1995 1997 flags = repo.known([bin(s) for s in ids])
1996 1998 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1997 1999
1998 2000
1999 2001 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2000 2002 def debuglabelcomplete(ui, repo, *args):
2001 2003 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2002 2004 debugnamecomplete(ui, repo, *args)
2003 2005
2004 2006
2005 2007 @command(
2006 2008 b'debuglocks',
2007 2009 [
2008 2010 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2009 2011 (
2010 2012 b'W',
2011 2013 b'force-free-wlock',
2012 2014 None,
2013 2015 _(b'free the working state lock (DANGEROUS)'),
2014 2016 ),
2015 2017 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2016 2018 (
2017 2019 b'S',
2018 2020 b'set-wlock',
2019 2021 None,
2020 2022 _(b'set the working state lock until stopped'),
2021 2023 ),
2022 2024 ],
2023 2025 _(b'[OPTION]...'),
2024 2026 )
2025 2027 def debuglocks(ui, repo, **opts):
2026 2028 """show or modify state of locks
2027 2029
2028 2030 By default, this command will show which locks are held. This
2029 2031 includes the user and process holding the lock, the amount of time
2030 2032 the lock has been held, and the machine name where the process is
2031 2033 running if it's not local.
2032 2034
2033 2035 Locks protect the integrity of Mercurial's data, so should be
2034 2036 treated with care. System crashes or other interruptions may cause
2035 2037 locks to not be properly released, though Mercurial will usually
2036 2038 detect and remove such stale locks automatically.
2037 2039
2038 2040 However, detecting stale locks may not always be possible (for
2039 2041 instance, on a shared filesystem). Removing locks may also be
2040 2042 blocked by filesystem permissions.
2041 2043
2042 2044 Setting a lock will prevent other commands from changing the data.
2043 2045 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2044 2046 The set locks are removed when the command exits.
2045 2047
2046 2048 Returns 0 if no locks are held.
2047 2049
2048 2050 """
2049 2051
2050 2052 if opts.get('force_free_lock'):
2051 2053 repo.svfs.unlink(b'lock')
2052 2054 if opts.get('force_free_wlock'):
2053 2055 repo.vfs.unlink(b'wlock')
2054 2056 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2055 2057 return 0
2056 2058
2057 2059 locks = []
2058 2060 try:
2059 2061 if opts.get('set_wlock'):
2060 2062 try:
2061 2063 locks.append(repo.wlock(False))
2062 2064 except error.LockHeld:
2063 2065 raise error.Abort(_(b'wlock is already held'))
2064 2066 if opts.get('set_lock'):
2065 2067 try:
2066 2068 locks.append(repo.lock(False))
2067 2069 except error.LockHeld:
2068 2070 raise error.Abort(_(b'lock is already held'))
2069 2071 if len(locks):
2070 2072 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2071 2073 return 0
2072 2074 finally:
2073 2075 release(*locks)
2074 2076
2075 2077 now = time.time()
2076 2078 held = 0
2077 2079
2078 2080 def report(vfs, name, method):
2079 2081 # this causes stale locks to get reaped for more accurate reporting
2080 2082 try:
2081 2083 l = method(False)
2082 2084 except error.LockHeld:
2083 2085 l = None
2084 2086
2085 2087 if l:
2086 2088 l.release()
2087 2089 else:
2088 2090 try:
2089 2091 st = vfs.lstat(name)
2090 2092 age = now - st[stat.ST_MTIME]
2091 2093 user = util.username(st.st_uid)
2092 2094 locker = vfs.readlock(name)
2093 2095 if b":" in locker:
2094 2096 host, pid = locker.split(b':')
2095 2097 if host == socket.gethostname():
2096 2098 locker = b'user %s, process %s' % (user or b'None', pid)
2097 2099 else:
2098 2100 locker = b'user %s, process %s, host %s' % (
2099 2101 user or b'None',
2100 2102 pid,
2101 2103 host,
2102 2104 )
2103 2105 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2104 2106 return 1
2105 2107 except OSError as e:
2106 2108 if e.errno != errno.ENOENT:
2107 2109 raise
2108 2110
2109 2111 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2110 2112 return 0
2111 2113
2112 2114 held += report(repo.svfs, b"lock", repo.lock)
2113 2115 held += report(repo.vfs, b"wlock", repo.wlock)
2114 2116
2115 2117 return held
2116 2118
2117 2119
2118 2120 @command(
2119 2121 b'debugmanifestfulltextcache',
2120 2122 [
2121 2123 (b'', b'clear', False, _(b'clear the cache')),
2122 2124 (
2123 2125 b'a',
2124 2126 b'add',
2125 2127 [],
2126 2128 _(b'add the given manifest nodes to the cache'),
2127 2129 _(b'NODE'),
2128 2130 ),
2129 2131 ],
2130 2132 b'',
2131 2133 )
2132 2134 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2133 2135 """show, clear or amend the contents of the manifest fulltext cache"""
2134 2136
2135 2137 def getcache():
2136 2138 r = repo.manifestlog.getstorage(b'')
2137 2139 try:
2138 2140 return r._fulltextcache
2139 2141 except AttributeError:
2140 2142 msg = _(
2141 2143 b"Current revlog implementation doesn't appear to have a "
2142 2144 b"manifest fulltext cache\n"
2143 2145 )
2144 2146 raise error.Abort(msg)
2145 2147
2146 2148 if opts.get('clear'):
2147 2149 with repo.wlock():
2148 2150 cache = getcache()
2149 2151 cache.clear(clear_persisted_data=True)
2150 2152 return
2151 2153
2152 2154 if add:
2153 2155 with repo.wlock():
2154 2156 m = repo.manifestlog
2155 2157 store = m.getstorage(b'')
2156 2158 for n in add:
2157 2159 try:
2158 2160 manifest = m[store.lookup(n)]
2159 2161 except error.LookupError as e:
2160 2162 raise error.Abort(
2161 2163 bytes(e), hint=b"Check your manifest node id"
2162 2164 )
2163 2165 manifest.read() # stores revisision in cache too
2164 2166 return
2165 2167
2166 2168 cache = getcache()
2167 2169 if not len(cache):
2168 2170 ui.write(_(b'cache empty\n'))
2169 2171 else:
2170 2172 ui.write(
2171 2173 _(
2172 2174 b'cache contains %d manifest entries, in order of most to '
2173 2175 b'least recent:\n'
2174 2176 )
2175 2177 % (len(cache),)
2176 2178 )
2177 2179 totalsize = 0
2178 2180 for nodeid in cache:
2179 2181 # Use cache.get to not update the LRU order
2180 2182 data = cache.peek(nodeid)
2181 2183 size = len(data)
2182 2184 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2183 2185 ui.write(
2184 2186 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2185 2187 )
2186 2188 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2187 2189 ui.write(
2188 2190 _(b'total cache data size %s, on-disk %s\n')
2189 2191 % (util.bytecount(totalsize), util.bytecount(ondisk))
2190 2192 )
2191 2193
2192 2194
2193 2195 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2194 2196 def debugmergestate(ui, repo, *args, **opts):
2195 2197 """print merge state
2196 2198
2197 2199 Use --verbose to print out information about whether v1 or v2 merge state
2198 2200 was chosen."""
2199 2201
2200 2202 if ui.verbose:
2201 2203 ms = mergestatemod.mergestate(repo)
2202 2204
2203 2205 # sort so that reasonable information is on top
2204 2206 v1records = ms._readrecordsv1()
2205 2207 v2records = ms._readrecordsv2()
2206 2208
2207 2209 if not v1records and not v2records:
2208 2210 pass
2209 2211 elif not v2records:
2210 2212 ui.writenoi18n(b'no version 2 merge state\n')
2211 2213 elif ms._v1v2match(v1records, v2records):
2212 2214 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2213 2215 else:
2214 2216 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2215 2217
2216 2218 opts = pycompat.byteskwargs(opts)
2217 2219 if not opts[b'template']:
2218 2220 opts[b'template'] = (
2219 2221 b'{if(commits, "", "no merge state found\n")}'
2220 2222 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2221 2223 b'{files % "file: {path} (state \\"{state}\\")\n'
2222 2224 b'{if(local_path, "'
2223 2225 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2224 2226 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2225 2227 b' other path: {other_path} (node {other_node})\n'
2226 2228 b'")}'
2227 2229 b'{if(rename_side, "'
2228 2230 b' rename side: {rename_side}\n'
2229 2231 b' renamed path: {renamed_path}\n'
2230 2232 b'")}'
2231 2233 b'{extras % " extra: {key} = {value}\n"}'
2232 2234 b'"}'
2233 2235 b'{extras % "extra: {file} ({key} = {value})\n"}'
2234 2236 )
2235 2237
2236 2238 ms = mergestatemod.mergestate.read(repo)
2237 2239
2238 2240 fm = ui.formatter(b'debugmergestate', opts)
2239 2241 fm.startitem()
2240 2242
2241 2243 fm_commits = fm.nested(b'commits')
2242 2244 if ms.active():
2243 2245 for name, node, label_index in (
2244 2246 (b'local', ms.local, 0),
2245 2247 (b'other', ms.other, 1),
2246 2248 ):
2247 2249 fm_commits.startitem()
2248 2250 fm_commits.data(name=name)
2249 2251 fm_commits.data(node=hex(node))
2250 2252 if ms._labels and len(ms._labels) > label_index:
2251 2253 fm_commits.data(label=ms._labels[label_index])
2252 2254 fm_commits.end()
2253 2255
2254 2256 fm_files = fm.nested(b'files')
2255 2257 if ms.active():
2256 2258 for f in ms:
2257 2259 fm_files.startitem()
2258 2260 fm_files.data(path=f)
2259 2261 state = ms._state[f]
2260 2262 fm_files.data(state=state[0])
2261 2263 if state[0] in (
2262 2264 mergestatemod.MERGE_RECORD_UNRESOLVED,
2263 2265 mergestatemod.MERGE_RECORD_RESOLVED,
2264 2266 ):
2265 2267 fm_files.data(local_key=state[1])
2266 2268 fm_files.data(local_path=state[2])
2267 2269 fm_files.data(ancestor_path=state[3])
2268 2270 fm_files.data(ancestor_node=state[4])
2269 2271 fm_files.data(other_path=state[5])
2270 2272 fm_files.data(other_node=state[6])
2271 2273 fm_files.data(local_flags=state[7])
2272 2274 elif state[0] in (
2273 2275 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2274 2276 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2275 2277 ):
2276 2278 fm_files.data(renamed_path=state[1])
2277 2279 fm_files.data(rename_side=state[2])
2278 2280 fm_extras = fm_files.nested(b'extras')
2279 2281 for k, v in sorted(ms.extras(f).items()):
2280 2282 fm_extras.startitem()
2281 2283 fm_extras.data(key=k)
2282 2284 fm_extras.data(value=v)
2283 2285 fm_extras.end()
2284 2286
2285 2287 fm_files.end()
2286 2288
2287 2289 fm_extras = fm.nested(b'extras')
2288 2290 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2289 2291 if f in ms:
2290 2292 # If file is in mergestate, we have already processed it's extras
2291 2293 continue
2292 2294 for k, v in pycompat.iteritems(d):
2293 2295 fm_extras.startitem()
2294 2296 fm_extras.data(file=f)
2295 2297 fm_extras.data(key=k)
2296 2298 fm_extras.data(value=v)
2297 2299 fm_extras.end()
2298 2300
2299 2301 fm.end()
2300 2302
2301 2303
2302 2304 @command(b'debugnamecomplete', [], _(b'NAME...'))
2303 2305 def debugnamecomplete(ui, repo, *args):
2304 2306 '''complete "names" - tags, open branch names, bookmark names'''
2305 2307
2306 2308 names = set()
2307 2309 # since we previously only listed open branches, we will handle that
2308 2310 # specially (after this for loop)
2309 2311 for name, ns in pycompat.iteritems(repo.names):
2310 2312 if name != b'branches':
2311 2313 names.update(ns.listnames(repo))
2312 2314 names.update(
2313 2315 tag
2314 2316 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2315 2317 if not closed
2316 2318 )
2317 2319 completions = set()
2318 2320 if not args:
2319 2321 args = [b'']
2320 2322 for a in args:
2321 2323 completions.update(n for n in names if n.startswith(a))
2322 2324 ui.write(b'\n'.join(sorted(completions)))
2323 2325 ui.write(b'\n')
2324 2326
2325 2327
2326 2328 @command(
2327 2329 b'debugnodemap',
2328 2330 [
2329 2331 (
2330 2332 b'',
2331 2333 b'dump-new',
2332 2334 False,
2333 2335 _(b'write a (new) persistent binary nodemap on stdout'),
2334 2336 ),
2335 2337 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2336 2338 (
2337 2339 b'',
2338 2340 b'check',
2339 2341 False,
2340 2342 _(b'check that the data on disk data are correct.'),
2341 2343 ),
2342 2344 (
2343 2345 b'',
2344 2346 b'metadata',
2345 2347 False,
2346 2348 _(b'display the on disk meta data for the nodemap'),
2347 2349 ),
2348 2350 ],
2349 2351 )
2350 2352 def debugnodemap(ui, repo, **opts):
2351 2353 """write and inspect on disk nodemap"""
2352 2354 if opts['dump_new']:
2353 2355 unfi = repo.unfiltered()
2354 2356 cl = unfi.changelog
2355 2357 if util.safehasattr(cl.index, "nodemap_data_all"):
2356 2358 data = cl.index.nodemap_data_all()
2357 2359 else:
2358 2360 data = nodemap.persistent_data(cl.index)
2359 2361 ui.write(data)
2360 2362 elif opts['dump_disk']:
2361 2363 unfi = repo.unfiltered()
2362 2364 cl = unfi.changelog
2363 2365 nm_data = nodemap.persisted_data(cl)
2364 2366 if nm_data is not None:
2365 2367 docket, data = nm_data
2366 2368 ui.write(data[:])
2367 2369 elif opts['check']:
2368 2370 unfi = repo.unfiltered()
2369 2371 cl = unfi.changelog
2370 2372 nm_data = nodemap.persisted_data(cl)
2371 2373 if nm_data is not None:
2372 2374 docket, data = nm_data
2373 2375 return nodemap.check_data(ui, cl.index, data)
2374 2376 elif opts['metadata']:
2375 2377 unfi = repo.unfiltered()
2376 2378 cl = unfi.changelog
2377 2379 nm_data = nodemap.persisted_data(cl)
2378 2380 if nm_data is not None:
2379 2381 docket, data = nm_data
2380 2382 ui.write((b"uid: %s\n") % docket.uid)
2381 2383 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2382 2384 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2383 2385 ui.write((b"data-length: %d\n") % docket.data_length)
2384 2386 ui.write((b"data-unused: %d\n") % docket.data_unused)
2385 2387 unused_perc = docket.data_unused * 100.0 / docket.data_length
2386 2388 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2387 2389
2388 2390
2389 2391 @command(
2390 2392 b'debugobsolete',
2391 2393 [
2392 2394 (b'', b'flags', 0, _(b'markers flag')),
2393 2395 (
2394 2396 b'',
2395 2397 b'record-parents',
2396 2398 False,
2397 2399 _(b'record parent information for the precursor'),
2398 2400 ),
2399 2401 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2400 2402 (
2401 2403 b'',
2402 2404 b'exclusive',
2403 2405 False,
2404 2406 _(b'restrict display to markers only relevant to REV'),
2405 2407 ),
2406 2408 (b'', b'index', False, _(b'display index of the marker')),
2407 2409 (b'', b'delete', [], _(b'delete markers specified by indices')),
2408 2410 ]
2409 2411 + cmdutil.commitopts2
2410 2412 + cmdutil.formatteropts,
2411 2413 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2412 2414 )
2413 2415 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2414 2416 """create arbitrary obsolete marker
2415 2417
2416 2418 With no arguments, displays the list of obsolescence markers."""
2417 2419
2418 2420 opts = pycompat.byteskwargs(opts)
2419 2421
2420 2422 def parsenodeid(s):
2421 2423 try:
2422 2424 # We do not use revsingle/revrange functions here to accept
2423 2425 # arbitrary node identifiers, possibly not present in the
2424 2426 # local repository.
2425 2427 n = bin(s)
2426 2428 if len(n) != len(nullid):
2427 2429 raise TypeError()
2428 2430 return n
2429 2431 except TypeError:
2430 2432 raise error.InputError(
2431 2433 b'changeset references must be full hexadecimal '
2432 2434 b'node identifiers'
2433 2435 )
2434 2436
2435 2437 if opts.get(b'delete'):
2436 2438 indices = []
2437 2439 for v in opts.get(b'delete'):
2438 2440 try:
2439 2441 indices.append(int(v))
2440 2442 except ValueError:
2441 2443 raise error.InputError(
2442 2444 _(b'invalid index value: %r') % v,
2443 2445 hint=_(b'use integers for indices'),
2444 2446 )
2445 2447
2446 2448 if repo.currenttransaction():
2447 2449 raise error.Abort(
2448 2450 _(b'cannot delete obsmarkers in the middle of transaction.')
2449 2451 )
2450 2452
2451 2453 with repo.lock():
2452 2454 n = repair.deleteobsmarkers(repo.obsstore, indices)
2453 2455 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2454 2456
2455 2457 return
2456 2458
2457 2459 if precursor is not None:
2458 2460 if opts[b'rev']:
2459 2461 raise error.InputError(
2460 2462 b'cannot select revision when creating marker'
2461 2463 )
2462 2464 metadata = {}
2463 2465 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2464 2466 succs = tuple(parsenodeid(succ) for succ in successors)
2465 2467 l = repo.lock()
2466 2468 try:
2467 2469 tr = repo.transaction(b'debugobsolete')
2468 2470 try:
2469 2471 date = opts.get(b'date')
2470 2472 if date:
2471 2473 date = dateutil.parsedate(date)
2472 2474 else:
2473 2475 date = None
2474 2476 prec = parsenodeid(precursor)
2475 2477 parents = None
2476 2478 if opts[b'record_parents']:
2477 2479 if prec not in repo.unfiltered():
2478 2480 raise error.Abort(
2479 2481 b'cannot used --record-parents on '
2480 2482 b'unknown changesets'
2481 2483 )
2482 2484 parents = repo.unfiltered()[prec].parents()
2483 2485 parents = tuple(p.node() for p in parents)
2484 2486 repo.obsstore.create(
2485 2487 tr,
2486 2488 prec,
2487 2489 succs,
2488 2490 opts[b'flags'],
2489 2491 parents=parents,
2490 2492 date=date,
2491 2493 metadata=metadata,
2492 2494 ui=ui,
2493 2495 )
2494 2496 tr.close()
2495 2497 except ValueError as exc:
2496 2498 raise error.Abort(
2497 2499 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2498 2500 )
2499 2501 finally:
2500 2502 tr.release()
2501 2503 finally:
2502 2504 l.release()
2503 2505 else:
2504 2506 if opts[b'rev']:
2505 2507 revs = scmutil.revrange(repo, opts[b'rev'])
2506 2508 nodes = [repo[r].node() for r in revs]
2507 2509 markers = list(
2508 2510 obsutil.getmarkers(
2509 2511 repo, nodes=nodes, exclusive=opts[b'exclusive']
2510 2512 )
2511 2513 )
2512 2514 markers.sort(key=lambda x: x._data)
2513 2515 else:
2514 2516 markers = obsutil.getmarkers(repo)
2515 2517
2516 2518 markerstoiter = markers
2517 2519 isrelevant = lambda m: True
2518 2520 if opts.get(b'rev') and opts.get(b'index'):
2519 2521 markerstoiter = obsutil.getmarkers(repo)
2520 2522 markerset = set(markers)
2521 2523 isrelevant = lambda m: m in markerset
2522 2524
2523 2525 fm = ui.formatter(b'debugobsolete', opts)
2524 2526 for i, m in enumerate(markerstoiter):
2525 2527 if not isrelevant(m):
2526 2528 # marker can be irrelevant when we're iterating over a set
2527 2529 # of markers (markerstoiter) which is bigger than the set
2528 2530 # of markers we want to display (markers)
2529 2531 # this can happen if both --index and --rev options are
2530 2532 # provided and thus we need to iterate over all of the markers
2531 2533 # to get the correct indices, but only display the ones that
2532 2534 # are relevant to --rev value
2533 2535 continue
2534 2536 fm.startitem()
2535 2537 ind = i if opts.get(b'index') else None
2536 2538 cmdutil.showmarker(fm, m, index=ind)
2537 2539 fm.end()
2538 2540
2539 2541
2540 2542 @command(
2541 2543 b'debugp1copies',
2542 2544 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2543 2545 _(b'[-r REV]'),
2544 2546 )
2545 2547 def debugp1copies(ui, repo, **opts):
2546 2548 """dump copy information compared to p1"""
2547 2549
2548 2550 opts = pycompat.byteskwargs(opts)
2549 2551 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2550 2552 for dst, src in ctx.p1copies().items():
2551 2553 ui.write(b'%s -> %s\n' % (src, dst))
2552 2554
2553 2555
2554 2556 @command(
2555 2557 b'debugp2copies',
2556 2558 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2557 2559 _(b'[-r REV]'),
2558 2560 )
2559 2561 def debugp1copies(ui, repo, **opts):
2560 2562 """dump copy information compared to p2"""
2561 2563
2562 2564 opts = pycompat.byteskwargs(opts)
2563 2565 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2564 2566 for dst, src in ctx.p2copies().items():
2565 2567 ui.write(b'%s -> %s\n' % (src, dst))
2566 2568
2567 2569
2568 2570 @command(
2569 2571 b'debugpathcomplete',
2570 2572 [
2571 2573 (b'f', b'full', None, _(b'complete an entire path')),
2572 2574 (b'n', b'normal', None, _(b'show only normal files')),
2573 2575 (b'a', b'added', None, _(b'show only added files')),
2574 2576 (b'r', b'removed', None, _(b'show only removed files')),
2575 2577 ],
2576 2578 _(b'FILESPEC...'),
2577 2579 )
2578 2580 def debugpathcomplete(ui, repo, *specs, **opts):
2579 2581 """complete part or all of a tracked path
2580 2582
2581 2583 This command supports shells that offer path name completion. It
2582 2584 currently completes only files already known to the dirstate.
2583 2585
2584 2586 Completion extends only to the next path segment unless
2585 2587 --full is specified, in which case entire paths are used."""
2586 2588
2587 2589 def complete(path, acceptable):
2588 2590 dirstate = repo.dirstate
2589 2591 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2590 2592 rootdir = repo.root + pycompat.ossep
2591 2593 if spec != repo.root and not spec.startswith(rootdir):
2592 2594 return [], []
2593 2595 if os.path.isdir(spec):
2594 2596 spec += b'/'
2595 2597 spec = spec[len(rootdir) :]
2596 2598 fixpaths = pycompat.ossep != b'/'
2597 2599 if fixpaths:
2598 2600 spec = spec.replace(pycompat.ossep, b'/')
2599 2601 speclen = len(spec)
2600 2602 fullpaths = opts['full']
2601 2603 files, dirs = set(), set()
2602 2604 adddir, addfile = dirs.add, files.add
2603 2605 for f, st in pycompat.iteritems(dirstate):
2604 2606 if f.startswith(spec) and st[0] in acceptable:
2605 2607 if fixpaths:
2606 2608 f = f.replace(b'/', pycompat.ossep)
2607 2609 if fullpaths:
2608 2610 addfile(f)
2609 2611 continue
2610 2612 s = f.find(pycompat.ossep, speclen)
2611 2613 if s >= 0:
2612 2614 adddir(f[:s])
2613 2615 else:
2614 2616 addfile(f)
2615 2617 return files, dirs
2616 2618
2617 2619 acceptable = b''
2618 2620 if opts['normal']:
2619 2621 acceptable += b'nm'
2620 2622 if opts['added']:
2621 2623 acceptable += b'a'
2622 2624 if opts['removed']:
2623 2625 acceptable += b'r'
2624 2626 cwd = repo.getcwd()
2625 2627 if not specs:
2626 2628 specs = [b'.']
2627 2629
2628 2630 files, dirs = set(), set()
2629 2631 for spec in specs:
2630 2632 f, d = complete(spec, acceptable or b'nmar')
2631 2633 files.update(f)
2632 2634 dirs.update(d)
2633 2635 files.update(dirs)
2634 2636 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2635 2637 ui.write(b'\n')
2636 2638
2637 2639
2638 2640 @command(
2639 2641 b'debugpathcopies',
2640 2642 cmdutil.walkopts,
2641 2643 b'hg debugpathcopies REV1 REV2 [FILE]',
2642 2644 inferrepo=True,
2643 2645 )
2644 2646 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2645 2647 """show copies between two revisions"""
2646 2648 ctx1 = scmutil.revsingle(repo, rev1)
2647 2649 ctx2 = scmutil.revsingle(repo, rev2)
2648 2650 m = scmutil.match(ctx1, pats, opts)
2649 2651 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2650 2652 ui.write(b'%s -> %s\n' % (src, dst))
2651 2653
2652 2654
2653 2655 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2654 2656 def debugpeer(ui, path):
2655 2657 """establish a connection to a peer repository"""
2656 2658 # Always enable peer request logging. Requires --debug to display
2657 2659 # though.
2658 2660 overrides = {
2659 2661 (b'devel', b'debug.peer-request'): True,
2660 2662 }
2661 2663
2662 2664 with ui.configoverride(overrides):
2663 2665 peer = hg.peer(ui, {}, path)
2664 2666
2665 2667 try:
2666 2668 local = peer.local() is not None
2667 2669 canpush = peer.canpush()
2668 2670
2669 2671 ui.write(_(b'url: %s\n') % peer.url())
2670 2672 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2671 2673 ui.write(
2672 2674 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2673 2675 )
2674 2676 finally:
2675 2677 peer.close()
2676 2678
2677 2679
2678 2680 @command(
2679 2681 b'debugpickmergetool',
2680 2682 [
2681 2683 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2682 2684 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2683 2685 ]
2684 2686 + cmdutil.walkopts
2685 2687 + cmdutil.mergetoolopts,
2686 2688 _(b'[PATTERN]...'),
2687 2689 inferrepo=True,
2688 2690 )
2689 2691 def debugpickmergetool(ui, repo, *pats, **opts):
2690 2692 """examine which merge tool is chosen for specified file
2691 2693
2692 2694 As described in :hg:`help merge-tools`, Mercurial examines
2693 2695 configurations below in this order to decide which merge tool is
2694 2696 chosen for specified file.
2695 2697
2696 2698 1. ``--tool`` option
2697 2699 2. ``HGMERGE`` environment variable
2698 2700 3. configurations in ``merge-patterns`` section
2699 2701 4. configuration of ``ui.merge``
2700 2702 5. configurations in ``merge-tools`` section
2701 2703 6. ``hgmerge`` tool (for historical reason only)
2702 2704 7. default tool for fallback (``:merge`` or ``:prompt``)
2703 2705
2704 2706 This command writes out examination result in the style below::
2705 2707
2706 2708 FILE = MERGETOOL
2707 2709
2708 2710 By default, all files known in the first parent context of the
2709 2711 working directory are examined. Use file patterns and/or -I/-X
2710 2712 options to limit target files. -r/--rev is also useful to examine
2711 2713 files in another context without actual updating to it.
2712 2714
2713 2715 With --debug, this command shows warning messages while matching
2714 2716 against ``merge-patterns`` and so on, too. It is recommended to
2715 2717 use this option with explicit file patterns and/or -I/-X options,
2716 2718 because this option increases amount of output per file according
2717 2719 to configurations in hgrc.
2718 2720
2719 2721 With -v/--verbose, this command shows configurations below at
2720 2722 first (only if specified).
2721 2723
2722 2724 - ``--tool`` option
2723 2725 - ``HGMERGE`` environment variable
2724 2726 - configuration of ``ui.merge``
2725 2727
2726 2728 If merge tool is chosen before matching against
2727 2729 ``merge-patterns``, this command can't show any helpful
2728 2730 information, even with --debug. In such case, information above is
2729 2731 useful to know why a merge tool is chosen.
2730 2732 """
2731 2733 opts = pycompat.byteskwargs(opts)
2732 2734 overrides = {}
2733 2735 if opts[b'tool']:
2734 2736 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2735 2737 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2736 2738
2737 2739 with ui.configoverride(overrides, b'debugmergepatterns'):
2738 2740 hgmerge = encoding.environ.get(b"HGMERGE")
2739 2741 if hgmerge is not None:
2740 2742 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2741 2743 uimerge = ui.config(b"ui", b"merge")
2742 2744 if uimerge:
2743 2745 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2744 2746
2745 2747 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2746 2748 m = scmutil.match(ctx, pats, opts)
2747 2749 changedelete = opts[b'changedelete']
2748 2750 for path in ctx.walk(m):
2749 2751 fctx = ctx[path]
2750 2752 try:
2751 2753 if not ui.debugflag:
2752 2754 ui.pushbuffer(error=True)
2753 2755 tool, toolpath = filemerge._picktool(
2754 2756 repo,
2755 2757 ui,
2756 2758 path,
2757 2759 fctx.isbinary(),
2758 2760 b'l' in fctx.flags(),
2759 2761 changedelete,
2760 2762 )
2761 2763 finally:
2762 2764 if not ui.debugflag:
2763 2765 ui.popbuffer()
2764 2766 ui.write(b'%s = %s\n' % (path, tool))
2765 2767
2766 2768
2767 2769 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2768 2770 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2769 2771 """access the pushkey key/value protocol
2770 2772
2771 2773 With two args, list the keys in the given namespace.
2772 2774
2773 2775 With five args, set a key to new if it currently is set to old.
2774 2776 Reports success or failure.
2775 2777 """
2776 2778
2777 2779 target = hg.peer(ui, {}, repopath)
2778 2780 try:
2779 2781 if keyinfo:
2780 2782 key, old, new = keyinfo
2781 2783 with target.commandexecutor() as e:
2782 2784 r = e.callcommand(
2783 2785 b'pushkey',
2784 2786 {
2785 2787 b'namespace': namespace,
2786 2788 b'key': key,
2787 2789 b'old': old,
2788 2790 b'new': new,
2789 2791 },
2790 2792 ).result()
2791 2793
2792 2794 ui.status(pycompat.bytestr(r) + b'\n')
2793 2795 return not r
2794 2796 else:
2795 2797 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2796 2798 ui.write(
2797 2799 b"%s\t%s\n"
2798 2800 % (stringutil.escapestr(k), stringutil.escapestr(v))
2799 2801 )
2800 2802 finally:
2801 2803 target.close()
2802 2804
2803 2805
2804 2806 @command(b'debugpvec', [], _(b'A B'))
2805 2807 def debugpvec(ui, repo, a, b=None):
2806 2808 ca = scmutil.revsingle(repo, a)
2807 2809 cb = scmutil.revsingle(repo, b)
2808 2810 pa = pvec.ctxpvec(ca)
2809 2811 pb = pvec.ctxpvec(cb)
2810 2812 if pa == pb:
2811 2813 rel = b"="
2812 2814 elif pa > pb:
2813 2815 rel = b">"
2814 2816 elif pa < pb:
2815 2817 rel = b"<"
2816 2818 elif pa | pb:
2817 2819 rel = b"|"
2818 2820 ui.write(_(b"a: %s\n") % pa)
2819 2821 ui.write(_(b"b: %s\n") % pb)
2820 2822 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2821 2823 ui.write(
2822 2824 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2823 2825 % (
2824 2826 abs(pa._depth - pb._depth),
2825 2827 pvec._hamming(pa._vec, pb._vec),
2826 2828 pa.distance(pb),
2827 2829 rel,
2828 2830 )
2829 2831 )
2830 2832
2831 2833
2832 2834 @command(
2833 2835 b'debugrebuilddirstate|debugrebuildstate',
2834 2836 [
2835 2837 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2836 2838 (
2837 2839 b'',
2838 2840 b'minimal',
2839 2841 None,
2840 2842 _(
2841 2843 b'only rebuild files that are inconsistent with '
2842 2844 b'the working copy parent'
2843 2845 ),
2844 2846 ),
2845 2847 ],
2846 2848 _(b'[-r REV]'),
2847 2849 )
2848 2850 def debugrebuilddirstate(ui, repo, rev, **opts):
2849 2851 """rebuild the dirstate as it would look like for the given revision
2850 2852
2851 2853 If no revision is specified the first current parent will be used.
2852 2854
2853 2855 The dirstate will be set to the files of the given revision.
2854 2856 The actual working directory content or existing dirstate
2855 2857 information such as adds or removes is not considered.
2856 2858
2857 2859 ``minimal`` will only rebuild the dirstate status for files that claim to be
2858 2860 tracked but are not in the parent manifest, or that exist in the parent
2859 2861 manifest but are not in the dirstate. It will not change adds, removes, or
2860 2862 modified files that are in the working copy parent.
2861 2863
2862 2864 One use of this command is to make the next :hg:`status` invocation
2863 2865 check the actual file content.
2864 2866 """
2865 2867 ctx = scmutil.revsingle(repo, rev)
2866 2868 with repo.wlock():
2867 2869 dirstate = repo.dirstate
2868 2870 changedfiles = None
2869 2871 # See command doc for what minimal does.
2870 2872 if opts.get('minimal'):
2871 2873 manifestfiles = set(ctx.manifest().keys())
2872 2874 dirstatefiles = set(dirstate)
2873 2875 manifestonly = manifestfiles - dirstatefiles
2874 2876 dsonly = dirstatefiles - manifestfiles
2875 2877 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2876 2878 changedfiles = manifestonly | dsnotadded
2877 2879
2878 2880 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2879 2881
2880 2882
2881 2883 @command(b'debugrebuildfncache', [], b'')
2882 2884 def debugrebuildfncache(ui, repo):
2883 2885 """rebuild the fncache file"""
2884 2886 repair.rebuildfncache(ui, repo)
2885 2887
2886 2888
2887 2889 @command(
2888 2890 b'debugrename',
2889 2891 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2890 2892 _(b'[-r REV] [FILE]...'),
2891 2893 )
2892 2894 def debugrename(ui, repo, *pats, **opts):
2893 2895 """dump rename information"""
2894 2896
2895 2897 opts = pycompat.byteskwargs(opts)
2896 2898 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2897 2899 m = scmutil.match(ctx, pats, opts)
2898 2900 for abs in ctx.walk(m):
2899 2901 fctx = ctx[abs]
2900 2902 o = fctx.filelog().renamed(fctx.filenode())
2901 2903 rel = repo.pathto(abs)
2902 2904 if o:
2903 2905 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2904 2906 else:
2905 2907 ui.write(_(b"%s not renamed\n") % rel)
2906 2908
2907 2909
2908 2910 @command(b'debugrequires|debugrequirements', [], b'')
2909 2911 def debugrequirements(ui, repo):
2910 2912 """ print the current repo requirements """
2911 2913 for r in sorted(repo.requirements):
2912 2914 ui.write(b"%s\n" % r)
2913 2915
2914 2916
2915 2917 @command(
2916 2918 b'debugrevlog',
2917 2919 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2918 2920 _(b'-c|-m|FILE'),
2919 2921 optionalrepo=True,
2920 2922 )
2921 2923 def debugrevlog(ui, repo, file_=None, **opts):
2922 2924 """show data and statistics about a revlog"""
2923 2925 opts = pycompat.byteskwargs(opts)
2924 2926 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2925 2927
2926 2928 if opts.get(b"dump"):
2927 2929 numrevs = len(r)
2928 2930 ui.write(
2929 2931 (
2930 2932 b"# rev p1rev p2rev start end deltastart base p1 p2"
2931 2933 b" rawsize totalsize compression heads chainlen\n"
2932 2934 )
2933 2935 )
2934 2936 ts = 0
2935 2937 heads = set()
2936 2938
2937 2939 for rev in pycompat.xrange(numrevs):
2938 2940 dbase = r.deltaparent(rev)
2939 2941 if dbase == -1:
2940 2942 dbase = rev
2941 2943 cbase = r.chainbase(rev)
2942 2944 clen = r.chainlen(rev)
2943 2945 p1, p2 = r.parentrevs(rev)
2944 2946 rs = r.rawsize(rev)
2945 2947 ts = ts + rs
2946 2948 heads -= set(r.parentrevs(rev))
2947 2949 heads.add(rev)
2948 2950 try:
2949 2951 compression = ts / r.end(rev)
2950 2952 except ZeroDivisionError:
2951 2953 compression = 0
2952 2954 ui.write(
2953 2955 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2954 2956 b"%11d %5d %8d\n"
2955 2957 % (
2956 2958 rev,
2957 2959 p1,
2958 2960 p2,
2959 2961 r.start(rev),
2960 2962 r.end(rev),
2961 2963 r.start(dbase),
2962 2964 r.start(cbase),
2963 2965 r.start(p1),
2964 2966 r.start(p2),
2965 2967 rs,
2966 2968 ts,
2967 2969 compression,
2968 2970 len(heads),
2969 2971 clen,
2970 2972 )
2971 2973 )
2972 2974 return 0
2973 2975
2974 2976 v = r.version
2975 2977 format = v & 0xFFFF
2976 2978 flags = []
2977 2979 gdelta = False
2978 2980 if v & revlog.FLAG_INLINE_DATA:
2979 2981 flags.append(b'inline')
2980 2982 if v & revlog.FLAG_GENERALDELTA:
2981 2983 gdelta = True
2982 2984 flags.append(b'generaldelta')
2983 2985 if not flags:
2984 2986 flags = [b'(none)']
2985 2987
2986 2988 ### tracks merge vs single parent
2987 2989 nummerges = 0
2988 2990
2989 2991 ### tracks ways the "delta" are build
2990 2992 # nodelta
2991 2993 numempty = 0
2992 2994 numemptytext = 0
2993 2995 numemptydelta = 0
2994 2996 # full file content
2995 2997 numfull = 0
2996 2998 # intermediate snapshot against a prior snapshot
2997 2999 numsemi = 0
2998 3000 # snapshot count per depth
2999 3001 numsnapdepth = collections.defaultdict(lambda: 0)
3000 3002 # delta against previous revision
3001 3003 numprev = 0
3002 3004 # delta against first or second parent (not prev)
3003 3005 nump1 = 0
3004 3006 nump2 = 0
3005 3007 # delta against neither prev nor parents
3006 3008 numother = 0
3007 3009 # delta against prev that are also first or second parent
3008 3010 # (details of `numprev`)
3009 3011 nump1prev = 0
3010 3012 nump2prev = 0
3011 3013
3012 3014 # data about delta chain of each revs
3013 3015 chainlengths = []
3014 3016 chainbases = []
3015 3017 chainspans = []
3016 3018
3017 3019 # data about each revision
3018 3020 datasize = [None, 0, 0]
3019 3021 fullsize = [None, 0, 0]
3020 3022 semisize = [None, 0, 0]
3021 3023 # snapshot count per depth
3022 3024 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3023 3025 deltasize = [None, 0, 0]
3024 3026 chunktypecounts = {}
3025 3027 chunktypesizes = {}
3026 3028
3027 3029 def addsize(size, l):
3028 3030 if l[0] is None or size < l[0]:
3029 3031 l[0] = size
3030 3032 if size > l[1]:
3031 3033 l[1] = size
3032 3034 l[2] += size
3033 3035
3034 3036 numrevs = len(r)
3035 3037 for rev in pycompat.xrange(numrevs):
3036 3038 p1, p2 = r.parentrevs(rev)
3037 3039 delta = r.deltaparent(rev)
3038 3040 if format > 0:
3039 3041 addsize(r.rawsize(rev), datasize)
3040 3042 if p2 != nullrev:
3041 3043 nummerges += 1
3042 3044 size = r.length(rev)
3043 3045 if delta == nullrev:
3044 3046 chainlengths.append(0)
3045 3047 chainbases.append(r.start(rev))
3046 3048 chainspans.append(size)
3047 3049 if size == 0:
3048 3050 numempty += 1
3049 3051 numemptytext += 1
3050 3052 else:
3051 3053 numfull += 1
3052 3054 numsnapdepth[0] += 1
3053 3055 addsize(size, fullsize)
3054 3056 addsize(size, snapsizedepth[0])
3055 3057 else:
3056 3058 chainlengths.append(chainlengths[delta] + 1)
3057 3059 baseaddr = chainbases[delta]
3058 3060 revaddr = r.start(rev)
3059 3061 chainbases.append(baseaddr)
3060 3062 chainspans.append((revaddr - baseaddr) + size)
3061 3063 if size == 0:
3062 3064 numempty += 1
3063 3065 numemptydelta += 1
3064 3066 elif r.issnapshot(rev):
3065 3067 addsize(size, semisize)
3066 3068 numsemi += 1
3067 3069 depth = r.snapshotdepth(rev)
3068 3070 numsnapdepth[depth] += 1
3069 3071 addsize(size, snapsizedepth[depth])
3070 3072 else:
3071 3073 addsize(size, deltasize)
3072 3074 if delta == rev - 1:
3073 3075 numprev += 1
3074 3076 if delta == p1:
3075 3077 nump1prev += 1
3076 3078 elif delta == p2:
3077 3079 nump2prev += 1
3078 3080 elif delta == p1:
3079 3081 nump1 += 1
3080 3082 elif delta == p2:
3081 3083 nump2 += 1
3082 3084 elif delta != nullrev:
3083 3085 numother += 1
3084 3086
3085 3087 # Obtain data on the raw chunks in the revlog.
3086 3088 if util.safehasattr(r, b'_getsegmentforrevs'):
3087 3089 segment = r._getsegmentforrevs(rev, rev)[1]
3088 3090 else:
3089 3091 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3090 3092 if segment:
3091 3093 chunktype = bytes(segment[0:1])
3092 3094 else:
3093 3095 chunktype = b'empty'
3094 3096
3095 3097 if chunktype not in chunktypecounts:
3096 3098 chunktypecounts[chunktype] = 0
3097 3099 chunktypesizes[chunktype] = 0
3098 3100
3099 3101 chunktypecounts[chunktype] += 1
3100 3102 chunktypesizes[chunktype] += size
3101 3103
3102 3104 # Adjust size min value for empty cases
3103 3105 for size in (datasize, fullsize, semisize, deltasize):
3104 3106 if size[0] is None:
3105 3107 size[0] = 0
3106 3108
3107 3109 numdeltas = numrevs - numfull - numempty - numsemi
3108 3110 numoprev = numprev - nump1prev - nump2prev
3109 3111 totalrawsize = datasize[2]
3110 3112 datasize[2] /= numrevs
3111 3113 fulltotal = fullsize[2]
3112 3114 if numfull == 0:
3113 3115 fullsize[2] = 0
3114 3116 else:
3115 3117 fullsize[2] /= numfull
3116 3118 semitotal = semisize[2]
3117 3119 snaptotal = {}
3118 3120 if numsemi > 0:
3119 3121 semisize[2] /= numsemi
3120 3122 for depth in snapsizedepth:
3121 3123 snaptotal[depth] = snapsizedepth[depth][2]
3122 3124 snapsizedepth[depth][2] /= numsnapdepth[depth]
3123 3125
3124 3126 deltatotal = deltasize[2]
3125 3127 if numdeltas > 0:
3126 3128 deltasize[2] /= numdeltas
3127 3129 totalsize = fulltotal + semitotal + deltatotal
3128 3130 avgchainlen = sum(chainlengths) / numrevs
3129 3131 maxchainlen = max(chainlengths)
3130 3132 maxchainspan = max(chainspans)
3131 3133 compratio = 1
3132 3134 if totalsize:
3133 3135 compratio = totalrawsize / totalsize
3134 3136
3135 3137 basedfmtstr = b'%%%dd\n'
3136 3138 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3137 3139
3138 3140 def dfmtstr(max):
3139 3141 return basedfmtstr % len(str(max))
3140 3142
3141 3143 def pcfmtstr(max, padding=0):
3142 3144 return basepcfmtstr % (len(str(max)), b' ' * padding)
3143 3145
3144 3146 def pcfmt(value, total):
3145 3147 if total:
3146 3148 return (value, 100 * float(value) / total)
3147 3149 else:
3148 3150 return value, 100.0
3149 3151
3150 3152 ui.writenoi18n(b'format : %d\n' % format)
3151 3153 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3152 3154
3153 3155 ui.write(b'\n')
3154 3156 fmt = pcfmtstr(totalsize)
3155 3157 fmt2 = dfmtstr(totalsize)
3156 3158 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3157 3159 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3158 3160 ui.writenoi18n(
3159 3161 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3160 3162 )
3161 3163 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3162 3164 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3163 3165 ui.writenoi18n(
3164 3166 b' text : '
3165 3167 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3166 3168 )
3167 3169 ui.writenoi18n(
3168 3170 b' delta : '
3169 3171 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3170 3172 )
3171 3173 ui.writenoi18n(
3172 3174 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3173 3175 )
3174 3176 for depth in sorted(numsnapdepth):
3175 3177 ui.write(
3176 3178 (b' lvl-%-3d : ' % depth)
3177 3179 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3178 3180 )
3179 3181 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3180 3182 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3181 3183 ui.writenoi18n(
3182 3184 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3183 3185 )
3184 3186 for depth in sorted(numsnapdepth):
3185 3187 ui.write(
3186 3188 (b' lvl-%-3d : ' % depth)
3187 3189 + fmt % pcfmt(snaptotal[depth], totalsize)
3188 3190 )
3189 3191 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3190 3192
3191 3193 def fmtchunktype(chunktype):
3192 3194 if chunktype == b'empty':
3193 3195 return b' %s : ' % chunktype
3194 3196 elif chunktype in pycompat.bytestr(string.ascii_letters):
3195 3197 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3196 3198 else:
3197 3199 return b' 0x%s : ' % hex(chunktype)
3198 3200
3199 3201 ui.write(b'\n')
3200 3202 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3201 3203 for chunktype in sorted(chunktypecounts):
3202 3204 ui.write(fmtchunktype(chunktype))
3203 3205 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3204 3206 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3205 3207 for chunktype in sorted(chunktypecounts):
3206 3208 ui.write(fmtchunktype(chunktype))
3207 3209 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3208 3210
3209 3211 ui.write(b'\n')
3210 3212 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3211 3213 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3212 3214 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3213 3215 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3214 3216 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3215 3217
3216 3218 if format > 0:
3217 3219 ui.write(b'\n')
3218 3220 ui.writenoi18n(
3219 3221 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3220 3222 % tuple(datasize)
3221 3223 )
3222 3224 ui.writenoi18n(
3223 3225 b'full revision size (min/max/avg) : %d / %d / %d\n'
3224 3226 % tuple(fullsize)
3225 3227 )
3226 3228 ui.writenoi18n(
3227 3229 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3228 3230 % tuple(semisize)
3229 3231 )
3230 3232 for depth in sorted(snapsizedepth):
3231 3233 if depth == 0:
3232 3234 continue
3233 3235 ui.writenoi18n(
3234 3236 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3235 3237 % ((depth,) + tuple(snapsizedepth[depth]))
3236 3238 )
3237 3239 ui.writenoi18n(
3238 3240 b'delta size (min/max/avg) : %d / %d / %d\n'
3239 3241 % tuple(deltasize)
3240 3242 )
3241 3243
3242 3244 if numdeltas > 0:
3243 3245 ui.write(b'\n')
3244 3246 fmt = pcfmtstr(numdeltas)
3245 3247 fmt2 = pcfmtstr(numdeltas, 4)
3246 3248 ui.writenoi18n(
3247 3249 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3248 3250 )
3249 3251 if numprev > 0:
3250 3252 ui.writenoi18n(
3251 3253 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3252 3254 )
3253 3255 ui.writenoi18n(
3254 3256 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3255 3257 )
3256 3258 ui.writenoi18n(
3257 3259 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3258 3260 )
3259 3261 if gdelta:
3260 3262 ui.writenoi18n(
3261 3263 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3262 3264 )
3263 3265 ui.writenoi18n(
3264 3266 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3265 3267 )
3266 3268 ui.writenoi18n(
3267 3269 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3268 3270 )
3269 3271
3270 3272
3271 3273 @command(
3272 3274 b'debugrevlogindex',
3273 3275 cmdutil.debugrevlogopts
3274 3276 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3275 3277 _(b'[-f FORMAT] -c|-m|FILE'),
3276 3278 optionalrepo=True,
3277 3279 )
3278 3280 def debugrevlogindex(ui, repo, file_=None, **opts):
3279 3281 """dump the contents of a revlog index"""
3280 3282 opts = pycompat.byteskwargs(opts)
3281 3283 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3282 3284 format = opts.get(b'format', 0)
3283 3285 if format not in (0, 1):
3284 3286 raise error.Abort(_(b"unknown format %d") % format)
3285 3287
3286 3288 if ui.debugflag:
3287 3289 shortfn = hex
3288 3290 else:
3289 3291 shortfn = short
3290 3292
3291 3293 # There might not be anything in r, so have a sane default
3292 3294 idlen = 12
3293 3295 for i in r:
3294 3296 idlen = len(shortfn(r.node(i)))
3295 3297 break
3296 3298
3297 3299 if format == 0:
3298 3300 if ui.verbose:
3299 3301 ui.writenoi18n(
3300 3302 b" rev offset length linkrev %s %s p2\n"
3301 3303 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3302 3304 )
3303 3305 else:
3304 3306 ui.writenoi18n(
3305 3307 b" rev linkrev %s %s p2\n"
3306 3308 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3307 3309 )
3308 3310 elif format == 1:
3309 3311 if ui.verbose:
3310 3312 ui.writenoi18n(
3311 3313 (
3312 3314 b" rev flag offset length size link p1"
3313 3315 b" p2 %s\n"
3314 3316 )
3315 3317 % b"nodeid".rjust(idlen)
3316 3318 )
3317 3319 else:
3318 3320 ui.writenoi18n(
3319 3321 b" rev flag size link p1 p2 %s\n"
3320 3322 % b"nodeid".rjust(idlen)
3321 3323 )
3322 3324
3323 3325 for i in r:
3324 3326 node = r.node(i)
3325 3327 if format == 0:
3326 3328 try:
3327 3329 pp = r.parents(node)
3328 3330 except Exception:
3329 3331 pp = [nullid, nullid]
3330 3332 if ui.verbose:
3331 3333 ui.write(
3332 3334 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3333 3335 % (
3334 3336 i,
3335 3337 r.start(i),
3336 3338 r.length(i),
3337 3339 r.linkrev(i),
3338 3340 shortfn(node),
3339 3341 shortfn(pp[0]),
3340 3342 shortfn(pp[1]),
3341 3343 )
3342 3344 )
3343 3345 else:
3344 3346 ui.write(
3345 3347 b"% 6d % 7d %s %s %s\n"
3346 3348 % (
3347 3349 i,
3348 3350 r.linkrev(i),
3349 3351 shortfn(node),
3350 3352 shortfn(pp[0]),
3351 3353 shortfn(pp[1]),
3352 3354 )
3353 3355 )
3354 3356 elif format == 1:
3355 3357 pr = r.parentrevs(i)
3356 3358 if ui.verbose:
3357 3359 ui.write(
3358 3360 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3359 3361 % (
3360 3362 i,
3361 3363 r.flags(i),
3362 3364 r.start(i),
3363 3365 r.length(i),
3364 3366 r.rawsize(i),
3365 3367 r.linkrev(i),
3366 3368 pr[0],
3367 3369 pr[1],
3368 3370 shortfn(node),
3369 3371 )
3370 3372 )
3371 3373 else:
3372 3374 ui.write(
3373 3375 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3374 3376 % (
3375 3377 i,
3376 3378 r.flags(i),
3377 3379 r.rawsize(i),
3378 3380 r.linkrev(i),
3379 3381 pr[0],
3380 3382 pr[1],
3381 3383 shortfn(node),
3382 3384 )
3383 3385 )
3384 3386
3385 3387
3386 3388 @command(
3387 3389 b'debugrevspec',
3388 3390 [
3389 3391 (
3390 3392 b'',
3391 3393 b'optimize',
3392 3394 None,
3393 3395 _(b'print parsed tree after optimizing (DEPRECATED)'),
3394 3396 ),
3395 3397 (
3396 3398 b'',
3397 3399 b'show-revs',
3398 3400 True,
3399 3401 _(b'print list of result revisions (default)'),
3400 3402 ),
3401 3403 (
3402 3404 b's',
3403 3405 b'show-set',
3404 3406 None,
3405 3407 _(b'print internal representation of result set'),
3406 3408 ),
3407 3409 (
3408 3410 b'p',
3409 3411 b'show-stage',
3410 3412 [],
3411 3413 _(b'print parsed tree at the given stage'),
3412 3414 _(b'NAME'),
3413 3415 ),
3414 3416 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3415 3417 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3416 3418 ],
3417 3419 b'REVSPEC',
3418 3420 )
3419 3421 def debugrevspec(ui, repo, expr, **opts):
3420 3422 """parse and apply a revision specification
3421 3423
3422 3424 Use -p/--show-stage option to print the parsed tree at the given stages.
3423 3425 Use -p all to print tree at every stage.
3424 3426
3425 3427 Use --no-show-revs option with -s or -p to print only the set
3426 3428 representation or the parsed tree respectively.
3427 3429
3428 3430 Use --verify-optimized to compare the optimized result with the unoptimized
3429 3431 one. Returns 1 if the optimized result differs.
3430 3432 """
3431 3433 opts = pycompat.byteskwargs(opts)
3432 3434 aliases = ui.configitems(b'revsetalias')
3433 3435 stages = [
3434 3436 (b'parsed', lambda tree: tree),
3435 3437 (
3436 3438 b'expanded',
3437 3439 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3438 3440 ),
3439 3441 (b'concatenated', revsetlang.foldconcat),
3440 3442 (b'analyzed', revsetlang.analyze),
3441 3443 (b'optimized', revsetlang.optimize),
3442 3444 ]
3443 3445 if opts[b'no_optimized']:
3444 3446 stages = stages[:-1]
3445 3447 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3446 3448 raise error.Abort(
3447 3449 _(b'cannot use --verify-optimized with --no-optimized')
3448 3450 )
3449 3451 stagenames = {n for n, f in stages}
3450 3452
3451 3453 showalways = set()
3452 3454 showchanged = set()
3453 3455 if ui.verbose and not opts[b'show_stage']:
3454 3456 # show parsed tree by --verbose (deprecated)
3455 3457 showalways.add(b'parsed')
3456 3458 showchanged.update([b'expanded', b'concatenated'])
3457 3459 if opts[b'optimize']:
3458 3460 showalways.add(b'optimized')
3459 3461 if opts[b'show_stage'] and opts[b'optimize']:
3460 3462 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3461 3463 if opts[b'show_stage'] == [b'all']:
3462 3464 showalways.update(stagenames)
3463 3465 else:
3464 3466 for n in opts[b'show_stage']:
3465 3467 if n not in stagenames:
3466 3468 raise error.Abort(_(b'invalid stage name: %s') % n)
3467 3469 showalways.update(opts[b'show_stage'])
3468 3470
3469 3471 treebystage = {}
3470 3472 printedtree = None
3471 3473 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3472 3474 for n, f in stages:
3473 3475 treebystage[n] = tree = f(tree)
3474 3476 if n in showalways or (n in showchanged and tree != printedtree):
3475 3477 if opts[b'show_stage'] or n != b'parsed':
3476 3478 ui.write(b"* %s:\n" % n)
3477 3479 ui.write(revsetlang.prettyformat(tree), b"\n")
3478 3480 printedtree = tree
3479 3481
3480 3482 if opts[b'verify_optimized']:
3481 3483 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3482 3484 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3483 3485 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3484 3486 ui.writenoi18n(
3485 3487 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3486 3488 )
3487 3489 ui.writenoi18n(
3488 3490 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3489 3491 )
3490 3492 arevs = list(arevs)
3491 3493 brevs = list(brevs)
3492 3494 if arevs == brevs:
3493 3495 return 0
3494 3496 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3495 3497 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3496 3498 sm = difflib.SequenceMatcher(None, arevs, brevs)
3497 3499 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3498 3500 if tag in ('delete', 'replace'):
3499 3501 for c in arevs[alo:ahi]:
3500 3502 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3501 3503 if tag in ('insert', 'replace'):
3502 3504 for c in brevs[blo:bhi]:
3503 3505 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3504 3506 if tag == 'equal':
3505 3507 for c in arevs[alo:ahi]:
3506 3508 ui.write(b' %d\n' % c)
3507 3509 return 1
3508 3510
3509 3511 func = revset.makematcher(tree)
3510 3512 revs = func(repo)
3511 3513 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3512 3514 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3513 3515 if not opts[b'show_revs']:
3514 3516 return
3515 3517 for c in revs:
3516 3518 ui.write(b"%d\n" % c)
3517 3519
3518 3520
3519 3521 @command(
3520 3522 b'debugserve',
3521 3523 [
3522 3524 (
3523 3525 b'',
3524 3526 b'sshstdio',
3525 3527 False,
3526 3528 _(b'run an SSH server bound to process handles'),
3527 3529 ),
3528 3530 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3529 3531 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3530 3532 ],
3531 3533 b'',
3532 3534 )
3533 3535 def debugserve(ui, repo, **opts):
3534 3536 """run a server with advanced settings
3535 3537
3536 3538 This command is similar to :hg:`serve`. It exists partially as a
3537 3539 workaround to the fact that ``hg serve --stdio`` must have specific
3538 3540 arguments for security reasons.
3539 3541 """
3540 3542 opts = pycompat.byteskwargs(opts)
3541 3543
3542 3544 if not opts[b'sshstdio']:
3543 3545 raise error.Abort(_(b'only --sshstdio is currently supported'))
3544 3546
3545 3547 logfh = None
3546 3548
3547 3549 if opts[b'logiofd'] and opts[b'logiofile']:
3548 3550 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3549 3551
3550 3552 if opts[b'logiofd']:
3551 3553 # Ideally we would be line buffered. But line buffering in binary
3552 3554 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3553 3555 # buffering could have performance impacts. But since this isn't
3554 3556 # performance critical code, it should be fine.
3555 3557 try:
3556 3558 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3557 3559 except OSError as e:
3558 3560 if e.errno != errno.ESPIPE:
3559 3561 raise
3560 3562 # can't seek a pipe, so `ab` mode fails on py3
3561 3563 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3562 3564 elif opts[b'logiofile']:
3563 3565 logfh = open(opts[b'logiofile'], b'ab', 0)
3564 3566
3565 3567 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3566 3568 s.serve_forever()
3567 3569
3568 3570
3569 3571 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3570 3572 def debugsetparents(ui, repo, rev1, rev2=None):
3571 3573 """manually set the parents of the current working directory (DANGEROUS)
3572 3574
3573 3575 This command is not what you are looking for and should not be used. Using
3574 3576 this command will most certainly results in slight corruption of the file
3575 3577 level histories withing your repository. DO NOT USE THIS COMMAND.
3576 3578
3577 3579 The command update the p1 and p2 field in the dirstate, and not touching
3578 3580 anything else. This useful for writing repository conversion tools, but
3579 3581 should be used with extreme care. For example, neither the working
3580 3582 directory nor the dirstate is updated, so file status may be incorrect
3581 3583 after running this command. Only used if you are one of the few people that
3582 3584 deeply unstand both conversion tools and file level histories. If you are
3583 3585 reading this help, you are not one of this people (most of them sailed west
3584 3586 from Mithlond anyway.
3585 3587
3586 3588 So one last time DO NOT USE THIS COMMAND.
3587 3589
3588 3590 Returns 0 on success.
3589 3591 """
3590 3592
3591 3593 node1 = scmutil.revsingle(repo, rev1).node()
3592 3594 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3593 3595
3594 3596 with repo.wlock():
3595 3597 repo.setparents(node1, node2)
3596 3598
3597 3599
3598 3600 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3599 3601 def debugsidedata(ui, repo, file_, rev=None, **opts):
3600 3602 """dump the side data for a cl/manifest/file revision
3601 3603
3602 3604 Use --verbose to dump the sidedata content."""
3603 3605 opts = pycompat.byteskwargs(opts)
3604 3606 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3605 3607 if rev is not None:
3606 3608 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3607 3609 file_, rev = None, file_
3608 3610 elif rev is None:
3609 3611 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3610 3612 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3611 3613 r = getattr(r, '_revlog', r)
3612 3614 try:
3613 3615 sidedata = r.sidedata(r.lookup(rev))
3614 3616 except KeyError:
3615 3617 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3616 3618 if sidedata:
3617 3619 sidedata = list(sidedata.items())
3618 3620 sidedata.sort()
3619 3621 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3620 3622 for key, value in sidedata:
3621 3623 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3622 3624 if ui.verbose:
3623 3625 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3624 3626
3625 3627
3626 3628 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3627 3629 def debugssl(ui, repo, source=None, **opts):
3628 3630 """test a secure connection to a server
3629 3631
3630 3632 This builds the certificate chain for the server on Windows, installing the
3631 3633 missing intermediates and trusted root via Windows Update if necessary. It
3632 3634 does nothing on other platforms.
3633 3635
3634 3636 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3635 3637 that server is used. See :hg:`help urls` for more information.
3636 3638
3637 3639 If the update succeeds, retry the original operation. Otherwise, the cause
3638 3640 of the SSL error is likely another issue.
3639 3641 """
3640 3642 if not pycompat.iswindows:
3641 3643 raise error.Abort(
3642 3644 _(b'certificate chain building is only possible on Windows')
3643 3645 )
3644 3646
3645 3647 if not source:
3646 3648 if not repo:
3647 3649 raise error.Abort(
3648 3650 _(
3649 3651 b"there is no Mercurial repository here, and no "
3650 3652 b"server specified"
3651 3653 )
3652 3654 )
3653 3655 source = b"default"
3654 3656
3655 3657 source, branches = urlutil.parseurl(ui.expandpath(source))
3656 3658 url = urlutil.url(source)
3657 3659
3658 3660 defaultport = {b'https': 443, b'ssh': 22}
3659 3661 if url.scheme in defaultport:
3660 3662 try:
3661 3663 addr = (url.host, int(url.port or defaultport[url.scheme]))
3662 3664 except ValueError:
3663 3665 raise error.Abort(_(b"malformed port number in URL"))
3664 3666 else:
3665 3667 raise error.Abort(_(b"only https and ssh connections are supported"))
3666 3668
3667 3669 from . import win32
3668 3670
3669 3671 s = ssl.wrap_socket(
3670 3672 socket.socket(),
3671 3673 ssl_version=ssl.PROTOCOL_TLS,
3672 3674 cert_reqs=ssl.CERT_NONE,
3673 3675 ca_certs=None,
3674 3676 )
3675 3677
3676 3678 try:
3677 3679 s.connect(addr)
3678 3680 cert = s.getpeercert(True)
3679 3681
3680 3682 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3681 3683
3682 3684 complete = win32.checkcertificatechain(cert, build=False)
3683 3685
3684 3686 if not complete:
3685 3687 ui.status(_(b'certificate chain is incomplete, updating... '))
3686 3688
3687 3689 if not win32.checkcertificatechain(cert):
3688 3690 ui.status(_(b'failed.\n'))
3689 3691 else:
3690 3692 ui.status(_(b'done.\n'))
3691 3693 else:
3692 3694 ui.status(_(b'full certificate chain is available\n'))
3693 3695 finally:
3694 3696 s.close()
3695 3697
3696 3698
3697 3699 @command(
3698 3700 b"debugbackupbundle",
3699 3701 [
3700 3702 (
3701 3703 b"",
3702 3704 b"recover",
3703 3705 b"",
3704 3706 b"brings the specified changeset back into the repository",
3705 3707 )
3706 3708 ]
3707 3709 + cmdutil.logopts,
3708 3710 _(b"hg debugbackupbundle [--recover HASH]"),
3709 3711 )
3710 3712 def debugbackupbundle(ui, repo, *pats, **opts):
3711 3713 """lists the changesets available in backup bundles
3712 3714
3713 3715 Without any arguments, this command prints a list of the changesets in each
3714 3716 backup bundle.
3715 3717
3716 3718 --recover takes a changeset hash and unbundles the first bundle that
3717 3719 contains that hash, which puts that changeset back in your repository.
3718 3720
3719 3721 --verbose will print the entire commit message and the bundle path for that
3720 3722 backup.
3721 3723 """
3722 3724 backups = list(
3723 3725 filter(
3724 3726 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3725 3727 )
3726 3728 )
3727 3729 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3728 3730
3729 3731 opts = pycompat.byteskwargs(opts)
3730 3732 opts[b"bundle"] = b""
3731 3733 opts[b"force"] = None
3732 3734 limit = logcmdutil.getlimit(opts)
3733 3735
3734 3736 def display(other, chlist, displayer):
3735 3737 if opts.get(b"newest_first"):
3736 3738 chlist.reverse()
3737 3739 count = 0
3738 3740 for n in chlist:
3739 3741 if limit is not None and count >= limit:
3740 3742 break
3741 3743 parents = [True for p in other.changelog.parents(n) if p != nullid]
3742 3744 if opts.get(b"no_merges") and len(parents) == 2:
3743 3745 continue
3744 3746 count += 1
3745 3747 displayer.show(other[n])
3746 3748
3747 3749 recovernode = opts.get(b"recover")
3748 3750 if recovernode:
3749 3751 if scmutil.isrevsymbol(repo, recovernode):
3750 3752 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3751 3753 return
3752 3754 elif backups:
3753 3755 msg = _(
3754 3756 b"Recover changesets using: hg debugbackupbundle --recover "
3755 3757 b"<changeset hash>\n\nAvailable backup changesets:"
3756 3758 )
3757 3759 ui.status(msg, label=b"status.removed")
3758 3760 else:
3759 3761 ui.status(_(b"no backup changesets found\n"))
3760 3762 return
3761 3763
3762 3764 for backup in backups:
3763 3765 # Much of this is copied from the hg incoming logic
3764 3766 source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
3765 3767 source, branches = urlutil.parseurl(source, opts.get(b"branch"))
3766 3768 try:
3767 3769 other = hg.peer(repo, opts, source)
3768 3770 except error.LookupError as ex:
3769 3771 msg = _(b"\nwarning: unable to open bundle %s") % source
3770 3772 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3771 3773 ui.warn(msg, hint=hint)
3772 3774 continue
3773 3775 revs, checkout = hg.addbranchrevs(
3774 3776 repo, other, branches, opts.get(b"rev")
3775 3777 )
3776 3778
3777 3779 if revs:
3778 3780 revs = [other.lookup(rev) for rev in revs]
3779 3781
3780 3782 quiet = ui.quiet
3781 3783 try:
3782 3784 ui.quiet = True
3783 3785 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3784 3786 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3785 3787 )
3786 3788 except error.LookupError:
3787 3789 continue
3788 3790 finally:
3789 3791 ui.quiet = quiet
3790 3792
3791 3793 try:
3792 3794 if not chlist:
3793 3795 continue
3794 3796 if recovernode:
3795 3797 with repo.lock(), repo.transaction(b"unbundle") as tr:
3796 3798 if scmutil.isrevsymbol(other, recovernode):
3797 3799 ui.status(_(b"Unbundling %s\n") % (recovernode))
3798 3800 f = hg.openpath(ui, source)
3799 3801 gen = exchange.readbundle(ui, f, source)
3800 3802 if isinstance(gen, bundle2.unbundle20):
3801 3803 bundle2.applybundle(
3802 3804 repo,
3803 3805 gen,
3804 3806 tr,
3805 3807 source=b"unbundle",
3806 3808 url=b"bundle:" + source,
3807 3809 )
3808 3810 else:
3809 3811 gen.apply(repo, b"unbundle", b"bundle:" + source)
3810 3812 break
3811 3813 else:
3812 3814 backupdate = encoding.strtolocal(
3813 3815 time.strftime(
3814 3816 "%a %H:%M, %Y-%m-%d",
3815 3817 time.localtime(os.path.getmtime(source)),
3816 3818 )
3817 3819 )
3818 3820 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3819 3821 if ui.verbose:
3820 3822 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3821 3823 else:
3822 3824 opts[
3823 3825 b"template"
3824 3826 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3825 3827 displayer = logcmdutil.changesetdisplayer(
3826 3828 ui, other, opts, False
3827 3829 )
3828 3830 display(other, chlist, displayer)
3829 3831 displayer.close()
3830 3832 finally:
3831 3833 cleanupfn()
3832 3834
3833 3835
3834 3836 @command(
3835 3837 b'debugsub',
3836 3838 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3837 3839 _(b'[-r REV] [REV]'),
3838 3840 )
3839 3841 def debugsub(ui, repo, rev=None):
3840 3842 ctx = scmutil.revsingle(repo, rev, None)
3841 3843 for k, v in sorted(ctx.substate.items()):
3842 3844 ui.writenoi18n(b'path %s\n' % k)
3843 3845 ui.writenoi18n(b' source %s\n' % v[0])
3844 3846 ui.writenoi18n(b' revision %s\n' % v[1])
3845 3847
3846 3848
3847 3849 @command(b'debugshell', optionalrepo=True)
3848 3850 def debugshell(ui, repo):
3849 3851 """run an interactive Python interpreter
3850 3852
3851 3853 The local namespace is provided with a reference to the ui and
3852 3854 the repo instance (if available).
3853 3855 """
3854 3856 import code
3855 3857
3856 3858 imported_objects = {
3857 3859 'ui': ui,
3858 3860 'repo': repo,
3859 3861 }
3860 3862
3861 3863 code.interact(local=imported_objects)
3862 3864
3863 3865
3864 3866 @command(
3865 3867 b'debugsuccessorssets',
3866 3868 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3867 3869 _(b'[REV]'),
3868 3870 )
3869 3871 def debugsuccessorssets(ui, repo, *revs, **opts):
3870 3872 """show set of successors for revision
3871 3873
3872 3874 A successors set of changeset A is a consistent group of revisions that
3873 3875 succeed A. It contains non-obsolete changesets only unless closests
3874 3876 successors set is set.
3875 3877
3876 3878 In most cases a changeset A has a single successors set containing a single
3877 3879 successor (changeset A replaced by A').
3878 3880
3879 3881 A changeset that is made obsolete with no successors are called "pruned".
3880 3882 Such changesets have no successors sets at all.
3881 3883
3882 3884 A changeset that has been "split" will have a successors set containing
3883 3885 more than one successor.
3884 3886
3885 3887 A changeset that has been rewritten in multiple different ways is called
3886 3888 "divergent". Such changesets have multiple successor sets (each of which
3887 3889 may also be split, i.e. have multiple successors).
3888 3890
3889 3891 Results are displayed as follows::
3890 3892
3891 3893 <rev1>
3892 3894 <successors-1A>
3893 3895 <rev2>
3894 3896 <successors-2A>
3895 3897 <successors-2B1> <successors-2B2> <successors-2B3>
3896 3898
3897 3899 Here rev2 has two possible (i.e. divergent) successors sets. The first
3898 3900 holds one element, whereas the second holds three (i.e. the changeset has
3899 3901 been split).
3900 3902 """
3901 3903 # passed to successorssets caching computation from one call to another
3902 3904 cache = {}
3903 3905 ctx2str = bytes
3904 3906 node2str = short
3905 3907 for rev in scmutil.revrange(repo, revs):
3906 3908 ctx = repo[rev]
3907 3909 ui.write(b'%s\n' % ctx2str(ctx))
3908 3910 for succsset in obsutil.successorssets(
3909 3911 repo, ctx.node(), closest=opts['closest'], cache=cache
3910 3912 ):
3911 3913 if succsset:
3912 3914 ui.write(b' ')
3913 3915 ui.write(node2str(succsset[0]))
3914 3916 for node in succsset[1:]:
3915 3917 ui.write(b' ')
3916 3918 ui.write(node2str(node))
3917 3919 ui.write(b'\n')
3918 3920
3919 3921
3920 3922 @command(b'debugtagscache', [])
3921 3923 def debugtagscache(ui, repo):
3922 3924 """display the contents of .hg/cache/hgtagsfnodes1"""
3923 3925 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3924 3926 flog = repo.file(b'.hgtags')
3925 3927 for r in repo:
3926 3928 node = repo[r].node()
3927 3929 tagsnode = cache.getfnode(node, computemissing=False)
3928 3930 if tagsnode:
3929 3931 tagsnodedisplay = hex(tagsnode)
3930 3932 if not flog.hasnode(tagsnode):
3931 3933 tagsnodedisplay += b' (unknown node)'
3932 3934 elif tagsnode is None:
3933 3935 tagsnodedisplay = b'missing'
3934 3936 else:
3935 3937 tagsnodedisplay = b'invalid'
3936 3938
3937 3939 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3938 3940
3939 3941
3940 3942 @command(
3941 3943 b'debugtemplate',
3942 3944 [
3943 3945 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3944 3946 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3945 3947 ],
3946 3948 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3947 3949 optionalrepo=True,
3948 3950 )
3949 3951 def debugtemplate(ui, repo, tmpl, **opts):
3950 3952 """parse and apply a template
3951 3953
3952 3954 If -r/--rev is given, the template is processed as a log template and
3953 3955 applied to the given changesets. Otherwise, it is processed as a generic
3954 3956 template.
3955 3957
3956 3958 Use --verbose to print the parsed tree.
3957 3959 """
3958 3960 revs = None
3959 3961 if opts['rev']:
3960 3962 if repo is None:
3961 3963 raise error.RepoError(
3962 3964 _(b'there is no Mercurial repository here (.hg not found)')
3963 3965 )
3964 3966 revs = scmutil.revrange(repo, opts['rev'])
3965 3967
3966 3968 props = {}
3967 3969 for d in opts['define']:
3968 3970 try:
3969 3971 k, v = (e.strip() for e in d.split(b'=', 1))
3970 3972 if not k or k == b'ui':
3971 3973 raise ValueError
3972 3974 props[k] = v
3973 3975 except ValueError:
3974 3976 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3975 3977
3976 3978 if ui.verbose:
3977 3979 aliases = ui.configitems(b'templatealias')
3978 3980 tree = templater.parse(tmpl)
3979 3981 ui.note(templater.prettyformat(tree), b'\n')
3980 3982 newtree = templater.expandaliases(tree, aliases)
3981 3983 if newtree != tree:
3982 3984 ui.notenoi18n(
3983 3985 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3984 3986 )
3985 3987
3986 3988 if revs is None:
3987 3989 tres = formatter.templateresources(ui, repo)
3988 3990 t = formatter.maketemplater(ui, tmpl, resources=tres)
3989 3991 if ui.verbose:
3990 3992 kwds, funcs = t.symbolsuseddefault()
3991 3993 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3992 3994 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3993 3995 ui.write(t.renderdefault(props))
3994 3996 else:
3995 3997 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3996 3998 if ui.verbose:
3997 3999 kwds, funcs = displayer.t.symbolsuseddefault()
3998 4000 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3999 4001 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4000 4002 for r in revs:
4001 4003 displayer.show(repo[r], **pycompat.strkwargs(props))
4002 4004 displayer.close()
4003 4005
4004 4006
4005 4007 @command(
4006 4008 b'debuguigetpass',
4007 4009 [
4008 4010 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4009 4011 ],
4010 4012 _(b'[-p TEXT]'),
4011 4013 norepo=True,
4012 4014 )
4013 4015 def debuguigetpass(ui, prompt=b''):
4014 4016 """show prompt to type password"""
4015 4017 r = ui.getpass(prompt)
4016 4018 if r is None:
4017 4019 r = b"<default response>"
4018 4020 ui.writenoi18n(b'response: %s\n' % r)
4019 4021
4020 4022
4021 4023 @command(
4022 4024 b'debuguiprompt',
4023 4025 [
4024 4026 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4025 4027 ],
4026 4028 _(b'[-p TEXT]'),
4027 4029 norepo=True,
4028 4030 )
4029 4031 def debuguiprompt(ui, prompt=b''):
4030 4032 """show plain prompt"""
4031 4033 r = ui.prompt(prompt)
4032 4034 ui.writenoi18n(b'response: %s\n' % r)
4033 4035
4034 4036
4035 4037 @command(b'debugupdatecaches', [])
4036 4038 def debugupdatecaches(ui, repo, *pats, **opts):
4037 4039 """warm all known caches in the repository"""
4038 4040 with repo.wlock(), repo.lock():
4039 4041 repo.updatecaches(full=True)
4040 4042
4041 4043
4042 4044 @command(
4043 4045 b'debugupgraderepo',
4044 4046 [
4045 4047 (
4046 4048 b'o',
4047 4049 b'optimize',
4048 4050 [],
4049 4051 _(b'extra optimization to perform'),
4050 4052 _(b'NAME'),
4051 4053 ),
4052 4054 (b'', b'run', False, _(b'performs an upgrade')),
4053 4055 (b'', b'backup', True, _(b'keep the old repository content around')),
4054 4056 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4055 4057 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4056 4058 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4057 4059 ],
4058 4060 )
4059 4061 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4060 4062 """upgrade a repository to use different features
4061 4063
4062 4064 If no arguments are specified, the repository is evaluated for upgrade
4063 4065 and a list of problems and potential optimizations is printed.
4064 4066
4065 4067 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4066 4068 can be influenced via additional arguments. More details will be provided
4067 4069 by the command output when run without ``--run``.
4068 4070
4069 4071 During the upgrade, the repository will be locked and no writes will be
4070 4072 allowed.
4071 4073
4072 4074 At the end of the upgrade, the repository may not be readable while new
4073 4075 repository data is swapped in. This window will be as long as it takes to
4074 4076 rename some directories inside the ``.hg`` directory. On most machines, this
4075 4077 should complete almost instantaneously and the chances of a consumer being
4076 4078 unable to access the repository should be low.
4077 4079
4078 4080 By default, all revlog will be upgraded. You can restrict this using flag
4079 4081 such as `--manifest`:
4080 4082
4081 4083 * `--manifest`: only optimize the manifest
4082 4084 * `--no-manifest`: optimize all revlog but the manifest
4083 4085 * `--changelog`: optimize the changelog only
4084 4086 * `--no-changelog --no-manifest`: optimize filelogs only
4085 4087 * `--filelogs`: optimize the filelogs only
4086 4088 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4087 4089 """
4088 4090 return upgrade.upgraderepo(
4089 4091 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4090 4092 )
4091 4093
4092 4094
4093 4095 @command(
4094 4096 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4095 4097 )
4096 4098 def debugwalk(ui, repo, *pats, **opts):
4097 4099 """show how files match on given patterns"""
4098 4100 opts = pycompat.byteskwargs(opts)
4099 4101 m = scmutil.match(repo[None], pats, opts)
4100 4102 if ui.verbose:
4101 4103 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4102 4104 items = list(repo[None].walk(m))
4103 4105 if not items:
4104 4106 return
4105 4107 f = lambda fn: fn
4106 4108 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4107 4109 f = lambda fn: util.normpath(fn)
4108 4110 fmt = b'f %%-%ds %%-%ds %%s' % (
4109 4111 max([len(abs) for abs in items]),
4110 4112 max([len(repo.pathto(abs)) for abs in items]),
4111 4113 )
4112 4114 for abs in items:
4113 4115 line = fmt % (
4114 4116 abs,
4115 4117 f(repo.pathto(abs)),
4116 4118 m.exact(abs) and b'exact' or b'',
4117 4119 )
4118 4120 ui.write(b"%s\n" % line.rstrip())
4119 4121
4120 4122
4121 4123 @command(b'debugwhyunstable', [], _(b'REV'))
4122 4124 def debugwhyunstable(ui, repo, rev):
4123 4125 """explain instabilities of a changeset"""
4124 4126 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4125 4127 dnodes = b''
4126 4128 if entry.get(b'divergentnodes'):
4127 4129 dnodes = (
4128 4130 b' '.join(
4129 4131 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4130 4132 for ctx in entry[b'divergentnodes']
4131 4133 )
4132 4134 + b' '
4133 4135 )
4134 4136 ui.write(
4135 4137 b'%s: %s%s %s\n'
4136 4138 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4137 4139 )
4138 4140
4139 4141
4140 4142 @command(
4141 4143 b'debugwireargs',
4142 4144 [
4143 4145 (b'', b'three', b'', b'three'),
4144 4146 (b'', b'four', b'', b'four'),
4145 4147 (b'', b'five', b'', b'five'),
4146 4148 ]
4147 4149 + cmdutil.remoteopts,
4148 4150 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4149 4151 norepo=True,
4150 4152 )
4151 4153 def debugwireargs(ui, repopath, *vals, **opts):
4152 4154 opts = pycompat.byteskwargs(opts)
4153 4155 repo = hg.peer(ui, opts, repopath)
4154 4156 try:
4155 4157 for opt in cmdutil.remoteopts:
4156 4158 del opts[opt[1]]
4157 4159 args = {}
4158 4160 for k, v in pycompat.iteritems(opts):
4159 4161 if v:
4160 4162 args[k] = v
4161 4163 args = pycompat.strkwargs(args)
4162 4164 # run twice to check that we don't mess up the stream for the next command
4163 4165 res1 = repo.debugwireargs(*vals, **args)
4164 4166 res2 = repo.debugwireargs(*vals, **args)
4165 4167 ui.write(b"%s\n" % res1)
4166 4168 if res1 != res2:
4167 4169 ui.warn(b"%s\n" % res2)
4168 4170 finally:
4169 4171 repo.close()
4170 4172
4171 4173
4172 4174 def _parsewirelangblocks(fh):
4173 4175 activeaction = None
4174 4176 blocklines = []
4175 4177 lastindent = 0
4176 4178
4177 4179 for line in fh:
4178 4180 line = line.rstrip()
4179 4181 if not line:
4180 4182 continue
4181 4183
4182 4184 if line.startswith(b'#'):
4183 4185 continue
4184 4186
4185 4187 if not line.startswith(b' '):
4186 4188 # New block. Flush previous one.
4187 4189 if activeaction:
4188 4190 yield activeaction, blocklines
4189 4191
4190 4192 activeaction = line
4191 4193 blocklines = []
4192 4194 lastindent = 0
4193 4195 continue
4194 4196
4195 4197 # Else we start with an indent.
4196 4198
4197 4199 if not activeaction:
4198 4200 raise error.Abort(_(b'indented line outside of block'))
4199 4201
4200 4202 indent = len(line) - len(line.lstrip())
4201 4203
4202 4204 # If this line is indented more than the last line, concatenate it.
4203 4205 if indent > lastindent and blocklines:
4204 4206 blocklines[-1] += line.lstrip()
4205 4207 else:
4206 4208 blocklines.append(line)
4207 4209 lastindent = indent
4208 4210
4209 4211 # Flush last block.
4210 4212 if activeaction:
4211 4213 yield activeaction, blocklines
4212 4214
4213 4215
4214 4216 @command(
4215 4217 b'debugwireproto',
4216 4218 [
4217 4219 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4218 4220 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4219 4221 (
4220 4222 b'',
4221 4223 b'noreadstderr',
4222 4224 False,
4223 4225 _(b'do not read from stderr of the remote'),
4224 4226 ),
4225 4227 (
4226 4228 b'',
4227 4229 b'nologhandshake',
4228 4230 False,
4229 4231 _(b'do not log I/O related to the peer handshake'),
4230 4232 ),
4231 4233 ]
4232 4234 + cmdutil.remoteopts,
4233 4235 _(b'[PATH]'),
4234 4236 optionalrepo=True,
4235 4237 )
4236 4238 def debugwireproto(ui, repo, path=None, **opts):
4237 4239 """send wire protocol commands to a server
4238 4240
4239 4241 This command can be used to issue wire protocol commands to remote
4240 4242 peers and to debug the raw data being exchanged.
4241 4243
4242 4244 ``--localssh`` will start an SSH server against the current repository
4243 4245 and connect to that. By default, the connection will perform a handshake
4244 4246 and establish an appropriate peer instance.
4245 4247
4246 4248 ``--peer`` can be used to bypass the handshake protocol and construct a
4247 4249 peer instance using the specified class type. Valid values are ``raw``,
4248 4250 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4249 4251 raw data payloads and don't support higher-level command actions.
4250 4252
4251 4253 ``--noreadstderr`` can be used to disable automatic reading from stderr
4252 4254 of the peer (for SSH connections only). Disabling automatic reading of
4253 4255 stderr is useful for making output more deterministic.
4254 4256
4255 4257 Commands are issued via a mini language which is specified via stdin.
4256 4258 The language consists of individual actions to perform. An action is
4257 4259 defined by a block. A block is defined as a line with no leading
4258 4260 space followed by 0 or more lines with leading space. Blocks are
4259 4261 effectively a high-level command with additional metadata.
4260 4262
4261 4263 Lines beginning with ``#`` are ignored.
4262 4264
4263 4265 The following sections denote available actions.
4264 4266
4265 4267 raw
4266 4268 ---
4267 4269
4268 4270 Send raw data to the server.
4269 4271
4270 4272 The block payload contains the raw data to send as one atomic send
4271 4273 operation. The data may not actually be delivered in a single system
4272 4274 call: it depends on the abilities of the transport being used.
4273 4275
4274 4276 Each line in the block is de-indented and concatenated. Then, that
4275 4277 value is evaluated as a Python b'' literal. This allows the use of
4276 4278 backslash escaping, etc.
4277 4279
4278 4280 raw+
4279 4281 ----
4280 4282
4281 4283 Behaves like ``raw`` except flushes output afterwards.
4282 4284
4283 4285 command <X>
4284 4286 -----------
4285 4287
4286 4288 Send a request to run a named command, whose name follows the ``command``
4287 4289 string.
4288 4290
4289 4291 Arguments to the command are defined as lines in this block. The format of
4290 4292 each line is ``<key> <value>``. e.g.::
4291 4293
4292 4294 command listkeys
4293 4295 namespace bookmarks
4294 4296
4295 4297 If the value begins with ``eval:``, it will be interpreted as a Python
4296 4298 literal expression. Otherwise values are interpreted as Python b'' literals.
4297 4299 This allows sending complex types and encoding special byte sequences via
4298 4300 backslash escaping.
4299 4301
4300 4302 The following arguments have special meaning:
4301 4303
4302 4304 ``PUSHFILE``
4303 4305 When defined, the *push* mechanism of the peer will be used instead
4304 4306 of the static request-response mechanism and the content of the
4305 4307 file specified in the value of this argument will be sent as the
4306 4308 command payload.
4307 4309
4308 4310 This can be used to submit a local bundle file to the remote.
4309 4311
4310 4312 batchbegin
4311 4313 ----------
4312 4314
4313 4315 Instruct the peer to begin a batched send.
4314 4316
4315 4317 All ``command`` blocks are queued for execution until the next
4316 4318 ``batchsubmit`` block.
4317 4319
4318 4320 batchsubmit
4319 4321 -----------
4320 4322
4321 4323 Submit previously queued ``command`` blocks as a batch request.
4322 4324
4323 4325 This action MUST be paired with a ``batchbegin`` action.
4324 4326
4325 4327 httprequest <method> <path>
4326 4328 ---------------------------
4327 4329
4328 4330 (HTTP peer only)
4329 4331
4330 4332 Send an HTTP request to the peer.
4331 4333
4332 4334 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4333 4335
4334 4336 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4335 4337 headers to add to the request. e.g. ``Accept: foo``.
4336 4338
4337 4339 The following arguments are special:
4338 4340
4339 4341 ``BODYFILE``
4340 4342 The content of the file defined as the value to this argument will be
4341 4343 transferred verbatim as the HTTP request body.
4342 4344
4343 4345 ``frame <type> <flags> <payload>``
4344 4346 Send a unified protocol frame as part of the request body.
4345 4347
4346 4348 All frames will be collected and sent as the body to the HTTP
4347 4349 request.
4348 4350
4349 4351 close
4350 4352 -----
4351 4353
4352 4354 Close the connection to the server.
4353 4355
4354 4356 flush
4355 4357 -----
4356 4358
4357 4359 Flush data written to the server.
4358 4360
4359 4361 readavailable
4360 4362 -------------
4361 4363
4362 4364 Close the write end of the connection and read all available data from
4363 4365 the server.
4364 4366
4365 4367 If the connection to the server encompasses multiple pipes, we poll both
4366 4368 pipes and read available data.
4367 4369
4368 4370 readline
4369 4371 --------
4370 4372
4371 4373 Read a line of output from the server. If there are multiple output
4372 4374 pipes, reads only the main pipe.
4373 4375
4374 4376 ereadline
4375 4377 ---------
4376 4378
4377 4379 Like ``readline``, but read from the stderr pipe, if available.
4378 4380
4379 4381 read <X>
4380 4382 --------
4381 4383
4382 4384 ``read()`` N bytes from the server's main output pipe.
4383 4385
4384 4386 eread <X>
4385 4387 ---------
4386 4388
4387 4389 ``read()`` N bytes from the server's stderr pipe, if available.
4388 4390
4389 4391 Specifying Unified Frame-Based Protocol Frames
4390 4392 ----------------------------------------------
4391 4393
4392 4394 It is possible to emit a *Unified Frame-Based Protocol* by using special
4393 4395 syntax.
4394 4396
4395 4397 A frame is composed as a type, flags, and payload. These can be parsed
4396 4398 from a string of the form:
4397 4399
4398 4400 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4399 4401
4400 4402 ``request-id`` and ``stream-id`` are integers defining the request and
4401 4403 stream identifiers.
4402 4404
4403 4405 ``type`` can be an integer value for the frame type or the string name
4404 4406 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4405 4407 ``command-name``.
4406 4408
4407 4409 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4408 4410 components. Each component (and there can be just one) can be an integer
4409 4411 or a flag name for stream flags or frame flags, respectively. Values are
4410 4412 resolved to integers and then bitwise OR'd together.
4411 4413
4412 4414 ``payload`` represents the raw frame payload. If it begins with
4413 4415 ``cbor:``, the following string is evaluated as Python code and the
4414 4416 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4415 4417 as a Python byte string literal.
4416 4418 """
4417 4419 opts = pycompat.byteskwargs(opts)
4418 4420
4419 4421 if opts[b'localssh'] and not repo:
4420 4422 raise error.Abort(_(b'--localssh requires a repository'))
4421 4423
4422 4424 if opts[b'peer'] and opts[b'peer'] not in (
4423 4425 b'raw',
4424 4426 b'http2',
4425 4427 b'ssh1',
4426 4428 b'ssh2',
4427 4429 ):
4428 4430 raise error.Abort(
4429 4431 _(b'invalid value for --peer'),
4430 4432 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4431 4433 )
4432 4434
4433 4435 if path and opts[b'localssh']:
4434 4436 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4435 4437
4436 4438 if ui.interactive():
4437 4439 ui.write(_(b'(waiting for commands on stdin)\n'))
4438 4440
4439 4441 blocks = list(_parsewirelangblocks(ui.fin))
4440 4442
4441 4443 proc = None
4442 4444 stdin = None
4443 4445 stdout = None
4444 4446 stderr = None
4445 4447 opener = None
4446 4448
4447 4449 if opts[b'localssh']:
4448 4450 # We start the SSH server in its own process so there is process
4449 4451 # separation. This prevents a whole class of potential bugs around
4450 4452 # shared state from interfering with server operation.
4451 4453 args = procutil.hgcmd() + [
4452 4454 b'-R',
4453 4455 repo.root,
4454 4456 b'debugserve',
4455 4457 b'--sshstdio',
4456 4458 ]
4457 4459 proc = subprocess.Popen(
4458 4460 pycompat.rapply(procutil.tonativestr, args),
4459 4461 stdin=subprocess.PIPE,
4460 4462 stdout=subprocess.PIPE,
4461 4463 stderr=subprocess.PIPE,
4462 4464 bufsize=0,
4463 4465 )
4464 4466
4465 4467 stdin = proc.stdin
4466 4468 stdout = proc.stdout
4467 4469 stderr = proc.stderr
4468 4470
4469 4471 # We turn the pipes into observers so we can log I/O.
4470 4472 if ui.verbose or opts[b'peer'] == b'raw':
4471 4473 stdin = util.makeloggingfileobject(
4472 4474 ui, proc.stdin, b'i', logdata=True
4473 4475 )
4474 4476 stdout = util.makeloggingfileobject(
4475 4477 ui, proc.stdout, b'o', logdata=True
4476 4478 )
4477 4479 stderr = util.makeloggingfileobject(
4478 4480 ui, proc.stderr, b'e', logdata=True
4479 4481 )
4480 4482
4481 4483 # --localssh also implies the peer connection settings.
4482 4484
4483 4485 url = b'ssh://localserver'
4484 4486 autoreadstderr = not opts[b'noreadstderr']
4485 4487
4486 4488 if opts[b'peer'] == b'ssh1':
4487 4489 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4488 4490 peer = sshpeer.sshv1peer(
4489 4491 ui,
4490 4492 url,
4491 4493 proc,
4492 4494 stdin,
4493 4495 stdout,
4494 4496 stderr,
4495 4497 None,
4496 4498 autoreadstderr=autoreadstderr,
4497 4499 )
4498 4500 elif opts[b'peer'] == b'ssh2':
4499 4501 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4500 4502 peer = sshpeer.sshv2peer(
4501 4503 ui,
4502 4504 url,
4503 4505 proc,
4504 4506 stdin,
4505 4507 stdout,
4506 4508 stderr,
4507 4509 None,
4508 4510 autoreadstderr=autoreadstderr,
4509 4511 )
4510 4512 elif opts[b'peer'] == b'raw':
4511 4513 ui.write(_(b'using raw connection to peer\n'))
4512 4514 peer = None
4513 4515 else:
4514 4516 ui.write(_(b'creating ssh peer from handshake results\n'))
4515 4517 peer = sshpeer.makepeer(
4516 4518 ui,
4517 4519 url,
4518 4520 proc,
4519 4521 stdin,
4520 4522 stdout,
4521 4523 stderr,
4522 4524 autoreadstderr=autoreadstderr,
4523 4525 )
4524 4526
4525 4527 elif path:
4526 4528 # We bypass hg.peer() so we can proxy the sockets.
4527 4529 # TODO consider not doing this because we skip
4528 4530 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4529 4531 u = urlutil.url(path)
4530 4532 if u.scheme != b'http':
4531 4533 raise error.Abort(_(b'only http:// paths are currently supported'))
4532 4534
4533 4535 url, authinfo = u.authinfo()
4534 4536 openerargs = {
4535 4537 'useragent': b'Mercurial debugwireproto',
4536 4538 }
4537 4539
4538 4540 # Turn pipes/sockets into observers so we can log I/O.
4539 4541 if ui.verbose:
4540 4542 openerargs.update(
4541 4543 {
4542 4544 'loggingfh': ui,
4543 4545 'loggingname': b's',
4544 4546 'loggingopts': {
4545 4547 'logdata': True,
4546 4548 'logdataapis': False,
4547 4549 },
4548 4550 }
4549 4551 )
4550 4552
4551 4553 if ui.debugflag:
4552 4554 openerargs['loggingopts']['logdataapis'] = True
4553 4555
4554 4556 # Don't send default headers when in raw mode. This allows us to
4555 4557 # bypass most of the behavior of our URL handling code so we can
4556 4558 # have near complete control over what's sent on the wire.
4557 4559 if opts[b'peer'] == b'raw':
4558 4560 openerargs['sendaccept'] = False
4559 4561
4560 4562 opener = urlmod.opener(ui, authinfo, **openerargs)
4561 4563
4562 4564 if opts[b'peer'] == b'http2':
4563 4565 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4564 4566 # We go through makepeer() because we need an API descriptor for
4565 4567 # the peer instance to be useful.
4566 4568 with ui.configoverride(
4567 4569 {(b'experimental', b'httppeer.advertise-v2'): True}
4568 4570 ):
4569 4571 if opts[b'nologhandshake']:
4570 4572 ui.pushbuffer()
4571 4573
4572 4574 peer = httppeer.makepeer(ui, path, opener=opener)
4573 4575
4574 4576 if opts[b'nologhandshake']:
4575 4577 ui.popbuffer()
4576 4578
4577 4579 if not isinstance(peer, httppeer.httpv2peer):
4578 4580 raise error.Abort(
4579 4581 _(
4580 4582 b'could not instantiate HTTP peer for '
4581 4583 b'wire protocol version 2'
4582 4584 ),
4583 4585 hint=_(
4584 4586 b'the server may not have the feature '
4585 4587 b'enabled or is not allowing this '
4586 4588 b'client version'
4587 4589 ),
4588 4590 )
4589 4591
4590 4592 elif opts[b'peer'] == b'raw':
4591 4593 ui.write(_(b'using raw connection to peer\n'))
4592 4594 peer = None
4593 4595 elif opts[b'peer']:
4594 4596 raise error.Abort(
4595 4597 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4596 4598 )
4597 4599 else:
4598 4600 peer = httppeer.makepeer(ui, path, opener=opener)
4599 4601
4600 4602 # We /could/ populate stdin/stdout with sock.makefile()...
4601 4603 else:
4602 4604 raise error.Abort(_(b'unsupported connection configuration'))
4603 4605
4604 4606 batchedcommands = None
4605 4607
4606 4608 # Now perform actions based on the parsed wire language instructions.
4607 4609 for action, lines in blocks:
4608 4610 if action in (b'raw', b'raw+'):
4609 4611 if not stdin:
4610 4612 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4611 4613
4612 4614 # Concatenate the data together.
4613 4615 data = b''.join(l.lstrip() for l in lines)
4614 4616 data = stringutil.unescapestr(data)
4615 4617 stdin.write(data)
4616 4618
4617 4619 if action == b'raw+':
4618 4620 stdin.flush()
4619 4621 elif action == b'flush':
4620 4622 if not stdin:
4621 4623 raise error.Abort(_(b'cannot call flush on this peer'))
4622 4624 stdin.flush()
4623 4625 elif action.startswith(b'command'):
4624 4626 if not peer:
4625 4627 raise error.Abort(
4626 4628 _(
4627 4629 b'cannot send commands unless peer instance '
4628 4630 b'is available'
4629 4631 )
4630 4632 )
4631 4633
4632 4634 command = action.split(b' ', 1)[1]
4633 4635
4634 4636 args = {}
4635 4637 for line in lines:
4636 4638 # We need to allow empty values.
4637 4639 fields = line.lstrip().split(b' ', 1)
4638 4640 if len(fields) == 1:
4639 4641 key = fields[0]
4640 4642 value = b''
4641 4643 else:
4642 4644 key, value = fields
4643 4645
4644 4646 if value.startswith(b'eval:'):
4645 4647 value = stringutil.evalpythonliteral(value[5:])
4646 4648 else:
4647 4649 value = stringutil.unescapestr(value)
4648 4650
4649 4651 args[key] = value
4650 4652
4651 4653 if batchedcommands is not None:
4652 4654 batchedcommands.append((command, args))
4653 4655 continue
4654 4656
4655 4657 ui.status(_(b'sending %s command\n') % command)
4656 4658
4657 4659 if b'PUSHFILE' in args:
4658 4660 with open(args[b'PUSHFILE'], 'rb') as fh:
4659 4661 del args[b'PUSHFILE']
4660 4662 res, output = peer._callpush(
4661 4663 command, fh, **pycompat.strkwargs(args)
4662 4664 )
4663 4665 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4664 4666 ui.status(
4665 4667 _(b'remote output: %s\n') % stringutil.escapestr(output)
4666 4668 )
4667 4669 else:
4668 4670 with peer.commandexecutor() as e:
4669 4671 res = e.callcommand(command, args).result()
4670 4672
4671 4673 if isinstance(res, wireprotov2peer.commandresponse):
4672 4674 val = res.objects()
4673 4675 ui.status(
4674 4676 _(b'response: %s\n')
4675 4677 % stringutil.pprint(val, bprefix=True, indent=2)
4676 4678 )
4677 4679 else:
4678 4680 ui.status(
4679 4681 _(b'response: %s\n')
4680 4682 % stringutil.pprint(res, bprefix=True, indent=2)
4681 4683 )
4682 4684
4683 4685 elif action == b'batchbegin':
4684 4686 if batchedcommands is not None:
4685 4687 raise error.Abort(_(b'nested batchbegin not allowed'))
4686 4688
4687 4689 batchedcommands = []
4688 4690 elif action == b'batchsubmit':
4689 4691 # There is a batching API we could go through. But it would be
4690 4692 # difficult to normalize requests into function calls. It is easier
4691 4693 # to bypass this layer and normalize to commands + args.
4692 4694 ui.status(
4693 4695 _(b'sending batch with %d sub-commands\n')
4694 4696 % len(batchedcommands)
4695 4697 )
4696 4698 assert peer is not None
4697 4699 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4698 4700 ui.status(
4699 4701 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4700 4702 )
4701 4703
4702 4704 batchedcommands = None
4703 4705
4704 4706 elif action.startswith(b'httprequest '):
4705 4707 if not opener:
4706 4708 raise error.Abort(
4707 4709 _(b'cannot use httprequest without an HTTP peer')
4708 4710 )
4709 4711
4710 4712 request = action.split(b' ', 2)
4711 4713 if len(request) != 3:
4712 4714 raise error.Abort(
4713 4715 _(
4714 4716 b'invalid httprequest: expected format is '
4715 4717 b'"httprequest <method> <path>'
4716 4718 )
4717 4719 )
4718 4720
4719 4721 method, httppath = request[1:]
4720 4722 headers = {}
4721 4723 body = None
4722 4724 frames = []
4723 4725 for line in lines:
4724 4726 line = line.lstrip()
4725 4727 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4726 4728 if m:
4727 4729 # Headers need to use native strings.
4728 4730 key = pycompat.strurl(m.group(1))
4729 4731 value = pycompat.strurl(m.group(2))
4730 4732 headers[key] = value
4731 4733 continue
4732 4734
4733 4735 if line.startswith(b'BODYFILE '):
4734 4736 with open(line.split(b' ', 1), b'rb') as fh:
4735 4737 body = fh.read()
4736 4738 elif line.startswith(b'frame '):
4737 4739 frame = wireprotoframing.makeframefromhumanstring(
4738 4740 line[len(b'frame ') :]
4739 4741 )
4740 4742
4741 4743 frames.append(frame)
4742 4744 else:
4743 4745 raise error.Abort(
4744 4746 _(b'unknown argument to httprequest: %s') % line
4745 4747 )
4746 4748
4747 4749 url = path + httppath
4748 4750
4749 4751 if frames:
4750 4752 body = b''.join(bytes(f) for f in frames)
4751 4753
4752 4754 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4753 4755
4754 4756 # urllib.Request insists on using has_data() as a proxy for
4755 4757 # determining the request method. Override that to use our
4756 4758 # explicitly requested method.
4757 4759 req.get_method = lambda: pycompat.sysstr(method)
4758 4760
4759 4761 try:
4760 4762 res = opener.open(req)
4761 4763 body = res.read()
4762 4764 except util.urlerr.urlerror as e:
4763 4765 # read() method must be called, but only exists in Python 2
4764 4766 getattr(e, 'read', lambda: None)()
4765 4767 continue
4766 4768
4767 4769 ct = res.headers.get('Content-Type')
4768 4770 if ct == 'application/mercurial-cbor':
4769 4771 ui.write(
4770 4772 _(b'cbor> %s\n')
4771 4773 % stringutil.pprint(
4772 4774 cborutil.decodeall(body), bprefix=True, indent=2
4773 4775 )
4774 4776 )
4775 4777
4776 4778 elif action == b'close':
4777 4779 assert peer is not None
4778 4780 peer.close()
4779 4781 elif action == b'readavailable':
4780 4782 if not stdout or not stderr:
4781 4783 raise error.Abort(
4782 4784 _(b'readavailable not available on this peer')
4783 4785 )
4784 4786
4785 4787 stdin.close()
4786 4788 stdout.read()
4787 4789 stderr.read()
4788 4790
4789 4791 elif action == b'readline':
4790 4792 if not stdout:
4791 4793 raise error.Abort(_(b'readline not available on this peer'))
4792 4794 stdout.readline()
4793 4795 elif action == b'ereadline':
4794 4796 if not stderr:
4795 4797 raise error.Abort(_(b'ereadline not available on this peer'))
4796 4798 stderr.readline()
4797 4799 elif action.startswith(b'read '):
4798 4800 count = int(action.split(b' ', 1)[1])
4799 4801 if not stdout:
4800 4802 raise error.Abort(_(b'read not available on this peer'))
4801 4803 stdout.read(count)
4802 4804 elif action.startswith(b'eread '):
4803 4805 count = int(action.split(b' ', 1)[1])
4804 4806 if not stderr:
4805 4807 raise error.Abort(_(b'eread not available on this peer'))
4806 4808 stderr.read(count)
4807 4809 else:
4808 4810 raise error.Abort(_(b'unknown action: %s') % action)
4809 4811
4810 4812 if batchedcommands is not None:
4811 4813 raise error.Abort(_(b'unclosed "batchbegin" request'))
4812 4814
4813 4815 if peer:
4814 4816 peer.close()
4815 4817
4816 4818 if proc:
4817 4819 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now