##// END OF EJS Templates
debuginstall: add entry about re2 Rust bindings when applicable...
Raphaël Gomès -
r45019:c9897371 default
parent child Browse files
Show More
@@ -1,4503 +1,4510 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import glob
15 15 import operator
16 16 import os
17 17 import platform
18 18 import random
19 19 import re
20 20 import socket
21 21 import ssl
22 22 import stat
23 23 import string
24 24 import subprocess
25 25 import sys
26 26 import time
27 27
28 28 from .i18n import _
29 29 from .node import (
30 30 bin,
31 31 hex,
32 32 nullid,
33 33 nullrev,
34 34 short,
35 35 )
36 36 from .pycompat import (
37 37 getattr,
38 38 open,
39 39 )
40 40 from . import (
41 41 bundle2,
42 42 bundlerepo,
43 43 changegroup,
44 44 cmdutil,
45 45 color,
46 46 context,
47 47 copies,
48 48 dagparser,
49 49 encoding,
50 50 error,
51 51 exchange,
52 52 extensions,
53 53 filemerge,
54 54 filesetlang,
55 55 formatter,
56 56 hg,
57 57 httppeer,
58 58 localrepo,
59 59 lock as lockmod,
60 60 logcmdutil,
61 61 merge as mergemod,
62 62 obsolete,
63 63 obsutil,
64 64 pathutil,
65 65 phases,
66 66 policy,
67 67 pvec,
68 68 pycompat,
69 69 registrar,
70 70 repair,
71 71 revlog,
72 72 revset,
73 73 revsetlang,
74 74 scmutil,
75 75 setdiscovery,
76 76 simplemerge,
77 77 sshpeer,
78 78 sslutil,
79 79 streamclone,
80 80 tags as tagsmod,
81 81 templater,
82 82 treediscovery,
83 83 upgrade,
84 84 url as urlmod,
85 85 util,
86 86 vfs as vfsmod,
87 87 wireprotoframing,
88 88 wireprotoserver,
89 89 wireprotov2peer,
90 90 )
91 91 from .utils import (
92 92 cborutil,
93 93 compression,
94 94 dateutil,
95 95 procutil,
96 96 stringutil,
97 97 )
98 98
99 99 from .revlogutils import (
100 100 deltas as deltautil,
101 101 nodemap,
102 102 )
103 103
104 104 release = lockmod.release
105 105
106 106 command = registrar.command()
107 107
108 108
109 109 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
110 110 def debugancestor(ui, repo, *args):
111 111 """find the ancestor revision of two revisions in a given index"""
112 112 if len(args) == 3:
113 113 index, rev1, rev2 = args
114 114 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
115 115 lookup = r.lookup
116 116 elif len(args) == 2:
117 117 if not repo:
118 118 raise error.Abort(
119 119 _(b'there is no Mercurial repository here (.hg not found)')
120 120 )
121 121 rev1, rev2 = args
122 122 r = repo.changelog
123 123 lookup = repo.lookup
124 124 else:
125 125 raise error.Abort(_(b'either two or three arguments required'))
126 126 a = r.ancestor(lookup(rev1), lookup(rev2))
127 127 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
128 128
129 129
130 130 @command(b'debugapplystreamclonebundle', [], b'FILE')
131 131 def debugapplystreamclonebundle(ui, repo, fname):
132 132 """apply a stream clone bundle file"""
133 133 f = hg.openpath(ui, fname)
134 134 gen = exchange.readbundle(ui, f, fname)
135 135 gen.apply(repo)
136 136
137 137
138 138 @command(
139 139 b'debugbuilddag',
140 140 [
141 141 (
142 142 b'm',
143 143 b'mergeable-file',
144 144 None,
145 145 _(b'add single file mergeable changes'),
146 146 ),
147 147 (
148 148 b'o',
149 149 b'overwritten-file',
150 150 None,
151 151 _(b'add single file all revs overwrite'),
152 152 ),
153 153 (b'n', b'new-file', None, _(b'add new file at each rev')),
154 154 ],
155 155 _(b'[OPTION]... [TEXT]'),
156 156 )
157 157 def debugbuilddag(
158 158 ui,
159 159 repo,
160 160 text=None,
161 161 mergeable_file=False,
162 162 overwritten_file=False,
163 163 new_file=False,
164 164 ):
165 165 """builds a repo with a given DAG from scratch in the current empty repo
166 166
167 167 The description of the DAG is read from stdin if not given on the
168 168 command line.
169 169
170 170 Elements:
171 171
172 172 - "+n" is a linear run of n nodes based on the current default parent
173 173 - "." is a single node based on the current default parent
174 174 - "$" resets the default parent to null (implied at the start);
175 175 otherwise the default parent is always the last node created
176 176 - "<p" sets the default parent to the backref p
177 177 - "*p" is a fork at parent p, which is a backref
178 178 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
179 179 - "/p2" is a merge of the preceding node and p2
180 180 - ":tag" defines a local tag for the preceding node
181 181 - "@branch" sets the named branch for subsequent nodes
182 182 - "#...\\n" is a comment up to the end of the line
183 183
184 184 Whitespace between the above elements is ignored.
185 185
186 186 A backref is either
187 187
188 188 - a number n, which references the node curr-n, where curr is the current
189 189 node, or
190 190 - the name of a local tag you placed earlier using ":tag", or
191 191 - empty to denote the default parent.
192 192
193 193 All string valued-elements are either strictly alphanumeric, or must
194 194 be enclosed in double quotes ("..."), with "\\" as escape character.
195 195 """
196 196
197 197 if text is None:
198 198 ui.status(_(b"reading DAG from stdin\n"))
199 199 text = ui.fin.read()
200 200
201 201 cl = repo.changelog
202 202 if len(cl) > 0:
203 203 raise error.Abort(_(b'repository is not empty'))
204 204
205 205 # determine number of revs in DAG
206 206 total = 0
207 207 for type, data in dagparser.parsedag(text):
208 208 if type == b'n':
209 209 total += 1
210 210
211 211 if mergeable_file:
212 212 linesperrev = 2
213 213 # make a file with k lines per rev
214 214 initialmergedlines = [
215 215 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
216 216 ]
217 217 initialmergedlines.append(b"")
218 218
219 219 tags = []
220 220 progress = ui.makeprogress(
221 221 _(b'building'), unit=_(b'revisions'), total=total
222 222 )
223 223 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
224 224 at = -1
225 225 atbranch = b'default'
226 226 nodeids = []
227 227 id = 0
228 228 progress.update(id)
229 229 for type, data in dagparser.parsedag(text):
230 230 if type == b'n':
231 231 ui.note((b'node %s\n' % pycompat.bytestr(data)))
232 232 id, ps = data
233 233
234 234 files = []
235 235 filecontent = {}
236 236
237 237 p2 = None
238 238 if mergeable_file:
239 239 fn = b"mf"
240 240 p1 = repo[ps[0]]
241 241 if len(ps) > 1:
242 242 p2 = repo[ps[1]]
243 243 pa = p1.ancestor(p2)
244 244 base, local, other = [
245 245 x[fn].data() for x in (pa, p1, p2)
246 246 ]
247 247 m3 = simplemerge.Merge3Text(base, local, other)
248 248 ml = [l.strip() for l in m3.merge_lines()]
249 249 ml.append(b"")
250 250 elif at > 0:
251 251 ml = p1[fn].data().split(b"\n")
252 252 else:
253 253 ml = initialmergedlines
254 254 ml[id * linesperrev] += b" r%i" % id
255 255 mergedtext = b"\n".join(ml)
256 256 files.append(fn)
257 257 filecontent[fn] = mergedtext
258 258
259 259 if overwritten_file:
260 260 fn = b"of"
261 261 files.append(fn)
262 262 filecontent[fn] = b"r%i\n" % id
263 263
264 264 if new_file:
265 265 fn = b"nf%i" % id
266 266 files.append(fn)
267 267 filecontent[fn] = b"r%i\n" % id
268 268 if len(ps) > 1:
269 269 if not p2:
270 270 p2 = repo[ps[1]]
271 271 for fn in p2:
272 272 if fn.startswith(b"nf"):
273 273 files.append(fn)
274 274 filecontent[fn] = p2[fn].data()
275 275
276 276 def fctxfn(repo, cx, path):
277 277 if path in filecontent:
278 278 return context.memfilectx(
279 279 repo, cx, path, filecontent[path]
280 280 )
281 281 return None
282 282
283 283 if len(ps) == 0 or ps[0] < 0:
284 284 pars = [None, None]
285 285 elif len(ps) == 1:
286 286 pars = [nodeids[ps[0]], None]
287 287 else:
288 288 pars = [nodeids[p] for p in ps]
289 289 cx = context.memctx(
290 290 repo,
291 291 pars,
292 292 b"r%i" % id,
293 293 files,
294 294 fctxfn,
295 295 date=(id, 0),
296 296 user=b"debugbuilddag",
297 297 extra={b'branch': atbranch},
298 298 )
299 299 nodeid = repo.commitctx(cx)
300 300 nodeids.append(nodeid)
301 301 at = id
302 302 elif type == b'l':
303 303 id, name = data
304 304 ui.note((b'tag %s\n' % name))
305 305 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
306 306 elif type == b'a':
307 307 ui.note((b'branch %s\n' % data))
308 308 atbranch = data
309 309 progress.update(id)
310 310
311 311 if tags:
312 312 repo.vfs.write(b"localtags", b"".join(tags))
313 313
314 314
315 315 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
316 316 indent_string = b' ' * indent
317 317 if all:
318 318 ui.writenoi18n(
319 319 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
320 320 % indent_string
321 321 )
322 322
323 323 def showchunks(named):
324 324 ui.write(b"\n%s%s\n" % (indent_string, named))
325 325 for deltadata in gen.deltaiter():
326 326 node, p1, p2, cs, deltabase, delta, flags = deltadata
327 327 ui.write(
328 328 b"%s%s %s %s %s %s %d\n"
329 329 % (
330 330 indent_string,
331 331 hex(node),
332 332 hex(p1),
333 333 hex(p2),
334 334 hex(cs),
335 335 hex(deltabase),
336 336 len(delta),
337 337 )
338 338 )
339 339
340 340 gen.changelogheader()
341 341 showchunks(b"changelog")
342 342 gen.manifestheader()
343 343 showchunks(b"manifest")
344 344 for chunkdata in iter(gen.filelogheader, {}):
345 345 fname = chunkdata[b'filename']
346 346 showchunks(fname)
347 347 else:
348 348 if isinstance(gen, bundle2.unbundle20):
349 349 raise error.Abort(_(b'use debugbundle2 for this file'))
350 350 gen.changelogheader()
351 351 for deltadata in gen.deltaiter():
352 352 node, p1, p2, cs, deltabase, delta, flags = deltadata
353 353 ui.write(b"%s%s\n" % (indent_string, hex(node)))
354 354
355 355
356 356 def _debugobsmarkers(ui, part, indent=0, **opts):
357 357 """display version and markers contained in 'data'"""
358 358 opts = pycompat.byteskwargs(opts)
359 359 data = part.read()
360 360 indent_string = b' ' * indent
361 361 try:
362 362 version, markers = obsolete._readmarkers(data)
363 363 except error.UnknownVersion as exc:
364 364 msg = b"%sunsupported version: %s (%d bytes)\n"
365 365 msg %= indent_string, exc.version, len(data)
366 366 ui.write(msg)
367 367 else:
368 368 msg = b"%sversion: %d (%d bytes)\n"
369 369 msg %= indent_string, version, len(data)
370 370 ui.write(msg)
371 371 fm = ui.formatter(b'debugobsolete', opts)
372 372 for rawmarker in sorted(markers):
373 373 m = obsutil.marker(None, rawmarker)
374 374 fm.startitem()
375 375 fm.plain(indent_string)
376 376 cmdutil.showmarker(fm, m)
377 377 fm.end()
378 378
379 379
380 380 def _debugphaseheads(ui, data, indent=0):
381 381 """display version and markers contained in 'data'"""
382 382 indent_string = b' ' * indent
383 383 headsbyphase = phases.binarydecode(data)
384 384 for phase in phases.allphases:
385 385 for head in headsbyphase[phase]:
386 386 ui.write(indent_string)
387 387 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
388 388
389 389
390 390 def _quasirepr(thing):
391 391 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
392 392 return b'{%s}' % (
393 393 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
394 394 )
395 395 return pycompat.bytestr(repr(thing))
396 396
397 397
398 398 def _debugbundle2(ui, gen, all=None, **opts):
399 399 """lists the contents of a bundle2"""
400 400 if not isinstance(gen, bundle2.unbundle20):
401 401 raise error.Abort(_(b'not a bundle2 file'))
402 402 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
403 403 parttypes = opts.get('part_type', [])
404 404 for part in gen.iterparts():
405 405 if parttypes and part.type not in parttypes:
406 406 continue
407 407 msg = b'%s -- %s (mandatory: %r)\n'
408 408 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
409 409 if part.type == b'changegroup':
410 410 version = part.params.get(b'version', b'01')
411 411 cg = changegroup.getunbundler(version, part, b'UN')
412 412 if not ui.quiet:
413 413 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
414 414 if part.type == b'obsmarkers':
415 415 if not ui.quiet:
416 416 _debugobsmarkers(ui, part, indent=4, **opts)
417 417 if part.type == b'phase-heads':
418 418 if not ui.quiet:
419 419 _debugphaseheads(ui, part, indent=4)
420 420
421 421
422 422 @command(
423 423 b'debugbundle',
424 424 [
425 425 (b'a', b'all', None, _(b'show all details')),
426 426 (b'', b'part-type', [], _(b'show only the named part type')),
427 427 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
428 428 ],
429 429 _(b'FILE'),
430 430 norepo=True,
431 431 )
432 432 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
433 433 """lists the contents of a bundle"""
434 434 with hg.openpath(ui, bundlepath) as f:
435 435 if spec:
436 436 spec = exchange.getbundlespec(ui, f)
437 437 ui.write(b'%s\n' % spec)
438 438 return
439 439
440 440 gen = exchange.readbundle(ui, f, bundlepath)
441 441 if isinstance(gen, bundle2.unbundle20):
442 442 return _debugbundle2(ui, gen, all=all, **opts)
443 443 _debugchangegroup(ui, gen, all=all, **opts)
444 444
445 445
446 446 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
447 447 def debugcapabilities(ui, path, **opts):
448 448 """lists the capabilities of a remote peer"""
449 449 opts = pycompat.byteskwargs(opts)
450 450 peer = hg.peer(ui, opts, path)
451 451 caps = peer.capabilities()
452 452 ui.writenoi18n(b'Main capabilities:\n')
453 453 for c in sorted(caps):
454 454 ui.write(b' %s\n' % c)
455 455 b2caps = bundle2.bundle2caps(peer)
456 456 if b2caps:
457 457 ui.writenoi18n(b'Bundle2 capabilities:\n')
458 458 for key, values in sorted(pycompat.iteritems(b2caps)):
459 459 ui.write(b' %s\n' % key)
460 460 for v in values:
461 461 ui.write(b' %s\n' % v)
462 462
463 463
464 464 @command(b'debugcheckstate', [], b'')
465 465 def debugcheckstate(ui, repo):
466 466 """validate the correctness of the current dirstate"""
467 467 parent1, parent2 = repo.dirstate.parents()
468 468 m1 = repo[parent1].manifest()
469 469 m2 = repo[parent2].manifest()
470 470 errors = 0
471 471 for f in repo.dirstate:
472 472 state = repo.dirstate[f]
473 473 if state in b"nr" and f not in m1:
474 474 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
475 475 errors += 1
476 476 if state in b"a" and f in m1:
477 477 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
478 478 errors += 1
479 479 if state in b"m" and f not in m1 and f not in m2:
480 480 ui.warn(
481 481 _(b"%s in state %s, but not in either manifest\n") % (f, state)
482 482 )
483 483 errors += 1
484 484 for f in m1:
485 485 state = repo.dirstate[f]
486 486 if state not in b"nrm":
487 487 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
488 488 errors += 1
489 489 if errors:
490 490 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
491 491 raise error.Abort(errstr)
492 492
493 493
494 494 @command(
495 495 b'debugcolor',
496 496 [(b'', b'style', None, _(b'show all configured styles'))],
497 497 b'hg debugcolor',
498 498 )
499 499 def debugcolor(ui, repo, **opts):
500 500 """show available color, effects or style"""
501 501 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
502 502 if opts.get('style'):
503 503 return _debugdisplaystyle(ui)
504 504 else:
505 505 return _debugdisplaycolor(ui)
506 506
507 507
508 508 def _debugdisplaycolor(ui):
509 509 ui = ui.copy()
510 510 ui._styles.clear()
511 511 for effect in color._activeeffects(ui).keys():
512 512 ui._styles[effect] = effect
513 513 if ui._terminfoparams:
514 514 for k, v in ui.configitems(b'color'):
515 515 if k.startswith(b'color.'):
516 516 ui._styles[k] = k[6:]
517 517 elif k.startswith(b'terminfo.'):
518 518 ui._styles[k] = k[9:]
519 519 ui.write(_(b'available colors:\n'))
520 520 # sort label with a '_' after the other to group '_background' entry.
521 521 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
522 522 for colorname, label in items:
523 523 ui.write(b'%s\n' % colorname, label=label)
524 524
525 525
526 526 def _debugdisplaystyle(ui):
527 527 ui.write(_(b'available style:\n'))
528 528 if not ui._styles:
529 529 return
530 530 width = max(len(s) for s in ui._styles)
531 531 for label, effects in sorted(ui._styles.items()):
532 532 ui.write(b'%s' % label, label=label)
533 533 if effects:
534 534 # 50
535 535 ui.write(b': ')
536 536 ui.write(b' ' * (max(0, width - len(label))))
537 537 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
538 538 ui.write(b'\n')
539 539
540 540
541 541 @command(b'debugcreatestreamclonebundle', [], b'FILE')
542 542 def debugcreatestreamclonebundle(ui, repo, fname):
543 543 """create a stream clone bundle file
544 544
545 545 Stream bundles are special bundles that are essentially archives of
546 546 revlog files. They are commonly used for cloning very quickly.
547 547 """
548 548 # TODO we may want to turn this into an abort when this functionality
549 549 # is moved into `hg bundle`.
550 550 if phases.hassecret(repo):
551 551 ui.warn(
552 552 _(
553 553 b'(warning: stream clone bundle will contain secret '
554 554 b'revisions)\n'
555 555 )
556 556 )
557 557
558 558 requirements, gen = streamclone.generatebundlev1(repo)
559 559 changegroup.writechunks(ui, gen, fname)
560 560
561 561 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
562 562
563 563
564 564 @command(
565 565 b'debugdag',
566 566 [
567 567 (b't', b'tags', None, _(b'use tags as labels')),
568 568 (b'b', b'branches', None, _(b'annotate with branch names')),
569 569 (b'', b'dots', None, _(b'use dots for runs')),
570 570 (b's', b'spaces', None, _(b'separate elements by spaces')),
571 571 ],
572 572 _(b'[OPTION]... [FILE [REV]...]'),
573 573 optionalrepo=True,
574 574 )
575 575 def debugdag(ui, repo, file_=None, *revs, **opts):
576 576 """format the changelog or an index DAG as a concise textual description
577 577
578 578 If you pass a revlog index, the revlog's DAG is emitted. If you list
579 579 revision numbers, they get labeled in the output as rN.
580 580
581 581 Otherwise, the changelog DAG of the current repo is emitted.
582 582 """
583 583 spaces = opts.get('spaces')
584 584 dots = opts.get('dots')
585 585 if file_:
586 586 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
587 587 revs = {int(r) for r in revs}
588 588
589 589 def events():
590 590 for r in rlog:
591 591 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
592 592 if r in revs:
593 593 yield b'l', (r, b"r%i" % r)
594 594
595 595 elif repo:
596 596 cl = repo.changelog
597 597 tags = opts.get('tags')
598 598 branches = opts.get('branches')
599 599 if tags:
600 600 labels = {}
601 601 for l, n in repo.tags().items():
602 602 labels.setdefault(cl.rev(n), []).append(l)
603 603
604 604 def events():
605 605 b = b"default"
606 606 for r in cl:
607 607 if branches:
608 608 newb = cl.read(cl.node(r))[5][b'branch']
609 609 if newb != b:
610 610 yield b'a', newb
611 611 b = newb
612 612 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
613 613 if tags:
614 614 ls = labels.get(r)
615 615 if ls:
616 616 for l in ls:
617 617 yield b'l', (r, l)
618 618
619 619 else:
620 620 raise error.Abort(_(b'need repo for changelog dag'))
621 621
622 622 for line in dagparser.dagtextlines(
623 623 events(),
624 624 addspaces=spaces,
625 625 wraplabels=True,
626 626 wrapannotations=True,
627 627 wrapnonlinear=dots,
628 628 usedots=dots,
629 629 maxlinewidth=70,
630 630 ):
631 631 ui.write(line)
632 632 ui.write(b"\n")
633 633
634 634
635 635 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
636 636 def debugdata(ui, repo, file_, rev=None, **opts):
637 637 """dump the contents of a data file revision"""
638 638 opts = pycompat.byteskwargs(opts)
639 639 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
640 640 if rev is not None:
641 641 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
642 642 file_, rev = None, file_
643 643 elif rev is None:
644 644 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
645 645 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
646 646 try:
647 647 ui.write(r.rawdata(r.lookup(rev)))
648 648 except KeyError:
649 649 raise error.Abort(_(b'invalid revision identifier %s') % rev)
650 650
651 651
652 652 @command(
653 653 b'debugdate',
654 654 [(b'e', b'extended', None, _(b'try extended date formats'))],
655 655 _(b'[-e] DATE [RANGE]'),
656 656 norepo=True,
657 657 optionalrepo=True,
658 658 )
659 659 def debugdate(ui, date, range=None, **opts):
660 660 """parse and display a date"""
661 661 if opts["extended"]:
662 662 d = dateutil.parsedate(date, dateutil.extendeddateformats)
663 663 else:
664 664 d = dateutil.parsedate(date)
665 665 ui.writenoi18n(b"internal: %d %d\n" % d)
666 666 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
667 667 if range:
668 668 m = dateutil.matchdate(range)
669 669 ui.writenoi18n(b"match: %s\n" % m(d[0]))
670 670
671 671
672 672 @command(
673 673 b'debugdeltachain',
674 674 cmdutil.debugrevlogopts + cmdutil.formatteropts,
675 675 _(b'-c|-m|FILE'),
676 676 optionalrepo=True,
677 677 )
678 678 def debugdeltachain(ui, repo, file_=None, **opts):
679 679 """dump information about delta chains in a revlog
680 680
681 681 Output can be templatized. Available template keywords are:
682 682
683 683 :``rev``: revision number
684 684 :``chainid``: delta chain identifier (numbered by unique base)
685 685 :``chainlen``: delta chain length to this revision
686 686 :``prevrev``: previous revision in delta chain
687 687 :``deltatype``: role of delta / how it was computed
688 688 :``compsize``: compressed size of revision
689 689 :``uncompsize``: uncompressed size of revision
690 690 :``chainsize``: total size of compressed revisions in chain
691 691 :``chainratio``: total chain size divided by uncompressed revision size
692 692 (new delta chains typically start at ratio 2.00)
693 693 :``lindist``: linear distance from base revision in delta chain to end
694 694 of this revision
695 695 :``extradist``: total size of revisions not part of this delta chain from
696 696 base of delta chain to end of this revision; a measurement
697 697 of how much extra data we need to read/seek across to read
698 698 the delta chain for this revision
699 699 :``extraratio``: extradist divided by chainsize; another representation of
700 700 how much unrelated data is needed to load this delta chain
701 701
702 702 If the repository is configured to use the sparse read, additional keywords
703 703 are available:
704 704
705 705 :``readsize``: total size of data read from the disk for a revision
706 706 (sum of the sizes of all the blocks)
707 707 :``largestblock``: size of the largest block of data read from the disk
708 708 :``readdensity``: density of useful bytes in the data read from the disk
709 709 :``srchunks``: in how many data hunks the whole revision would be read
710 710
711 711 The sparse read can be enabled with experimental.sparse-read = True
712 712 """
713 713 opts = pycompat.byteskwargs(opts)
714 714 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
715 715 index = r.index
716 716 start = r.start
717 717 length = r.length
718 718 generaldelta = r.version & revlog.FLAG_GENERALDELTA
719 719 withsparseread = getattr(r, '_withsparseread', False)
720 720
721 721 def revinfo(rev):
722 722 e = index[rev]
723 723 compsize = e[1]
724 724 uncompsize = e[2]
725 725 chainsize = 0
726 726
727 727 if generaldelta:
728 728 if e[3] == e[5]:
729 729 deltatype = b'p1'
730 730 elif e[3] == e[6]:
731 731 deltatype = b'p2'
732 732 elif e[3] == rev - 1:
733 733 deltatype = b'prev'
734 734 elif e[3] == rev:
735 735 deltatype = b'base'
736 736 else:
737 737 deltatype = b'other'
738 738 else:
739 739 if e[3] == rev:
740 740 deltatype = b'base'
741 741 else:
742 742 deltatype = b'prev'
743 743
744 744 chain = r._deltachain(rev)[0]
745 745 for iterrev in chain:
746 746 e = index[iterrev]
747 747 chainsize += e[1]
748 748
749 749 return compsize, uncompsize, deltatype, chain, chainsize
750 750
751 751 fm = ui.formatter(b'debugdeltachain', opts)
752 752
753 753 fm.plain(
754 754 b' rev chain# chainlen prev delta '
755 755 b'size rawsize chainsize ratio lindist extradist '
756 756 b'extraratio'
757 757 )
758 758 if withsparseread:
759 759 fm.plain(b' readsize largestblk rddensity srchunks')
760 760 fm.plain(b'\n')
761 761
762 762 chainbases = {}
763 763 for rev in r:
764 764 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
765 765 chainbase = chain[0]
766 766 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
767 767 basestart = start(chainbase)
768 768 revstart = start(rev)
769 769 lineardist = revstart + comp - basestart
770 770 extradist = lineardist - chainsize
771 771 try:
772 772 prevrev = chain[-2]
773 773 except IndexError:
774 774 prevrev = -1
775 775
776 776 if uncomp != 0:
777 777 chainratio = float(chainsize) / float(uncomp)
778 778 else:
779 779 chainratio = chainsize
780 780
781 781 if chainsize != 0:
782 782 extraratio = float(extradist) / float(chainsize)
783 783 else:
784 784 extraratio = extradist
785 785
786 786 fm.startitem()
787 787 fm.write(
788 788 b'rev chainid chainlen prevrev deltatype compsize '
789 789 b'uncompsize chainsize chainratio lindist extradist '
790 790 b'extraratio',
791 791 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
792 792 rev,
793 793 chainid,
794 794 len(chain),
795 795 prevrev,
796 796 deltatype,
797 797 comp,
798 798 uncomp,
799 799 chainsize,
800 800 chainratio,
801 801 lineardist,
802 802 extradist,
803 803 extraratio,
804 804 rev=rev,
805 805 chainid=chainid,
806 806 chainlen=len(chain),
807 807 prevrev=prevrev,
808 808 deltatype=deltatype,
809 809 compsize=comp,
810 810 uncompsize=uncomp,
811 811 chainsize=chainsize,
812 812 chainratio=chainratio,
813 813 lindist=lineardist,
814 814 extradist=extradist,
815 815 extraratio=extraratio,
816 816 )
817 817 if withsparseread:
818 818 readsize = 0
819 819 largestblock = 0
820 820 srchunks = 0
821 821
822 822 for revschunk in deltautil.slicechunk(r, chain):
823 823 srchunks += 1
824 824 blkend = start(revschunk[-1]) + length(revschunk[-1])
825 825 blksize = blkend - start(revschunk[0])
826 826
827 827 readsize += blksize
828 828 if largestblock < blksize:
829 829 largestblock = blksize
830 830
831 831 if readsize:
832 832 readdensity = float(chainsize) / float(readsize)
833 833 else:
834 834 readdensity = 1
835 835
836 836 fm.write(
837 837 b'readsize largestblock readdensity srchunks',
838 838 b' %10d %10d %9.5f %8d',
839 839 readsize,
840 840 largestblock,
841 841 readdensity,
842 842 srchunks,
843 843 readsize=readsize,
844 844 largestblock=largestblock,
845 845 readdensity=readdensity,
846 846 srchunks=srchunks,
847 847 )
848 848
849 849 fm.plain(b'\n')
850 850
851 851 fm.end()
852 852
853 853
854 854 @command(
855 855 b'debugdirstate|debugstate',
856 856 [
857 857 (
858 858 b'',
859 859 b'nodates',
860 860 None,
861 861 _(b'do not display the saved mtime (DEPRECATED)'),
862 862 ),
863 863 (b'', b'dates', True, _(b'display the saved mtime')),
864 864 (b'', b'datesort', None, _(b'sort by saved mtime')),
865 865 ],
866 866 _(b'[OPTION]...'),
867 867 )
868 868 def debugstate(ui, repo, **opts):
869 869 """show the contents of the current dirstate"""
870 870
871 871 nodates = not opts['dates']
872 872 if opts.get('nodates') is not None:
873 873 nodates = True
874 874 datesort = opts.get('datesort')
875 875
876 876 if datesort:
877 877 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
878 878 else:
879 879 keyfunc = None # sort by filename
880 880 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
881 881 if ent[3] == -1:
882 882 timestr = b'unset '
883 883 elif nodates:
884 884 timestr = b'set '
885 885 else:
886 886 timestr = time.strftime(
887 887 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
888 888 )
889 889 timestr = encoding.strtolocal(timestr)
890 890 if ent[1] & 0o20000:
891 891 mode = b'lnk'
892 892 else:
893 893 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
894 894 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
895 895 for f in repo.dirstate.copies():
896 896 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
897 897
898 898
899 899 @command(
900 900 b'debugdiscovery',
901 901 [
902 902 (b'', b'old', None, _(b'use old-style discovery')),
903 903 (
904 904 b'',
905 905 b'nonheads',
906 906 None,
907 907 _(b'use old-style discovery with non-heads included'),
908 908 ),
909 909 (b'', b'rev', [], b'restrict discovery to this set of revs'),
910 910 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
911 911 ]
912 912 + cmdutil.remoteopts,
913 913 _(b'[--rev REV] [OTHER]'),
914 914 )
915 915 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
916 916 """runs the changeset discovery protocol in isolation"""
917 917 opts = pycompat.byteskwargs(opts)
918 918 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
919 919 remote = hg.peer(repo, opts, remoteurl)
920 920 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
921 921
922 922 # make sure tests are repeatable
923 923 random.seed(int(opts[b'seed']))
924 924
925 925 if opts.get(b'old'):
926 926
927 927 def doit(pushedrevs, remoteheads, remote=remote):
928 928 if not util.safehasattr(remote, b'branches'):
929 929 # enable in-client legacy support
930 930 remote = localrepo.locallegacypeer(remote.local())
931 931 common, _in, hds = treediscovery.findcommonincoming(
932 932 repo, remote, force=True
933 933 )
934 934 common = set(common)
935 935 if not opts.get(b'nonheads'):
936 936 ui.writenoi18n(
937 937 b"unpruned common: %s\n"
938 938 % b" ".join(sorted(short(n) for n in common))
939 939 )
940 940
941 941 clnode = repo.changelog.node
942 942 common = repo.revs(b'heads(::%ln)', common)
943 943 common = {clnode(r) for r in common}
944 944 return common, hds
945 945
946 946 else:
947 947
948 948 def doit(pushedrevs, remoteheads, remote=remote):
949 949 nodes = None
950 950 if pushedrevs:
951 951 revs = scmutil.revrange(repo, pushedrevs)
952 952 nodes = [repo[r].node() for r in revs]
953 953 common, any, hds = setdiscovery.findcommonheads(
954 954 ui, repo, remote, ancestorsof=nodes
955 955 )
956 956 return common, hds
957 957
958 958 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
959 959 localrevs = opts[b'rev']
960 960 with util.timedcm('debug-discovery') as t:
961 961 common, hds = doit(localrevs, remoterevs)
962 962
963 963 # compute all statistics
964 964 common = set(common)
965 965 rheads = set(hds)
966 966 lheads = set(repo.heads())
967 967
968 968 data = {}
969 969 data[b'elapsed'] = t.elapsed
970 970 data[b'nb-common'] = len(common)
971 971 data[b'nb-common-local'] = len(common & lheads)
972 972 data[b'nb-common-remote'] = len(common & rheads)
973 973 data[b'nb-common-both'] = len(common & rheads & lheads)
974 974 data[b'nb-local'] = len(lheads)
975 975 data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local']
976 976 data[b'nb-remote'] = len(rheads)
977 977 data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote']
978 978 data[b'nb-revs'] = len(repo.revs(b'all()'))
979 979 data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common))
980 980 data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common']
981 981
982 982 # display discovery summary
983 983 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
984 984 ui.writenoi18n(b"heads summary:\n")
985 985 ui.writenoi18n(b" total common heads: %(nb-common)9d\n" % data)
986 986 ui.writenoi18n(b" also local heads: %(nb-common-local)9d\n" % data)
987 987 ui.writenoi18n(b" also remote heads: %(nb-common-remote)9d\n" % data)
988 988 ui.writenoi18n(b" both: %(nb-common-both)9d\n" % data)
989 989 ui.writenoi18n(b" local heads: %(nb-local)9d\n" % data)
990 990 ui.writenoi18n(b" common: %(nb-common-local)9d\n" % data)
991 991 ui.writenoi18n(b" missing: %(nb-local-missing)9d\n" % data)
992 992 ui.writenoi18n(b" remote heads: %(nb-remote)9d\n" % data)
993 993 ui.writenoi18n(b" common: %(nb-common-remote)9d\n" % data)
994 994 ui.writenoi18n(b" unknown: %(nb-remote-unknown)9d\n" % data)
995 995 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
996 996 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
997 997 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
998 998
999 999 if ui.verbose:
1000 1000 ui.writenoi18n(
1001 1001 b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common))
1002 1002 )
1003 1003
1004 1004
1005 1005 _chunksize = 4 << 10
1006 1006
1007 1007
1008 1008 @command(
1009 1009 b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
1010 1010 )
1011 1011 def debugdownload(ui, repo, url, output=None, **opts):
1012 1012 """download a resource using Mercurial logic and config
1013 1013 """
1014 1014 fh = urlmod.open(ui, url, output)
1015 1015
1016 1016 dest = ui
1017 1017 if output:
1018 1018 dest = open(output, b"wb", _chunksize)
1019 1019 try:
1020 1020 data = fh.read(_chunksize)
1021 1021 while data:
1022 1022 dest.write(data)
1023 1023 data = fh.read(_chunksize)
1024 1024 finally:
1025 1025 if output:
1026 1026 dest.close()
1027 1027
1028 1028
1029 1029 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1030 1030 def debugextensions(ui, repo, **opts):
1031 1031 '''show information about active extensions'''
1032 1032 opts = pycompat.byteskwargs(opts)
1033 1033 exts = extensions.extensions(ui)
1034 1034 hgver = util.version()
1035 1035 fm = ui.formatter(b'debugextensions', opts)
1036 1036 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1037 1037 isinternal = extensions.ismoduleinternal(extmod)
1038 1038 extsource = None
1039 1039
1040 1040 if util.safehasattr(extmod, '__file__'):
1041 1041 extsource = pycompat.fsencode(extmod.__file__)
1042 1042 elif getattr(sys, 'oxidized', False):
1043 1043 extsource = pycompat.sysexecutable
1044 1044 if isinternal:
1045 1045 exttestedwith = [] # never expose magic string to users
1046 1046 else:
1047 1047 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1048 1048 extbuglink = getattr(extmod, 'buglink', None)
1049 1049
1050 1050 fm.startitem()
1051 1051
1052 1052 if ui.quiet or ui.verbose:
1053 1053 fm.write(b'name', b'%s\n', extname)
1054 1054 else:
1055 1055 fm.write(b'name', b'%s', extname)
1056 1056 if isinternal or hgver in exttestedwith:
1057 1057 fm.plain(b'\n')
1058 1058 elif not exttestedwith:
1059 1059 fm.plain(_(b' (untested!)\n'))
1060 1060 else:
1061 1061 lasttestedversion = exttestedwith[-1]
1062 1062 fm.plain(b' (%s!)\n' % lasttestedversion)
1063 1063
1064 1064 fm.condwrite(
1065 1065 ui.verbose and extsource,
1066 1066 b'source',
1067 1067 _(b' location: %s\n'),
1068 1068 extsource or b"",
1069 1069 )
1070 1070
1071 1071 if ui.verbose:
1072 1072 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1073 1073 fm.data(bundled=isinternal)
1074 1074
1075 1075 fm.condwrite(
1076 1076 ui.verbose and exttestedwith,
1077 1077 b'testedwith',
1078 1078 _(b' tested with: %s\n'),
1079 1079 fm.formatlist(exttestedwith, name=b'ver'),
1080 1080 )
1081 1081
1082 1082 fm.condwrite(
1083 1083 ui.verbose and extbuglink,
1084 1084 b'buglink',
1085 1085 _(b' bug reporting: %s\n'),
1086 1086 extbuglink or b"",
1087 1087 )
1088 1088
1089 1089 fm.end()
1090 1090
1091 1091
1092 1092 @command(
1093 1093 b'debugfileset',
1094 1094 [
1095 1095 (
1096 1096 b'r',
1097 1097 b'rev',
1098 1098 b'',
1099 1099 _(b'apply the filespec on this revision'),
1100 1100 _(b'REV'),
1101 1101 ),
1102 1102 (
1103 1103 b'',
1104 1104 b'all-files',
1105 1105 False,
1106 1106 _(b'test files from all revisions and working directory'),
1107 1107 ),
1108 1108 (
1109 1109 b's',
1110 1110 b'show-matcher',
1111 1111 None,
1112 1112 _(b'print internal representation of matcher'),
1113 1113 ),
1114 1114 (
1115 1115 b'p',
1116 1116 b'show-stage',
1117 1117 [],
1118 1118 _(b'print parsed tree at the given stage'),
1119 1119 _(b'NAME'),
1120 1120 ),
1121 1121 ],
1122 1122 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1123 1123 )
1124 1124 def debugfileset(ui, repo, expr, **opts):
1125 1125 '''parse and apply a fileset specification'''
1126 1126 from . import fileset
1127 1127
1128 1128 fileset.symbols # force import of fileset so we have predicates to optimize
1129 1129 opts = pycompat.byteskwargs(opts)
1130 1130 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1131 1131
1132 1132 stages = [
1133 1133 (b'parsed', pycompat.identity),
1134 1134 (b'analyzed', filesetlang.analyze),
1135 1135 (b'optimized', filesetlang.optimize),
1136 1136 ]
1137 1137 stagenames = {n for n, f in stages}
1138 1138
1139 1139 showalways = set()
1140 1140 if ui.verbose and not opts[b'show_stage']:
1141 1141 # show parsed tree by --verbose (deprecated)
1142 1142 showalways.add(b'parsed')
1143 1143 if opts[b'show_stage'] == [b'all']:
1144 1144 showalways.update(stagenames)
1145 1145 else:
1146 1146 for n in opts[b'show_stage']:
1147 1147 if n not in stagenames:
1148 1148 raise error.Abort(_(b'invalid stage name: %s') % n)
1149 1149 showalways.update(opts[b'show_stage'])
1150 1150
1151 1151 tree = filesetlang.parse(expr)
1152 1152 for n, f in stages:
1153 1153 tree = f(tree)
1154 1154 if n in showalways:
1155 1155 if opts[b'show_stage'] or n != b'parsed':
1156 1156 ui.write(b"* %s:\n" % n)
1157 1157 ui.write(filesetlang.prettyformat(tree), b"\n")
1158 1158
1159 1159 files = set()
1160 1160 if opts[b'all_files']:
1161 1161 for r in repo:
1162 1162 c = repo[r]
1163 1163 files.update(c.files())
1164 1164 files.update(c.substate)
1165 1165 if opts[b'all_files'] or ctx.rev() is None:
1166 1166 wctx = repo[None]
1167 1167 files.update(
1168 1168 repo.dirstate.walk(
1169 1169 scmutil.matchall(repo),
1170 1170 subrepos=list(wctx.substate),
1171 1171 unknown=True,
1172 1172 ignored=True,
1173 1173 )
1174 1174 )
1175 1175 files.update(wctx.substate)
1176 1176 else:
1177 1177 files.update(ctx.files())
1178 1178 files.update(ctx.substate)
1179 1179
1180 1180 m = ctx.matchfileset(repo.getcwd(), expr)
1181 1181 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1182 1182 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1183 1183 for f in sorted(files):
1184 1184 if not m(f):
1185 1185 continue
1186 1186 ui.write(b"%s\n" % f)
1187 1187
1188 1188
1189 1189 @command(b'debugformat', [] + cmdutil.formatteropts)
1190 1190 def debugformat(ui, repo, **opts):
1191 1191 """display format information about the current repository
1192 1192
1193 1193 Use --verbose to get extra information about current config value and
1194 1194 Mercurial default."""
1195 1195 opts = pycompat.byteskwargs(opts)
1196 1196 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1197 1197 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1198 1198
1199 1199 def makeformatname(name):
1200 1200 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1201 1201
1202 1202 fm = ui.formatter(b'debugformat', opts)
1203 1203 if fm.isplain():
1204 1204
1205 1205 def formatvalue(value):
1206 1206 if util.safehasattr(value, b'startswith'):
1207 1207 return value
1208 1208 if value:
1209 1209 return b'yes'
1210 1210 else:
1211 1211 return b'no'
1212 1212
1213 1213 else:
1214 1214 formatvalue = pycompat.identity
1215 1215
1216 1216 fm.plain(b'format-variant')
1217 1217 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1218 1218 fm.plain(b' repo')
1219 1219 if ui.verbose:
1220 1220 fm.plain(b' config default')
1221 1221 fm.plain(b'\n')
1222 1222 for fv in upgrade.allformatvariant:
1223 1223 fm.startitem()
1224 1224 repovalue = fv.fromrepo(repo)
1225 1225 configvalue = fv.fromconfig(repo)
1226 1226
1227 1227 if repovalue != configvalue:
1228 1228 namelabel = b'formatvariant.name.mismatchconfig'
1229 1229 repolabel = b'formatvariant.repo.mismatchconfig'
1230 1230 elif repovalue != fv.default:
1231 1231 namelabel = b'formatvariant.name.mismatchdefault'
1232 1232 repolabel = b'formatvariant.repo.mismatchdefault'
1233 1233 else:
1234 1234 namelabel = b'formatvariant.name.uptodate'
1235 1235 repolabel = b'formatvariant.repo.uptodate'
1236 1236
1237 1237 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1238 1238 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1239 1239 if fv.default != configvalue:
1240 1240 configlabel = b'formatvariant.config.special'
1241 1241 else:
1242 1242 configlabel = b'formatvariant.config.default'
1243 1243 fm.condwrite(
1244 1244 ui.verbose,
1245 1245 b'config',
1246 1246 b' %6s',
1247 1247 formatvalue(configvalue),
1248 1248 label=configlabel,
1249 1249 )
1250 1250 fm.condwrite(
1251 1251 ui.verbose,
1252 1252 b'default',
1253 1253 b' %7s',
1254 1254 formatvalue(fv.default),
1255 1255 label=b'formatvariant.default',
1256 1256 )
1257 1257 fm.plain(b'\n')
1258 1258 fm.end()
1259 1259
1260 1260
1261 1261 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1262 1262 def debugfsinfo(ui, path=b"."):
1263 1263 """show information detected about current filesystem"""
1264 1264 ui.writenoi18n(b'path: %s\n' % path)
1265 1265 ui.writenoi18n(
1266 1266 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1267 1267 )
1268 1268 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1269 1269 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1270 1270 ui.writenoi18n(
1271 1271 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1272 1272 )
1273 1273 ui.writenoi18n(
1274 1274 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1275 1275 )
1276 1276 casesensitive = b'(unknown)'
1277 1277 try:
1278 1278 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1279 1279 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1280 1280 except OSError:
1281 1281 pass
1282 1282 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1283 1283
1284 1284
1285 1285 @command(
1286 1286 b'debuggetbundle',
1287 1287 [
1288 1288 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1289 1289 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1290 1290 (
1291 1291 b't',
1292 1292 b'type',
1293 1293 b'bzip2',
1294 1294 _(b'bundle compression type to use'),
1295 1295 _(b'TYPE'),
1296 1296 ),
1297 1297 ],
1298 1298 _(b'REPO FILE [-H|-C ID]...'),
1299 1299 norepo=True,
1300 1300 )
1301 1301 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1302 1302 """retrieves a bundle from a repo
1303 1303
1304 1304 Every ID must be a full-length hex node id string. Saves the bundle to the
1305 1305 given file.
1306 1306 """
1307 1307 opts = pycompat.byteskwargs(opts)
1308 1308 repo = hg.peer(ui, opts, repopath)
1309 1309 if not repo.capable(b'getbundle'):
1310 1310 raise error.Abort(b"getbundle() not supported by target repository")
1311 1311 args = {}
1312 1312 if common:
1313 1313 args['common'] = [bin(s) for s in common]
1314 1314 if head:
1315 1315 args['heads'] = [bin(s) for s in head]
1316 1316 # TODO: get desired bundlecaps from command line.
1317 1317 args['bundlecaps'] = None
1318 1318 bundle = repo.getbundle(b'debug', **args)
1319 1319
1320 1320 bundletype = opts.get(b'type', b'bzip2').lower()
1321 1321 btypes = {
1322 1322 b'none': b'HG10UN',
1323 1323 b'bzip2': b'HG10BZ',
1324 1324 b'gzip': b'HG10GZ',
1325 1325 b'bundle2': b'HG20',
1326 1326 }
1327 1327 bundletype = btypes.get(bundletype)
1328 1328 if bundletype not in bundle2.bundletypes:
1329 1329 raise error.Abort(_(b'unknown bundle type specified with --type'))
1330 1330 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1331 1331
1332 1332
1333 1333 @command(b'debugignore', [], b'[FILE]')
1334 1334 def debugignore(ui, repo, *files, **opts):
1335 1335 """display the combined ignore pattern and information about ignored files
1336 1336
1337 1337 With no argument display the combined ignore pattern.
1338 1338
1339 1339 Given space separated file names, shows if the given file is ignored and
1340 1340 if so, show the ignore rule (file and line number) that matched it.
1341 1341 """
1342 1342 ignore = repo.dirstate._ignore
1343 1343 if not files:
1344 1344 # Show all the patterns
1345 1345 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1346 1346 else:
1347 1347 m = scmutil.match(repo[None], pats=files)
1348 1348 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1349 1349 for f in m.files():
1350 1350 nf = util.normpath(f)
1351 1351 ignored = None
1352 1352 ignoredata = None
1353 1353 if nf != b'.':
1354 1354 if ignore(nf):
1355 1355 ignored = nf
1356 1356 ignoredata = repo.dirstate._ignorefileandline(nf)
1357 1357 else:
1358 1358 for p in pathutil.finddirs(nf):
1359 1359 if ignore(p):
1360 1360 ignored = p
1361 1361 ignoredata = repo.dirstate._ignorefileandline(p)
1362 1362 break
1363 1363 if ignored:
1364 1364 if ignored == nf:
1365 1365 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1366 1366 else:
1367 1367 ui.write(
1368 1368 _(
1369 1369 b"%s is ignored because of "
1370 1370 b"containing directory %s\n"
1371 1371 )
1372 1372 % (uipathfn(f), ignored)
1373 1373 )
1374 1374 ignorefile, lineno, line = ignoredata
1375 1375 ui.write(
1376 1376 _(b"(ignore rule in %s, line %d: '%s')\n")
1377 1377 % (ignorefile, lineno, line)
1378 1378 )
1379 1379 else:
1380 1380 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1381 1381
1382 1382
1383 1383 @command(
1384 1384 b'debugindex',
1385 1385 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1386 1386 _(b'-c|-m|FILE'),
1387 1387 )
1388 1388 def debugindex(ui, repo, file_=None, **opts):
1389 1389 """dump index data for a storage primitive"""
1390 1390 opts = pycompat.byteskwargs(opts)
1391 1391 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1392 1392
1393 1393 if ui.debugflag:
1394 1394 shortfn = hex
1395 1395 else:
1396 1396 shortfn = short
1397 1397
1398 1398 idlen = 12
1399 1399 for i in store:
1400 1400 idlen = len(shortfn(store.node(i)))
1401 1401 break
1402 1402
1403 1403 fm = ui.formatter(b'debugindex', opts)
1404 1404 fm.plain(
1405 1405 b' rev linkrev %s %s p2\n'
1406 1406 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1407 1407 )
1408 1408
1409 1409 for rev in store:
1410 1410 node = store.node(rev)
1411 1411 parents = store.parents(node)
1412 1412
1413 1413 fm.startitem()
1414 1414 fm.write(b'rev', b'%6d ', rev)
1415 1415 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1416 1416 fm.write(b'node', b'%s ', shortfn(node))
1417 1417 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1418 1418 fm.write(b'p2', b'%s', shortfn(parents[1]))
1419 1419 fm.plain(b'\n')
1420 1420
1421 1421 fm.end()
1422 1422
1423 1423
1424 1424 @command(
1425 1425 b'debugindexdot',
1426 1426 cmdutil.debugrevlogopts,
1427 1427 _(b'-c|-m|FILE'),
1428 1428 optionalrepo=True,
1429 1429 )
1430 1430 def debugindexdot(ui, repo, file_=None, **opts):
1431 1431 """dump an index DAG as a graphviz dot file"""
1432 1432 opts = pycompat.byteskwargs(opts)
1433 1433 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1434 1434 ui.writenoi18n(b"digraph G {\n")
1435 1435 for i in r:
1436 1436 node = r.node(i)
1437 1437 pp = r.parents(node)
1438 1438 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1439 1439 if pp[1] != nullid:
1440 1440 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1441 1441 ui.write(b"}\n")
1442 1442
1443 1443
1444 1444 @command(b'debugindexstats', [])
1445 1445 def debugindexstats(ui, repo):
1446 1446 """show stats related to the changelog index"""
1447 1447 repo.changelog.shortest(nullid, 1)
1448 1448 index = repo.changelog.index
1449 1449 if not util.safehasattr(index, b'stats'):
1450 1450 raise error.Abort(_(b'debugindexstats only works with native code'))
1451 1451 for k, v in sorted(index.stats().items()):
1452 1452 ui.write(b'%s: %d\n' % (k, v))
1453 1453
1454 1454
1455 1455 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1456 1456 def debuginstall(ui, **opts):
1457 1457 '''test Mercurial installation
1458 1458
1459 1459 Returns 0 on success.
1460 1460 '''
1461 1461 opts = pycompat.byteskwargs(opts)
1462 1462
1463 1463 problems = 0
1464 1464
1465 1465 fm = ui.formatter(b'debuginstall', opts)
1466 1466 fm.startitem()
1467 1467
1468 1468 # encoding
1469 1469 fm.write(b'encoding', _(b"checking encoding (%s)...\n"), encoding.encoding)
1470 1470 err = None
1471 1471 try:
1472 1472 codecs.lookup(pycompat.sysstr(encoding.encoding))
1473 1473 except LookupError as inst:
1474 1474 err = stringutil.forcebytestr(inst)
1475 1475 problems += 1
1476 1476 fm.condwrite(
1477 1477 err,
1478 1478 b'encodingerror',
1479 1479 _(b" %s\n (check that your locale is properly set)\n"),
1480 1480 err,
1481 1481 )
1482 1482
1483 1483 # Python
1484 1484 pythonlib = None
1485 1485 if util.safehasattr(os, '__file__'):
1486 1486 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1487 1487 elif getattr(sys, 'oxidized', False):
1488 1488 pythonlib = pycompat.sysexecutable
1489 1489
1490 1490 fm.write(
1491 1491 b'pythonexe',
1492 1492 _(b"checking Python executable (%s)\n"),
1493 1493 pycompat.sysexecutable or _(b"unknown"),
1494 1494 )
1495 1495 fm.write(
1496 1496 b'pythonimplementation',
1497 1497 _(b"checking Python implementation (%s)\n"),
1498 1498 pycompat.sysbytes(platform.python_implementation()),
1499 1499 )
1500 1500 fm.write(
1501 1501 b'pythonver',
1502 1502 _(b"checking Python version (%s)\n"),
1503 1503 (b"%d.%d.%d" % sys.version_info[:3]),
1504 1504 )
1505 1505 fm.write(
1506 1506 b'pythonlib',
1507 1507 _(b"checking Python lib (%s)...\n"),
1508 1508 pythonlib or _(b"unknown"),
1509 1509 )
1510 1510
1511 1511 try:
1512 1512 from . import rustext
1513 1513
1514 1514 rustext.__doc__ # trigger lazy import
1515 1515 except ImportError:
1516 1516 rustext = None
1517 1517
1518 1518 security = set(sslutil.supportedprotocols)
1519 1519 if sslutil.hassni:
1520 1520 security.add(b'sni')
1521 1521
1522 1522 fm.write(
1523 1523 b'pythonsecurity',
1524 1524 _(b"checking Python security support (%s)\n"),
1525 1525 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1526 1526 )
1527 1527
1528 1528 # These are warnings, not errors. So don't increment problem count. This
1529 1529 # may change in the future.
1530 1530 if b'tls1.2' not in security:
1531 1531 fm.plain(
1532 1532 _(
1533 1533 b' TLS 1.2 not supported by Python install; '
1534 1534 b'network connections lack modern security\n'
1535 1535 )
1536 1536 )
1537 1537 if b'sni' not in security:
1538 1538 fm.plain(
1539 1539 _(
1540 1540 b' SNI not supported by Python install; may have '
1541 1541 b'connectivity issues with some servers\n'
1542 1542 )
1543 1543 )
1544 1544
1545 1545 fm.plain(
1546 1546 _(
1547 1547 b"checking Rust extensions (%s)\n"
1548 1548 % (b'missing' if rustext is None else b'installed')
1549 1549 ),
1550 1550 )
1551 1551
1552 1552 # TODO print CA cert info
1553 1553
1554 1554 # hg version
1555 1555 hgver = util.version()
1556 1556 fm.write(
1557 1557 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1558 1558 )
1559 1559 fm.write(
1560 1560 b'hgverextra',
1561 1561 _(b"checking Mercurial custom build (%s)\n"),
1562 1562 b'+'.join(hgver.split(b'+')[1:]),
1563 1563 )
1564 1564
1565 1565 # compiled modules
1566 1566 hgmodules = None
1567 1567 if util.safehasattr(sys.modules[__name__], '__file__'):
1568 1568 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1569 1569 elif getattr(sys, 'oxidized', False):
1570 1570 hgmodules = pycompat.sysexecutable
1571 1571
1572 1572 fm.write(
1573 1573 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1574 1574 )
1575 1575 fm.write(
1576 1576 b'hgmodules',
1577 1577 _(b"checking installed modules (%s)...\n"),
1578 1578 hgmodules or _(b"unknown"),
1579 1579 )
1580 1580
1581 1581 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1582 1582 rustext = rustandc # for now, that's the only case
1583 1583 cext = policy.policy in (b'c', b'allow') or rustandc
1584 1584 nopure = cext or rustext
1585 1585 if nopure:
1586 1586 err = None
1587 1587 try:
1588 1588 if cext:
1589 1589 from .cext import ( # pytype: disable=import-error
1590 1590 base85,
1591 1591 bdiff,
1592 1592 mpatch,
1593 1593 osutil,
1594 1594 )
1595 1595
1596 1596 # quiet pyflakes
1597 1597 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1598 1598 if rustext:
1599 1599 from .rustext import ( # pytype: disable=import-error
1600 1600 ancestor,
1601 1601 dirstate,
1602 1602 )
1603 1603
1604 1604 dir(ancestor), dir(dirstate) # quiet pyflakes
1605 1605 except Exception as inst:
1606 1606 err = stringutil.forcebytestr(inst)
1607 1607 problems += 1
1608 1608 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1609 1609
1610 1610 compengines = util.compengines._engines.values()
1611 1611 fm.write(
1612 1612 b'compengines',
1613 1613 _(b'checking registered compression engines (%s)\n'),
1614 1614 fm.formatlist(
1615 1615 sorted(e.name() for e in compengines),
1616 1616 name=b'compengine',
1617 1617 fmt=b'%s',
1618 1618 sep=b', ',
1619 1619 ),
1620 1620 )
1621 1621 fm.write(
1622 1622 b'compenginesavail',
1623 1623 _(b'checking available compression engines (%s)\n'),
1624 1624 fm.formatlist(
1625 1625 sorted(e.name() for e in compengines if e.available()),
1626 1626 name=b'compengine',
1627 1627 fmt=b'%s',
1628 1628 sep=b', ',
1629 1629 ),
1630 1630 )
1631 1631 wirecompengines = compression.compengines.supportedwireengines(
1632 1632 compression.SERVERROLE
1633 1633 )
1634 1634 fm.write(
1635 1635 b'compenginesserver',
1636 1636 _(
1637 1637 b'checking available compression engines '
1638 1638 b'for wire protocol (%s)\n'
1639 1639 ),
1640 1640 fm.formatlist(
1641 1641 [e.name() for e in wirecompengines if e.wireprotosupport()],
1642 1642 name=b'compengine',
1643 1643 fmt=b'%s',
1644 1644 sep=b', ',
1645 1645 ),
1646 1646 )
1647 1647 re2 = b'missing'
1648 1648 if util._re2:
1649 1649 re2 = b'available'
1650 1650 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1651 1651 fm.data(re2=bool(util._re2))
1652 1652
1653 rust_debug_mod = policy.importrust("debug")
1654 if rust_debug_mod is not None:
1655 re2_rust = b'installed' if rust_debug_mod.re2_installed else b'missing'
1656
1657 msg = b'checking "re2" regexp engine Rust bindings (%s)\n'
1658 fm.plain(_(msg % re2_rust))
1659
1653 1660 # templates
1654 1661 p = templater.templatepaths()
1655 1662 fm.write(b'templatedirs', b'checking templates (%s)...\n', b' '.join(p))
1656 1663 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1657 1664 if p:
1658 1665 m = templater.templatepath(b"map-cmdline.default")
1659 1666 if m:
1660 1667 # template found, check if it is working
1661 1668 err = None
1662 1669 try:
1663 1670 templater.templater.frommapfile(m)
1664 1671 except Exception as inst:
1665 1672 err = stringutil.forcebytestr(inst)
1666 1673 p = None
1667 1674 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1668 1675 else:
1669 1676 p = None
1670 1677 fm.condwrite(
1671 1678 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1672 1679 )
1673 1680 fm.condwrite(
1674 1681 not m,
1675 1682 b'defaulttemplatenotfound',
1676 1683 _(b" template '%s' not found\n"),
1677 1684 b"default",
1678 1685 )
1679 1686 if not p:
1680 1687 problems += 1
1681 1688 fm.condwrite(
1682 1689 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1683 1690 )
1684 1691
1685 1692 # editor
1686 1693 editor = ui.geteditor()
1687 1694 editor = util.expandpath(editor)
1688 1695 editorbin = procutil.shellsplit(editor)[0]
1689 1696 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1690 1697 cmdpath = procutil.findexe(editorbin)
1691 1698 fm.condwrite(
1692 1699 not cmdpath and editor == b'vi',
1693 1700 b'vinotfound',
1694 1701 _(
1695 1702 b" No commit editor set and can't find %s in PATH\n"
1696 1703 b" (specify a commit editor in your configuration"
1697 1704 b" file)\n"
1698 1705 ),
1699 1706 not cmdpath and editor == b'vi' and editorbin,
1700 1707 )
1701 1708 fm.condwrite(
1702 1709 not cmdpath and editor != b'vi',
1703 1710 b'editornotfound',
1704 1711 _(
1705 1712 b" Can't find editor '%s' in PATH\n"
1706 1713 b" (specify a commit editor in your configuration"
1707 1714 b" file)\n"
1708 1715 ),
1709 1716 not cmdpath and editorbin,
1710 1717 )
1711 1718 if not cmdpath and editor != b'vi':
1712 1719 problems += 1
1713 1720
1714 1721 # check username
1715 1722 username = None
1716 1723 err = None
1717 1724 try:
1718 1725 username = ui.username()
1719 1726 except error.Abort as e:
1720 1727 err = stringutil.forcebytestr(e)
1721 1728 problems += 1
1722 1729
1723 1730 fm.condwrite(
1724 1731 username, b'username', _(b"checking username (%s)\n"), username
1725 1732 )
1726 1733 fm.condwrite(
1727 1734 err,
1728 1735 b'usernameerror',
1729 1736 _(
1730 1737 b"checking username...\n %s\n"
1731 1738 b" (specify a username in your configuration file)\n"
1732 1739 ),
1733 1740 err,
1734 1741 )
1735 1742
1736 1743 for name, mod in extensions.extensions():
1737 1744 handler = getattr(mod, 'debuginstall', None)
1738 1745 if handler is not None:
1739 1746 problems += handler(ui, fm)
1740 1747
1741 1748 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1742 1749 if not problems:
1743 1750 fm.data(problems=problems)
1744 1751 fm.condwrite(
1745 1752 problems,
1746 1753 b'problems',
1747 1754 _(b"%d problems detected, please check your install!\n"),
1748 1755 problems,
1749 1756 )
1750 1757 fm.end()
1751 1758
1752 1759 return problems
1753 1760
1754 1761
1755 1762 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1756 1763 def debugknown(ui, repopath, *ids, **opts):
1757 1764 """test whether node ids are known to a repo
1758 1765
1759 1766 Every ID must be a full-length hex node id string. Returns a list of 0s
1760 1767 and 1s indicating unknown/known.
1761 1768 """
1762 1769 opts = pycompat.byteskwargs(opts)
1763 1770 repo = hg.peer(ui, opts, repopath)
1764 1771 if not repo.capable(b'known'):
1765 1772 raise error.Abort(b"known() not supported by target repository")
1766 1773 flags = repo.known([bin(s) for s in ids])
1767 1774 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1768 1775
1769 1776
1770 1777 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1771 1778 def debuglabelcomplete(ui, repo, *args):
1772 1779 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1773 1780 debugnamecomplete(ui, repo, *args)
1774 1781
1775 1782
1776 1783 @command(
1777 1784 b'debuglocks',
1778 1785 [
1779 1786 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1780 1787 (
1781 1788 b'W',
1782 1789 b'force-wlock',
1783 1790 None,
1784 1791 _(b'free the working state lock (DANGEROUS)'),
1785 1792 ),
1786 1793 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1787 1794 (
1788 1795 b'S',
1789 1796 b'set-wlock',
1790 1797 None,
1791 1798 _(b'set the working state lock until stopped'),
1792 1799 ),
1793 1800 ],
1794 1801 _(b'[OPTION]...'),
1795 1802 )
1796 1803 def debuglocks(ui, repo, **opts):
1797 1804 """show or modify state of locks
1798 1805
1799 1806 By default, this command will show which locks are held. This
1800 1807 includes the user and process holding the lock, the amount of time
1801 1808 the lock has been held, and the machine name where the process is
1802 1809 running if it's not local.
1803 1810
1804 1811 Locks protect the integrity of Mercurial's data, so should be
1805 1812 treated with care. System crashes or other interruptions may cause
1806 1813 locks to not be properly released, though Mercurial will usually
1807 1814 detect and remove such stale locks automatically.
1808 1815
1809 1816 However, detecting stale locks may not always be possible (for
1810 1817 instance, on a shared filesystem). Removing locks may also be
1811 1818 blocked by filesystem permissions.
1812 1819
1813 1820 Setting a lock will prevent other commands from changing the data.
1814 1821 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1815 1822 The set locks are removed when the command exits.
1816 1823
1817 1824 Returns 0 if no locks are held.
1818 1825
1819 1826 """
1820 1827
1821 1828 if opts.get('force_lock'):
1822 1829 repo.svfs.unlink(b'lock')
1823 1830 if opts.get('force_wlock'):
1824 1831 repo.vfs.unlink(b'wlock')
1825 1832 if opts.get('force_lock') or opts.get('force_wlock'):
1826 1833 return 0
1827 1834
1828 1835 locks = []
1829 1836 try:
1830 1837 if opts.get('set_wlock'):
1831 1838 try:
1832 1839 locks.append(repo.wlock(False))
1833 1840 except error.LockHeld:
1834 1841 raise error.Abort(_(b'wlock is already held'))
1835 1842 if opts.get('set_lock'):
1836 1843 try:
1837 1844 locks.append(repo.lock(False))
1838 1845 except error.LockHeld:
1839 1846 raise error.Abort(_(b'lock is already held'))
1840 1847 if len(locks):
1841 1848 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1842 1849 return 0
1843 1850 finally:
1844 1851 release(*locks)
1845 1852
1846 1853 now = time.time()
1847 1854 held = 0
1848 1855
1849 1856 def report(vfs, name, method):
1850 1857 # this causes stale locks to get reaped for more accurate reporting
1851 1858 try:
1852 1859 l = method(False)
1853 1860 except error.LockHeld:
1854 1861 l = None
1855 1862
1856 1863 if l:
1857 1864 l.release()
1858 1865 else:
1859 1866 try:
1860 1867 st = vfs.lstat(name)
1861 1868 age = now - st[stat.ST_MTIME]
1862 1869 user = util.username(st.st_uid)
1863 1870 locker = vfs.readlock(name)
1864 1871 if b":" in locker:
1865 1872 host, pid = locker.split(b':')
1866 1873 if host == socket.gethostname():
1867 1874 locker = b'user %s, process %s' % (user or b'None', pid)
1868 1875 else:
1869 1876 locker = b'user %s, process %s, host %s' % (
1870 1877 user or b'None',
1871 1878 pid,
1872 1879 host,
1873 1880 )
1874 1881 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1875 1882 return 1
1876 1883 except OSError as e:
1877 1884 if e.errno != errno.ENOENT:
1878 1885 raise
1879 1886
1880 1887 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1881 1888 return 0
1882 1889
1883 1890 held += report(repo.svfs, b"lock", repo.lock)
1884 1891 held += report(repo.vfs, b"wlock", repo.wlock)
1885 1892
1886 1893 return held
1887 1894
1888 1895
1889 1896 @command(
1890 1897 b'debugmanifestfulltextcache',
1891 1898 [
1892 1899 (b'', b'clear', False, _(b'clear the cache')),
1893 1900 (
1894 1901 b'a',
1895 1902 b'add',
1896 1903 [],
1897 1904 _(b'add the given manifest nodes to the cache'),
1898 1905 _(b'NODE'),
1899 1906 ),
1900 1907 ],
1901 1908 b'',
1902 1909 )
1903 1910 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1904 1911 """show, clear or amend the contents of the manifest fulltext cache"""
1905 1912
1906 1913 def getcache():
1907 1914 r = repo.manifestlog.getstorage(b'')
1908 1915 try:
1909 1916 return r._fulltextcache
1910 1917 except AttributeError:
1911 1918 msg = _(
1912 1919 b"Current revlog implementation doesn't appear to have a "
1913 1920 b"manifest fulltext cache\n"
1914 1921 )
1915 1922 raise error.Abort(msg)
1916 1923
1917 1924 if opts.get('clear'):
1918 1925 with repo.wlock():
1919 1926 cache = getcache()
1920 1927 cache.clear(clear_persisted_data=True)
1921 1928 return
1922 1929
1923 1930 if add:
1924 1931 with repo.wlock():
1925 1932 m = repo.manifestlog
1926 1933 store = m.getstorage(b'')
1927 1934 for n in add:
1928 1935 try:
1929 1936 manifest = m[store.lookup(n)]
1930 1937 except error.LookupError as e:
1931 1938 raise error.Abort(e, hint=b"Check your manifest node id")
1932 1939 manifest.read() # stores revisision in cache too
1933 1940 return
1934 1941
1935 1942 cache = getcache()
1936 1943 if not len(cache):
1937 1944 ui.write(_(b'cache empty\n'))
1938 1945 else:
1939 1946 ui.write(
1940 1947 _(
1941 1948 b'cache contains %d manifest entries, in order of most to '
1942 1949 b'least recent:\n'
1943 1950 )
1944 1951 % (len(cache),)
1945 1952 )
1946 1953 totalsize = 0
1947 1954 for nodeid in cache:
1948 1955 # Use cache.get to not update the LRU order
1949 1956 data = cache.peek(nodeid)
1950 1957 size = len(data)
1951 1958 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1952 1959 ui.write(
1953 1960 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
1954 1961 )
1955 1962 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
1956 1963 ui.write(
1957 1964 _(b'total cache data size %s, on-disk %s\n')
1958 1965 % (util.bytecount(totalsize), util.bytecount(ondisk))
1959 1966 )
1960 1967
1961 1968
1962 1969 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
1963 1970 def debugmergestate(ui, repo, *args, **opts):
1964 1971 """print merge state
1965 1972
1966 1973 Use --verbose to print out information about whether v1 or v2 merge state
1967 1974 was chosen."""
1968 1975
1969 1976 if ui.verbose:
1970 1977 ms = mergemod.mergestate(repo)
1971 1978
1972 1979 # sort so that reasonable information is on top
1973 1980 v1records = ms._readrecordsv1()
1974 1981 v2records = ms._readrecordsv2()
1975 1982
1976 1983 if not v1records and not v2records:
1977 1984 pass
1978 1985 elif not v2records:
1979 1986 ui.writenoi18n(b'no version 2 merge state\n')
1980 1987 elif ms._v1v2match(v1records, v2records):
1981 1988 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
1982 1989 else:
1983 1990 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
1984 1991
1985 1992 opts = pycompat.byteskwargs(opts)
1986 1993 if not opts[b'template']:
1987 1994 opts[b'template'] = (
1988 1995 b'{if(commits, "", "no merge state found\n")}'
1989 1996 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
1990 1997 b'{files % "file: {path} (state \\"{state}\\")\n'
1991 1998 b'{if(local_path, "'
1992 1999 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
1993 2000 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
1994 2001 b' other path: {other_path} (node {other_node})\n'
1995 2002 b'")}'
1996 2003 b'{if(rename_side, "'
1997 2004 b' rename side: {rename_side}\n'
1998 2005 b' renamed path: {renamed_path}\n'
1999 2006 b'")}'
2000 2007 b'{extras % " extra: {key} = {value}\n"}'
2001 2008 b'"}'
2002 2009 )
2003 2010
2004 2011 ms = mergemod.mergestate.read(repo)
2005 2012
2006 2013 fm = ui.formatter(b'debugmergestate', opts)
2007 2014 fm.startitem()
2008 2015
2009 2016 fm_commits = fm.nested(b'commits')
2010 2017 if ms.active():
2011 2018 for name, node, label_index in (
2012 2019 (b'local', ms.local, 0),
2013 2020 (b'other', ms.other, 1),
2014 2021 ):
2015 2022 fm_commits.startitem()
2016 2023 fm_commits.data(name=name)
2017 2024 fm_commits.data(node=hex(node))
2018 2025 if ms._labels and len(ms._labels) > label_index:
2019 2026 fm_commits.data(label=ms._labels[label_index])
2020 2027 fm_commits.end()
2021 2028
2022 2029 fm_files = fm.nested(b'files')
2023 2030 if ms.active():
2024 2031 for f in ms:
2025 2032 fm_files.startitem()
2026 2033 fm_files.data(path=f)
2027 2034 state = ms._state[f]
2028 2035 fm_files.data(state=state[0])
2029 2036 if state[0] in (
2030 2037 mergemod.MERGE_RECORD_UNRESOLVED,
2031 2038 mergemod.MERGE_RECORD_RESOLVED,
2032 2039 ):
2033 2040 fm_files.data(local_key=state[1])
2034 2041 fm_files.data(local_path=state[2])
2035 2042 fm_files.data(ancestor_path=state[3])
2036 2043 fm_files.data(ancestor_node=state[4])
2037 2044 fm_files.data(other_path=state[5])
2038 2045 fm_files.data(other_node=state[6])
2039 2046 fm_files.data(local_flags=state[7])
2040 2047 elif state[0] in (
2041 2048 mergemod.MERGE_RECORD_UNRESOLVED_PATH,
2042 2049 mergemod.MERGE_RECORD_RESOLVED_PATH,
2043 2050 ):
2044 2051 fm_files.data(renamed_path=state[1])
2045 2052 fm_files.data(rename_side=state[2])
2046 2053 fm_extras = fm_files.nested(b'extras')
2047 2054 for k, v in ms.extras(f).items():
2048 2055 fm_extras.startitem()
2049 2056 fm_extras.data(key=k)
2050 2057 fm_extras.data(value=v)
2051 2058 fm_extras.end()
2052 2059
2053 2060 fm_files.end()
2054 2061
2055 2062 fm.end()
2056 2063
2057 2064
2058 2065 @command(b'debugnamecomplete', [], _(b'NAME...'))
2059 2066 def debugnamecomplete(ui, repo, *args):
2060 2067 '''complete "names" - tags, open branch names, bookmark names'''
2061 2068
2062 2069 names = set()
2063 2070 # since we previously only listed open branches, we will handle that
2064 2071 # specially (after this for loop)
2065 2072 for name, ns in pycompat.iteritems(repo.names):
2066 2073 if name != b'branches':
2067 2074 names.update(ns.listnames(repo))
2068 2075 names.update(
2069 2076 tag
2070 2077 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2071 2078 if not closed
2072 2079 )
2073 2080 completions = set()
2074 2081 if not args:
2075 2082 args = [b'']
2076 2083 for a in args:
2077 2084 completions.update(n for n in names if n.startswith(a))
2078 2085 ui.write(b'\n'.join(sorted(completions)))
2079 2086 ui.write(b'\n')
2080 2087
2081 2088
2082 2089 @command(
2083 2090 b'debugnodemap',
2084 2091 [
2085 2092 (
2086 2093 b'',
2087 2094 b'dump-new',
2088 2095 False,
2089 2096 _(b'write a (new) persistent binary nodemap on stdin'),
2090 2097 ),
2091 2098 (b'', b'dump-disk', False, _(b'dump on-disk data on stdin')),
2092 2099 (
2093 2100 b'',
2094 2101 b'check',
2095 2102 False,
2096 2103 _(b'check that the data on disk data are correct.'),
2097 2104 ),
2098 2105 (
2099 2106 b'',
2100 2107 b'metadata',
2101 2108 False,
2102 2109 _(b'display the on disk meta data for the nodemap'),
2103 2110 ),
2104 2111 ],
2105 2112 )
2106 2113 def debugnodemap(ui, repo, **opts):
2107 2114 """write and inspect on disk nodemap
2108 2115 """
2109 2116 if opts['dump_new']:
2110 2117 unfi = repo.unfiltered()
2111 2118 cl = unfi.changelog
2112 2119 if util.safehasattr(cl.index, "nodemap_data_all"):
2113 2120 data = cl.index.nodemap_data_all()
2114 2121 else:
2115 2122 data = nodemap.persistent_data(cl.index)
2116 2123 ui.write(data)
2117 2124 elif opts['dump_disk']:
2118 2125 unfi = repo.unfiltered()
2119 2126 cl = unfi.changelog
2120 2127 nm_data = nodemap.persisted_data(cl)
2121 2128 if nm_data is not None:
2122 2129 docket, data = nm_data
2123 2130 ui.write(data[:])
2124 2131 elif opts['check']:
2125 2132 unfi = repo.unfiltered()
2126 2133 cl = unfi.changelog
2127 2134 nm_data = nodemap.persisted_data(cl)
2128 2135 if nm_data is not None:
2129 2136 docket, data = nm_data
2130 2137 return nodemap.check_data(ui, cl.index, data)
2131 2138 elif opts['metadata']:
2132 2139 unfi = repo.unfiltered()
2133 2140 cl = unfi.changelog
2134 2141 nm_data = nodemap.persisted_data(cl)
2135 2142 if nm_data is not None:
2136 2143 docket, data = nm_data
2137 2144 ui.write((b"uid: %s\n") % docket.uid)
2138 2145 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2139 2146 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2140 2147 ui.write((b"data-length: %d\n") % docket.data_length)
2141 2148 ui.write((b"data-unused: %d\n") % docket.data_unused)
2142 2149
2143 2150
2144 2151 @command(
2145 2152 b'debugobsolete',
2146 2153 [
2147 2154 (b'', b'flags', 0, _(b'markers flag')),
2148 2155 (
2149 2156 b'',
2150 2157 b'record-parents',
2151 2158 False,
2152 2159 _(b'record parent information for the precursor'),
2153 2160 ),
2154 2161 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2155 2162 (
2156 2163 b'',
2157 2164 b'exclusive',
2158 2165 False,
2159 2166 _(b'restrict display to markers only relevant to REV'),
2160 2167 ),
2161 2168 (b'', b'index', False, _(b'display index of the marker')),
2162 2169 (b'', b'delete', [], _(b'delete markers specified by indices')),
2163 2170 ]
2164 2171 + cmdutil.commitopts2
2165 2172 + cmdutil.formatteropts,
2166 2173 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2167 2174 )
2168 2175 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2169 2176 """create arbitrary obsolete marker
2170 2177
2171 2178 With no arguments, displays the list of obsolescence markers."""
2172 2179
2173 2180 opts = pycompat.byteskwargs(opts)
2174 2181
2175 2182 def parsenodeid(s):
2176 2183 try:
2177 2184 # We do not use revsingle/revrange functions here to accept
2178 2185 # arbitrary node identifiers, possibly not present in the
2179 2186 # local repository.
2180 2187 n = bin(s)
2181 2188 if len(n) != len(nullid):
2182 2189 raise TypeError()
2183 2190 return n
2184 2191 except TypeError:
2185 2192 raise error.Abort(
2186 2193 b'changeset references must be full hexadecimal '
2187 2194 b'node identifiers'
2188 2195 )
2189 2196
2190 2197 if opts.get(b'delete'):
2191 2198 indices = []
2192 2199 for v in opts.get(b'delete'):
2193 2200 try:
2194 2201 indices.append(int(v))
2195 2202 except ValueError:
2196 2203 raise error.Abort(
2197 2204 _(b'invalid index value: %r') % v,
2198 2205 hint=_(b'use integers for indices'),
2199 2206 )
2200 2207
2201 2208 if repo.currenttransaction():
2202 2209 raise error.Abort(
2203 2210 _(b'cannot delete obsmarkers in the middle of transaction.')
2204 2211 )
2205 2212
2206 2213 with repo.lock():
2207 2214 n = repair.deleteobsmarkers(repo.obsstore, indices)
2208 2215 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2209 2216
2210 2217 return
2211 2218
2212 2219 if precursor is not None:
2213 2220 if opts[b'rev']:
2214 2221 raise error.Abort(b'cannot select revision when creating marker')
2215 2222 metadata = {}
2216 2223 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2217 2224 succs = tuple(parsenodeid(succ) for succ in successors)
2218 2225 l = repo.lock()
2219 2226 try:
2220 2227 tr = repo.transaction(b'debugobsolete')
2221 2228 try:
2222 2229 date = opts.get(b'date')
2223 2230 if date:
2224 2231 date = dateutil.parsedate(date)
2225 2232 else:
2226 2233 date = None
2227 2234 prec = parsenodeid(precursor)
2228 2235 parents = None
2229 2236 if opts[b'record_parents']:
2230 2237 if prec not in repo.unfiltered():
2231 2238 raise error.Abort(
2232 2239 b'cannot used --record-parents on '
2233 2240 b'unknown changesets'
2234 2241 )
2235 2242 parents = repo.unfiltered()[prec].parents()
2236 2243 parents = tuple(p.node() for p in parents)
2237 2244 repo.obsstore.create(
2238 2245 tr,
2239 2246 prec,
2240 2247 succs,
2241 2248 opts[b'flags'],
2242 2249 parents=parents,
2243 2250 date=date,
2244 2251 metadata=metadata,
2245 2252 ui=ui,
2246 2253 )
2247 2254 tr.close()
2248 2255 except ValueError as exc:
2249 2256 raise error.Abort(
2250 2257 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2251 2258 )
2252 2259 finally:
2253 2260 tr.release()
2254 2261 finally:
2255 2262 l.release()
2256 2263 else:
2257 2264 if opts[b'rev']:
2258 2265 revs = scmutil.revrange(repo, opts[b'rev'])
2259 2266 nodes = [repo[r].node() for r in revs]
2260 2267 markers = list(
2261 2268 obsutil.getmarkers(
2262 2269 repo, nodes=nodes, exclusive=opts[b'exclusive']
2263 2270 )
2264 2271 )
2265 2272 markers.sort(key=lambda x: x._data)
2266 2273 else:
2267 2274 markers = obsutil.getmarkers(repo)
2268 2275
2269 2276 markerstoiter = markers
2270 2277 isrelevant = lambda m: True
2271 2278 if opts.get(b'rev') and opts.get(b'index'):
2272 2279 markerstoiter = obsutil.getmarkers(repo)
2273 2280 markerset = set(markers)
2274 2281 isrelevant = lambda m: m in markerset
2275 2282
2276 2283 fm = ui.formatter(b'debugobsolete', opts)
2277 2284 for i, m in enumerate(markerstoiter):
2278 2285 if not isrelevant(m):
2279 2286 # marker can be irrelevant when we're iterating over a set
2280 2287 # of markers (markerstoiter) which is bigger than the set
2281 2288 # of markers we want to display (markers)
2282 2289 # this can happen if both --index and --rev options are
2283 2290 # provided and thus we need to iterate over all of the markers
2284 2291 # to get the correct indices, but only display the ones that
2285 2292 # are relevant to --rev value
2286 2293 continue
2287 2294 fm.startitem()
2288 2295 ind = i if opts.get(b'index') else None
2289 2296 cmdutil.showmarker(fm, m, index=ind)
2290 2297 fm.end()
2291 2298
2292 2299
2293 2300 @command(
2294 2301 b'debugp1copies',
2295 2302 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2296 2303 _(b'[-r REV]'),
2297 2304 )
2298 2305 def debugp1copies(ui, repo, **opts):
2299 2306 """dump copy information compared to p1"""
2300 2307
2301 2308 opts = pycompat.byteskwargs(opts)
2302 2309 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2303 2310 for dst, src in ctx.p1copies().items():
2304 2311 ui.write(b'%s -> %s\n' % (src, dst))
2305 2312
2306 2313
2307 2314 @command(
2308 2315 b'debugp2copies',
2309 2316 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2310 2317 _(b'[-r REV]'),
2311 2318 )
2312 2319 def debugp1copies(ui, repo, **opts):
2313 2320 """dump copy information compared to p2"""
2314 2321
2315 2322 opts = pycompat.byteskwargs(opts)
2316 2323 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2317 2324 for dst, src in ctx.p2copies().items():
2318 2325 ui.write(b'%s -> %s\n' % (src, dst))
2319 2326
2320 2327
2321 2328 @command(
2322 2329 b'debugpathcomplete',
2323 2330 [
2324 2331 (b'f', b'full', None, _(b'complete an entire path')),
2325 2332 (b'n', b'normal', None, _(b'show only normal files')),
2326 2333 (b'a', b'added', None, _(b'show only added files')),
2327 2334 (b'r', b'removed', None, _(b'show only removed files')),
2328 2335 ],
2329 2336 _(b'FILESPEC...'),
2330 2337 )
2331 2338 def debugpathcomplete(ui, repo, *specs, **opts):
2332 2339 '''complete part or all of a tracked path
2333 2340
2334 2341 This command supports shells that offer path name completion. It
2335 2342 currently completes only files already known to the dirstate.
2336 2343
2337 2344 Completion extends only to the next path segment unless
2338 2345 --full is specified, in which case entire paths are used.'''
2339 2346
2340 2347 def complete(path, acceptable):
2341 2348 dirstate = repo.dirstate
2342 2349 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2343 2350 rootdir = repo.root + pycompat.ossep
2344 2351 if spec != repo.root and not spec.startswith(rootdir):
2345 2352 return [], []
2346 2353 if os.path.isdir(spec):
2347 2354 spec += b'/'
2348 2355 spec = spec[len(rootdir) :]
2349 2356 fixpaths = pycompat.ossep != b'/'
2350 2357 if fixpaths:
2351 2358 spec = spec.replace(pycompat.ossep, b'/')
2352 2359 speclen = len(spec)
2353 2360 fullpaths = opts['full']
2354 2361 files, dirs = set(), set()
2355 2362 adddir, addfile = dirs.add, files.add
2356 2363 for f, st in pycompat.iteritems(dirstate):
2357 2364 if f.startswith(spec) and st[0] in acceptable:
2358 2365 if fixpaths:
2359 2366 f = f.replace(b'/', pycompat.ossep)
2360 2367 if fullpaths:
2361 2368 addfile(f)
2362 2369 continue
2363 2370 s = f.find(pycompat.ossep, speclen)
2364 2371 if s >= 0:
2365 2372 adddir(f[:s])
2366 2373 else:
2367 2374 addfile(f)
2368 2375 return files, dirs
2369 2376
2370 2377 acceptable = b''
2371 2378 if opts['normal']:
2372 2379 acceptable += b'nm'
2373 2380 if opts['added']:
2374 2381 acceptable += b'a'
2375 2382 if opts['removed']:
2376 2383 acceptable += b'r'
2377 2384 cwd = repo.getcwd()
2378 2385 if not specs:
2379 2386 specs = [b'.']
2380 2387
2381 2388 files, dirs = set(), set()
2382 2389 for spec in specs:
2383 2390 f, d = complete(spec, acceptable or b'nmar')
2384 2391 files.update(f)
2385 2392 dirs.update(d)
2386 2393 files.update(dirs)
2387 2394 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2388 2395 ui.write(b'\n')
2389 2396
2390 2397
2391 2398 @command(
2392 2399 b'debugpathcopies',
2393 2400 cmdutil.walkopts,
2394 2401 b'hg debugpathcopies REV1 REV2 [FILE]',
2395 2402 inferrepo=True,
2396 2403 )
2397 2404 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2398 2405 """show copies between two revisions"""
2399 2406 ctx1 = scmutil.revsingle(repo, rev1)
2400 2407 ctx2 = scmutil.revsingle(repo, rev2)
2401 2408 m = scmutil.match(ctx1, pats, opts)
2402 2409 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2403 2410 ui.write(b'%s -> %s\n' % (src, dst))
2404 2411
2405 2412
2406 2413 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2407 2414 def debugpeer(ui, path):
2408 2415 """establish a connection to a peer repository"""
2409 2416 # Always enable peer request logging. Requires --debug to display
2410 2417 # though.
2411 2418 overrides = {
2412 2419 (b'devel', b'debug.peer-request'): True,
2413 2420 }
2414 2421
2415 2422 with ui.configoverride(overrides):
2416 2423 peer = hg.peer(ui, {}, path)
2417 2424
2418 2425 local = peer.local() is not None
2419 2426 canpush = peer.canpush()
2420 2427
2421 2428 ui.write(_(b'url: %s\n') % peer.url())
2422 2429 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2423 2430 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2424 2431
2425 2432
2426 2433 @command(
2427 2434 b'debugpickmergetool',
2428 2435 [
2429 2436 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2430 2437 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2431 2438 ]
2432 2439 + cmdutil.walkopts
2433 2440 + cmdutil.mergetoolopts,
2434 2441 _(b'[PATTERN]...'),
2435 2442 inferrepo=True,
2436 2443 )
2437 2444 def debugpickmergetool(ui, repo, *pats, **opts):
2438 2445 """examine which merge tool is chosen for specified file
2439 2446
2440 2447 As described in :hg:`help merge-tools`, Mercurial examines
2441 2448 configurations below in this order to decide which merge tool is
2442 2449 chosen for specified file.
2443 2450
2444 2451 1. ``--tool`` option
2445 2452 2. ``HGMERGE`` environment variable
2446 2453 3. configurations in ``merge-patterns`` section
2447 2454 4. configuration of ``ui.merge``
2448 2455 5. configurations in ``merge-tools`` section
2449 2456 6. ``hgmerge`` tool (for historical reason only)
2450 2457 7. default tool for fallback (``:merge`` or ``:prompt``)
2451 2458
2452 2459 This command writes out examination result in the style below::
2453 2460
2454 2461 FILE = MERGETOOL
2455 2462
2456 2463 By default, all files known in the first parent context of the
2457 2464 working directory are examined. Use file patterns and/or -I/-X
2458 2465 options to limit target files. -r/--rev is also useful to examine
2459 2466 files in another context without actual updating to it.
2460 2467
2461 2468 With --debug, this command shows warning messages while matching
2462 2469 against ``merge-patterns`` and so on, too. It is recommended to
2463 2470 use this option with explicit file patterns and/or -I/-X options,
2464 2471 because this option increases amount of output per file according
2465 2472 to configurations in hgrc.
2466 2473
2467 2474 With -v/--verbose, this command shows configurations below at
2468 2475 first (only if specified).
2469 2476
2470 2477 - ``--tool`` option
2471 2478 - ``HGMERGE`` environment variable
2472 2479 - configuration of ``ui.merge``
2473 2480
2474 2481 If merge tool is chosen before matching against
2475 2482 ``merge-patterns``, this command can't show any helpful
2476 2483 information, even with --debug. In such case, information above is
2477 2484 useful to know why a merge tool is chosen.
2478 2485 """
2479 2486 opts = pycompat.byteskwargs(opts)
2480 2487 overrides = {}
2481 2488 if opts[b'tool']:
2482 2489 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2483 2490 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2484 2491
2485 2492 with ui.configoverride(overrides, b'debugmergepatterns'):
2486 2493 hgmerge = encoding.environ.get(b"HGMERGE")
2487 2494 if hgmerge is not None:
2488 2495 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2489 2496 uimerge = ui.config(b"ui", b"merge")
2490 2497 if uimerge:
2491 2498 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2492 2499
2493 2500 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2494 2501 m = scmutil.match(ctx, pats, opts)
2495 2502 changedelete = opts[b'changedelete']
2496 2503 for path in ctx.walk(m):
2497 2504 fctx = ctx[path]
2498 2505 try:
2499 2506 if not ui.debugflag:
2500 2507 ui.pushbuffer(error=True)
2501 2508 tool, toolpath = filemerge._picktool(
2502 2509 repo,
2503 2510 ui,
2504 2511 path,
2505 2512 fctx.isbinary(),
2506 2513 b'l' in fctx.flags(),
2507 2514 changedelete,
2508 2515 )
2509 2516 finally:
2510 2517 if not ui.debugflag:
2511 2518 ui.popbuffer()
2512 2519 ui.write(b'%s = %s\n' % (path, tool))
2513 2520
2514 2521
2515 2522 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2516 2523 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2517 2524 '''access the pushkey key/value protocol
2518 2525
2519 2526 With two args, list the keys in the given namespace.
2520 2527
2521 2528 With five args, set a key to new if it currently is set to old.
2522 2529 Reports success or failure.
2523 2530 '''
2524 2531
2525 2532 target = hg.peer(ui, {}, repopath)
2526 2533 if keyinfo:
2527 2534 key, old, new = keyinfo
2528 2535 with target.commandexecutor() as e:
2529 2536 r = e.callcommand(
2530 2537 b'pushkey',
2531 2538 {
2532 2539 b'namespace': namespace,
2533 2540 b'key': key,
2534 2541 b'old': old,
2535 2542 b'new': new,
2536 2543 },
2537 2544 ).result()
2538 2545
2539 2546 ui.status(pycompat.bytestr(r) + b'\n')
2540 2547 return not r
2541 2548 else:
2542 2549 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2543 2550 ui.write(
2544 2551 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2545 2552 )
2546 2553
2547 2554
2548 2555 @command(b'debugpvec', [], _(b'A B'))
2549 2556 def debugpvec(ui, repo, a, b=None):
2550 2557 ca = scmutil.revsingle(repo, a)
2551 2558 cb = scmutil.revsingle(repo, b)
2552 2559 pa = pvec.ctxpvec(ca)
2553 2560 pb = pvec.ctxpvec(cb)
2554 2561 if pa == pb:
2555 2562 rel = b"="
2556 2563 elif pa > pb:
2557 2564 rel = b">"
2558 2565 elif pa < pb:
2559 2566 rel = b"<"
2560 2567 elif pa | pb:
2561 2568 rel = b"|"
2562 2569 ui.write(_(b"a: %s\n") % pa)
2563 2570 ui.write(_(b"b: %s\n") % pb)
2564 2571 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2565 2572 ui.write(
2566 2573 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2567 2574 % (
2568 2575 abs(pa._depth - pb._depth),
2569 2576 pvec._hamming(pa._vec, pb._vec),
2570 2577 pa.distance(pb),
2571 2578 rel,
2572 2579 )
2573 2580 )
2574 2581
2575 2582
2576 2583 @command(
2577 2584 b'debugrebuilddirstate|debugrebuildstate',
2578 2585 [
2579 2586 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2580 2587 (
2581 2588 b'',
2582 2589 b'minimal',
2583 2590 None,
2584 2591 _(
2585 2592 b'only rebuild files that are inconsistent with '
2586 2593 b'the working copy parent'
2587 2594 ),
2588 2595 ),
2589 2596 ],
2590 2597 _(b'[-r REV]'),
2591 2598 )
2592 2599 def debugrebuilddirstate(ui, repo, rev, **opts):
2593 2600 """rebuild the dirstate as it would look like for the given revision
2594 2601
2595 2602 If no revision is specified the first current parent will be used.
2596 2603
2597 2604 The dirstate will be set to the files of the given revision.
2598 2605 The actual working directory content or existing dirstate
2599 2606 information such as adds or removes is not considered.
2600 2607
2601 2608 ``minimal`` will only rebuild the dirstate status for files that claim to be
2602 2609 tracked but are not in the parent manifest, or that exist in the parent
2603 2610 manifest but are not in the dirstate. It will not change adds, removes, or
2604 2611 modified files that are in the working copy parent.
2605 2612
2606 2613 One use of this command is to make the next :hg:`status` invocation
2607 2614 check the actual file content.
2608 2615 """
2609 2616 ctx = scmutil.revsingle(repo, rev)
2610 2617 with repo.wlock():
2611 2618 dirstate = repo.dirstate
2612 2619 changedfiles = None
2613 2620 # See command doc for what minimal does.
2614 2621 if opts.get('minimal'):
2615 2622 manifestfiles = set(ctx.manifest().keys())
2616 2623 dirstatefiles = set(dirstate)
2617 2624 manifestonly = manifestfiles - dirstatefiles
2618 2625 dsonly = dirstatefiles - manifestfiles
2619 2626 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2620 2627 changedfiles = manifestonly | dsnotadded
2621 2628
2622 2629 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2623 2630
2624 2631
2625 2632 @command(b'debugrebuildfncache', [], b'')
2626 2633 def debugrebuildfncache(ui, repo):
2627 2634 """rebuild the fncache file"""
2628 2635 repair.rebuildfncache(ui, repo)
2629 2636
2630 2637
2631 2638 @command(
2632 2639 b'debugrename',
2633 2640 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2634 2641 _(b'[-r REV] [FILE]...'),
2635 2642 )
2636 2643 def debugrename(ui, repo, *pats, **opts):
2637 2644 """dump rename information"""
2638 2645
2639 2646 opts = pycompat.byteskwargs(opts)
2640 2647 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2641 2648 m = scmutil.match(ctx, pats, opts)
2642 2649 for abs in ctx.walk(m):
2643 2650 fctx = ctx[abs]
2644 2651 o = fctx.filelog().renamed(fctx.filenode())
2645 2652 rel = repo.pathto(abs)
2646 2653 if o:
2647 2654 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2648 2655 else:
2649 2656 ui.write(_(b"%s not renamed\n") % rel)
2650 2657
2651 2658
2652 2659 @command(
2653 2660 b'debugrevlog',
2654 2661 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2655 2662 _(b'-c|-m|FILE'),
2656 2663 optionalrepo=True,
2657 2664 )
2658 2665 def debugrevlog(ui, repo, file_=None, **opts):
2659 2666 """show data and statistics about a revlog"""
2660 2667 opts = pycompat.byteskwargs(opts)
2661 2668 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2662 2669
2663 2670 if opts.get(b"dump"):
2664 2671 numrevs = len(r)
2665 2672 ui.write(
2666 2673 (
2667 2674 b"# rev p1rev p2rev start end deltastart base p1 p2"
2668 2675 b" rawsize totalsize compression heads chainlen\n"
2669 2676 )
2670 2677 )
2671 2678 ts = 0
2672 2679 heads = set()
2673 2680
2674 2681 for rev in pycompat.xrange(numrevs):
2675 2682 dbase = r.deltaparent(rev)
2676 2683 if dbase == -1:
2677 2684 dbase = rev
2678 2685 cbase = r.chainbase(rev)
2679 2686 clen = r.chainlen(rev)
2680 2687 p1, p2 = r.parentrevs(rev)
2681 2688 rs = r.rawsize(rev)
2682 2689 ts = ts + rs
2683 2690 heads -= set(r.parentrevs(rev))
2684 2691 heads.add(rev)
2685 2692 try:
2686 2693 compression = ts / r.end(rev)
2687 2694 except ZeroDivisionError:
2688 2695 compression = 0
2689 2696 ui.write(
2690 2697 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2691 2698 b"%11d %5d %8d\n"
2692 2699 % (
2693 2700 rev,
2694 2701 p1,
2695 2702 p2,
2696 2703 r.start(rev),
2697 2704 r.end(rev),
2698 2705 r.start(dbase),
2699 2706 r.start(cbase),
2700 2707 r.start(p1),
2701 2708 r.start(p2),
2702 2709 rs,
2703 2710 ts,
2704 2711 compression,
2705 2712 len(heads),
2706 2713 clen,
2707 2714 )
2708 2715 )
2709 2716 return 0
2710 2717
2711 2718 v = r.version
2712 2719 format = v & 0xFFFF
2713 2720 flags = []
2714 2721 gdelta = False
2715 2722 if v & revlog.FLAG_INLINE_DATA:
2716 2723 flags.append(b'inline')
2717 2724 if v & revlog.FLAG_GENERALDELTA:
2718 2725 gdelta = True
2719 2726 flags.append(b'generaldelta')
2720 2727 if not flags:
2721 2728 flags = [b'(none)']
2722 2729
2723 2730 ### tracks merge vs single parent
2724 2731 nummerges = 0
2725 2732
2726 2733 ### tracks ways the "delta" are build
2727 2734 # nodelta
2728 2735 numempty = 0
2729 2736 numemptytext = 0
2730 2737 numemptydelta = 0
2731 2738 # full file content
2732 2739 numfull = 0
2733 2740 # intermediate snapshot against a prior snapshot
2734 2741 numsemi = 0
2735 2742 # snapshot count per depth
2736 2743 numsnapdepth = collections.defaultdict(lambda: 0)
2737 2744 # delta against previous revision
2738 2745 numprev = 0
2739 2746 # delta against first or second parent (not prev)
2740 2747 nump1 = 0
2741 2748 nump2 = 0
2742 2749 # delta against neither prev nor parents
2743 2750 numother = 0
2744 2751 # delta against prev that are also first or second parent
2745 2752 # (details of `numprev`)
2746 2753 nump1prev = 0
2747 2754 nump2prev = 0
2748 2755
2749 2756 # data about delta chain of each revs
2750 2757 chainlengths = []
2751 2758 chainbases = []
2752 2759 chainspans = []
2753 2760
2754 2761 # data about each revision
2755 2762 datasize = [None, 0, 0]
2756 2763 fullsize = [None, 0, 0]
2757 2764 semisize = [None, 0, 0]
2758 2765 # snapshot count per depth
2759 2766 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2760 2767 deltasize = [None, 0, 0]
2761 2768 chunktypecounts = {}
2762 2769 chunktypesizes = {}
2763 2770
2764 2771 def addsize(size, l):
2765 2772 if l[0] is None or size < l[0]:
2766 2773 l[0] = size
2767 2774 if size > l[1]:
2768 2775 l[1] = size
2769 2776 l[2] += size
2770 2777
2771 2778 numrevs = len(r)
2772 2779 for rev in pycompat.xrange(numrevs):
2773 2780 p1, p2 = r.parentrevs(rev)
2774 2781 delta = r.deltaparent(rev)
2775 2782 if format > 0:
2776 2783 addsize(r.rawsize(rev), datasize)
2777 2784 if p2 != nullrev:
2778 2785 nummerges += 1
2779 2786 size = r.length(rev)
2780 2787 if delta == nullrev:
2781 2788 chainlengths.append(0)
2782 2789 chainbases.append(r.start(rev))
2783 2790 chainspans.append(size)
2784 2791 if size == 0:
2785 2792 numempty += 1
2786 2793 numemptytext += 1
2787 2794 else:
2788 2795 numfull += 1
2789 2796 numsnapdepth[0] += 1
2790 2797 addsize(size, fullsize)
2791 2798 addsize(size, snapsizedepth[0])
2792 2799 else:
2793 2800 chainlengths.append(chainlengths[delta] + 1)
2794 2801 baseaddr = chainbases[delta]
2795 2802 revaddr = r.start(rev)
2796 2803 chainbases.append(baseaddr)
2797 2804 chainspans.append((revaddr - baseaddr) + size)
2798 2805 if size == 0:
2799 2806 numempty += 1
2800 2807 numemptydelta += 1
2801 2808 elif r.issnapshot(rev):
2802 2809 addsize(size, semisize)
2803 2810 numsemi += 1
2804 2811 depth = r.snapshotdepth(rev)
2805 2812 numsnapdepth[depth] += 1
2806 2813 addsize(size, snapsizedepth[depth])
2807 2814 else:
2808 2815 addsize(size, deltasize)
2809 2816 if delta == rev - 1:
2810 2817 numprev += 1
2811 2818 if delta == p1:
2812 2819 nump1prev += 1
2813 2820 elif delta == p2:
2814 2821 nump2prev += 1
2815 2822 elif delta == p1:
2816 2823 nump1 += 1
2817 2824 elif delta == p2:
2818 2825 nump2 += 1
2819 2826 elif delta != nullrev:
2820 2827 numother += 1
2821 2828
2822 2829 # Obtain data on the raw chunks in the revlog.
2823 2830 if util.safehasattr(r, b'_getsegmentforrevs'):
2824 2831 segment = r._getsegmentforrevs(rev, rev)[1]
2825 2832 else:
2826 2833 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2827 2834 if segment:
2828 2835 chunktype = bytes(segment[0:1])
2829 2836 else:
2830 2837 chunktype = b'empty'
2831 2838
2832 2839 if chunktype not in chunktypecounts:
2833 2840 chunktypecounts[chunktype] = 0
2834 2841 chunktypesizes[chunktype] = 0
2835 2842
2836 2843 chunktypecounts[chunktype] += 1
2837 2844 chunktypesizes[chunktype] += size
2838 2845
2839 2846 # Adjust size min value for empty cases
2840 2847 for size in (datasize, fullsize, semisize, deltasize):
2841 2848 if size[0] is None:
2842 2849 size[0] = 0
2843 2850
2844 2851 numdeltas = numrevs - numfull - numempty - numsemi
2845 2852 numoprev = numprev - nump1prev - nump2prev
2846 2853 totalrawsize = datasize[2]
2847 2854 datasize[2] /= numrevs
2848 2855 fulltotal = fullsize[2]
2849 2856 if numfull == 0:
2850 2857 fullsize[2] = 0
2851 2858 else:
2852 2859 fullsize[2] /= numfull
2853 2860 semitotal = semisize[2]
2854 2861 snaptotal = {}
2855 2862 if numsemi > 0:
2856 2863 semisize[2] /= numsemi
2857 2864 for depth in snapsizedepth:
2858 2865 snaptotal[depth] = snapsizedepth[depth][2]
2859 2866 snapsizedepth[depth][2] /= numsnapdepth[depth]
2860 2867
2861 2868 deltatotal = deltasize[2]
2862 2869 if numdeltas > 0:
2863 2870 deltasize[2] /= numdeltas
2864 2871 totalsize = fulltotal + semitotal + deltatotal
2865 2872 avgchainlen = sum(chainlengths) / numrevs
2866 2873 maxchainlen = max(chainlengths)
2867 2874 maxchainspan = max(chainspans)
2868 2875 compratio = 1
2869 2876 if totalsize:
2870 2877 compratio = totalrawsize / totalsize
2871 2878
2872 2879 basedfmtstr = b'%%%dd\n'
2873 2880 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
2874 2881
2875 2882 def dfmtstr(max):
2876 2883 return basedfmtstr % len(str(max))
2877 2884
2878 2885 def pcfmtstr(max, padding=0):
2879 2886 return basepcfmtstr % (len(str(max)), b' ' * padding)
2880 2887
2881 2888 def pcfmt(value, total):
2882 2889 if total:
2883 2890 return (value, 100 * float(value) / total)
2884 2891 else:
2885 2892 return value, 100.0
2886 2893
2887 2894 ui.writenoi18n(b'format : %d\n' % format)
2888 2895 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
2889 2896
2890 2897 ui.write(b'\n')
2891 2898 fmt = pcfmtstr(totalsize)
2892 2899 fmt2 = dfmtstr(totalsize)
2893 2900 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2894 2901 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
2895 2902 ui.writenoi18n(
2896 2903 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
2897 2904 )
2898 2905 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2899 2906 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
2900 2907 ui.writenoi18n(
2901 2908 b' text : '
2902 2909 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
2903 2910 )
2904 2911 ui.writenoi18n(
2905 2912 b' delta : '
2906 2913 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
2907 2914 )
2908 2915 ui.writenoi18n(
2909 2916 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
2910 2917 )
2911 2918 for depth in sorted(numsnapdepth):
2912 2919 ui.write(
2913 2920 (b' lvl-%-3d : ' % depth)
2914 2921 + fmt % pcfmt(numsnapdepth[depth], numrevs)
2915 2922 )
2916 2923 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2917 2924 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
2918 2925 ui.writenoi18n(
2919 2926 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
2920 2927 )
2921 2928 for depth in sorted(numsnapdepth):
2922 2929 ui.write(
2923 2930 (b' lvl-%-3d : ' % depth)
2924 2931 + fmt % pcfmt(snaptotal[depth], totalsize)
2925 2932 )
2926 2933 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
2927 2934
2928 2935 def fmtchunktype(chunktype):
2929 2936 if chunktype == b'empty':
2930 2937 return b' %s : ' % chunktype
2931 2938 elif chunktype in pycompat.bytestr(string.ascii_letters):
2932 2939 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2933 2940 else:
2934 2941 return b' 0x%s : ' % hex(chunktype)
2935 2942
2936 2943 ui.write(b'\n')
2937 2944 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
2938 2945 for chunktype in sorted(chunktypecounts):
2939 2946 ui.write(fmtchunktype(chunktype))
2940 2947 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2941 2948 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
2942 2949 for chunktype in sorted(chunktypecounts):
2943 2950 ui.write(fmtchunktype(chunktype))
2944 2951 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2945 2952
2946 2953 ui.write(b'\n')
2947 2954 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2948 2955 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
2949 2956 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
2950 2957 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
2951 2958 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
2952 2959
2953 2960 if format > 0:
2954 2961 ui.write(b'\n')
2955 2962 ui.writenoi18n(
2956 2963 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
2957 2964 % tuple(datasize)
2958 2965 )
2959 2966 ui.writenoi18n(
2960 2967 b'full revision size (min/max/avg) : %d / %d / %d\n'
2961 2968 % tuple(fullsize)
2962 2969 )
2963 2970 ui.writenoi18n(
2964 2971 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
2965 2972 % tuple(semisize)
2966 2973 )
2967 2974 for depth in sorted(snapsizedepth):
2968 2975 if depth == 0:
2969 2976 continue
2970 2977 ui.writenoi18n(
2971 2978 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
2972 2979 % ((depth,) + tuple(snapsizedepth[depth]))
2973 2980 )
2974 2981 ui.writenoi18n(
2975 2982 b'delta size (min/max/avg) : %d / %d / %d\n'
2976 2983 % tuple(deltasize)
2977 2984 )
2978 2985
2979 2986 if numdeltas > 0:
2980 2987 ui.write(b'\n')
2981 2988 fmt = pcfmtstr(numdeltas)
2982 2989 fmt2 = pcfmtstr(numdeltas, 4)
2983 2990 ui.writenoi18n(
2984 2991 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
2985 2992 )
2986 2993 if numprev > 0:
2987 2994 ui.writenoi18n(
2988 2995 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
2989 2996 )
2990 2997 ui.writenoi18n(
2991 2998 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
2992 2999 )
2993 3000 ui.writenoi18n(
2994 3001 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
2995 3002 )
2996 3003 if gdelta:
2997 3004 ui.writenoi18n(
2998 3005 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
2999 3006 )
3000 3007 ui.writenoi18n(
3001 3008 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3002 3009 )
3003 3010 ui.writenoi18n(
3004 3011 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3005 3012 )
3006 3013
3007 3014
3008 3015 @command(
3009 3016 b'debugrevlogindex',
3010 3017 cmdutil.debugrevlogopts
3011 3018 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3012 3019 _(b'[-f FORMAT] -c|-m|FILE'),
3013 3020 optionalrepo=True,
3014 3021 )
3015 3022 def debugrevlogindex(ui, repo, file_=None, **opts):
3016 3023 """dump the contents of a revlog index"""
3017 3024 opts = pycompat.byteskwargs(opts)
3018 3025 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3019 3026 format = opts.get(b'format', 0)
3020 3027 if format not in (0, 1):
3021 3028 raise error.Abort(_(b"unknown format %d") % format)
3022 3029
3023 3030 if ui.debugflag:
3024 3031 shortfn = hex
3025 3032 else:
3026 3033 shortfn = short
3027 3034
3028 3035 # There might not be anything in r, so have a sane default
3029 3036 idlen = 12
3030 3037 for i in r:
3031 3038 idlen = len(shortfn(r.node(i)))
3032 3039 break
3033 3040
3034 3041 if format == 0:
3035 3042 if ui.verbose:
3036 3043 ui.writenoi18n(
3037 3044 b" rev offset length linkrev %s %s p2\n"
3038 3045 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3039 3046 )
3040 3047 else:
3041 3048 ui.writenoi18n(
3042 3049 b" rev linkrev %s %s p2\n"
3043 3050 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3044 3051 )
3045 3052 elif format == 1:
3046 3053 if ui.verbose:
3047 3054 ui.writenoi18n(
3048 3055 (
3049 3056 b" rev flag offset length size link p1"
3050 3057 b" p2 %s\n"
3051 3058 )
3052 3059 % b"nodeid".rjust(idlen)
3053 3060 )
3054 3061 else:
3055 3062 ui.writenoi18n(
3056 3063 b" rev flag size link p1 p2 %s\n"
3057 3064 % b"nodeid".rjust(idlen)
3058 3065 )
3059 3066
3060 3067 for i in r:
3061 3068 node = r.node(i)
3062 3069 if format == 0:
3063 3070 try:
3064 3071 pp = r.parents(node)
3065 3072 except Exception:
3066 3073 pp = [nullid, nullid]
3067 3074 if ui.verbose:
3068 3075 ui.write(
3069 3076 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3070 3077 % (
3071 3078 i,
3072 3079 r.start(i),
3073 3080 r.length(i),
3074 3081 r.linkrev(i),
3075 3082 shortfn(node),
3076 3083 shortfn(pp[0]),
3077 3084 shortfn(pp[1]),
3078 3085 )
3079 3086 )
3080 3087 else:
3081 3088 ui.write(
3082 3089 b"% 6d % 7d %s %s %s\n"
3083 3090 % (
3084 3091 i,
3085 3092 r.linkrev(i),
3086 3093 shortfn(node),
3087 3094 shortfn(pp[0]),
3088 3095 shortfn(pp[1]),
3089 3096 )
3090 3097 )
3091 3098 elif format == 1:
3092 3099 pr = r.parentrevs(i)
3093 3100 if ui.verbose:
3094 3101 ui.write(
3095 3102 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3096 3103 % (
3097 3104 i,
3098 3105 r.flags(i),
3099 3106 r.start(i),
3100 3107 r.length(i),
3101 3108 r.rawsize(i),
3102 3109 r.linkrev(i),
3103 3110 pr[0],
3104 3111 pr[1],
3105 3112 shortfn(node),
3106 3113 )
3107 3114 )
3108 3115 else:
3109 3116 ui.write(
3110 3117 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3111 3118 % (
3112 3119 i,
3113 3120 r.flags(i),
3114 3121 r.rawsize(i),
3115 3122 r.linkrev(i),
3116 3123 pr[0],
3117 3124 pr[1],
3118 3125 shortfn(node),
3119 3126 )
3120 3127 )
3121 3128
3122 3129
3123 3130 @command(
3124 3131 b'debugrevspec',
3125 3132 [
3126 3133 (
3127 3134 b'',
3128 3135 b'optimize',
3129 3136 None,
3130 3137 _(b'print parsed tree after optimizing (DEPRECATED)'),
3131 3138 ),
3132 3139 (
3133 3140 b'',
3134 3141 b'show-revs',
3135 3142 True,
3136 3143 _(b'print list of result revisions (default)'),
3137 3144 ),
3138 3145 (
3139 3146 b's',
3140 3147 b'show-set',
3141 3148 None,
3142 3149 _(b'print internal representation of result set'),
3143 3150 ),
3144 3151 (
3145 3152 b'p',
3146 3153 b'show-stage',
3147 3154 [],
3148 3155 _(b'print parsed tree at the given stage'),
3149 3156 _(b'NAME'),
3150 3157 ),
3151 3158 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3152 3159 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3153 3160 ],
3154 3161 b'REVSPEC',
3155 3162 )
3156 3163 def debugrevspec(ui, repo, expr, **opts):
3157 3164 """parse and apply a revision specification
3158 3165
3159 3166 Use -p/--show-stage option to print the parsed tree at the given stages.
3160 3167 Use -p all to print tree at every stage.
3161 3168
3162 3169 Use --no-show-revs option with -s or -p to print only the set
3163 3170 representation or the parsed tree respectively.
3164 3171
3165 3172 Use --verify-optimized to compare the optimized result with the unoptimized
3166 3173 one. Returns 1 if the optimized result differs.
3167 3174 """
3168 3175 opts = pycompat.byteskwargs(opts)
3169 3176 aliases = ui.configitems(b'revsetalias')
3170 3177 stages = [
3171 3178 (b'parsed', lambda tree: tree),
3172 3179 (
3173 3180 b'expanded',
3174 3181 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3175 3182 ),
3176 3183 (b'concatenated', revsetlang.foldconcat),
3177 3184 (b'analyzed', revsetlang.analyze),
3178 3185 (b'optimized', revsetlang.optimize),
3179 3186 ]
3180 3187 if opts[b'no_optimized']:
3181 3188 stages = stages[:-1]
3182 3189 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3183 3190 raise error.Abort(
3184 3191 _(b'cannot use --verify-optimized with --no-optimized')
3185 3192 )
3186 3193 stagenames = {n for n, f in stages}
3187 3194
3188 3195 showalways = set()
3189 3196 showchanged = set()
3190 3197 if ui.verbose and not opts[b'show_stage']:
3191 3198 # show parsed tree by --verbose (deprecated)
3192 3199 showalways.add(b'parsed')
3193 3200 showchanged.update([b'expanded', b'concatenated'])
3194 3201 if opts[b'optimize']:
3195 3202 showalways.add(b'optimized')
3196 3203 if opts[b'show_stage'] and opts[b'optimize']:
3197 3204 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3198 3205 if opts[b'show_stage'] == [b'all']:
3199 3206 showalways.update(stagenames)
3200 3207 else:
3201 3208 for n in opts[b'show_stage']:
3202 3209 if n not in stagenames:
3203 3210 raise error.Abort(_(b'invalid stage name: %s') % n)
3204 3211 showalways.update(opts[b'show_stage'])
3205 3212
3206 3213 treebystage = {}
3207 3214 printedtree = None
3208 3215 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3209 3216 for n, f in stages:
3210 3217 treebystage[n] = tree = f(tree)
3211 3218 if n in showalways or (n in showchanged and tree != printedtree):
3212 3219 if opts[b'show_stage'] or n != b'parsed':
3213 3220 ui.write(b"* %s:\n" % n)
3214 3221 ui.write(revsetlang.prettyformat(tree), b"\n")
3215 3222 printedtree = tree
3216 3223
3217 3224 if opts[b'verify_optimized']:
3218 3225 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3219 3226 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3220 3227 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3221 3228 ui.writenoi18n(
3222 3229 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3223 3230 )
3224 3231 ui.writenoi18n(
3225 3232 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3226 3233 )
3227 3234 arevs = list(arevs)
3228 3235 brevs = list(brevs)
3229 3236 if arevs == brevs:
3230 3237 return 0
3231 3238 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3232 3239 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3233 3240 sm = difflib.SequenceMatcher(None, arevs, brevs)
3234 3241 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3235 3242 if tag in ('delete', 'replace'):
3236 3243 for c in arevs[alo:ahi]:
3237 3244 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3238 3245 if tag in ('insert', 'replace'):
3239 3246 for c in brevs[blo:bhi]:
3240 3247 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3241 3248 if tag == 'equal':
3242 3249 for c in arevs[alo:ahi]:
3243 3250 ui.write(b' %d\n' % c)
3244 3251 return 1
3245 3252
3246 3253 func = revset.makematcher(tree)
3247 3254 revs = func(repo)
3248 3255 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3249 3256 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3250 3257 if not opts[b'show_revs']:
3251 3258 return
3252 3259 for c in revs:
3253 3260 ui.write(b"%d\n" % c)
3254 3261
3255 3262
3256 3263 @command(
3257 3264 b'debugserve',
3258 3265 [
3259 3266 (
3260 3267 b'',
3261 3268 b'sshstdio',
3262 3269 False,
3263 3270 _(b'run an SSH server bound to process handles'),
3264 3271 ),
3265 3272 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3266 3273 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3267 3274 ],
3268 3275 b'',
3269 3276 )
3270 3277 def debugserve(ui, repo, **opts):
3271 3278 """run a server with advanced settings
3272 3279
3273 3280 This command is similar to :hg:`serve`. It exists partially as a
3274 3281 workaround to the fact that ``hg serve --stdio`` must have specific
3275 3282 arguments for security reasons.
3276 3283 """
3277 3284 opts = pycompat.byteskwargs(opts)
3278 3285
3279 3286 if not opts[b'sshstdio']:
3280 3287 raise error.Abort(_(b'only --sshstdio is currently supported'))
3281 3288
3282 3289 logfh = None
3283 3290
3284 3291 if opts[b'logiofd'] and opts[b'logiofile']:
3285 3292 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3286 3293
3287 3294 if opts[b'logiofd']:
3288 3295 # Ideally we would be line buffered. But line buffering in binary
3289 3296 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3290 3297 # buffering could have performance impacts. But since this isn't
3291 3298 # performance critical code, it should be fine.
3292 3299 try:
3293 3300 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3294 3301 except OSError as e:
3295 3302 if e.errno != errno.ESPIPE:
3296 3303 raise
3297 3304 # can't seek a pipe, so `ab` mode fails on py3
3298 3305 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3299 3306 elif opts[b'logiofile']:
3300 3307 logfh = open(opts[b'logiofile'], b'ab', 0)
3301 3308
3302 3309 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3303 3310 s.serve_forever()
3304 3311
3305 3312
3306 3313 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3307 3314 def debugsetparents(ui, repo, rev1, rev2=None):
3308 3315 """manually set the parents of the current working directory
3309 3316
3310 3317 This is useful for writing repository conversion tools, but should
3311 3318 be used with care. For example, neither the working directory nor the
3312 3319 dirstate is updated, so file status may be incorrect after running this
3313 3320 command.
3314 3321
3315 3322 Returns 0 on success.
3316 3323 """
3317 3324
3318 3325 node1 = scmutil.revsingle(repo, rev1).node()
3319 3326 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3320 3327
3321 3328 with repo.wlock():
3322 3329 repo.setparents(node1, node2)
3323 3330
3324 3331
3325 3332 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3326 3333 def debugsidedata(ui, repo, file_, rev=None, **opts):
3327 3334 """dump the side data for a cl/manifest/file revision
3328 3335
3329 3336 Use --verbose to dump the sidedata content."""
3330 3337 opts = pycompat.byteskwargs(opts)
3331 3338 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3332 3339 if rev is not None:
3333 3340 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3334 3341 file_, rev = None, file_
3335 3342 elif rev is None:
3336 3343 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3337 3344 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3338 3345 r = getattr(r, '_revlog', r)
3339 3346 try:
3340 3347 sidedata = r.sidedata(r.lookup(rev))
3341 3348 except KeyError:
3342 3349 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3343 3350 if sidedata:
3344 3351 sidedata = list(sidedata.items())
3345 3352 sidedata.sort()
3346 3353 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3347 3354 for key, value in sidedata:
3348 3355 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3349 3356 if ui.verbose:
3350 3357 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3351 3358
3352 3359
3353 3360 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3354 3361 def debugssl(ui, repo, source=None, **opts):
3355 3362 '''test a secure connection to a server
3356 3363
3357 3364 This builds the certificate chain for the server on Windows, installing the
3358 3365 missing intermediates and trusted root via Windows Update if necessary. It
3359 3366 does nothing on other platforms.
3360 3367
3361 3368 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3362 3369 that server is used. See :hg:`help urls` for more information.
3363 3370
3364 3371 If the update succeeds, retry the original operation. Otherwise, the cause
3365 3372 of the SSL error is likely another issue.
3366 3373 '''
3367 3374 if not pycompat.iswindows:
3368 3375 raise error.Abort(
3369 3376 _(b'certificate chain building is only possible on Windows')
3370 3377 )
3371 3378
3372 3379 if not source:
3373 3380 if not repo:
3374 3381 raise error.Abort(
3375 3382 _(
3376 3383 b"there is no Mercurial repository here, and no "
3377 3384 b"server specified"
3378 3385 )
3379 3386 )
3380 3387 source = b"default"
3381 3388
3382 3389 source, branches = hg.parseurl(ui.expandpath(source))
3383 3390 url = util.url(source)
3384 3391
3385 3392 defaultport = {b'https': 443, b'ssh': 22}
3386 3393 if url.scheme in defaultport:
3387 3394 try:
3388 3395 addr = (url.host, int(url.port or defaultport[url.scheme]))
3389 3396 except ValueError:
3390 3397 raise error.Abort(_(b"malformed port number in URL"))
3391 3398 else:
3392 3399 raise error.Abort(_(b"only https and ssh connections are supported"))
3393 3400
3394 3401 from . import win32
3395 3402
3396 3403 s = ssl.wrap_socket(
3397 3404 socket.socket(),
3398 3405 ssl_version=ssl.PROTOCOL_TLS,
3399 3406 cert_reqs=ssl.CERT_NONE,
3400 3407 ca_certs=None,
3401 3408 )
3402 3409
3403 3410 try:
3404 3411 s.connect(addr)
3405 3412 cert = s.getpeercert(True)
3406 3413
3407 3414 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3408 3415
3409 3416 complete = win32.checkcertificatechain(cert, build=False)
3410 3417
3411 3418 if not complete:
3412 3419 ui.status(_(b'certificate chain is incomplete, updating... '))
3413 3420
3414 3421 if not win32.checkcertificatechain(cert):
3415 3422 ui.status(_(b'failed.\n'))
3416 3423 else:
3417 3424 ui.status(_(b'done.\n'))
3418 3425 else:
3419 3426 ui.status(_(b'full certificate chain is available\n'))
3420 3427 finally:
3421 3428 s.close()
3422 3429
3423 3430
3424 3431 @command(
3425 3432 b"debugbackupbundle",
3426 3433 [
3427 3434 (
3428 3435 b"",
3429 3436 b"recover",
3430 3437 b"",
3431 3438 b"brings the specified changeset back into the repository",
3432 3439 )
3433 3440 ]
3434 3441 + cmdutil.logopts,
3435 3442 _(b"hg debugbackupbundle [--recover HASH]"),
3436 3443 )
3437 3444 def debugbackupbundle(ui, repo, *pats, **opts):
3438 3445 """lists the changesets available in backup bundles
3439 3446
3440 3447 Without any arguments, this command prints a list of the changesets in each
3441 3448 backup bundle.
3442 3449
3443 3450 --recover takes a changeset hash and unbundles the first bundle that
3444 3451 contains that hash, which puts that changeset back in your repository.
3445 3452
3446 3453 --verbose will print the entire commit message and the bundle path for that
3447 3454 backup.
3448 3455 """
3449 3456 backups = list(
3450 3457 filter(
3451 3458 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3452 3459 )
3453 3460 )
3454 3461 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3455 3462
3456 3463 opts = pycompat.byteskwargs(opts)
3457 3464 opts[b"bundle"] = b""
3458 3465 opts[b"force"] = None
3459 3466 limit = logcmdutil.getlimit(opts)
3460 3467
3461 3468 def display(other, chlist, displayer):
3462 3469 if opts.get(b"newest_first"):
3463 3470 chlist.reverse()
3464 3471 count = 0
3465 3472 for n in chlist:
3466 3473 if limit is not None and count >= limit:
3467 3474 break
3468 3475 parents = [True for p in other.changelog.parents(n) if p != nullid]
3469 3476 if opts.get(b"no_merges") and len(parents) == 2:
3470 3477 continue
3471 3478 count += 1
3472 3479 displayer.show(other[n])
3473 3480
3474 3481 recovernode = opts.get(b"recover")
3475 3482 if recovernode:
3476 3483 if scmutil.isrevsymbol(repo, recovernode):
3477 3484 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3478 3485 return
3479 3486 elif backups:
3480 3487 msg = _(
3481 3488 b"Recover changesets using: hg debugbackupbundle --recover "
3482 3489 b"<changeset hash>\n\nAvailable backup changesets:"
3483 3490 )
3484 3491 ui.status(msg, label=b"status.removed")
3485 3492 else:
3486 3493 ui.status(_(b"no backup changesets found\n"))
3487 3494 return
3488 3495
3489 3496 for backup in backups:
3490 3497 # Much of this is copied from the hg incoming logic
3491 3498 source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
3492 3499 source, branches = hg.parseurl(source, opts.get(b"branch"))
3493 3500 try:
3494 3501 other = hg.peer(repo, opts, source)
3495 3502 except error.LookupError as ex:
3496 3503 msg = _(b"\nwarning: unable to open bundle %s") % source
3497 3504 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3498 3505 ui.warn(msg, hint=hint)
3499 3506 continue
3500 3507 revs, checkout = hg.addbranchrevs(
3501 3508 repo, other, branches, opts.get(b"rev")
3502 3509 )
3503 3510
3504 3511 if revs:
3505 3512 revs = [other.lookup(rev) for rev in revs]
3506 3513
3507 3514 quiet = ui.quiet
3508 3515 try:
3509 3516 ui.quiet = True
3510 3517 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3511 3518 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3512 3519 )
3513 3520 except error.LookupError:
3514 3521 continue
3515 3522 finally:
3516 3523 ui.quiet = quiet
3517 3524
3518 3525 try:
3519 3526 if not chlist:
3520 3527 continue
3521 3528 if recovernode:
3522 3529 with repo.lock(), repo.transaction(b"unbundle") as tr:
3523 3530 if scmutil.isrevsymbol(other, recovernode):
3524 3531 ui.status(_(b"Unbundling %s\n") % (recovernode))
3525 3532 f = hg.openpath(ui, source)
3526 3533 gen = exchange.readbundle(ui, f, source)
3527 3534 if isinstance(gen, bundle2.unbundle20):
3528 3535 bundle2.applybundle(
3529 3536 repo,
3530 3537 gen,
3531 3538 tr,
3532 3539 source=b"unbundle",
3533 3540 url=b"bundle:" + source,
3534 3541 )
3535 3542 else:
3536 3543 gen.apply(repo, b"unbundle", b"bundle:" + source)
3537 3544 break
3538 3545 else:
3539 3546 backupdate = encoding.strtolocal(
3540 3547 time.strftime(
3541 3548 "%a %H:%M, %Y-%m-%d",
3542 3549 time.localtime(os.path.getmtime(source)),
3543 3550 )
3544 3551 )
3545 3552 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3546 3553 if ui.verbose:
3547 3554 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3548 3555 else:
3549 3556 opts[
3550 3557 b"template"
3551 3558 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3552 3559 displayer = logcmdutil.changesetdisplayer(
3553 3560 ui, other, opts, False
3554 3561 )
3555 3562 display(other, chlist, displayer)
3556 3563 displayer.close()
3557 3564 finally:
3558 3565 cleanupfn()
3559 3566
3560 3567
3561 3568 @command(
3562 3569 b'debugsub',
3563 3570 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3564 3571 _(b'[-r REV] [REV]'),
3565 3572 )
3566 3573 def debugsub(ui, repo, rev=None):
3567 3574 ctx = scmutil.revsingle(repo, rev, None)
3568 3575 for k, v in sorted(ctx.substate.items()):
3569 3576 ui.writenoi18n(b'path %s\n' % k)
3570 3577 ui.writenoi18n(b' source %s\n' % v[0])
3571 3578 ui.writenoi18n(b' revision %s\n' % v[1])
3572 3579
3573 3580
3574 3581 @command(
3575 3582 b'debugsuccessorssets',
3576 3583 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3577 3584 _(b'[REV]'),
3578 3585 )
3579 3586 def debugsuccessorssets(ui, repo, *revs, **opts):
3580 3587 """show set of successors for revision
3581 3588
3582 3589 A successors set of changeset A is a consistent group of revisions that
3583 3590 succeed A. It contains non-obsolete changesets only unless closests
3584 3591 successors set is set.
3585 3592
3586 3593 In most cases a changeset A has a single successors set containing a single
3587 3594 successor (changeset A replaced by A').
3588 3595
3589 3596 A changeset that is made obsolete with no successors are called "pruned".
3590 3597 Such changesets have no successors sets at all.
3591 3598
3592 3599 A changeset that has been "split" will have a successors set containing
3593 3600 more than one successor.
3594 3601
3595 3602 A changeset that has been rewritten in multiple different ways is called
3596 3603 "divergent". Such changesets have multiple successor sets (each of which
3597 3604 may also be split, i.e. have multiple successors).
3598 3605
3599 3606 Results are displayed as follows::
3600 3607
3601 3608 <rev1>
3602 3609 <successors-1A>
3603 3610 <rev2>
3604 3611 <successors-2A>
3605 3612 <successors-2B1> <successors-2B2> <successors-2B3>
3606 3613
3607 3614 Here rev2 has two possible (i.e. divergent) successors sets. The first
3608 3615 holds one element, whereas the second holds three (i.e. the changeset has
3609 3616 been split).
3610 3617 """
3611 3618 # passed to successorssets caching computation from one call to another
3612 3619 cache = {}
3613 3620 ctx2str = bytes
3614 3621 node2str = short
3615 3622 for rev in scmutil.revrange(repo, revs):
3616 3623 ctx = repo[rev]
3617 3624 ui.write(b'%s\n' % ctx2str(ctx))
3618 3625 for succsset in obsutil.successorssets(
3619 3626 repo, ctx.node(), closest=opts['closest'], cache=cache
3620 3627 ):
3621 3628 if succsset:
3622 3629 ui.write(b' ')
3623 3630 ui.write(node2str(succsset[0]))
3624 3631 for node in succsset[1:]:
3625 3632 ui.write(b' ')
3626 3633 ui.write(node2str(node))
3627 3634 ui.write(b'\n')
3628 3635
3629 3636
3630 3637 @command(b'debugtagscache', [])
3631 3638 def debugtagscache(ui, repo):
3632 3639 """display the contents of .hg/cache/hgtagsfnodes1"""
3633 3640 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3634 3641 for r in repo:
3635 3642 node = repo[r].node()
3636 3643 tagsnode = cache.getfnode(node, computemissing=False)
3637 3644 tagsnodedisplay = hex(tagsnode) if tagsnode else b'missing/invalid'
3638 3645 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3639 3646
3640 3647
3641 3648 @command(
3642 3649 b'debugtemplate',
3643 3650 [
3644 3651 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3645 3652 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3646 3653 ],
3647 3654 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3648 3655 optionalrepo=True,
3649 3656 )
3650 3657 def debugtemplate(ui, repo, tmpl, **opts):
3651 3658 """parse and apply a template
3652 3659
3653 3660 If -r/--rev is given, the template is processed as a log template and
3654 3661 applied to the given changesets. Otherwise, it is processed as a generic
3655 3662 template.
3656 3663
3657 3664 Use --verbose to print the parsed tree.
3658 3665 """
3659 3666 revs = None
3660 3667 if opts['rev']:
3661 3668 if repo is None:
3662 3669 raise error.RepoError(
3663 3670 _(b'there is no Mercurial repository here (.hg not found)')
3664 3671 )
3665 3672 revs = scmutil.revrange(repo, opts['rev'])
3666 3673
3667 3674 props = {}
3668 3675 for d in opts['define']:
3669 3676 try:
3670 3677 k, v = (e.strip() for e in d.split(b'=', 1))
3671 3678 if not k or k == b'ui':
3672 3679 raise ValueError
3673 3680 props[k] = v
3674 3681 except ValueError:
3675 3682 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3676 3683
3677 3684 if ui.verbose:
3678 3685 aliases = ui.configitems(b'templatealias')
3679 3686 tree = templater.parse(tmpl)
3680 3687 ui.note(templater.prettyformat(tree), b'\n')
3681 3688 newtree = templater.expandaliases(tree, aliases)
3682 3689 if newtree != tree:
3683 3690 ui.notenoi18n(
3684 3691 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3685 3692 )
3686 3693
3687 3694 if revs is None:
3688 3695 tres = formatter.templateresources(ui, repo)
3689 3696 t = formatter.maketemplater(ui, tmpl, resources=tres)
3690 3697 if ui.verbose:
3691 3698 kwds, funcs = t.symbolsuseddefault()
3692 3699 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3693 3700 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3694 3701 ui.write(t.renderdefault(props))
3695 3702 else:
3696 3703 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3697 3704 if ui.verbose:
3698 3705 kwds, funcs = displayer.t.symbolsuseddefault()
3699 3706 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3700 3707 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3701 3708 for r in revs:
3702 3709 displayer.show(repo[r], **pycompat.strkwargs(props))
3703 3710 displayer.close()
3704 3711
3705 3712
3706 3713 @command(
3707 3714 b'debuguigetpass',
3708 3715 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3709 3716 _(b'[-p TEXT]'),
3710 3717 norepo=True,
3711 3718 )
3712 3719 def debuguigetpass(ui, prompt=b''):
3713 3720 """show prompt to type password"""
3714 3721 r = ui.getpass(prompt)
3715 3722 ui.writenoi18n(b'respose: %s\n' % r)
3716 3723
3717 3724
3718 3725 @command(
3719 3726 b'debuguiprompt',
3720 3727 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3721 3728 _(b'[-p TEXT]'),
3722 3729 norepo=True,
3723 3730 )
3724 3731 def debuguiprompt(ui, prompt=b''):
3725 3732 """show plain prompt"""
3726 3733 r = ui.prompt(prompt)
3727 3734 ui.writenoi18n(b'response: %s\n' % r)
3728 3735
3729 3736
3730 3737 @command(b'debugupdatecaches', [])
3731 3738 def debugupdatecaches(ui, repo, *pats, **opts):
3732 3739 """warm all known caches in the repository"""
3733 3740 with repo.wlock(), repo.lock():
3734 3741 repo.updatecaches(full=True)
3735 3742
3736 3743
3737 3744 @command(
3738 3745 b'debugupgraderepo',
3739 3746 [
3740 3747 (
3741 3748 b'o',
3742 3749 b'optimize',
3743 3750 [],
3744 3751 _(b'extra optimization to perform'),
3745 3752 _(b'NAME'),
3746 3753 ),
3747 3754 (b'', b'run', False, _(b'performs an upgrade')),
3748 3755 (b'', b'backup', True, _(b'keep the old repository content around')),
3749 3756 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3750 3757 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3751 3758 ],
3752 3759 )
3753 3760 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3754 3761 """upgrade a repository to use different features
3755 3762
3756 3763 If no arguments are specified, the repository is evaluated for upgrade
3757 3764 and a list of problems and potential optimizations is printed.
3758 3765
3759 3766 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3760 3767 can be influenced via additional arguments. More details will be provided
3761 3768 by the command output when run without ``--run``.
3762 3769
3763 3770 During the upgrade, the repository will be locked and no writes will be
3764 3771 allowed.
3765 3772
3766 3773 At the end of the upgrade, the repository may not be readable while new
3767 3774 repository data is swapped in. This window will be as long as it takes to
3768 3775 rename some directories inside the ``.hg`` directory. On most machines, this
3769 3776 should complete almost instantaneously and the chances of a consumer being
3770 3777 unable to access the repository should be low.
3771 3778
3772 3779 By default, all revlog will be upgraded. You can restrict this using flag
3773 3780 such as `--manifest`:
3774 3781
3775 3782 * `--manifest`: only optimize the manifest
3776 3783 * `--no-manifest`: optimize all revlog but the manifest
3777 3784 * `--changelog`: optimize the changelog only
3778 3785 * `--no-changelog --no-manifest`: optimize filelogs only
3779 3786 """
3780 3787 return upgrade.upgraderepo(
3781 3788 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3782 3789 )
3783 3790
3784 3791
3785 3792 @command(
3786 3793 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3787 3794 )
3788 3795 def debugwalk(ui, repo, *pats, **opts):
3789 3796 """show how files match on given patterns"""
3790 3797 opts = pycompat.byteskwargs(opts)
3791 3798 m = scmutil.match(repo[None], pats, opts)
3792 3799 if ui.verbose:
3793 3800 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3794 3801 items = list(repo[None].walk(m))
3795 3802 if not items:
3796 3803 return
3797 3804 f = lambda fn: fn
3798 3805 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3799 3806 f = lambda fn: util.normpath(fn)
3800 3807 fmt = b'f %%-%ds %%-%ds %%s' % (
3801 3808 max([len(abs) for abs in items]),
3802 3809 max([len(repo.pathto(abs)) for abs in items]),
3803 3810 )
3804 3811 for abs in items:
3805 3812 line = fmt % (
3806 3813 abs,
3807 3814 f(repo.pathto(abs)),
3808 3815 m.exact(abs) and b'exact' or b'',
3809 3816 )
3810 3817 ui.write(b"%s\n" % line.rstrip())
3811 3818
3812 3819
3813 3820 @command(b'debugwhyunstable', [], _(b'REV'))
3814 3821 def debugwhyunstable(ui, repo, rev):
3815 3822 """explain instabilities of a changeset"""
3816 3823 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3817 3824 dnodes = b''
3818 3825 if entry.get(b'divergentnodes'):
3819 3826 dnodes = (
3820 3827 b' '.join(
3821 3828 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3822 3829 for ctx in entry[b'divergentnodes']
3823 3830 )
3824 3831 + b' '
3825 3832 )
3826 3833 ui.write(
3827 3834 b'%s: %s%s %s\n'
3828 3835 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3829 3836 )
3830 3837
3831 3838
3832 3839 @command(
3833 3840 b'debugwireargs',
3834 3841 [
3835 3842 (b'', b'three', b'', b'three'),
3836 3843 (b'', b'four', b'', b'four'),
3837 3844 (b'', b'five', b'', b'five'),
3838 3845 ]
3839 3846 + cmdutil.remoteopts,
3840 3847 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3841 3848 norepo=True,
3842 3849 )
3843 3850 def debugwireargs(ui, repopath, *vals, **opts):
3844 3851 opts = pycompat.byteskwargs(opts)
3845 3852 repo = hg.peer(ui, opts, repopath)
3846 3853 for opt in cmdutil.remoteopts:
3847 3854 del opts[opt[1]]
3848 3855 args = {}
3849 3856 for k, v in pycompat.iteritems(opts):
3850 3857 if v:
3851 3858 args[k] = v
3852 3859 args = pycompat.strkwargs(args)
3853 3860 # run twice to check that we don't mess up the stream for the next command
3854 3861 res1 = repo.debugwireargs(*vals, **args)
3855 3862 res2 = repo.debugwireargs(*vals, **args)
3856 3863 ui.write(b"%s\n" % res1)
3857 3864 if res1 != res2:
3858 3865 ui.warn(b"%s\n" % res2)
3859 3866
3860 3867
3861 3868 def _parsewirelangblocks(fh):
3862 3869 activeaction = None
3863 3870 blocklines = []
3864 3871 lastindent = 0
3865 3872
3866 3873 for line in fh:
3867 3874 line = line.rstrip()
3868 3875 if not line:
3869 3876 continue
3870 3877
3871 3878 if line.startswith(b'#'):
3872 3879 continue
3873 3880
3874 3881 if not line.startswith(b' '):
3875 3882 # New block. Flush previous one.
3876 3883 if activeaction:
3877 3884 yield activeaction, blocklines
3878 3885
3879 3886 activeaction = line
3880 3887 blocklines = []
3881 3888 lastindent = 0
3882 3889 continue
3883 3890
3884 3891 # Else we start with an indent.
3885 3892
3886 3893 if not activeaction:
3887 3894 raise error.Abort(_(b'indented line outside of block'))
3888 3895
3889 3896 indent = len(line) - len(line.lstrip())
3890 3897
3891 3898 # If this line is indented more than the last line, concatenate it.
3892 3899 if indent > lastindent and blocklines:
3893 3900 blocklines[-1] += line.lstrip()
3894 3901 else:
3895 3902 blocklines.append(line)
3896 3903 lastindent = indent
3897 3904
3898 3905 # Flush last block.
3899 3906 if activeaction:
3900 3907 yield activeaction, blocklines
3901 3908
3902 3909
3903 3910 @command(
3904 3911 b'debugwireproto',
3905 3912 [
3906 3913 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
3907 3914 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
3908 3915 (
3909 3916 b'',
3910 3917 b'noreadstderr',
3911 3918 False,
3912 3919 _(b'do not read from stderr of the remote'),
3913 3920 ),
3914 3921 (
3915 3922 b'',
3916 3923 b'nologhandshake',
3917 3924 False,
3918 3925 _(b'do not log I/O related to the peer handshake'),
3919 3926 ),
3920 3927 ]
3921 3928 + cmdutil.remoteopts,
3922 3929 _(b'[PATH]'),
3923 3930 optionalrepo=True,
3924 3931 )
3925 3932 def debugwireproto(ui, repo, path=None, **opts):
3926 3933 """send wire protocol commands to a server
3927 3934
3928 3935 This command can be used to issue wire protocol commands to remote
3929 3936 peers and to debug the raw data being exchanged.
3930 3937
3931 3938 ``--localssh`` will start an SSH server against the current repository
3932 3939 and connect to that. By default, the connection will perform a handshake
3933 3940 and establish an appropriate peer instance.
3934 3941
3935 3942 ``--peer`` can be used to bypass the handshake protocol and construct a
3936 3943 peer instance using the specified class type. Valid values are ``raw``,
3937 3944 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
3938 3945 raw data payloads and don't support higher-level command actions.
3939 3946
3940 3947 ``--noreadstderr`` can be used to disable automatic reading from stderr
3941 3948 of the peer (for SSH connections only). Disabling automatic reading of
3942 3949 stderr is useful for making output more deterministic.
3943 3950
3944 3951 Commands are issued via a mini language which is specified via stdin.
3945 3952 The language consists of individual actions to perform. An action is
3946 3953 defined by a block. A block is defined as a line with no leading
3947 3954 space followed by 0 or more lines with leading space. Blocks are
3948 3955 effectively a high-level command with additional metadata.
3949 3956
3950 3957 Lines beginning with ``#`` are ignored.
3951 3958
3952 3959 The following sections denote available actions.
3953 3960
3954 3961 raw
3955 3962 ---
3956 3963
3957 3964 Send raw data to the server.
3958 3965
3959 3966 The block payload contains the raw data to send as one atomic send
3960 3967 operation. The data may not actually be delivered in a single system
3961 3968 call: it depends on the abilities of the transport being used.
3962 3969
3963 3970 Each line in the block is de-indented and concatenated. Then, that
3964 3971 value is evaluated as a Python b'' literal. This allows the use of
3965 3972 backslash escaping, etc.
3966 3973
3967 3974 raw+
3968 3975 ----
3969 3976
3970 3977 Behaves like ``raw`` except flushes output afterwards.
3971 3978
3972 3979 command <X>
3973 3980 -----------
3974 3981
3975 3982 Send a request to run a named command, whose name follows the ``command``
3976 3983 string.
3977 3984
3978 3985 Arguments to the command are defined as lines in this block. The format of
3979 3986 each line is ``<key> <value>``. e.g.::
3980 3987
3981 3988 command listkeys
3982 3989 namespace bookmarks
3983 3990
3984 3991 If the value begins with ``eval:``, it will be interpreted as a Python
3985 3992 literal expression. Otherwise values are interpreted as Python b'' literals.
3986 3993 This allows sending complex types and encoding special byte sequences via
3987 3994 backslash escaping.
3988 3995
3989 3996 The following arguments have special meaning:
3990 3997
3991 3998 ``PUSHFILE``
3992 3999 When defined, the *push* mechanism of the peer will be used instead
3993 4000 of the static request-response mechanism and the content of the
3994 4001 file specified in the value of this argument will be sent as the
3995 4002 command payload.
3996 4003
3997 4004 This can be used to submit a local bundle file to the remote.
3998 4005
3999 4006 batchbegin
4000 4007 ----------
4001 4008
4002 4009 Instruct the peer to begin a batched send.
4003 4010
4004 4011 All ``command`` blocks are queued for execution until the next
4005 4012 ``batchsubmit`` block.
4006 4013
4007 4014 batchsubmit
4008 4015 -----------
4009 4016
4010 4017 Submit previously queued ``command`` blocks as a batch request.
4011 4018
4012 4019 This action MUST be paired with a ``batchbegin`` action.
4013 4020
4014 4021 httprequest <method> <path>
4015 4022 ---------------------------
4016 4023
4017 4024 (HTTP peer only)
4018 4025
4019 4026 Send an HTTP request to the peer.
4020 4027
4021 4028 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4022 4029
4023 4030 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4024 4031 headers to add to the request. e.g. ``Accept: foo``.
4025 4032
4026 4033 The following arguments are special:
4027 4034
4028 4035 ``BODYFILE``
4029 4036 The content of the file defined as the value to this argument will be
4030 4037 transferred verbatim as the HTTP request body.
4031 4038
4032 4039 ``frame <type> <flags> <payload>``
4033 4040 Send a unified protocol frame as part of the request body.
4034 4041
4035 4042 All frames will be collected and sent as the body to the HTTP
4036 4043 request.
4037 4044
4038 4045 close
4039 4046 -----
4040 4047
4041 4048 Close the connection to the server.
4042 4049
4043 4050 flush
4044 4051 -----
4045 4052
4046 4053 Flush data written to the server.
4047 4054
4048 4055 readavailable
4049 4056 -------------
4050 4057
4051 4058 Close the write end of the connection and read all available data from
4052 4059 the server.
4053 4060
4054 4061 If the connection to the server encompasses multiple pipes, we poll both
4055 4062 pipes and read available data.
4056 4063
4057 4064 readline
4058 4065 --------
4059 4066
4060 4067 Read a line of output from the server. If there are multiple output
4061 4068 pipes, reads only the main pipe.
4062 4069
4063 4070 ereadline
4064 4071 ---------
4065 4072
4066 4073 Like ``readline``, but read from the stderr pipe, if available.
4067 4074
4068 4075 read <X>
4069 4076 --------
4070 4077
4071 4078 ``read()`` N bytes from the server's main output pipe.
4072 4079
4073 4080 eread <X>
4074 4081 ---------
4075 4082
4076 4083 ``read()`` N bytes from the server's stderr pipe, if available.
4077 4084
4078 4085 Specifying Unified Frame-Based Protocol Frames
4079 4086 ----------------------------------------------
4080 4087
4081 4088 It is possible to emit a *Unified Frame-Based Protocol* by using special
4082 4089 syntax.
4083 4090
4084 4091 A frame is composed as a type, flags, and payload. These can be parsed
4085 4092 from a string of the form:
4086 4093
4087 4094 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4088 4095
4089 4096 ``request-id`` and ``stream-id`` are integers defining the request and
4090 4097 stream identifiers.
4091 4098
4092 4099 ``type`` can be an integer value for the frame type or the string name
4093 4100 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4094 4101 ``command-name``.
4095 4102
4096 4103 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4097 4104 components. Each component (and there can be just one) can be an integer
4098 4105 or a flag name for stream flags or frame flags, respectively. Values are
4099 4106 resolved to integers and then bitwise OR'd together.
4100 4107
4101 4108 ``payload`` represents the raw frame payload. If it begins with
4102 4109 ``cbor:``, the following string is evaluated as Python code and the
4103 4110 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4104 4111 as a Python byte string literal.
4105 4112 """
4106 4113 opts = pycompat.byteskwargs(opts)
4107 4114
4108 4115 if opts[b'localssh'] and not repo:
4109 4116 raise error.Abort(_(b'--localssh requires a repository'))
4110 4117
4111 4118 if opts[b'peer'] and opts[b'peer'] not in (
4112 4119 b'raw',
4113 4120 b'http2',
4114 4121 b'ssh1',
4115 4122 b'ssh2',
4116 4123 ):
4117 4124 raise error.Abort(
4118 4125 _(b'invalid value for --peer'),
4119 4126 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4120 4127 )
4121 4128
4122 4129 if path and opts[b'localssh']:
4123 4130 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4124 4131
4125 4132 if ui.interactive():
4126 4133 ui.write(_(b'(waiting for commands on stdin)\n'))
4127 4134
4128 4135 blocks = list(_parsewirelangblocks(ui.fin))
4129 4136
4130 4137 proc = None
4131 4138 stdin = None
4132 4139 stdout = None
4133 4140 stderr = None
4134 4141 opener = None
4135 4142
4136 4143 if opts[b'localssh']:
4137 4144 # We start the SSH server in its own process so there is process
4138 4145 # separation. This prevents a whole class of potential bugs around
4139 4146 # shared state from interfering with server operation.
4140 4147 args = procutil.hgcmd() + [
4141 4148 b'-R',
4142 4149 repo.root,
4143 4150 b'debugserve',
4144 4151 b'--sshstdio',
4145 4152 ]
4146 4153 proc = subprocess.Popen(
4147 4154 pycompat.rapply(procutil.tonativestr, args),
4148 4155 stdin=subprocess.PIPE,
4149 4156 stdout=subprocess.PIPE,
4150 4157 stderr=subprocess.PIPE,
4151 4158 bufsize=0,
4152 4159 )
4153 4160
4154 4161 stdin = proc.stdin
4155 4162 stdout = proc.stdout
4156 4163 stderr = proc.stderr
4157 4164
4158 4165 # We turn the pipes into observers so we can log I/O.
4159 4166 if ui.verbose or opts[b'peer'] == b'raw':
4160 4167 stdin = util.makeloggingfileobject(
4161 4168 ui, proc.stdin, b'i', logdata=True
4162 4169 )
4163 4170 stdout = util.makeloggingfileobject(
4164 4171 ui, proc.stdout, b'o', logdata=True
4165 4172 )
4166 4173 stderr = util.makeloggingfileobject(
4167 4174 ui, proc.stderr, b'e', logdata=True
4168 4175 )
4169 4176
4170 4177 # --localssh also implies the peer connection settings.
4171 4178
4172 4179 url = b'ssh://localserver'
4173 4180 autoreadstderr = not opts[b'noreadstderr']
4174 4181
4175 4182 if opts[b'peer'] == b'ssh1':
4176 4183 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4177 4184 peer = sshpeer.sshv1peer(
4178 4185 ui,
4179 4186 url,
4180 4187 proc,
4181 4188 stdin,
4182 4189 stdout,
4183 4190 stderr,
4184 4191 None,
4185 4192 autoreadstderr=autoreadstderr,
4186 4193 )
4187 4194 elif opts[b'peer'] == b'ssh2':
4188 4195 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4189 4196 peer = sshpeer.sshv2peer(
4190 4197 ui,
4191 4198 url,
4192 4199 proc,
4193 4200 stdin,
4194 4201 stdout,
4195 4202 stderr,
4196 4203 None,
4197 4204 autoreadstderr=autoreadstderr,
4198 4205 )
4199 4206 elif opts[b'peer'] == b'raw':
4200 4207 ui.write(_(b'using raw connection to peer\n'))
4201 4208 peer = None
4202 4209 else:
4203 4210 ui.write(_(b'creating ssh peer from handshake results\n'))
4204 4211 peer = sshpeer.makepeer(
4205 4212 ui,
4206 4213 url,
4207 4214 proc,
4208 4215 stdin,
4209 4216 stdout,
4210 4217 stderr,
4211 4218 autoreadstderr=autoreadstderr,
4212 4219 )
4213 4220
4214 4221 elif path:
4215 4222 # We bypass hg.peer() so we can proxy the sockets.
4216 4223 # TODO consider not doing this because we skip
4217 4224 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4218 4225 u = util.url(path)
4219 4226 if u.scheme != b'http':
4220 4227 raise error.Abort(_(b'only http:// paths are currently supported'))
4221 4228
4222 4229 url, authinfo = u.authinfo()
4223 4230 openerargs = {
4224 4231 'useragent': b'Mercurial debugwireproto',
4225 4232 }
4226 4233
4227 4234 # Turn pipes/sockets into observers so we can log I/O.
4228 4235 if ui.verbose:
4229 4236 openerargs.update(
4230 4237 {
4231 4238 'loggingfh': ui,
4232 4239 'loggingname': b's',
4233 4240 'loggingopts': {'logdata': True, 'logdataapis': False,},
4234 4241 }
4235 4242 )
4236 4243
4237 4244 if ui.debugflag:
4238 4245 openerargs['loggingopts']['logdataapis'] = True
4239 4246
4240 4247 # Don't send default headers when in raw mode. This allows us to
4241 4248 # bypass most of the behavior of our URL handling code so we can
4242 4249 # have near complete control over what's sent on the wire.
4243 4250 if opts[b'peer'] == b'raw':
4244 4251 openerargs['sendaccept'] = False
4245 4252
4246 4253 opener = urlmod.opener(ui, authinfo, **openerargs)
4247 4254
4248 4255 if opts[b'peer'] == b'http2':
4249 4256 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4250 4257 # We go through makepeer() because we need an API descriptor for
4251 4258 # the peer instance to be useful.
4252 4259 with ui.configoverride(
4253 4260 {(b'experimental', b'httppeer.advertise-v2'): True}
4254 4261 ):
4255 4262 if opts[b'nologhandshake']:
4256 4263 ui.pushbuffer()
4257 4264
4258 4265 peer = httppeer.makepeer(ui, path, opener=opener)
4259 4266
4260 4267 if opts[b'nologhandshake']:
4261 4268 ui.popbuffer()
4262 4269
4263 4270 if not isinstance(peer, httppeer.httpv2peer):
4264 4271 raise error.Abort(
4265 4272 _(
4266 4273 b'could not instantiate HTTP peer for '
4267 4274 b'wire protocol version 2'
4268 4275 ),
4269 4276 hint=_(
4270 4277 b'the server may not have the feature '
4271 4278 b'enabled or is not allowing this '
4272 4279 b'client version'
4273 4280 ),
4274 4281 )
4275 4282
4276 4283 elif opts[b'peer'] == b'raw':
4277 4284 ui.write(_(b'using raw connection to peer\n'))
4278 4285 peer = None
4279 4286 elif opts[b'peer']:
4280 4287 raise error.Abort(
4281 4288 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4282 4289 )
4283 4290 else:
4284 4291 peer = httppeer.makepeer(ui, path, opener=opener)
4285 4292
4286 4293 # We /could/ populate stdin/stdout with sock.makefile()...
4287 4294 else:
4288 4295 raise error.Abort(_(b'unsupported connection configuration'))
4289 4296
4290 4297 batchedcommands = None
4291 4298
4292 4299 # Now perform actions based on the parsed wire language instructions.
4293 4300 for action, lines in blocks:
4294 4301 if action in (b'raw', b'raw+'):
4295 4302 if not stdin:
4296 4303 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4297 4304
4298 4305 # Concatenate the data together.
4299 4306 data = b''.join(l.lstrip() for l in lines)
4300 4307 data = stringutil.unescapestr(data)
4301 4308 stdin.write(data)
4302 4309
4303 4310 if action == b'raw+':
4304 4311 stdin.flush()
4305 4312 elif action == b'flush':
4306 4313 if not stdin:
4307 4314 raise error.Abort(_(b'cannot call flush on this peer'))
4308 4315 stdin.flush()
4309 4316 elif action.startswith(b'command'):
4310 4317 if not peer:
4311 4318 raise error.Abort(
4312 4319 _(
4313 4320 b'cannot send commands unless peer instance '
4314 4321 b'is available'
4315 4322 )
4316 4323 )
4317 4324
4318 4325 command = action.split(b' ', 1)[1]
4319 4326
4320 4327 args = {}
4321 4328 for line in lines:
4322 4329 # We need to allow empty values.
4323 4330 fields = line.lstrip().split(b' ', 1)
4324 4331 if len(fields) == 1:
4325 4332 key = fields[0]
4326 4333 value = b''
4327 4334 else:
4328 4335 key, value = fields
4329 4336
4330 4337 if value.startswith(b'eval:'):
4331 4338 value = stringutil.evalpythonliteral(value[5:])
4332 4339 else:
4333 4340 value = stringutil.unescapestr(value)
4334 4341
4335 4342 args[key] = value
4336 4343
4337 4344 if batchedcommands is not None:
4338 4345 batchedcommands.append((command, args))
4339 4346 continue
4340 4347
4341 4348 ui.status(_(b'sending %s command\n') % command)
4342 4349
4343 4350 if b'PUSHFILE' in args:
4344 4351 with open(args[b'PUSHFILE'], 'rb') as fh:
4345 4352 del args[b'PUSHFILE']
4346 4353 res, output = peer._callpush(
4347 4354 command, fh, **pycompat.strkwargs(args)
4348 4355 )
4349 4356 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4350 4357 ui.status(
4351 4358 _(b'remote output: %s\n') % stringutil.escapestr(output)
4352 4359 )
4353 4360 else:
4354 4361 with peer.commandexecutor() as e:
4355 4362 res = e.callcommand(command, args).result()
4356 4363
4357 4364 if isinstance(res, wireprotov2peer.commandresponse):
4358 4365 val = res.objects()
4359 4366 ui.status(
4360 4367 _(b'response: %s\n')
4361 4368 % stringutil.pprint(val, bprefix=True, indent=2)
4362 4369 )
4363 4370 else:
4364 4371 ui.status(
4365 4372 _(b'response: %s\n')
4366 4373 % stringutil.pprint(res, bprefix=True, indent=2)
4367 4374 )
4368 4375
4369 4376 elif action == b'batchbegin':
4370 4377 if batchedcommands is not None:
4371 4378 raise error.Abort(_(b'nested batchbegin not allowed'))
4372 4379
4373 4380 batchedcommands = []
4374 4381 elif action == b'batchsubmit':
4375 4382 # There is a batching API we could go through. But it would be
4376 4383 # difficult to normalize requests into function calls. It is easier
4377 4384 # to bypass this layer and normalize to commands + args.
4378 4385 ui.status(
4379 4386 _(b'sending batch with %d sub-commands\n')
4380 4387 % len(batchedcommands)
4381 4388 )
4382 4389 assert peer is not None
4383 4390 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4384 4391 ui.status(
4385 4392 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4386 4393 )
4387 4394
4388 4395 batchedcommands = None
4389 4396
4390 4397 elif action.startswith(b'httprequest '):
4391 4398 if not opener:
4392 4399 raise error.Abort(
4393 4400 _(b'cannot use httprequest without an HTTP peer')
4394 4401 )
4395 4402
4396 4403 request = action.split(b' ', 2)
4397 4404 if len(request) != 3:
4398 4405 raise error.Abort(
4399 4406 _(
4400 4407 b'invalid httprequest: expected format is '
4401 4408 b'"httprequest <method> <path>'
4402 4409 )
4403 4410 )
4404 4411
4405 4412 method, httppath = request[1:]
4406 4413 headers = {}
4407 4414 body = None
4408 4415 frames = []
4409 4416 for line in lines:
4410 4417 line = line.lstrip()
4411 4418 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4412 4419 if m:
4413 4420 # Headers need to use native strings.
4414 4421 key = pycompat.strurl(m.group(1))
4415 4422 value = pycompat.strurl(m.group(2))
4416 4423 headers[key] = value
4417 4424 continue
4418 4425
4419 4426 if line.startswith(b'BODYFILE '):
4420 4427 with open(line.split(b' ', 1), b'rb') as fh:
4421 4428 body = fh.read()
4422 4429 elif line.startswith(b'frame '):
4423 4430 frame = wireprotoframing.makeframefromhumanstring(
4424 4431 line[len(b'frame ') :]
4425 4432 )
4426 4433
4427 4434 frames.append(frame)
4428 4435 else:
4429 4436 raise error.Abort(
4430 4437 _(b'unknown argument to httprequest: %s') % line
4431 4438 )
4432 4439
4433 4440 url = path + httppath
4434 4441
4435 4442 if frames:
4436 4443 body = b''.join(bytes(f) for f in frames)
4437 4444
4438 4445 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4439 4446
4440 4447 # urllib.Request insists on using has_data() as a proxy for
4441 4448 # determining the request method. Override that to use our
4442 4449 # explicitly requested method.
4443 4450 req.get_method = lambda: pycompat.sysstr(method)
4444 4451
4445 4452 try:
4446 4453 res = opener.open(req)
4447 4454 body = res.read()
4448 4455 except util.urlerr.urlerror as e:
4449 4456 # read() method must be called, but only exists in Python 2
4450 4457 getattr(e, 'read', lambda: None)()
4451 4458 continue
4452 4459
4453 4460 ct = res.headers.get('Content-Type')
4454 4461 if ct == 'application/mercurial-cbor':
4455 4462 ui.write(
4456 4463 _(b'cbor> %s\n')
4457 4464 % stringutil.pprint(
4458 4465 cborutil.decodeall(body), bprefix=True, indent=2
4459 4466 )
4460 4467 )
4461 4468
4462 4469 elif action == b'close':
4463 4470 assert peer is not None
4464 4471 peer.close()
4465 4472 elif action == b'readavailable':
4466 4473 if not stdout or not stderr:
4467 4474 raise error.Abort(
4468 4475 _(b'readavailable not available on this peer')
4469 4476 )
4470 4477
4471 4478 stdin.close()
4472 4479 stdout.read()
4473 4480 stderr.read()
4474 4481
4475 4482 elif action == b'readline':
4476 4483 if not stdout:
4477 4484 raise error.Abort(_(b'readline not available on this peer'))
4478 4485 stdout.readline()
4479 4486 elif action == b'ereadline':
4480 4487 if not stderr:
4481 4488 raise error.Abort(_(b'ereadline not available on this peer'))
4482 4489 stderr.readline()
4483 4490 elif action.startswith(b'read '):
4484 4491 count = int(action.split(b' ', 1)[1])
4485 4492 if not stdout:
4486 4493 raise error.Abort(_(b'read not available on this peer'))
4487 4494 stdout.read(count)
4488 4495 elif action.startswith(b'eread '):
4489 4496 count = int(action.split(b' ', 1)[1])
4490 4497 if not stderr:
4491 4498 raise error.Abort(_(b'eread not available on this peer'))
4492 4499 stderr.read(count)
4493 4500 else:
4494 4501 raise error.Abort(_(b'unknown action: %s') % action)
4495 4502
4496 4503 if batchedcommands is not None:
4497 4504 raise error.Abort(_(b'unclosed "batchbegin" request'))
4498 4505
4499 4506 if peer:
4500 4507 peer.close()
4501 4508
4502 4509 if proc:
4503 4510 proc.kill()
@@ -1,255 +1,261 b''
1 1 hg debuginstall
2 2 $ hg debuginstall
3 3 checking encoding (ascii)...
4 4 checking Python executable (*) (glob)
5 5 checking Python implementation (*) (glob)
6 6 checking Python version (2.*) (glob) (no-py3 !)
7 7 checking Python version (3.*) (glob) (py3 !)
8 8 checking Python lib (.*[Ll]ib.*)... (re)
9 9 checking Python security support (*) (glob)
10 10 TLS 1.2 not supported by Python install; network connections lack modern security (?)
11 11 SNI not supported by Python install; may have connectivity issues with some servers (?)
12 12 checking Rust extensions \((installed|missing)\) (re)
13 13 checking Mercurial version (*) (glob)
14 14 checking Mercurial custom build (*) (glob)
15 15 checking module policy (*) (glob)
16 16 checking installed modules (*mercurial)... (glob)
17 17 checking registered compression engines (*zlib*) (glob)
18 18 checking available compression engines (*zlib*) (glob)
19 19 checking available compression engines for wire protocol (*zlib*) (glob)
20 20 checking "re2" regexp engine \((available|missing)\) (re)
21 checking "re2" regexp engine Rust bindings \((installed|missing)\) (re)
21 22 checking templates (*mercurial?templates)... (glob)
22 23 checking default template (*mercurial?templates?map-cmdline.default) (glob)
23 24 checking commit editor... (*) (glob)
24 25 checking username (test)
25 26 no problems detected
26 27
27 28 hg debuginstall JSON
28 29 $ hg debuginstall -Tjson | sed 's|\\\\|\\|g'
29 30 [
30 31 {
31 32 "compengines": ["bz2", "bz2truncated", "none", "zlib"*], (glob)
32 33 "compenginesavail": ["bz2", "bz2truncated", "none", "zlib"*], (glob)
33 34 "compenginesserver": [*"zlib"*], (glob)
34 35 "defaulttemplate": "*mercurial?templates?map-cmdline.default", (glob)
35 36 "defaulttemplateerror": null,
36 37 "defaulttemplatenotfound": "default",
37 38 "editor": "*", (glob)
38 39 "editornotfound": false,
39 40 "encoding": "ascii",
40 41 "encodingerror": null,
41 42 "extensionserror": null, (no-pure !)
42 43 "hgmodulepolicy": "*", (glob)
43 44 "hgmodules": "*mercurial", (glob)
44 45 "hgver": "*", (glob)
45 46 "hgverextra": "*", (glob)
46 47 "problems": 0,
47 48 "pythonexe": "*", (glob)
48 49 "pythonimplementation": "*", (glob)
49 50 "pythonlib": "*", (glob)
50 51 "pythonsecurity": [*], (glob)
51 52 "pythonver": "*.*.*", (glob)
52 53 "re2": (true|false), (re)
53 54 "templatedirs": "*mercurial?templates", (glob)
54 55 "username": "test",
55 56 "usernameerror": null,
56 57 "vinotfound": false
57 58 }
58 59 ]
59 60
60 61 hg debuginstall with no username
61 62 $ HGUSER= hg debuginstall
62 63 checking encoding (ascii)...
63 64 checking Python executable (*) (glob)
64 65 checking Python implementation (*) (glob)
65 66 checking Python version (2.*) (glob) (no-py3 !)
66 67 checking Python version (3.*) (glob) (py3 !)
67 68 checking Python lib (.*[Ll]ib.*)... (re)
68 69 checking Python security support (*) (glob)
69 70 TLS 1.2 not supported by Python install; network connections lack modern security (?)
70 71 SNI not supported by Python install; may have connectivity issues with some servers (?)
71 72 checking Rust extensions \((installed|missing)\) (re)
72 73 checking Mercurial version (*) (glob)
73 74 checking Mercurial custom build (*) (glob)
74 75 checking module policy (*) (glob)
75 76 checking installed modules (*mercurial)... (glob)
76 77 checking registered compression engines (*zlib*) (glob)
77 78 checking available compression engines (*zlib*) (glob)
78 79 checking available compression engines for wire protocol (*zlib*) (glob)
79 80 checking "re2" regexp engine \((available|missing)\) (re)
81 checking "re2" regexp engine Rust bindings \((installed|missing)\) (re)
80 82 checking templates (*mercurial?templates)... (glob)
81 83 checking default template (*mercurial?templates?map-cmdline.default) (glob)
82 84 checking commit editor... (*) (glob)
83 85 checking username...
84 86 no username supplied
85 87 (specify a username in your configuration file)
86 88 1 problems detected, please check your install!
87 89 [1]
88 90
89 91 hg debuginstall with invalid encoding
90 92 $ HGENCODING=invalidenc hg debuginstall | grep encoding
91 93 checking encoding (invalidenc)...
92 94 unknown encoding: invalidenc
93 95
94 96 exception message in JSON
95 97
96 98 $ HGENCODING=invalidenc HGUSER= hg debuginstall -Tjson | grep error
97 99 "defaulttemplateerror": null,
98 100 "encodingerror": "unknown encoding: invalidenc",
99 101 "extensionserror": null, (no-pure !)
100 102 "usernameerror": "no username supplied",
101 103
102 104 path variables are expanded (~ is the same as $TESTTMP)
103 105 $ mkdir tools
104 106 $ touch tools/testeditor.exe
105 107 #if execbit
106 108 $ chmod 755 tools/testeditor.exe
107 109 #endif
108 110 $ HGEDITOR="~/tools/testeditor.exe" hg debuginstall
109 111 checking encoding (ascii)...
110 112 checking Python executable (*) (glob)
111 113 checking Python implementation (*) (glob)
112 114 checking Python version (2.*) (glob) (no-py3 !)
113 115 checking Python version (3.*) (glob) (py3 !)
114 116 checking Python lib (.*[Ll]ib.*)... (re)
115 117 checking Python security support (*) (glob)
116 118 TLS 1.2 not supported by Python install; network connections lack modern security (?)
117 119 SNI not supported by Python install; may have connectivity issues with some servers (?)
118 120 checking Rust extensions \((installed|missing)\) (re)
119 121 checking Mercurial version (*) (glob)
120 122 checking Mercurial custom build (*) (glob)
121 123 checking module policy (*) (glob)
122 124 checking installed modules (*mercurial)... (glob)
123 125 checking registered compression engines (*zlib*) (glob)
124 126 checking available compression engines (*zlib*) (glob)
125 127 checking available compression engines for wire protocol (*zlib*) (glob)
126 128 checking "re2" regexp engine \((available|missing)\) (re)
129 checking "re2" regexp engine Rust bindings \((installed|missing)\) (re)
127 130 checking templates (*mercurial?templates)... (glob)
128 131 checking default template (*mercurial?templates?map-cmdline.default) (glob)
129 132 checking commit editor... ($TESTTMP/tools/testeditor.exe)
130 133 checking username (test)
131 134 no problems detected
132 135
133 136 print out the binary post-shlexsplit in the error message when commit editor is
134 137 not found (this is intentionally using backslashes to mimic a windows usecase).
135 138 $ HGEDITOR="c:\foo\bar\baz.exe -y -z" hg debuginstall
136 139 checking encoding (ascii)...
137 140 checking Python executable (*) (glob)
138 141 checking Python implementation (*) (glob)
139 142 checking Python version (2.*) (glob) (no-py3 !)
140 143 checking Python version (3.*) (glob) (py3 !)
141 144 checking Python lib (.*[Ll]ib.*)... (re)
142 145 checking Python security support (*) (glob)
143 146 TLS 1.2 not supported by Python install; network connections lack modern security (?)
144 147 SNI not supported by Python install; may have connectivity issues with some servers (?)
145 148 checking Rust extensions \((installed|missing)\) (re)
146 149 checking Mercurial version (*) (glob)
147 150 checking Mercurial custom build (*) (glob)
148 151 checking module policy (*) (glob)
149 152 checking installed modules (*mercurial)... (glob)
150 153 checking registered compression engines (*zlib*) (glob)
151 154 checking available compression engines (*zlib*) (glob)
152 155 checking available compression engines for wire protocol (*zlib*) (glob)
153 156 checking "re2" regexp engine \((available|missing)\) (re)
157 checking "re2" regexp engine Rust bindings \((installed|missing)\) (re)
154 158 checking templates (*mercurial?templates)... (glob)
155 159 checking default template (*mercurial?templates?map-cmdline.default) (glob)
156 160 checking commit editor... (c:\foo\bar\baz.exe) (windows !)
157 161 Can't find editor 'c:\foo\bar\baz.exe' in PATH (windows !)
158 162 checking commit editor... (c:foobarbaz.exe) (no-windows !)
159 163 Can't find editor 'c:foobarbaz.exe' in PATH (no-windows !)
160 164 (specify a commit editor in your configuration file)
161 165 checking username (test)
162 166 1 problems detected, please check your install!
163 167 [1]
164 168
165 169 debuginstall extension support
166 170 $ hg debuginstall --config extensions.fsmonitor= --config fsmonitor.watchman_exe=false | grep atchman
167 171 fsmonitor checking for watchman binary... (false)
168 172 watchman binary missing or broken: warning: Watchman unavailable: watchman exited with code 1
169 173 Verify the json works too:
170 174 $ hg debuginstall --config extensions.fsmonitor= --config fsmonitor.watchman_exe=false -Tjson | grep atchman
171 175 "fsmonitor-watchman": "false",
172 176 "fsmonitor-watchman-error": "warning: Watchman unavailable: watchman exited with code 1",
173 177
174 178 Verify that Mercurial is installable with pip. Note that this MUST be
175 179 the last test in this file, because we do some nasty things to the
176 180 shell environment in order to make the virtualenv work reliably.
177 181
178 182 On Python 3, we use the venv module, which is part of the standard library.
179 183 But some Linux distros strip out this module's functionality involving pip,
180 184 so we have to look for the ensurepip module, which these distros strip out
181 185 completely.
182 186 On Python 2, we use the 3rd party virtualenv module, if available.
183 187
184 188 $ cd $TESTTMP
185 189 $ unset PYTHONPATH
186 190
187 191 #if py3 ensurepip
188 192 $ "$PYTHON" -m venv installenv >> pip.log
189 193
190 194 Note: we use this weird path to run pip and hg to avoid platform differences,
191 195 since it's bin on most platforms but Scripts on Windows.
192 196 $ ./installenv/*/pip install --no-index $TESTDIR/.. >> pip.log
193 197 Failed building wheel for mercurial (?)
194 198 $ ./installenv/*/hg debuginstall || cat pip.log
195 199 checking encoding (ascii)...
196 200 checking Python executable (*) (glob)
197 201 checking Python implementation (*) (glob)
198 202 checking Python version (3.*) (glob)
199 203 checking Python lib (*)... (glob)
200 204 checking Python security support (*) (glob)
201 205 checking Rust extensions \((installed|missing)\) (re)
202 206 checking Mercurial version (*) (glob)
203 207 checking Mercurial custom build (*) (glob)
204 208 checking module policy (*) (glob)
205 209 checking installed modules (*/mercurial)... (glob)
206 210 checking registered compression engines (*) (glob)
207 211 checking available compression engines (*) (glob)
208 212 checking available compression engines for wire protocol (*) (glob)
209 213 checking "re2" regexp engine \((available|missing)\) (re)
214 checking "re2" regexp engine Rust bindings \((installed|missing)\) (re)
210 215 checking templates ($TESTTMP/installenv/*/site-packages/mercurial/templates)... (glob)
211 216 checking default template ($TESTTMP/installenv/*/site-packages/mercurial/templates/map-cmdline.default) (glob)
212 217 checking commit editor... (*) (glob)
213 218 checking username (test)
214 219 no problems detected
215 220 #endif
216 221
217 222 #if no-py3 virtualenv
218 223
219 224 Note: --no-site-packages is deprecated, but some places have an
220 225 ancient virtualenv from their linux distro or similar and it's not yet
221 226 the default for them.
222 227
223 228 $ "$PYTHON" -m virtualenv --no-site-packages --never-download installenv >> pip.log
224 229 DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. (?)
225 230 DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. More details about Python 2 support in pip, can be found at https://pip.pypa.io/en/latest/development/release-process/#python-2-support (?)
226 231
227 232 Note: we use this weird path to run pip and hg to avoid platform differences,
228 233 since it's bin on most platforms but Scripts on Windows.
229 234 $ ./installenv/*/pip install --no-index $TESTDIR/.. >> pip.log
230 235 DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. (?)
231 236 DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. More details about Python 2 support in pip, can be found at https://pip.pypa.io/en/latest/development/release-process/#python-2-support (?)
232 237 $ ./installenv/*/hg debuginstall || cat pip.log
233 238 checking encoding (ascii)...
234 239 checking Python executable (*) (glob)
235 240 checking Python implementation (*) (glob)
236 241 checking Python version (2.*) (glob)
237 242 checking Python lib (*)... (glob)
238 243 checking Python security support (*) (glob)
239 244 TLS 1.2 not supported by Python install; network connections lack modern security (?)
240 245 SNI not supported by Python install; may have connectivity issues with some servers (?)
241 246 checking Rust extensions \((installed|missing)\) (re)
242 247 checking Mercurial version (*) (glob)
243 248 checking Mercurial custom build (*) (glob)
244 249 checking module policy (*) (glob)
245 250 checking installed modules (*/mercurial)... (glob)
246 251 checking registered compression engines (*) (glob)
247 252 checking available compression engines (*) (glob)
248 253 checking available compression engines for wire protocol (*) (glob)
249 254 checking "re2" regexp engine \((available|missing)\) (re)
255 checking "re2" regexp engine Rust bindings \((installed|missing)\) (re)
250 256 checking templates ($TESTTMP/installenv/*/site-packages/mercurial/templates)... (glob)
251 257 checking default template ($TESTTMP/installenv/*/site-packages/mercurial/templates/map-cmdline.default) (glob)
252 258 checking commit editor... (*) (glob)
253 259 checking username (test)
254 260 no problems detected
255 261 #endif
General Comments 0
You need to be logged in to leave comments. Login now