##// END OF EJS Templates
cleanup: rename some functions to avoid redefinitions
Manuel Jacob -
r50176:223d5508 default
parent child Browse files
Show More
@@ -1,5034 +1,5034 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 import binascii
10 10 import codecs
11 11 import collections
12 12 import contextlib
13 13 import difflib
14 14 import errno
15 15 import glob
16 16 import operator
17 17 import os
18 18 import platform
19 19 import random
20 20 import re
21 21 import socket
22 22 import ssl
23 23 import stat
24 24 import string
25 25 import subprocess
26 26 import sys
27 27 import time
28 28
29 29 from .i18n import _
30 30 from .node import (
31 31 bin,
32 32 hex,
33 33 nullrev,
34 34 short,
35 35 )
36 36 from .pycompat import (
37 37 getattr,
38 38 open,
39 39 )
40 40 from . import (
41 41 bundle2,
42 42 bundlerepo,
43 43 changegroup,
44 44 cmdutil,
45 45 color,
46 46 context,
47 47 copies,
48 48 dagparser,
49 49 dirstateutils,
50 50 encoding,
51 51 error,
52 52 exchange,
53 53 extensions,
54 54 filemerge,
55 55 filesetlang,
56 56 formatter,
57 57 hg,
58 58 httppeer,
59 59 localrepo,
60 60 lock as lockmod,
61 61 logcmdutil,
62 62 mergestate as mergestatemod,
63 63 metadata,
64 64 obsolete,
65 65 obsutil,
66 66 pathutil,
67 67 phases,
68 68 policy,
69 69 pvec,
70 70 pycompat,
71 71 registrar,
72 72 repair,
73 73 repoview,
74 74 requirements,
75 75 revlog,
76 76 revlogutils,
77 77 revset,
78 78 revsetlang,
79 79 scmutil,
80 80 setdiscovery,
81 81 simplemerge,
82 82 sshpeer,
83 83 sslutil,
84 84 streamclone,
85 85 strip,
86 86 tags as tagsmod,
87 87 templater,
88 88 treediscovery,
89 89 upgrade,
90 90 url as urlmod,
91 91 util,
92 92 vfs as vfsmod,
93 93 wireprotoframing,
94 94 wireprotoserver,
95 95 )
96 96 from .interfaces import repository
97 97 from .utils import (
98 98 cborutil,
99 99 compression,
100 100 dateutil,
101 101 procutil,
102 102 stringutil,
103 103 urlutil,
104 104 )
105 105
106 106 from .revlogutils import (
107 107 constants as revlog_constants,
108 108 debug as revlog_debug,
109 109 deltas as deltautil,
110 110 nodemap,
111 111 rewrite,
112 112 sidedata,
113 113 )
114 114
115 115 release = lockmod.release
116 116
117 117 table = {}
118 118 table.update(strip.command._table)
119 119 command = registrar.command(table)
120 120
121 121
122 122 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
123 123 def debugancestor(ui, repo, *args):
124 124 """find the ancestor revision of two revisions in a given index"""
125 125 if len(args) == 3:
126 126 index, rev1, rev2 = args
127 127 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
128 128 lookup = r.lookup
129 129 elif len(args) == 2:
130 130 if not repo:
131 131 raise error.Abort(
132 132 _(b'there is no Mercurial repository here (.hg not found)')
133 133 )
134 134 rev1, rev2 = args
135 135 r = repo.changelog
136 136 lookup = repo.lookup
137 137 else:
138 138 raise error.Abort(_(b'either two or three arguments required'))
139 139 a = r.ancestor(lookup(rev1), lookup(rev2))
140 140 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
141 141
142 142
143 143 @command(b'debugantivirusrunning', [])
144 144 def debugantivirusrunning(ui, repo):
145 145 """attempt to trigger an antivirus scanner to see if one is active"""
146 146 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
147 147 f.write(
148 148 util.b85decode(
149 149 # This is a base85-armored version of the EICAR test file. See
150 150 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
151 151 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
152 152 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
153 153 )
154 154 )
155 155 # Give an AV engine time to scan the file.
156 156 time.sleep(2)
157 157 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
158 158
159 159
160 160 @command(b'debugapplystreamclonebundle', [], b'FILE')
161 161 def debugapplystreamclonebundle(ui, repo, fname):
162 162 """apply a stream clone bundle file"""
163 163 f = hg.openpath(ui, fname)
164 164 gen = exchange.readbundle(ui, f, fname)
165 165 gen.apply(repo)
166 166
167 167
168 168 @command(
169 169 b'debugbuilddag',
170 170 [
171 171 (
172 172 b'm',
173 173 b'mergeable-file',
174 174 None,
175 175 _(b'add single file mergeable changes'),
176 176 ),
177 177 (
178 178 b'o',
179 179 b'overwritten-file',
180 180 None,
181 181 _(b'add single file all revs overwrite'),
182 182 ),
183 183 (b'n', b'new-file', None, _(b'add new file at each rev')),
184 184 (
185 185 b'',
186 186 b'from-existing',
187 187 None,
188 188 _(b'continue from a non-empty repository'),
189 189 ),
190 190 ],
191 191 _(b'[OPTION]... [TEXT]'),
192 192 )
193 193 def debugbuilddag(
194 194 ui,
195 195 repo,
196 196 text=None,
197 197 mergeable_file=False,
198 198 overwritten_file=False,
199 199 new_file=False,
200 200 from_existing=False,
201 201 ):
202 202 """builds a repo with a given DAG from scratch in the current empty repo
203 203
204 204 The description of the DAG is read from stdin if not given on the
205 205 command line.
206 206
207 207 Elements:
208 208
209 209 - "+n" is a linear run of n nodes based on the current default parent
210 210 - "." is a single node based on the current default parent
211 211 - "$" resets the default parent to null (implied at the start);
212 212 otherwise the default parent is always the last node created
213 213 - "<p" sets the default parent to the backref p
214 214 - "*p" is a fork at parent p, which is a backref
215 215 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
216 216 - "/p2" is a merge of the preceding node and p2
217 217 - ":tag" defines a local tag for the preceding node
218 218 - "@branch" sets the named branch for subsequent nodes
219 219 - "#...\\n" is a comment up to the end of the line
220 220
221 221 Whitespace between the above elements is ignored.
222 222
223 223 A backref is either
224 224
225 225 - a number n, which references the node curr-n, where curr is the current
226 226 node, or
227 227 - the name of a local tag you placed earlier using ":tag", or
228 228 - empty to denote the default parent.
229 229
230 230 All string valued-elements are either strictly alphanumeric, or must
231 231 be enclosed in double quotes ("..."), with "\\" as escape character.
232 232 """
233 233
234 234 if text is None:
235 235 ui.status(_(b"reading DAG from stdin\n"))
236 236 text = ui.fin.read()
237 237
238 238 cl = repo.changelog
239 239 if len(cl) > 0 and not from_existing:
240 240 raise error.Abort(_(b'repository is not empty'))
241 241
242 242 # determine number of revs in DAG
243 243 total = 0
244 244 for type, data in dagparser.parsedag(text):
245 245 if type == b'n':
246 246 total += 1
247 247
248 248 if mergeable_file:
249 249 linesperrev = 2
250 250 # make a file with k lines per rev
251 251 initialmergedlines = [
252 252 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
253 253 ]
254 254 initialmergedlines.append(b"")
255 255
256 256 tags = []
257 257 progress = ui.makeprogress(
258 258 _(b'building'), unit=_(b'revisions'), total=total
259 259 )
260 260 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
261 261 at = -1
262 262 atbranch = b'default'
263 263 nodeids = []
264 264 id = 0
265 265 progress.update(id)
266 266 for type, data in dagparser.parsedag(text):
267 267 if type == b'n':
268 268 ui.note((b'node %s\n' % pycompat.bytestr(data)))
269 269 id, ps = data
270 270
271 271 files = []
272 272 filecontent = {}
273 273
274 274 p2 = None
275 275 if mergeable_file:
276 276 fn = b"mf"
277 277 p1 = repo[ps[0]]
278 278 if len(ps) > 1:
279 279 p2 = repo[ps[1]]
280 280 pa = p1.ancestor(p2)
281 281 base, local, other = [
282 282 x[fn].data() for x in (pa, p1, p2)
283 283 ]
284 284 m3 = simplemerge.Merge3Text(base, local, other)
285 285 ml = [
286 286 l.strip()
287 287 for l in simplemerge.render_minimized(m3)[0]
288 288 ]
289 289 ml.append(b"")
290 290 elif at > 0:
291 291 ml = p1[fn].data().split(b"\n")
292 292 else:
293 293 ml = initialmergedlines
294 294 ml[id * linesperrev] += b" r%i" % id
295 295 mergedtext = b"\n".join(ml)
296 296 files.append(fn)
297 297 filecontent[fn] = mergedtext
298 298
299 299 if overwritten_file:
300 300 fn = b"of"
301 301 files.append(fn)
302 302 filecontent[fn] = b"r%i\n" % id
303 303
304 304 if new_file:
305 305 fn = b"nf%i" % id
306 306 files.append(fn)
307 307 filecontent[fn] = b"r%i\n" % id
308 308 if len(ps) > 1:
309 309 if not p2:
310 310 p2 = repo[ps[1]]
311 311 for fn in p2:
312 312 if fn.startswith(b"nf"):
313 313 files.append(fn)
314 314 filecontent[fn] = p2[fn].data()
315 315
316 316 def fctxfn(repo, cx, path):
317 317 if path in filecontent:
318 318 return context.memfilectx(
319 319 repo, cx, path, filecontent[path]
320 320 )
321 321 return None
322 322
323 323 if len(ps) == 0 or ps[0] < 0:
324 324 pars = [None, None]
325 325 elif len(ps) == 1:
326 326 pars = [nodeids[ps[0]], None]
327 327 else:
328 328 pars = [nodeids[p] for p in ps]
329 329 cx = context.memctx(
330 330 repo,
331 331 pars,
332 332 b"r%i" % id,
333 333 files,
334 334 fctxfn,
335 335 date=(id, 0),
336 336 user=b"debugbuilddag",
337 337 extra={b'branch': atbranch},
338 338 )
339 339 nodeid = repo.commitctx(cx)
340 340 nodeids.append(nodeid)
341 341 at = id
342 342 elif type == b'l':
343 343 id, name = data
344 344 ui.note((b'tag %s\n' % name))
345 345 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
346 346 elif type == b'a':
347 347 ui.note((b'branch %s\n' % data))
348 348 atbranch = data
349 349 progress.update(id)
350 350
351 351 if tags:
352 352 repo.vfs.write(b"localtags", b"".join(tags))
353 353
354 354
355 355 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
356 356 indent_string = b' ' * indent
357 357 if all:
358 358 ui.writenoi18n(
359 359 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
360 360 % indent_string
361 361 )
362 362
363 363 def showchunks(named):
364 364 ui.write(b"\n%s%s\n" % (indent_string, named))
365 365 for deltadata in gen.deltaiter():
366 366 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
367 367 ui.write(
368 368 b"%s%s %s %s %s %s %d\n"
369 369 % (
370 370 indent_string,
371 371 hex(node),
372 372 hex(p1),
373 373 hex(p2),
374 374 hex(cs),
375 375 hex(deltabase),
376 376 len(delta),
377 377 )
378 378 )
379 379
380 380 gen.changelogheader()
381 381 showchunks(b"changelog")
382 382 gen.manifestheader()
383 383 showchunks(b"manifest")
384 384 for chunkdata in iter(gen.filelogheader, {}):
385 385 fname = chunkdata[b'filename']
386 386 showchunks(fname)
387 387 else:
388 388 if isinstance(gen, bundle2.unbundle20):
389 389 raise error.Abort(_(b'use debugbundle2 for this file'))
390 390 gen.changelogheader()
391 391 for deltadata in gen.deltaiter():
392 392 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
393 393 ui.write(b"%s%s\n" % (indent_string, hex(node)))
394 394
395 395
396 396 def _debugobsmarkers(ui, part, indent=0, **opts):
397 397 """display version and markers contained in 'data'"""
398 398 opts = pycompat.byteskwargs(opts)
399 399 data = part.read()
400 400 indent_string = b' ' * indent
401 401 try:
402 402 version, markers = obsolete._readmarkers(data)
403 403 except error.UnknownVersion as exc:
404 404 msg = b"%sunsupported version: %s (%d bytes)\n"
405 405 msg %= indent_string, exc.version, len(data)
406 406 ui.write(msg)
407 407 else:
408 408 msg = b"%sversion: %d (%d bytes)\n"
409 409 msg %= indent_string, version, len(data)
410 410 ui.write(msg)
411 411 fm = ui.formatter(b'debugobsolete', opts)
412 412 for rawmarker in sorted(markers):
413 413 m = obsutil.marker(None, rawmarker)
414 414 fm.startitem()
415 415 fm.plain(indent_string)
416 416 cmdutil.showmarker(fm, m)
417 417 fm.end()
418 418
419 419
420 420 def _debugphaseheads(ui, data, indent=0):
421 421 """display version and markers contained in 'data'"""
422 422 indent_string = b' ' * indent
423 423 headsbyphase = phases.binarydecode(data)
424 424 for phase in phases.allphases:
425 425 for head in headsbyphase[phase]:
426 426 ui.write(indent_string)
427 427 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
428 428
429 429
430 430 def _quasirepr(thing):
431 431 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
432 432 return b'{%s}' % (
433 433 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
434 434 )
435 435 return pycompat.bytestr(repr(thing))
436 436
437 437
438 438 def _debugbundle2(ui, gen, all=None, **opts):
439 439 """lists the contents of a bundle2"""
440 440 if not isinstance(gen, bundle2.unbundle20):
441 441 raise error.Abort(_(b'not a bundle2 file'))
442 442 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
443 443 parttypes = opts.get('part_type', [])
444 444 for part in gen.iterparts():
445 445 if parttypes and part.type not in parttypes:
446 446 continue
447 447 msg = b'%s -- %s (mandatory: %r)\n'
448 448 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
449 449 if part.type == b'changegroup':
450 450 version = part.params.get(b'version', b'01')
451 451 cg = changegroup.getunbundler(version, part, b'UN')
452 452 if not ui.quiet:
453 453 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
454 454 if part.type == b'obsmarkers':
455 455 if not ui.quiet:
456 456 _debugobsmarkers(ui, part, indent=4, **opts)
457 457 if part.type == b'phase-heads':
458 458 if not ui.quiet:
459 459 _debugphaseheads(ui, part, indent=4)
460 460
461 461
462 462 @command(
463 463 b'debugbundle',
464 464 [
465 465 (b'a', b'all', None, _(b'show all details')),
466 466 (b'', b'part-type', [], _(b'show only the named part type')),
467 467 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
468 468 ],
469 469 _(b'FILE'),
470 470 norepo=True,
471 471 )
472 472 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
473 473 """lists the contents of a bundle"""
474 474 with hg.openpath(ui, bundlepath) as f:
475 475 if spec:
476 476 spec = exchange.getbundlespec(ui, f)
477 477 ui.write(b'%s\n' % spec)
478 478 return
479 479
480 480 gen = exchange.readbundle(ui, f, bundlepath)
481 481 if isinstance(gen, bundle2.unbundle20):
482 482 return _debugbundle2(ui, gen, all=all, **opts)
483 483 _debugchangegroup(ui, gen, all=all, **opts)
484 484
485 485
486 486 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
487 487 def debugcapabilities(ui, path, **opts):
488 488 """lists the capabilities of a remote peer"""
489 489 opts = pycompat.byteskwargs(opts)
490 490 peer = hg.peer(ui, opts, path)
491 491 try:
492 492 caps = peer.capabilities()
493 493 ui.writenoi18n(b'Main capabilities:\n')
494 494 for c in sorted(caps):
495 495 ui.write(b' %s\n' % c)
496 496 b2caps = bundle2.bundle2caps(peer)
497 497 if b2caps:
498 498 ui.writenoi18n(b'Bundle2 capabilities:\n')
499 499 for key, values in sorted(b2caps.items()):
500 500 ui.write(b' %s\n' % key)
501 501 for v in values:
502 502 ui.write(b' %s\n' % v)
503 503 finally:
504 504 peer.close()
505 505
506 506
507 507 @command(
508 508 b'debugchangedfiles',
509 509 [
510 510 (
511 511 b'',
512 512 b'compute',
513 513 False,
514 514 b"compute information instead of reading it from storage",
515 515 ),
516 516 ],
517 517 b'REV',
518 518 )
519 519 def debugchangedfiles(ui, repo, rev, **opts):
520 520 """list the stored files changes for a revision"""
521 521 ctx = logcmdutil.revsingle(repo, rev, None)
522 522 files = None
523 523
524 524 if opts['compute']:
525 525 files = metadata.compute_all_files_changes(ctx)
526 526 else:
527 527 sd = repo.changelog.sidedata(ctx.rev())
528 528 files_block = sd.get(sidedata.SD_FILES)
529 529 if files_block is not None:
530 530 files = metadata.decode_files_sidedata(sd)
531 531 if files is not None:
532 532 for f in sorted(files.touched):
533 533 if f in files.added:
534 534 action = b"added"
535 535 elif f in files.removed:
536 536 action = b"removed"
537 537 elif f in files.merged:
538 538 action = b"merged"
539 539 elif f in files.salvaged:
540 540 action = b"salvaged"
541 541 else:
542 542 action = b"touched"
543 543
544 544 copy_parent = b""
545 545 copy_source = b""
546 546 if f in files.copied_from_p1:
547 547 copy_parent = b"p1"
548 548 copy_source = files.copied_from_p1[f]
549 549 elif f in files.copied_from_p2:
550 550 copy_parent = b"p2"
551 551 copy_source = files.copied_from_p2[f]
552 552
553 553 data = (action, copy_parent, f, copy_source)
554 554 template = b"%-8s %2s: %s, %s;\n"
555 555 ui.write(template % data)
556 556
557 557
558 558 @command(b'debugcheckstate', [], b'')
559 559 def debugcheckstate(ui, repo):
560 560 """validate the correctness of the current dirstate"""
561 561 parent1, parent2 = repo.dirstate.parents()
562 562 m1 = repo[parent1].manifest()
563 563 m2 = repo[parent2].manifest()
564 564 errors = 0
565 565 for err in repo.dirstate.verify(m1, m2):
566 566 ui.warn(err[0] % err[1:])
567 567 errors += 1
568 568 if errors:
569 569 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
570 570 raise error.Abort(errstr)
571 571
572 572
573 573 @command(
574 574 b'debugcolor',
575 575 [(b'', b'style', None, _(b'show all configured styles'))],
576 576 b'hg debugcolor',
577 577 )
578 578 def debugcolor(ui, repo, **opts):
579 579 """show available color, effects or style"""
580 580 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
581 581 if opts.get('style'):
582 582 return _debugdisplaystyle(ui)
583 583 else:
584 584 return _debugdisplaycolor(ui)
585 585
586 586
587 587 def _debugdisplaycolor(ui):
588 588 ui = ui.copy()
589 589 ui._styles.clear()
590 590 for effect in color._activeeffects(ui).keys():
591 591 ui._styles[effect] = effect
592 592 if ui._terminfoparams:
593 593 for k, v in ui.configitems(b'color'):
594 594 if k.startswith(b'color.'):
595 595 ui._styles[k] = k[6:]
596 596 elif k.startswith(b'terminfo.'):
597 597 ui._styles[k] = k[9:]
598 598 ui.write(_(b'available colors:\n'))
599 599 # sort label with a '_' after the other to group '_background' entry.
600 600 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
601 601 for colorname, label in items:
602 602 ui.write(b'%s\n' % colorname, label=label)
603 603
604 604
605 605 def _debugdisplaystyle(ui):
606 606 ui.write(_(b'available style:\n'))
607 607 if not ui._styles:
608 608 return
609 609 width = max(len(s) for s in ui._styles)
610 610 for label, effects in sorted(ui._styles.items()):
611 611 ui.write(b'%s' % label, label=label)
612 612 if effects:
613 613 # 50
614 614 ui.write(b': ')
615 615 ui.write(b' ' * (max(0, width - len(label))))
616 616 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
617 617 ui.write(b'\n')
618 618
619 619
620 620 @command(b'debugcreatestreamclonebundle', [], b'FILE')
621 621 def debugcreatestreamclonebundle(ui, repo, fname):
622 622 """create a stream clone bundle file
623 623
624 624 Stream bundles are special bundles that are essentially archives of
625 625 revlog files. They are commonly used for cloning very quickly.
626 626 """
627 627 # TODO we may want to turn this into an abort when this functionality
628 628 # is moved into `hg bundle`.
629 629 if phases.hassecret(repo):
630 630 ui.warn(
631 631 _(
632 632 b'(warning: stream clone bundle will contain secret '
633 633 b'revisions)\n'
634 634 )
635 635 )
636 636
637 637 requirements, gen = streamclone.generatebundlev1(repo)
638 638 changegroup.writechunks(ui, gen, fname)
639 639
640 640 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
641 641
642 642
643 643 @command(
644 644 b'debugdag',
645 645 [
646 646 (b't', b'tags', None, _(b'use tags as labels')),
647 647 (b'b', b'branches', None, _(b'annotate with branch names')),
648 648 (b'', b'dots', None, _(b'use dots for runs')),
649 649 (b's', b'spaces', None, _(b'separate elements by spaces')),
650 650 ],
651 651 _(b'[OPTION]... [FILE [REV]...]'),
652 652 optionalrepo=True,
653 653 )
654 654 def debugdag(ui, repo, file_=None, *revs, **opts):
655 655 """format the changelog or an index DAG as a concise textual description
656 656
657 657 If you pass a revlog index, the revlog's DAG is emitted. If you list
658 658 revision numbers, they get labeled in the output as rN.
659 659
660 660 Otherwise, the changelog DAG of the current repo is emitted.
661 661 """
662 662 spaces = opts.get('spaces')
663 663 dots = opts.get('dots')
664 664 if file_:
665 665 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
666 666 revs = {int(r) for r in revs}
667 667
668 668 def events():
669 669 for r in rlog:
670 670 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
671 671 if r in revs:
672 672 yield b'l', (r, b"r%i" % r)
673 673
674 674 elif repo:
675 675 cl = repo.changelog
676 676 tags = opts.get('tags')
677 677 branches = opts.get('branches')
678 678 if tags:
679 679 labels = {}
680 680 for l, n in repo.tags().items():
681 681 labels.setdefault(cl.rev(n), []).append(l)
682 682
683 683 def events():
684 684 b = b"default"
685 685 for r in cl:
686 686 if branches:
687 687 newb = cl.read(cl.node(r))[5][b'branch']
688 688 if newb != b:
689 689 yield b'a', newb
690 690 b = newb
691 691 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
692 692 if tags:
693 693 ls = labels.get(r)
694 694 if ls:
695 695 for l in ls:
696 696 yield b'l', (r, l)
697 697
698 698 else:
699 699 raise error.Abort(_(b'need repo for changelog dag'))
700 700
701 701 for line in dagparser.dagtextlines(
702 702 events(),
703 703 addspaces=spaces,
704 704 wraplabels=True,
705 705 wrapannotations=True,
706 706 wrapnonlinear=dots,
707 707 usedots=dots,
708 708 maxlinewidth=70,
709 709 ):
710 710 ui.write(line)
711 711 ui.write(b"\n")
712 712
713 713
714 714 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
715 715 def debugdata(ui, repo, file_, rev=None, **opts):
716 716 """dump the contents of a data file revision"""
717 717 opts = pycompat.byteskwargs(opts)
718 718 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
719 719 if rev is not None:
720 720 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
721 721 file_, rev = None, file_
722 722 elif rev is None:
723 723 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
724 724 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
725 725 try:
726 726 ui.write(r.rawdata(r.lookup(rev)))
727 727 except KeyError:
728 728 raise error.Abort(_(b'invalid revision identifier %s') % rev)
729 729
730 730
731 731 @command(
732 732 b'debugdate',
733 733 [(b'e', b'extended', None, _(b'try extended date formats'))],
734 734 _(b'[-e] DATE [RANGE]'),
735 735 norepo=True,
736 736 optionalrepo=True,
737 737 )
738 738 def debugdate(ui, date, range=None, **opts):
739 739 """parse and display a date"""
740 740 if opts["extended"]:
741 741 d = dateutil.parsedate(date, dateutil.extendeddateformats)
742 742 else:
743 743 d = dateutil.parsedate(date)
744 744 ui.writenoi18n(b"internal: %d %d\n" % d)
745 745 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
746 746 if range:
747 747 m = dateutil.matchdate(range)
748 748 ui.writenoi18n(b"match: %s\n" % m(d[0]))
749 749
750 750
751 751 @command(
752 752 b'debugdeltachain',
753 753 cmdutil.debugrevlogopts + cmdutil.formatteropts,
754 754 _(b'-c|-m|FILE'),
755 755 optionalrepo=True,
756 756 )
757 757 def debugdeltachain(ui, repo, file_=None, **opts):
758 758 """dump information about delta chains in a revlog
759 759
760 760 Output can be templatized. Available template keywords are:
761 761
762 762 :``rev``: revision number
763 763 :``p1``: parent 1 revision number (for reference)
764 764 :``p2``: parent 2 revision number (for reference)
765 765 :``chainid``: delta chain identifier (numbered by unique base)
766 766 :``chainlen``: delta chain length to this revision
767 767 :``prevrev``: previous revision in delta chain
768 768 :``deltatype``: role of delta / how it was computed
769 769 - base: a full snapshot
770 770 - snap: an intermediate snapshot
771 771 - p1: a delta against the first parent
772 772 - p2: a delta against the second parent
773 773 - skip1: a delta against the same base as p1
774 774 (when p1 has empty delta
775 775 - skip2: a delta against the same base as p2
776 776 (when p2 has empty delta
777 777 - prev: a delta against the previous revision
778 778 - other: a delta against an arbitrary revision
779 779 :``compsize``: compressed size of revision
780 780 :``uncompsize``: uncompressed size of revision
781 781 :``chainsize``: total size of compressed revisions in chain
782 782 :``chainratio``: total chain size divided by uncompressed revision size
783 783 (new delta chains typically start at ratio 2.00)
784 784 :``lindist``: linear distance from base revision in delta chain to end
785 785 of this revision
786 786 :``extradist``: total size of revisions not part of this delta chain from
787 787 base of delta chain to end of this revision; a measurement
788 788 of how much extra data we need to read/seek across to read
789 789 the delta chain for this revision
790 790 :``extraratio``: extradist divided by chainsize; another representation of
791 791 how much unrelated data is needed to load this delta chain
792 792
793 793 If the repository is configured to use the sparse read, additional keywords
794 794 are available:
795 795
796 796 :``readsize``: total size of data read from the disk for a revision
797 797 (sum of the sizes of all the blocks)
798 798 :``largestblock``: size of the largest block of data read from the disk
799 799 :``readdensity``: density of useful bytes in the data read from the disk
800 800 :``srchunks``: in how many data hunks the whole revision would be read
801 801
802 802 The sparse read can be enabled with experimental.sparse-read = True
803 803 """
804 804 opts = pycompat.byteskwargs(opts)
805 805 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
806 806 index = r.index
807 807 start = r.start
808 808 length = r.length
809 809 generaldelta = r._generaldelta
810 810 withsparseread = getattr(r, '_withsparseread', False)
811 811
812 812 # security to avoid crash on corrupted revlogs
813 813 total_revs = len(index)
814 814
815 815 def revinfo(rev):
816 816 e = index[rev]
817 817 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
818 818 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
819 819 chainsize = 0
820 820
821 821 base = e[revlog_constants.ENTRY_DELTA_BASE]
822 822 p1 = e[revlog_constants.ENTRY_PARENT_1]
823 823 p2 = e[revlog_constants.ENTRY_PARENT_2]
824 824
825 825 # If the parents of a revision has an empty delta, we never try to delta
826 826 # against that parent, but directly against the delta base of that
827 827 # parent (recursively). It avoids adding a useless entry in the chain.
828 828 #
829 829 # However we need to detect that as a special case for delta-type, that
830 830 # is not simply "other".
831 831 p1_base = p1
832 832 if p1 != nullrev and p1 < total_revs:
833 833 e1 = index[p1]
834 834 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
835 835 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
836 836 if (
837 837 new_base == p1_base
838 838 or new_base == nullrev
839 839 or new_base >= total_revs
840 840 ):
841 841 break
842 842 p1_base = new_base
843 843 e1 = index[p1_base]
844 844 p2_base = p2
845 845 if p2 != nullrev and p2 < total_revs:
846 846 e2 = index[p2]
847 847 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
848 848 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
849 849 if (
850 850 new_base == p2_base
851 851 or new_base == nullrev
852 852 or new_base >= total_revs
853 853 ):
854 854 break
855 855 p2_base = new_base
856 856 e2 = index[p2_base]
857 857
858 858 if generaldelta:
859 859 if base == p1:
860 860 deltatype = b'p1'
861 861 elif base == p2:
862 862 deltatype = b'p2'
863 863 elif base == rev:
864 864 deltatype = b'base'
865 865 elif base == p1_base:
866 866 deltatype = b'skip1'
867 867 elif base == p2_base:
868 868 deltatype = b'skip2'
869 869 elif r.issnapshot(rev):
870 870 deltatype = b'snap'
871 871 elif base == rev - 1:
872 872 deltatype = b'prev'
873 873 else:
874 874 deltatype = b'other'
875 875 else:
876 876 if base == rev:
877 877 deltatype = b'base'
878 878 else:
879 879 deltatype = b'prev'
880 880
881 881 chain = r._deltachain(rev)[0]
882 882 for iterrev in chain:
883 883 e = index[iterrev]
884 884 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
885 885
886 886 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
887 887
888 888 fm = ui.formatter(b'debugdeltachain', opts)
889 889
890 890 fm.plain(
891 891 b' rev p1 p2 chain# chainlen prev delta '
892 892 b'size rawsize chainsize ratio lindist extradist '
893 893 b'extraratio'
894 894 )
895 895 if withsparseread:
896 896 fm.plain(b' readsize largestblk rddensity srchunks')
897 897 fm.plain(b'\n')
898 898
899 899 chainbases = {}
900 900 for rev in r:
901 901 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
902 902 chainbase = chain[0]
903 903 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
904 904 basestart = start(chainbase)
905 905 revstart = start(rev)
906 906 lineardist = revstart + comp - basestart
907 907 extradist = lineardist - chainsize
908 908 try:
909 909 prevrev = chain[-2]
910 910 except IndexError:
911 911 prevrev = -1
912 912
913 913 if uncomp != 0:
914 914 chainratio = float(chainsize) / float(uncomp)
915 915 else:
916 916 chainratio = chainsize
917 917
918 918 if chainsize != 0:
919 919 extraratio = float(extradist) / float(chainsize)
920 920 else:
921 921 extraratio = extradist
922 922
923 923 fm.startitem()
924 924 fm.write(
925 925 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
926 926 b'uncompsize chainsize chainratio lindist extradist '
927 927 b'extraratio',
928 928 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
929 929 rev,
930 930 p1,
931 931 p2,
932 932 chainid,
933 933 len(chain),
934 934 prevrev,
935 935 deltatype,
936 936 comp,
937 937 uncomp,
938 938 chainsize,
939 939 chainratio,
940 940 lineardist,
941 941 extradist,
942 942 extraratio,
943 943 rev=rev,
944 944 chainid=chainid,
945 945 chainlen=len(chain),
946 946 prevrev=prevrev,
947 947 deltatype=deltatype,
948 948 compsize=comp,
949 949 uncompsize=uncomp,
950 950 chainsize=chainsize,
951 951 chainratio=chainratio,
952 952 lindist=lineardist,
953 953 extradist=extradist,
954 954 extraratio=extraratio,
955 955 )
956 956 if withsparseread:
957 957 readsize = 0
958 958 largestblock = 0
959 959 srchunks = 0
960 960
961 961 for revschunk in deltautil.slicechunk(r, chain):
962 962 srchunks += 1
963 963 blkend = start(revschunk[-1]) + length(revschunk[-1])
964 964 blksize = blkend - start(revschunk[0])
965 965
966 966 readsize += blksize
967 967 if largestblock < blksize:
968 968 largestblock = blksize
969 969
970 970 if readsize:
971 971 readdensity = float(chainsize) / float(readsize)
972 972 else:
973 973 readdensity = 1
974 974
975 975 fm.write(
976 976 b'readsize largestblock readdensity srchunks',
977 977 b' %10d %10d %9.5f %8d',
978 978 readsize,
979 979 largestblock,
980 980 readdensity,
981 981 srchunks,
982 982 readsize=readsize,
983 983 largestblock=largestblock,
984 984 readdensity=readdensity,
985 985 srchunks=srchunks,
986 986 )
987 987
988 988 fm.plain(b'\n')
989 989
990 990 fm.end()
991 991
992 992
993 993 @command(
994 994 b'debug-delta-find',
995 995 cmdutil.debugrevlogopts + cmdutil.formatteropts,
996 996 _(b'-c|-m|FILE REV'),
997 997 optionalrepo=True,
998 998 )
999 999 def debugdeltafind(ui, repo, arg_1, arg_2=None, **opts):
1000 1000 """display the computation to get to a valid delta for storing REV
1001 1001
1002 1002 This command will replay the process used to find the "best" delta to store
1003 1003 a revision and display information about all the steps used to get to that
1004 1004 result.
1005 1005
1006 1006 The revision use the revision number of the target storage (not changelog
1007 1007 revision number).
1008 1008
1009 1009 note: the process is initiated from a full text of the revision to store.
1010 1010 """
1011 1011 opts = pycompat.byteskwargs(opts)
1012 1012 if arg_2 is None:
1013 1013 file_ = None
1014 1014 rev = arg_1
1015 1015 else:
1016 1016 file_ = arg_1
1017 1017 rev = arg_2
1018 1018
1019 1019 rev = int(rev)
1020 1020
1021 1021 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1022 1022
1023 1023 deltacomputer = deltautil.deltacomputer(
1024 1024 revlog,
1025 1025 write_debug=ui.write,
1026 1026 debug_search=True,
1027 1027 )
1028 1028
1029 1029 node = revlog.node(rev)
1030 1030 p1r, p2r = revlog.parentrevs(rev)
1031 1031 p1 = revlog.node(p1r)
1032 1032 p2 = revlog.node(p2r)
1033 1033 btext = [revlog.revision(rev)]
1034 1034 textlen = len(btext[0])
1035 1035 cachedelta = None
1036 1036 flags = revlog.flags(rev)
1037 1037
1038 1038 revinfo = revlogutils.revisioninfo(
1039 1039 node,
1040 1040 p1,
1041 1041 p2,
1042 1042 btext,
1043 1043 textlen,
1044 1044 cachedelta,
1045 1045 flags,
1046 1046 )
1047 1047
1048 1048 fh = revlog._datafp()
1049 1049 deltacomputer.finddeltainfo(revinfo, fh, target_rev=rev)
1050 1050
1051 1051
1052 1052 @command(
1053 1053 b'debugdirstate|debugstate',
1054 1054 [
1055 1055 (
1056 1056 b'',
1057 1057 b'nodates',
1058 1058 None,
1059 1059 _(b'do not display the saved mtime (DEPRECATED)'),
1060 1060 ),
1061 1061 (b'', b'dates', True, _(b'display the saved mtime')),
1062 1062 (b'', b'datesort', None, _(b'sort by saved mtime')),
1063 1063 (
1064 1064 b'',
1065 1065 b'docket',
1066 1066 False,
1067 1067 _(b'display the docket (metadata file) instead'),
1068 1068 ),
1069 1069 (
1070 1070 b'',
1071 1071 b'all',
1072 1072 False,
1073 1073 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1074 1074 ),
1075 1075 ],
1076 1076 _(b'[OPTION]...'),
1077 1077 )
1078 1078 def debugstate(ui, repo, **opts):
1079 1079 """show the contents of the current dirstate"""
1080 1080
1081 1081 if opts.get("docket"):
1082 1082 if not repo.dirstate._use_dirstate_v2:
1083 1083 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1084 1084
1085 1085 docket = repo.dirstate._map.docket
1086 1086 (
1087 1087 start_offset,
1088 1088 root_nodes,
1089 1089 nodes_with_entry,
1090 1090 nodes_with_copy,
1091 1091 unused_bytes,
1092 1092 _unused,
1093 1093 ignore_pattern,
1094 1094 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1095 1095
1096 1096 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1097 1097 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1098 1098 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1099 1099 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1100 1100 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1101 1101 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1102 1102 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1103 1103 ui.write(
1104 1104 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1105 1105 )
1106 1106 return
1107 1107
1108 1108 nodates = not opts['dates']
1109 1109 if opts.get('nodates') is not None:
1110 1110 nodates = True
1111 1111 datesort = opts.get('datesort')
1112 1112
1113 1113 if datesort:
1114 1114
1115 1115 def keyfunc(entry):
1116 1116 filename, _state, _mode, _size, mtime = entry
1117 1117 return (mtime, filename)
1118 1118
1119 1119 else:
1120 1120 keyfunc = None # sort by filename
1121 1121 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1122 1122 entries.sort(key=keyfunc)
1123 1123 for entry in entries:
1124 1124 filename, state, mode, size, mtime = entry
1125 1125 if mtime == -1:
1126 1126 timestr = b'unset '
1127 1127 elif nodates:
1128 1128 timestr = b'set '
1129 1129 else:
1130 1130 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1131 1131 timestr = encoding.strtolocal(timestr)
1132 1132 if mode & 0o20000:
1133 1133 mode = b'lnk'
1134 1134 else:
1135 1135 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1136 1136 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1137 1137 for f in repo.dirstate.copies():
1138 1138 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1139 1139
1140 1140
1141 1141 @command(
1142 1142 b'debugdirstateignorepatternshash',
1143 1143 [],
1144 1144 _(b''),
1145 1145 )
1146 1146 def debugdirstateignorepatternshash(ui, repo, **opts):
1147 1147 """show the hash of ignore patterns stored in dirstate if v2,
1148 1148 or nothing for dirstate-v2
1149 1149 """
1150 1150 if repo.dirstate._use_dirstate_v2:
1151 1151 docket = repo.dirstate._map.docket
1152 1152 hash_len = 20 # 160 bits for SHA-1
1153 1153 hash_bytes = docket.tree_metadata[-hash_len:]
1154 1154 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1155 1155
1156 1156
1157 1157 @command(
1158 1158 b'debugdiscovery',
1159 1159 [
1160 1160 (b'', b'old', None, _(b'use old-style discovery')),
1161 1161 (
1162 1162 b'',
1163 1163 b'nonheads',
1164 1164 None,
1165 1165 _(b'use old-style discovery with non-heads included'),
1166 1166 ),
1167 1167 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1168 1168 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1169 1169 (
1170 1170 b'',
1171 1171 b'local-as-revs',
1172 1172 b"",
1173 1173 b'treat local has having these revisions only',
1174 1174 ),
1175 1175 (
1176 1176 b'',
1177 1177 b'remote-as-revs',
1178 1178 b"",
1179 1179 b'use local as remote, with only these revisions',
1180 1180 ),
1181 1181 ]
1182 1182 + cmdutil.remoteopts
1183 1183 + cmdutil.formatteropts,
1184 1184 _(b'[--rev REV] [OTHER]'),
1185 1185 )
1186 1186 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1187 1187 """runs the changeset discovery protocol in isolation
1188 1188
1189 1189 The local peer can be "replaced" by a subset of the local repository by
1190 1190 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1191 1191 be "replaced" by a subset of the local repository using the
1192 1192 `--local-as-revs` flag. This is useful to efficiently debug pathological
1193 1193 discovery situation.
1194 1194
1195 1195 The following developer oriented config are relevant for people playing with this command:
1196 1196
1197 1197 * devel.discovery.exchange-heads=True
1198 1198
1199 1199 If False, the discovery will not start with
1200 1200 remote head fetching and local head querying.
1201 1201
1202 1202 * devel.discovery.grow-sample=True
1203 1203
1204 1204 If False, the sample size used in set discovery will not be increased
1205 1205 through the process
1206 1206
1207 1207 * devel.discovery.grow-sample.dynamic=True
1208 1208
1209 1209 When discovery.grow-sample.dynamic is True, the default, the sample size is
1210 1210 adapted to the shape of the undecided set (it is set to the max of:
1211 1211 <target-size>, len(roots(undecided)), len(heads(undecided)
1212 1212
1213 1213 * devel.discovery.grow-sample.rate=1.05
1214 1214
1215 1215 the rate at which the sample grow
1216 1216
1217 1217 * devel.discovery.randomize=True
1218 1218
1219 1219 If andom sampling during discovery are deterministic. It is meant for
1220 1220 integration tests.
1221 1221
1222 1222 * devel.discovery.sample-size=200
1223 1223
1224 1224 Control the initial size of the discovery sample
1225 1225
1226 1226 * devel.discovery.sample-size.initial=100
1227 1227
1228 1228 Control the initial size of the discovery for initial change
1229 1229 """
1230 1230 opts = pycompat.byteskwargs(opts)
1231 1231 unfi = repo.unfiltered()
1232 1232
1233 1233 # setup potential extra filtering
1234 1234 local_revs = opts[b"local_as_revs"]
1235 1235 remote_revs = opts[b"remote_as_revs"]
1236 1236
1237 1237 # make sure tests are repeatable
1238 1238 random.seed(int(opts[b'seed']))
1239 1239
1240 1240 if not remote_revs:
1241 1241
1242 1242 remoteurl, branches = urlutil.get_unique_pull_path(
1243 1243 b'debugdiscovery', repo, ui, remoteurl
1244 1244 )
1245 1245 remote = hg.peer(repo, opts, remoteurl)
1246 1246 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1247 1247 else:
1248 1248 branches = (None, [])
1249 1249 remote_filtered_revs = logcmdutil.revrange(
1250 1250 unfi, [b"not (::(%s))" % remote_revs]
1251 1251 )
1252 1252 remote_filtered_revs = frozenset(remote_filtered_revs)
1253 1253
1254 1254 def remote_func(x):
1255 1255 return remote_filtered_revs
1256 1256
1257 1257 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1258 1258
1259 1259 remote = repo.peer()
1260 1260 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1261 1261
1262 1262 if local_revs:
1263 1263 local_filtered_revs = logcmdutil.revrange(
1264 1264 unfi, [b"not (::(%s))" % local_revs]
1265 1265 )
1266 1266 local_filtered_revs = frozenset(local_filtered_revs)
1267 1267
1268 1268 def local_func(x):
1269 1269 return local_filtered_revs
1270 1270
1271 1271 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1272 1272 repo = repo.filtered(b'debug-discovery-local-filter')
1273 1273
1274 1274 data = {}
1275 1275 if opts.get(b'old'):
1276 1276
1277 1277 def doit(pushedrevs, remoteheads, remote=remote):
1278 1278 if not util.safehasattr(remote, b'branches'):
1279 1279 # enable in-client legacy support
1280 1280 remote = localrepo.locallegacypeer(remote.local())
1281 1281 common, _in, hds = treediscovery.findcommonincoming(
1282 1282 repo, remote, force=True, audit=data
1283 1283 )
1284 1284 common = set(common)
1285 1285 if not opts.get(b'nonheads'):
1286 1286 ui.writenoi18n(
1287 1287 b"unpruned common: %s\n"
1288 1288 % b" ".join(sorted(short(n) for n in common))
1289 1289 )
1290 1290
1291 1291 clnode = repo.changelog.node
1292 1292 common = repo.revs(b'heads(::%ln)', common)
1293 1293 common = {clnode(r) for r in common}
1294 1294 return common, hds
1295 1295
1296 1296 else:
1297 1297
1298 1298 def doit(pushedrevs, remoteheads, remote=remote):
1299 1299 nodes = None
1300 1300 if pushedrevs:
1301 1301 revs = logcmdutil.revrange(repo, pushedrevs)
1302 1302 nodes = [repo[r].node() for r in revs]
1303 1303 common, any, hds = setdiscovery.findcommonheads(
1304 1304 ui, repo, remote, ancestorsof=nodes, audit=data
1305 1305 )
1306 1306 return common, hds
1307 1307
1308 1308 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1309 1309 localrevs = opts[b'rev']
1310 1310
1311 1311 fm = ui.formatter(b'debugdiscovery', opts)
1312 1312 if fm.strict_format:
1313 1313
1314 1314 @contextlib.contextmanager
1315 1315 def may_capture_output():
1316 1316 ui.pushbuffer()
1317 1317 yield
1318 1318 data[b'output'] = ui.popbuffer()
1319 1319
1320 1320 else:
1321 1321 may_capture_output = util.nullcontextmanager
1322 1322 with may_capture_output():
1323 1323 with util.timedcm('debug-discovery') as t:
1324 1324 common, hds = doit(localrevs, remoterevs)
1325 1325
1326 1326 # compute all statistics
1327 1327 heads_common = set(common)
1328 1328 heads_remote = set(hds)
1329 1329 heads_local = set(repo.heads())
1330 1330 # note: they cannot be a local or remote head that is in common and not
1331 1331 # itself a head of common.
1332 1332 heads_common_local = heads_common & heads_local
1333 1333 heads_common_remote = heads_common & heads_remote
1334 1334 heads_common_both = heads_common & heads_remote & heads_local
1335 1335
1336 1336 all = repo.revs(b'all()')
1337 1337 common = repo.revs(b'::%ln', common)
1338 1338 roots_common = repo.revs(b'roots(::%ld)', common)
1339 1339 missing = repo.revs(b'not ::%ld', common)
1340 1340 heads_missing = repo.revs(b'heads(%ld)', missing)
1341 1341 roots_missing = repo.revs(b'roots(%ld)', missing)
1342 1342 assert len(common) + len(missing) == len(all)
1343 1343
1344 1344 initial_undecided = repo.revs(
1345 1345 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1346 1346 )
1347 1347 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1348 1348 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1349 1349 common_initial_undecided = initial_undecided & common
1350 1350 missing_initial_undecided = initial_undecided & missing
1351 1351
1352 1352 data[b'elapsed'] = t.elapsed
1353 1353 data[b'nb-common-heads'] = len(heads_common)
1354 1354 data[b'nb-common-heads-local'] = len(heads_common_local)
1355 1355 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1356 1356 data[b'nb-common-heads-both'] = len(heads_common_both)
1357 1357 data[b'nb-common-roots'] = len(roots_common)
1358 1358 data[b'nb-head-local'] = len(heads_local)
1359 1359 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1360 1360 data[b'nb-head-remote'] = len(heads_remote)
1361 1361 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1362 1362 heads_common_remote
1363 1363 )
1364 1364 data[b'nb-revs'] = len(all)
1365 1365 data[b'nb-revs-common'] = len(common)
1366 1366 data[b'nb-revs-missing'] = len(missing)
1367 1367 data[b'nb-missing-heads'] = len(heads_missing)
1368 1368 data[b'nb-missing-roots'] = len(roots_missing)
1369 1369 data[b'nb-ini_und'] = len(initial_undecided)
1370 1370 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1371 1371 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1372 1372 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1373 1373 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1374 1374
1375 1375 fm.startitem()
1376 1376 fm.data(**pycompat.strkwargs(data))
1377 1377 # display discovery summary
1378 1378 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1379 1379 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1380 1380 fm.plain(b"queries: %(total-queries)9d\n" % data)
1381 1381 fm.plain(b"heads summary:\n")
1382 1382 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1383 1383 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1384 1384 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1385 1385 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1386 1386 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1387 1387 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1388 1388 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1389 1389 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1390 1390 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1391 1391 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1392 1392 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1393 1393 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1394 1394 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1395 1395 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1396 1396 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1397 1397 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1398 1398 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1399 1399 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1400 1400 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1401 1401 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1402 1402 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1403 1403 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1404 1404
1405 1405 if ui.verbose:
1406 1406 fm.plain(
1407 1407 b"common heads: %s\n"
1408 1408 % b" ".join(sorted(short(n) for n in heads_common))
1409 1409 )
1410 1410 fm.end()
1411 1411
1412 1412
1413 1413 _chunksize = 4 << 10
1414 1414
1415 1415
1416 1416 @command(
1417 1417 b'debugdownload',
1418 1418 [
1419 1419 (b'o', b'output', b'', _(b'path')),
1420 1420 ],
1421 1421 optionalrepo=True,
1422 1422 )
1423 1423 def debugdownload(ui, repo, url, output=None, **opts):
1424 1424 """download a resource using Mercurial logic and config"""
1425 1425 fh = urlmod.open(ui, url, output)
1426 1426
1427 1427 dest = ui
1428 1428 if output:
1429 1429 dest = open(output, b"wb", _chunksize)
1430 1430 try:
1431 1431 data = fh.read(_chunksize)
1432 1432 while data:
1433 1433 dest.write(data)
1434 1434 data = fh.read(_chunksize)
1435 1435 finally:
1436 1436 if output:
1437 1437 dest.close()
1438 1438
1439 1439
1440 1440 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1441 1441 def debugextensions(ui, repo, **opts):
1442 1442 '''show information about active extensions'''
1443 1443 opts = pycompat.byteskwargs(opts)
1444 1444 exts = extensions.extensions(ui)
1445 1445 hgver = util.version()
1446 1446 fm = ui.formatter(b'debugextensions', opts)
1447 1447 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1448 1448 isinternal = extensions.ismoduleinternal(extmod)
1449 1449 extsource = None
1450 1450
1451 1451 if util.safehasattr(extmod, '__file__'):
1452 1452 extsource = pycompat.fsencode(extmod.__file__)
1453 1453 elif getattr(sys, 'oxidized', False):
1454 1454 extsource = pycompat.sysexecutable
1455 1455 if isinternal:
1456 1456 exttestedwith = [] # never expose magic string to users
1457 1457 else:
1458 1458 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1459 1459 extbuglink = getattr(extmod, 'buglink', None)
1460 1460
1461 1461 fm.startitem()
1462 1462
1463 1463 if ui.quiet or ui.verbose:
1464 1464 fm.write(b'name', b'%s\n', extname)
1465 1465 else:
1466 1466 fm.write(b'name', b'%s', extname)
1467 1467 if isinternal or hgver in exttestedwith:
1468 1468 fm.plain(b'\n')
1469 1469 elif not exttestedwith:
1470 1470 fm.plain(_(b' (untested!)\n'))
1471 1471 else:
1472 1472 lasttestedversion = exttestedwith[-1]
1473 1473 fm.plain(b' (%s!)\n' % lasttestedversion)
1474 1474
1475 1475 fm.condwrite(
1476 1476 ui.verbose and extsource,
1477 1477 b'source',
1478 1478 _(b' location: %s\n'),
1479 1479 extsource or b"",
1480 1480 )
1481 1481
1482 1482 if ui.verbose:
1483 1483 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1484 1484 fm.data(bundled=isinternal)
1485 1485
1486 1486 fm.condwrite(
1487 1487 ui.verbose and exttestedwith,
1488 1488 b'testedwith',
1489 1489 _(b' tested with: %s\n'),
1490 1490 fm.formatlist(exttestedwith, name=b'ver'),
1491 1491 )
1492 1492
1493 1493 fm.condwrite(
1494 1494 ui.verbose and extbuglink,
1495 1495 b'buglink',
1496 1496 _(b' bug reporting: %s\n'),
1497 1497 extbuglink or b"",
1498 1498 )
1499 1499
1500 1500 fm.end()
1501 1501
1502 1502
1503 1503 @command(
1504 1504 b'debugfileset',
1505 1505 [
1506 1506 (
1507 1507 b'r',
1508 1508 b'rev',
1509 1509 b'',
1510 1510 _(b'apply the filespec on this revision'),
1511 1511 _(b'REV'),
1512 1512 ),
1513 1513 (
1514 1514 b'',
1515 1515 b'all-files',
1516 1516 False,
1517 1517 _(b'test files from all revisions and working directory'),
1518 1518 ),
1519 1519 (
1520 1520 b's',
1521 1521 b'show-matcher',
1522 1522 None,
1523 1523 _(b'print internal representation of matcher'),
1524 1524 ),
1525 1525 (
1526 1526 b'p',
1527 1527 b'show-stage',
1528 1528 [],
1529 1529 _(b'print parsed tree at the given stage'),
1530 1530 _(b'NAME'),
1531 1531 ),
1532 1532 ],
1533 1533 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1534 1534 )
1535 1535 def debugfileset(ui, repo, expr, **opts):
1536 1536 '''parse and apply a fileset specification'''
1537 1537 from . import fileset
1538 1538
1539 1539 fileset.symbols # force import of fileset so we have predicates to optimize
1540 1540 opts = pycompat.byteskwargs(opts)
1541 1541 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1542 1542
1543 1543 stages = [
1544 1544 (b'parsed', pycompat.identity),
1545 1545 (b'analyzed', filesetlang.analyze),
1546 1546 (b'optimized', filesetlang.optimize),
1547 1547 ]
1548 1548 stagenames = {n for n, f in stages}
1549 1549
1550 1550 showalways = set()
1551 1551 if ui.verbose and not opts[b'show_stage']:
1552 1552 # show parsed tree by --verbose (deprecated)
1553 1553 showalways.add(b'parsed')
1554 1554 if opts[b'show_stage'] == [b'all']:
1555 1555 showalways.update(stagenames)
1556 1556 else:
1557 1557 for n in opts[b'show_stage']:
1558 1558 if n not in stagenames:
1559 1559 raise error.Abort(_(b'invalid stage name: %s') % n)
1560 1560 showalways.update(opts[b'show_stage'])
1561 1561
1562 1562 tree = filesetlang.parse(expr)
1563 1563 for n, f in stages:
1564 1564 tree = f(tree)
1565 1565 if n in showalways:
1566 1566 if opts[b'show_stage'] or n != b'parsed':
1567 1567 ui.write(b"* %s:\n" % n)
1568 1568 ui.write(filesetlang.prettyformat(tree), b"\n")
1569 1569
1570 1570 files = set()
1571 1571 if opts[b'all_files']:
1572 1572 for r in repo:
1573 1573 c = repo[r]
1574 1574 files.update(c.files())
1575 1575 files.update(c.substate)
1576 1576 if opts[b'all_files'] or ctx.rev() is None:
1577 1577 wctx = repo[None]
1578 1578 files.update(
1579 1579 repo.dirstate.walk(
1580 1580 scmutil.matchall(repo),
1581 1581 subrepos=list(wctx.substate),
1582 1582 unknown=True,
1583 1583 ignored=True,
1584 1584 )
1585 1585 )
1586 1586 files.update(wctx.substate)
1587 1587 else:
1588 1588 files.update(ctx.files())
1589 1589 files.update(ctx.substate)
1590 1590
1591 1591 m = ctx.matchfileset(repo.getcwd(), expr)
1592 1592 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1593 1593 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1594 1594 for f in sorted(files):
1595 1595 if not m(f):
1596 1596 continue
1597 1597 ui.write(b"%s\n" % f)
1598 1598
1599 1599
1600 1600 @command(
1601 1601 b"debug-repair-issue6528",
1602 1602 [
1603 1603 (
1604 1604 b'',
1605 1605 b'to-report',
1606 1606 b'',
1607 1607 _(b'build a report of affected revisions to this file'),
1608 1608 _(b'FILE'),
1609 1609 ),
1610 1610 (
1611 1611 b'',
1612 1612 b'from-report',
1613 1613 b'',
1614 1614 _(b'repair revisions listed in this report file'),
1615 1615 _(b'FILE'),
1616 1616 ),
1617 1617 (
1618 1618 b'',
1619 1619 b'paranoid',
1620 1620 False,
1621 1621 _(b'check that both detection methods do the same thing'),
1622 1622 ),
1623 1623 ]
1624 1624 + cmdutil.dryrunopts,
1625 1625 )
1626 1626 def debug_repair_issue6528(ui, repo, **opts):
1627 1627 """find affected revisions and repair them. See issue6528 for more details.
1628 1628
1629 1629 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1630 1630 computation of affected revisions for a given repository across clones.
1631 1631 The report format is line-based (with empty lines ignored):
1632 1632
1633 1633 ```
1634 1634 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1635 1635 ```
1636 1636
1637 1637 There can be multiple broken revisions per filelog, they are separated by
1638 1638 a comma with no spaces. The only space is between the revision(s) and the
1639 1639 filename.
1640 1640
1641 1641 Note that this does *not* mean that this repairs future affected revisions,
1642 1642 that needs a separate fix at the exchange level that was introduced in
1643 1643 Mercurial 5.9.1.
1644 1644
1645 1645 There is a `--paranoid` flag to test that the fast implementation is correct
1646 1646 by checking it against the slow implementation. Since this matter is quite
1647 1647 urgent and testing every edge-case is probably quite costly, we use this
1648 1648 method to test on large repositories as a fuzzing method of sorts.
1649 1649 """
1650 1650 cmdutil.check_incompatible_arguments(
1651 1651 opts, 'to_report', ['from_report', 'dry_run']
1652 1652 )
1653 1653 dry_run = opts.get('dry_run')
1654 1654 to_report = opts.get('to_report')
1655 1655 from_report = opts.get('from_report')
1656 1656 paranoid = opts.get('paranoid')
1657 1657 # TODO maybe add filelog pattern and revision pattern parameters to help
1658 1658 # narrow down the search for users that know what they're looking for?
1659 1659
1660 1660 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1661 1661 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1662 1662 raise error.Abort(_(msg))
1663 1663
1664 1664 rewrite.repair_issue6528(
1665 1665 ui,
1666 1666 repo,
1667 1667 dry_run=dry_run,
1668 1668 to_report=to_report,
1669 1669 from_report=from_report,
1670 1670 paranoid=paranoid,
1671 1671 )
1672 1672
1673 1673
1674 1674 @command(b'debugformat', [] + cmdutil.formatteropts)
1675 1675 def debugformat(ui, repo, **opts):
1676 1676 """display format information about the current repository
1677 1677
1678 1678 Use --verbose to get extra information about current config value and
1679 1679 Mercurial default."""
1680 1680 opts = pycompat.byteskwargs(opts)
1681 1681 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1682 1682 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1683 1683
1684 1684 def makeformatname(name):
1685 1685 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1686 1686
1687 1687 fm = ui.formatter(b'debugformat', opts)
1688 1688 if fm.isplain():
1689 1689
1690 1690 def formatvalue(value):
1691 1691 if util.safehasattr(value, b'startswith'):
1692 1692 return value
1693 1693 if value:
1694 1694 return b'yes'
1695 1695 else:
1696 1696 return b'no'
1697 1697
1698 1698 else:
1699 1699 formatvalue = pycompat.identity
1700 1700
1701 1701 fm.plain(b'format-variant')
1702 1702 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1703 1703 fm.plain(b' repo')
1704 1704 if ui.verbose:
1705 1705 fm.plain(b' config default')
1706 1706 fm.plain(b'\n')
1707 1707 for fv in upgrade.allformatvariant:
1708 1708 fm.startitem()
1709 1709 repovalue = fv.fromrepo(repo)
1710 1710 configvalue = fv.fromconfig(repo)
1711 1711
1712 1712 if repovalue != configvalue:
1713 1713 namelabel = b'formatvariant.name.mismatchconfig'
1714 1714 repolabel = b'formatvariant.repo.mismatchconfig'
1715 1715 elif repovalue != fv.default:
1716 1716 namelabel = b'formatvariant.name.mismatchdefault'
1717 1717 repolabel = b'formatvariant.repo.mismatchdefault'
1718 1718 else:
1719 1719 namelabel = b'formatvariant.name.uptodate'
1720 1720 repolabel = b'formatvariant.repo.uptodate'
1721 1721
1722 1722 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1723 1723 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1724 1724 if fv.default != configvalue:
1725 1725 configlabel = b'formatvariant.config.special'
1726 1726 else:
1727 1727 configlabel = b'formatvariant.config.default'
1728 1728 fm.condwrite(
1729 1729 ui.verbose,
1730 1730 b'config',
1731 1731 b' %6s',
1732 1732 formatvalue(configvalue),
1733 1733 label=configlabel,
1734 1734 )
1735 1735 fm.condwrite(
1736 1736 ui.verbose,
1737 1737 b'default',
1738 1738 b' %7s',
1739 1739 formatvalue(fv.default),
1740 1740 label=b'formatvariant.default',
1741 1741 )
1742 1742 fm.plain(b'\n')
1743 1743 fm.end()
1744 1744
1745 1745
1746 1746 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1747 1747 def debugfsinfo(ui, path=b"."):
1748 1748 """show information detected about current filesystem"""
1749 1749 ui.writenoi18n(b'path: %s\n' % path)
1750 1750 ui.writenoi18n(
1751 1751 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1752 1752 )
1753 1753 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1754 1754 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1755 1755 ui.writenoi18n(
1756 1756 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1757 1757 )
1758 1758 ui.writenoi18n(
1759 1759 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1760 1760 )
1761 1761 casesensitive = b'(unknown)'
1762 1762 try:
1763 1763 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1764 1764 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1765 1765 except OSError:
1766 1766 pass
1767 1767 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1768 1768
1769 1769
1770 1770 @command(
1771 1771 b'debuggetbundle',
1772 1772 [
1773 1773 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1774 1774 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1775 1775 (
1776 1776 b't',
1777 1777 b'type',
1778 1778 b'bzip2',
1779 1779 _(b'bundle compression type to use'),
1780 1780 _(b'TYPE'),
1781 1781 ),
1782 1782 ],
1783 1783 _(b'REPO FILE [-H|-C ID]...'),
1784 1784 norepo=True,
1785 1785 )
1786 1786 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1787 1787 """retrieves a bundle from a repo
1788 1788
1789 1789 Every ID must be a full-length hex node id string. Saves the bundle to the
1790 1790 given file.
1791 1791 """
1792 1792 opts = pycompat.byteskwargs(opts)
1793 1793 repo = hg.peer(ui, opts, repopath)
1794 1794 if not repo.capable(b'getbundle'):
1795 1795 raise error.Abort(b"getbundle() not supported by target repository")
1796 1796 args = {}
1797 1797 if common:
1798 1798 args['common'] = [bin(s) for s in common]
1799 1799 if head:
1800 1800 args['heads'] = [bin(s) for s in head]
1801 1801 # TODO: get desired bundlecaps from command line.
1802 1802 args['bundlecaps'] = None
1803 1803 bundle = repo.getbundle(b'debug', **args)
1804 1804
1805 1805 bundletype = opts.get(b'type', b'bzip2').lower()
1806 1806 btypes = {
1807 1807 b'none': b'HG10UN',
1808 1808 b'bzip2': b'HG10BZ',
1809 1809 b'gzip': b'HG10GZ',
1810 1810 b'bundle2': b'HG20',
1811 1811 }
1812 1812 bundletype = btypes.get(bundletype)
1813 1813 if bundletype not in bundle2.bundletypes:
1814 1814 raise error.Abort(_(b'unknown bundle type specified with --type'))
1815 1815 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1816 1816
1817 1817
1818 1818 @command(b'debugignore', [], b'[FILE]')
1819 1819 def debugignore(ui, repo, *files, **opts):
1820 1820 """display the combined ignore pattern and information about ignored files
1821 1821
1822 1822 With no argument display the combined ignore pattern.
1823 1823
1824 1824 Given space separated file names, shows if the given file is ignored and
1825 1825 if so, show the ignore rule (file and line number) that matched it.
1826 1826 """
1827 1827 ignore = repo.dirstate._ignore
1828 1828 if not files:
1829 1829 # Show all the patterns
1830 1830 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1831 1831 else:
1832 1832 m = scmutil.match(repo[None], pats=files)
1833 1833 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1834 1834 for f in m.files():
1835 1835 nf = util.normpath(f)
1836 1836 ignored = None
1837 1837 ignoredata = None
1838 1838 if nf != b'.':
1839 1839 if ignore(nf):
1840 1840 ignored = nf
1841 1841 ignoredata = repo.dirstate._ignorefileandline(nf)
1842 1842 else:
1843 1843 for p in pathutil.finddirs(nf):
1844 1844 if ignore(p):
1845 1845 ignored = p
1846 1846 ignoredata = repo.dirstate._ignorefileandline(p)
1847 1847 break
1848 1848 if ignored:
1849 1849 if ignored == nf:
1850 1850 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1851 1851 else:
1852 1852 ui.write(
1853 1853 _(
1854 1854 b"%s is ignored because of "
1855 1855 b"containing directory %s\n"
1856 1856 )
1857 1857 % (uipathfn(f), ignored)
1858 1858 )
1859 1859 ignorefile, lineno, line = ignoredata
1860 1860 ui.write(
1861 1861 _(b"(ignore rule in %s, line %d: '%s')\n")
1862 1862 % (ignorefile, lineno, line)
1863 1863 )
1864 1864 else:
1865 1865 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1866 1866
1867 1867
1868 1868 @command(
1869 1869 b'debug-revlog-index|debugindex',
1870 1870 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1871 1871 _(b'-c|-m|FILE'),
1872 1872 )
1873 1873 def debugindex(ui, repo, file_=None, **opts):
1874 1874 """dump index data for a revlog"""
1875 1875 opts = pycompat.byteskwargs(opts)
1876 1876 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1877 1877
1878 1878 fm = ui.formatter(b'debugindex', opts)
1879 1879
1880 1880 revlog = getattr(store, b'_revlog', store)
1881 1881
1882 1882 return revlog_debug.debug_index(
1883 1883 ui,
1884 1884 repo,
1885 1885 formatter=fm,
1886 1886 revlog=revlog,
1887 1887 full_node=ui.debugflag,
1888 1888 )
1889 1889
1890 1890
1891 1891 @command(
1892 1892 b'debugindexdot',
1893 1893 cmdutil.debugrevlogopts,
1894 1894 _(b'-c|-m|FILE'),
1895 1895 optionalrepo=True,
1896 1896 )
1897 1897 def debugindexdot(ui, repo, file_=None, **opts):
1898 1898 """dump an index DAG as a graphviz dot file"""
1899 1899 opts = pycompat.byteskwargs(opts)
1900 1900 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1901 1901 ui.writenoi18n(b"digraph G {\n")
1902 1902 for i in r:
1903 1903 node = r.node(i)
1904 1904 pp = r.parents(node)
1905 1905 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1906 1906 if pp[1] != repo.nullid:
1907 1907 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1908 1908 ui.write(b"}\n")
1909 1909
1910 1910
1911 1911 @command(b'debugindexstats', [])
1912 1912 def debugindexstats(ui, repo):
1913 1913 """show stats related to the changelog index"""
1914 1914 repo.changelog.shortest(repo.nullid, 1)
1915 1915 index = repo.changelog.index
1916 1916 if not util.safehasattr(index, b'stats'):
1917 1917 raise error.Abort(_(b'debugindexstats only works with native code'))
1918 1918 for k, v in sorted(index.stats().items()):
1919 1919 ui.write(b'%s: %d\n' % (k, v))
1920 1920
1921 1921
1922 1922 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1923 1923 def debuginstall(ui, **opts):
1924 1924 """test Mercurial installation
1925 1925
1926 1926 Returns 0 on success.
1927 1927 """
1928 1928 opts = pycompat.byteskwargs(opts)
1929 1929
1930 1930 problems = 0
1931 1931
1932 1932 fm = ui.formatter(b'debuginstall', opts)
1933 1933 fm.startitem()
1934 1934
1935 1935 # encoding might be unknown or wrong. don't translate these messages.
1936 1936 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1937 1937 err = None
1938 1938 try:
1939 1939 codecs.lookup(pycompat.sysstr(encoding.encoding))
1940 1940 except LookupError as inst:
1941 1941 err = stringutil.forcebytestr(inst)
1942 1942 problems += 1
1943 1943 fm.condwrite(
1944 1944 err,
1945 1945 b'encodingerror',
1946 1946 b" %s\n (check that your locale is properly set)\n",
1947 1947 err,
1948 1948 )
1949 1949
1950 1950 # Python
1951 1951 pythonlib = None
1952 1952 if util.safehasattr(os, '__file__'):
1953 1953 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1954 1954 elif getattr(sys, 'oxidized', False):
1955 1955 pythonlib = pycompat.sysexecutable
1956 1956
1957 1957 fm.write(
1958 1958 b'pythonexe',
1959 1959 _(b"checking Python executable (%s)\n"),
1960 1960 pycompat.sysexecutable or _(b"unknown"),
1961 1961 )
1962 1962 fm.write(
1963 1963 b'pythonimplementation',
1964 1964 _(b"checking Python implementation (%s)\n"),
1965 1965 pycompat.sysbytes(platform.python_implementation()),
1966 1966 )
1967 1967 fm.write(
1968 1968 b'pythonver',
1969 1969 _(b"checking Python version (%s)\n"),
1970 1970 (b"%d.%d.%d" % sys.version_info[:3]),
1971 1971 )
1972 1972 fm.write(
1973 1973 b'pythonlib',
1974 1974 _(b"checking Python lib (%s)...\n"),
1975 1975 pythonlib or _(b"unknown"),
1976 1976 )
1977 1977
1978 1978 try:
1979 1979 from . import rustext # pytype: disable=import-error
1980 1980
1981 1981 rustext.__doc__ # trigger lazy import
1982 1982 except ImportError:
1983 1983 rustext = None
1984 1984
1985 1985 security = set(sslutil.supportedprotocols)
1986 1986 if sslutil.hassni:
1987 1987 security.add(b'sni')
1988 1988
1989 1989 fm.write(
1990 1990 b'pythonsecurity',
1991 1991 _(b"checking Python security support (%s)\n"),
1992 1992 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1993 1993 )
1994 1994
1995 1995 # These are warnings, not errors. So don't increment problem count. This
1996 1996 # may change in the future.
1997 1997 if b'tls1.2' not in security:
1998 1998 fm.plain(
1999 1999 _(
2000 2000 b' TLS 1.2 not supported by Python install; '
2001 2001 b'network connections lack modern security\n'
2002 2002 )
2003 2003 )
2004 2004 if b'sni' not in security:
2005 2005 fm.plain(
2006 2006 _(
2007 2007 b' SNI not supported by Python install; may have '
2008 2008 b'connectivity issues with some servers\n'
2009 2009 )
2010 2010 )
2011 2011
2012 2012 fm.plain(
2013 2013 _(
2014 2014 b"checking Rust extensions (%s)\n"
2015 2015 % (b'missing' if rustext is None else b'installed')
2016 2016 ),
2017 2017 )
2018 2018
2019 2019 # TODO print CA cert info
2020 2020
2021 2021 # hg version
2022 2022 hgver = util.version()
2023 2023 fm.write(
2024 2024 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2025 2025 )
2026 2026 fm.write(
2027 2027 b'hgverextra',
2028 2028 _(b"checking Mercurial custom build (%s)\n"),
2029 2029 b'+'.join(hgver.split(b'+')[1:]),
2030 2030 )
2031 2031
2032 2032 # compiled modules
2033 2033 hgmodules = None
2034 2034 if util.safehasattr(sys.modules[__name__], '__file__'):
2035 2035 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2036 2036 elif getattr(sys, 'oxidized', False):
2037 2037 hgmodules = pycompat.sysexecutable
2038 2038
2039 2039 fm.write(
2040 2040 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2041 2041 )
2042 2042 fm.write(
2043 2043 b'hgmodules',
2044 2044 _(b"checking installed modules (%s)...\n"),
2045 2045 hgmodules or _(b"unknown"),
2046 2046 )
2047 2047
2048 2048 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2049 2049 rustext = rustandc # for now, that's the only case
2050 2050 cext = policy.policy in (b'c', b'allow') or rustandc
2051 2051 nopure = cext or rustext
2052 2052 if nopure:
2053 2053 err = None
2054 2054 try:
2055 2055 if cext:
2056 2056 from .cext import ( # pytype: disable=import-error
2057 2057 base85,
2058 2058 bdiff,
2059 2059 mpatch,
2060 2060 osutil,
2061 2061 )
2062 2062
2063 2063 # quiet pyflakes
2064 2064 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2065 2065 if rustext:
2066 2066 from .rustext import ( # pytype: disable=import-error
2067 2067 ancestor,
2068 2068 dirstate,
2069 2069 )
2070 2070
2071 2071 dir(ancestor), dir(dirstate) # quiet pyflakes
2072 2072 except Exception as inst:
2073 2073 err = stringutil.forcebytestr(inst)
2074 2074 problems += 1
2075 2075 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2076 2076
2077 2077 compengines = util.compengines._engines.values()
2078 2078 fm.write(
2079 2079 b'compengines',
2080 2080 _(b'checking registered compression engines (%s)\n'),
2081 2081 fm.formatlist(
2082 2082 sorted(e.name() for e in compengines),
2083 2083 name=b'compengine',
2084 2084 fmt=b'%s',
2085 2085 sep=b', ',
2086 2086 ),
2087 2087 )
2088 2088 fm.write(
2089 2089 b'compenginesavail',
2090 2090 _(b'checking available compression engines (%s)\n'),
2091 2091 fm.formatlist(
2092 2092 sorted(e.name() for e in compengines if e.available()),
2093 2093 name=b'compengine',
2094 2094 fmt=b'%s',
2095 2095 sep=b', ',
2096 2096 ),
2097 2097 )
2098 2098 wirecompengines = compression.compengines.supportedwireengines(
2099 2099 compression.SERVERROLE
2100 2100 )
2101 2101 fm.write(
2102 2102 b'compenginesserver',
2103 2103 _(
2104 2104 b'checking available compression engines '
2105 2105 b'for wire protocol (%s)\n'
2106 2106 ),
2107 2107 fm.formatlist(
2108 2108 [e.name() for e in wirecompengines if e.wireprotosupport()],
2109 2109 name=b'compengine',
2110 2110 fmt=b'%s',
2111 2111 sep=b', ',
2112 2112 ),
2113 2113 )
2114 2114 re2 = b'missing'
2115 2115 if util._re2:
2116 2116 re2 = b'available'
2117 2117 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2118 2118 fm.data(re2=bool(util._re2))
2119 2119
2120 2120 # templates
2121 2121 p = templater.templatedir()
2122 2122 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2123 2123 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2124 2124 if p:
2125 2125 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2126 2126 if m:
2127 2127 # template found, check if it is working
2128 2128 err = None
2129 2129 try:
2130 2130 templater.templater.frommapfile(m)
2131 2131 except Exception as inst:
2132 2132 err = stringutil.forcebytestr(inst)
2133 2133 p = None
2134 2134 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2135 2135 else:
2136 2136 p = None
2137 2137 fm.condwrite(
2138 2138 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2139 2139 )
2140 2140 fm.condwrite(
2141 2141 not m,
2142 2142 b'defaulttemplatenotfound',
2143 2143 _(b" template '%s' not found\n"),
2144 2144 b"default",
2145 2145 )
2146 2146 if not p:
2147 2147 problems += 1
2148 2148 fm.condwrite(
2149 2149 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2150 2150 )
2151 2151
2152 2152 # editor
2153 2153 editor = ui.geteditor()
2154 2154 editor = util.expandpath(editor)
2155 2155 editorbin = procutil.shellsplit(editor)[0]
2156 2156 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2157 2157 cmdpath = procutil.findexe(editorbin)
2158 2158 fm.condwrite(
2159 2159 not cmdpath and editor == b'vi',
2160 2160 b'vinotfound',
2161 2161 _(
2162 2162 b" No commit editor set and can't find %s in PATH\n"
2163 2163 b" (specify a commit editor in your configuration"
2164 2164 b" file)\n"
2165 2165 ),
2166 2166 not cmdpath and editor == b'vi' and editorbin,
2167 2167 )
2168 2168 fm.condwrite(
2169 2169 not cmdpath and editor != b'vi',
2170 2170 b'editornotfound',
2171 2171 _(
2172 2172 b" Can't find editor '%s' in PATH\n"
2173 2173 b" (specify a commit editor in your configuration"
2174 2174 b" file)\n"
2175 2175 ),
2176 2176 not cmdpath and editorbin,
2177 2177 )
2178 2178 if not cmdpath and editor != b'vi':
2179 2179 problems += 1
2180 2180
2181 2181 # check username
2182 2182 username = None
2183 2183 err = None
2184 2184 try:
2185 2185 username = ui.username()
2186 2186 except error.Abort as e:
2187 2187 err = e.message
2188 2188 problems += 1
2189 2189
2190 2190 fm.condwrite(
2191 2191 username, b'username', _(b"checking username (%s)\n"), username
2192 2192 )
2193 2193 fm.condwrite(
2194 2194 err,
2195 2195 b'usernameerror',
2196 2196 _(
2197 2197 b"checking username...\n %s\n"
2198 2198 b" (specify a username in your configuration file)\n"
2199 2199 ),
2200 2200 err,
2201 2201 )
2202 2202
2203 2203 for name, mod in extensions.extensions():
2204 2204 handler = getattr(mod, 'debuginstall', None)
2205 2205 if handler is not None:
2206 2206 problems += handler(ui, fm)
2207 2207
2208 2208 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2209 2209 if not problems:
2210 2210 fm.data(problems=problems)
2211 2211 fm.condwrite(
2212 2212 problems,
2213 2213 b'problems',
2214 2214 _(b"%d problems detected, please check your install!\n"),
2215 2215 problems,
2216 2216 )
2217 2217 fm.end()
2218 2218
2219 2219 return problems
2220 2220
2221 2221
2222 2222 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2223 2223 def debugknown(ui, repopath, *ids, **opts):
2224 2224 """test whether node ids are known to a repo
2225 2225
2226 2226 Every ID must be a full-length hex node id string. Returns a list of 0s
2227 2227 and 1s indicating unknown/known.
2228 2228 """
2229 2229 opts = pycompat.byteskwargs(opts)
2230 2230 repo = hg.peer(ui, opts, repopath)
2231 2231 if not repo.capable(b'known'):
2232 2232 raise error.Abort(b"known() not supported by target repository")
2233 2233 flags = repo.known([bin(s) for s in ids])
2234 2234 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2235 2235
2236 2236
2237 2237 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2238 2238 def debuglabelcomplete(ui, repo, *args):
2239 2239 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2240 2240 debugnamecomplete(ui, repo, *args)
2241 2241
2242 2242
2243 2243 @command(
2244 2244 b'debuglocks',
2245 2245 [
2246 2246 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2247 2247 (
2248 2248 b'W',
2249 2249 b'force-free-wlock',
2250 2250 None,
2251 2251 _(b'free the working state lock (DANGEROUS)'),
2252 2252 ),
2253 2253 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2254 2254 (
2255 2255 b'S',
2256 2256 b'set-wlock',
2257 2257 None,
2258 2258 _(b'set the working state lock until stopped'),
2259 2259 ),
2260 2260 ],
2261 2261 _(b'[OPTION]...'),
2262 2262 )
2263 2263 def debuglocks(ui, repo, **opts):
2264 2264 """show or modify state of locks
2265 2265
2266 2266 By default, this command will show which locks are held. This
2267 2267 includes the user and process holding the lock, the amount of time
2268 2268 the lock has been held, and the machine name where the process is
2269 2269 running if it's not local.
2270 2270
2271 2271 Locks protect the integrity of Mercurial's data, so should be
2272 2272 treated with care. System crashes or other interruptions may cause
2273 2273 locks to not be properly released, though Mercurial will usually
2274 2274 detect and remove such stale locks automatically.
2275 2275
2276 2276 However, detecting stale locks may not always be possible (for
2277 2277 instance, on a shared filesystem). Removing locks may also be
2278 2278 blocked by filesystem permissions.
2279 2279
2280 2280 Setting a lock will prevent other commands from changing the data.
2281 2281 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2282 2282 The set locks are removed when the command exits.
2283 2283
2284 2284 Returns 0 if no locks are held.
2285 2285
2286 2286 """
2287 2287
2288 2288 if opts.get('force_free_lock'):
2289 2289 repo.svfs.tryunlink(b'lock')
2290 2290 if opts.get('force_free_wlock'):
2291 2291 repo.vfs.tryunlink(b'wlock')
2292 2292 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2293 2293 return 0
2294 2294
2295 2295 locks = []
2296 2296 try:
2297 2297 if opts.get('set_wlock'):
2298 2298 try:
2299 2299 locks.append(repo.wlock(False))
2300 2300 except error.LockHeld:
2301 2301 raise error.Abort(_(b'wlock is already held'))
2302 2302 if opts.get('set_lock'):
2303 2303 try:
2304 2304 locks.append(repo.lock(False))
2305 2305 except error.LockHeld:
2306 2306 raise error.Abort(_(b'lock is already held'))
2307 2307 if len(locks):
2308 2308 try:
2309 2309 if ui.interactive():
2310 2310 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2311 2311 ui.promptchoice(prompt)
2312 2312 else:
2313 2313 msg = b"%d locks held, waiting for signal\n"
2314 2314 msg %= len(locks)
2315 2315 ui.status(msg)
2316 2316 while True: # XXX wait for a signal
2317 2317 time.sleep(0.1)
2318 2318 except KeyboardInterrupt:
2319 2319 msg = b"signal-received releasing locks\n"
2320 2320 ui.status(msg)
2321 2321 return 0
2322 2322 finally:
2323 2323 release(*locks)
2324 2324
2325 2325 now = time.time()
2326 2326 held = 0
2327 2327
2328 2328 def report(vfs, name, method):
2329 2329 # this causes stale locks to get reaped for more accurate reporting
2330 2330 try:
2331 2331 l = method(False)
2332 2332 except error.LockHeld:
2333 2333 l = None
2334 2334
2335 2335 if l:
2336 2336 l.release()
2337 2337 else:
2338 2338 try:
2339 2339 st = vfs.lstat(name)
2340 2340 age = now - st[stat.ST_MTIME]
2341 2341 user = util.username(st.st_uid)
2342 2342 locker = vfs.readlock(name)
2343 2343 if b":" in locker:
2344 2344 host, pid = locker.split(b':')
2345 2345 if host == socket.gethostname():
2346 2346 locker = b'user %s, process %s' % (user or b'None', pid)
2347 2347 else:
2348 2348 locker = b'user %s, process %s, host %s' % (
2349 2349 user or b'None',
2350 2350 pid,
2351 2351 host,
2352 2352 )
2353 2353 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2354 2354 return 1
2355 2355 except OSError as e:
2356 2356 if e.errno != errno.ENOENT:
2357 2357 raise
2358 2358
2359 2359 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2360 2360 return 0
2361 2361
2362 2362 held += report(repo.svfs, b"lock", repo.lock)
2363 2363 held += report(repo.vfs, b"wlock", repo.wlock)
2364 2364
2365 2365 return held
2366 2366
2367 2367
2368 2368 @command(
2369 2369 b'debugmanifestfulltextcache',
2370 2370 [
2371 2371 (b'', b'clear', False, _(b'clear the cache')),
2372 2372 (
2373 2373 b'a',
2374 2374 b'add',
2375 2375 [],
2376 2376 _(b'add the given manifest nodes to the cache'),
2377 2377 _(b'NODE'),
2378 2378 ),
2379 2379 ],
2380 2380 b'',
2381 2381 )
2382 2382 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2383 2383 """show, clear or amend the contents of the manifest fulltext cache"""
2384 2384
2385 2385 def getcache():
2386 2386 r = repo.manifestlog.getstorage(b'')
2387 2387 try:
2388 2388 return r._fulltextcache
2389 2389 except AttributeError:
2390 2390 msg = _(
2391 2391 b"Current revlog implementation doesn't appear to have a "
2392 2392 b"manifest fulltext cache\n"
2393 2393 )
2394 2394 raise error.Abort(msg)
2395 2395
2396 2396 if opts.get('clear'):
2397 2397 with repo.wlock():
2398 2398 cache = getcache()
2399 2399 cache.clear(clear_persisted_data=True)
2400 2400 return
2401 2401
2402 2402 if add:
2403 2403 with repo.wlock():
2404 2404 m = repo.manifestlog
2405 2405 store = m.getstorage(b'')
2406 2406 for n in add:
2407 2407 try:
2408 2408 manifest = m[store.lookup(n)]
2409 2409 except error.LookupError as e:
2410 2410 raise error.Abort(
2411 2411 bytes(e), hint=b"Check your manifest node id"
2412 2412 )
2413 2413 manifest.read() # stores revisision in cache too
2414 2414 return
2415 2415
2416 2416 cache = getcache()
2417 2417 if not len(cache):
2418 2418 ui.write(_(b'cache empty\n'))
2419 2419 else:
2420 2420 ui.write(
2421 2421 _(
2422 2422 b'cache contains %d manifest entries, in order of most to '
2423 2423 b'least recent:\n'
2424 2424 )
2425 2425 % (len(cache),)
2426 2426 )
2427 2427 totalsize = 0
2428 2428 for nodeid in cache:
2429 2429 # Use cache.get to not update the LRU order
2430 2430 data = cache.peek(nodeid)
2431 2431 size = len(data)
2432 2432 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2433 2433 ui.write(
2434 2434 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2435 2435 )
2436 2436 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2437 2437 ui.write(
2438 2438 _(b'total cache data size %s, on-disk %s\n')
2439 2439 % (util.bytecount(totalsize), util.bytecount(ondisk))
2440 2440 )
2441 2441
2442 2442
2443 2443 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2444 2444 def debugmergestate(ui, repo, *args, **opts):
2445 2445 """print merge state
2446 2446
2447 2447 Use --verbose to print out information about whether v1 or v2 merge state
2448 2448 was chosen."""
2449 2449
2450 2450 if ui.verbose:
2451 2451 ms = mergestatemod.mergestate(repo)
2452 2452
2453 2453 # sort so that reasonable information is on top
2454 2454 v1records = ms._readrecordsv1()
2455 2455 v2records = ms._readrecordsv2()
2456 2456
2457 2457 if not v1records and not v2records:
2458 2458 pass
2459 2459 elif not v2records:
2460 2460 ui.writenoi18n(b'no version 2 merge state\n')
2461 2461 elif ms._v1v2match(v1records, v2records):
2462 2462 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2463 2463 else:
2464 2464 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2465 2465
2466 2466 opts = pycompat.byteskwargs(opts)
2467 2467 if not opts[b'template']:
2468 2468 opts[b'template'] = (
2469 2469 b'{if(commits, "", "no merge state found\n")}'
2470 2470 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2471 2471 b'{files % "file: {path} (state \\"{state}\\")\n'
2472 2472 b'{if(local_path, "'
2473 2473 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2474 2474 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2475 2475 b' other path: {other_path} (node {other_node})\n'
2476 2476 b'")}'
2477 2477 b'{if(rename_side, "'
2478 2478 b' rename side: {rename_side}\n'
2479 2479 b' renamed path: {renamed_path}\n'
2480 2480 b'")}'
2481 2481 b'{extras % " extra: {key} = {value}\n"}'
2482 2482 b'"}'
2483 2483 b'{extras % "extra: {file} ({key} = {value})\n"}'
2484 2484 )
2485 2485
2486 2486 ms = mergestatemod.mergestate.read(repo)
2487 2487
2488 2488 fm = ui.formatter(b'debugmergestate', opts)
2489 2489 fm.startitem()
2490 2490
2491 2491 fm_commits = fm.nested(b'commits')
2492 2492 if ms.active():
2493 2493 for name, node, label_index in (
2494 2494 (b'local', ms.local, 0),
2495 2495 (b'other', ms.other, 1),
2496 2496 ):
2497 2497 fm_commits.startitem()
2498 2498 fm_commits.data(name=name)
2499 2499 fm_commits.data(node=hex(node))
2500 2500 if ms._labels and len(ms._labels) > label_index:
2501 2501 fm_commits.data(label=ms._labels[label_index])
2502 2502 fm_commits.end()
2503 2503
2504 2504 fm_files = fm.nested(b'files')
2505 2505 if ms.active():
2506 2506 for f in ms:
2507 2507 fm_files.startitem()
2508 2508 fm_files.data(path=f)
2509 2509 state = ms._state[f]
2510 2510 fm_files.data(state=state[0])
2511 2511 if state[0] in (
2512 2512 mergestatemod.MERGE_RECORD_UNRESOLVED,
2513 2513 mergestatemod.MERGE_RECORD_RESOLVED,
2514 2514 ):
2515 2515 fm_files.data(local_key=state[1])
2516 2516 fm_files.data(local_path=state[2])
2517 2517 fm_files.data(ancestor_path=state[3])
2518 2518 fm_files.data(ancestor_node=state[4])
2519 2519 fm_files.data(other_path=state[5])
2520 2520 fm_files.data(other_node=state[6])
2521 2521 fm_files.data(local_flags=state[7])
2522 2522 elif state[0] in (
2523 2523 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2524 2524 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2525 2525 ):
2526 2526 fm_files.data(renamed_path=state[1])
2527 2527 fm_files.data(rename_side=state[2])
2528 2528 fm_extras = fm_files.nested(b'extras')
2529 2529 for k, v in sorted(ms.extras(f).items()):
2530 2530 fm_extras.startitem()
2531 2531 fm_extras.data(key=k)
2532 2532 fm_extras.data(value=v)
2533 2533 fm_extras.end()
2534 2534
2535 2535 fm_files.end()
2536 2536
2537 2537 fm_extras = fm.nested(b'extras')
2538 2538 for f, d in sorted(ms.allextras().items()):
2539 2539 if f in ms:
2540 2540 # If file is in mergestate, we have already processed it's extras
2541 2541 continue
2542 2542 for k, v in d.items():
2543 2543 fm_extras.startitem()
2544 2544 fm_extras.data(file=f)
2545 2545 fm_extras.data(key=k)
2546 2546 fm_extras.data(value=v)
2547 2547 fm_extras.end()
2548 2548
2549 2549 fm.end()
2550 2550
2551 2551
2552 2552 @command(b'debugnamecomplete', [], _(b'NAME...'))
2553 2553 def debugnamecomplete(ui, repo, *args):
2554 2554 '''complete "names" - tags, open branch names, bookmark names'''
2555 2555
2556 2556 names = set()
2557 2557 # since we previously only listed open branches, we will handle that
2558 2558 # specially (after this for loop)
2559 2559 for name, ns in repo.names.items():
2560 2560 if name != b'branches':
2561 2561 names.update(ns.listnames(repo))
2562 2562 names.update(
2563 2563 tag
2564 2564 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2565 2565 if not closed
2566 2566 )
2567 2567 completions = set()
2568 2568 if not args:
2569 2569 args = [b'']
2570 2570 for a in args:
2571 2571 completions.update(n for n in names if n.startswith(a))
2572 2572 ui.write(b'\n'.join(sorted(completions)))
2573 2573 ui.write(b'\n')
2574 2574
2575 2575
2576 2576 @command(
2577 2577 b'debugnodemap',
2578 2578 [
2579 2579 (
2580 2580 b'',
2581 2581 b'dump-new',
2582 2582 False,
2583 2583 _(b'write a (new) persistent binary nodemap on stdout'),
2584 2584 ),
2585 2585 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2586 2586 (
2587 2587 b'',
2588 2588 b'check',
2589 2589 False,
2590 2590 _(b'check that the data on disk data are correct.'),
2591 2591 ),
2592 2592 (
2593 2593 b'',
2594 2594 b'metadata',
2595 2595 False,
2596 2596 _(b'display the on disk meta data for the nodemap'),
2597 2597 ),
2598 2598 ],
2599 2599 )
2600 2600 def debugnodemap(ui, repo, **opts):
2601 2601 """write and inspect on disk nodemap"""
2602 2602 if opts['dump_new']:
2603 2603 unfi = repo.unfiltered()
2604 2604 cl = unfi.changelog
2605 2605 if util.safehasattr(cl.index, "nodemap_data_all"):
2606 2606 data = cl.index.nodemap_data_all()
2607 2607 else:
2608 2608 data = nodemap.persistent_data(cl.index)
2609 2609 ui.write(data)
2610 2610 elif opts['dump_disk']:
2611 2611 unfi = repo.unfiltered()
2612 2612 cl = unfi.changelog
2613 2613 nm_data = nodemap.persisted_data(cl)
2614 2614 if nm_data is not None:
2615 2615 docket, data = nm_data
2616 2616 ui.write(data[:])
2617 2617 elif opts['check']:
2618 2618 unfi = repo.unfiltered()
2619 2619 cl = unfi.changelog
2620 2620 nm_data = nodemap.persisted_data(cl)
2621 2621 if nm_data is not None:
2622 2622 docket, data = nm_data
2623 2623 return nodemap.check_data(ui, cl.index, data)
2624 2624 elif opts['metadata']:
2625 2625 unfi = repo.unfiltered()
2626 2626 cl = unfi.changelog
2627 2627 nm_data = nodemap.persisted_data(cl)
2628 2628 if nm_data is not None:
2629 2629 docket, data = nm_data
2630 2630 ui.write((b"uid: %s\n") % docket.uid)
2631 2631 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2632 2632 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2633 2633 ui.write((b"data-length: %d\n") % docket.data_length)
2634 2634 ui.write((b"data-unused: %d\n") % docket.data_unused)
2635 2635 unused_perc = docket.data_unused * 100.0 / docket.data_length
2636 2636 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2637 2637
2638 2638
2639 2639 @command(
2640 2640 b'debugobsolete',
2641 2641 [
2642 2642 (b'', b'flags', 0, _(b'markers flag')),
2643 2643 (
2644 2644 b'',
2645 2645 b'record-parents',
2646 2646 False,
2647 2647 _(b'record parent information for the precursor'),
2648 2648 ),
2649 2649 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2650 2650 (
2651 2651 b'',
2652 2652 b'exclusive',
2653 2653 False,
2654 2654 _(b'restrict display to markers only relevant to REV'),
2655 2655 ),
2656 2656 (b'', b'index', False, _(b'display index of the marker')),
2657 2657 (b'', b'delete', [], _(b'delete markers specified by indices')),
2658 2658 ]
2659 2659 + cmdutil.commitopts2
2660 2660 + cmdutil.formatteropts,
2661 2661 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2662 2662 )
2663 2663 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2664 2664 """create arbitrary obsolete marker
2665 2665
2666 2666 With no arguments, displays the list of obsolescence markers."""
2667 2667
2668 2668 opts = pycompat.byteskwargs(opts)
2669 2669
2670 2670 def parsenodeid(s):
2671 2671 try:
2672 2672 # We do not use revsingle/revrange functions here to accept
2673 2673 # arbitrary node identifiers, possibly not present in the
2674 2674 # local repository.
2675 2675 n = bin(s)
2676 2676 if len(n) != repo.nodeconstants.nodelen:
2677 2677 raise ValueError
2678 2678 return n
2679 2679 except ValueError:
2680 2680 raise error.InputError(
2681 2681 b'changeset references must be full hexadecimal '
2682 2682 b'node identifiers'
2683 2683 )
2684 2684
2685 2685 if opts.get(b'delete'):
2686 2686 indices = []
2687 2687 for v in opts.get(b'delete'):
2688 2688 try:
2689 2689 indices.append(int(v))
2690 2690 except ValueError:
2691 2691 raise error.InputError(
2692 2692 _(b'invalid index value: %r') % v,
2693 2693 hint=_(b'use integers for indices'),
2694 2694 )
2695 2695
2696 2696 if repo.currenttransaction():
2697 2697 raise error.Abort(
2698 2698 _(b'cannot delete obsmarkers in the middle of transaction.')
2699 2699 )
2700 2700
2701 2701 with repo.lock():
2702 2702 n = repair.deleteobsmarkers(repo.obsstore, indices)
2703 2703 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2704 2704
2705 2705 return
2706 2706
2707 2707 if precursor is not None:
2708 2708 if opts[b'rev']:
2709 2709 raise error.InputError(
2710 2710 b'cannot select revision when creating marker'
2711 2711 )
2712 2712 metadata = {}
2713 2713 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2714 2714 succs = tuple(parsenodeid(succ) for succ in successors)
2715 2715 l = repo.lock()
2716 2716 try:
2717 2717 tr = repo.transaction(b'debugobsolete')
2718 2718 try:
2719 2719 date = opts.get(b'date')
2720 2720 if date:
2721 2721 date = dateutil.parsedate(date)
2722 2722 else:
2723 2723 date = None
2724 2724 prec = parsenodeid(precursor)
2725 2725 parents = None
2726 2726 if opts[b'record_parents']:
2727 2727 if prec not in repo.unfiltered():
2728 2728 raise error.Abort(
2729 2729 b'cannot used --record-parents on '
2730 2730 b'unknown changesets'
2731 2731 )
2732 2732 parents = repo.unfiltered()[prec].parents()
2733 2733 parents = tuple(p.node() for p in parents)
2734 2734 repo.obsstore.create(
2735 2735 tr,
2736 2736 prec,
2737 2737 succs,
2738 2738 opts[b'flags'],
2739 2739 parents=parents,
2740 2740 date=date,
2741 2741 metadata=metadata,
2742 2742 ui=ui,
2743 2743 )
2744 2744 tr.close()
2745 2745 except ValueError as exc:
2746 2746 raise error.Abort(
2747 2747 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2748 2748 )
2749 2749 finally:
2750 2750 tr.release()
2751 2751 finally:
2752 2752 l.release()
2753 2753 else:
2754 2754 if opts[b'rev']:
2755 2755 revs = logcmdutil.revrange(repo, opts[b'rev'])
2756 2756 nodes = [repo[r].node() for r in revs]
2757 2757 markers = list(
2758 2758 obsutil.getmarkers(
2759 2759 repo, nodes=nodes, exclusive=opts[b'exclusive']
2760 2760 )
2761 2761 )
2762 2762 markers.sort(key=lambda x: x._data)
2763 2763 else:
2764 2764 markers = obsutil.getmarkers(repo)
2765 2765
2766 2766 markerstoiter = markers
2767 2767 isrelevant = lambda m: True
2768 2768 if opts.get(b'rev') and opts.get(b'index'):
2769 2769 markerstoiter = obsutil.getmarkers(repo)
2770 2770 markerset = set(markers)
2771 2771 isrelevant = lambda m: m in markerset
2772 2772
2773 2773 fm = ui.formatter(b'debugobsolete', opts)
2774 2774 for i, m in enumerate(markerstoiter):
2775 2775 if not isrelevant(m):
2776 2776 # marker can be irrelevant when we're iterating over a set
2777 2777 # of markers (markerstoiter) which is bigger than the set
2778 2778 # of markers we want to display (markers)
2779 2779 # this can happen if both --index and --rev options are
2780 2780 # provided and thus we need to iterate over all of the markers
2781 2781 # to get the correct indices, but only display the ones that
2782 2782 # are relevant to --rev value
2783 2783 continue
2784 2784 fm.startitem()
2785 2785 ind = i if opts.get(b'index') else None
2786 2786 cmdutil.showmarker(fm, m, index=ind)
2787 2787 fm.end()
2788 2788
2789 2789
2790 2790 @command(
2791 2791 b'debugp1copies',
2792 2792 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2793 2793 _(b'[-r REV]'),
2794 2794 )
2795 2795 def debugp1copies(ui, repo, **opts):
2796 2796 """dump copy information compared to p1"""
2797 2797
2798 2798 opts = pycompat.byteskwargs(opts)
2799 2799 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2800 2800 for dst, src in ctx.p1copies().items():
2801 2801 ui.write(b'%s -> %s\n' % (src, dst))
2802 2802
2803 2803
2804 2804 @command(
2805 2805 b'debugp2copies',
2806 2806 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2807 2807 _(b'[-r REV]'),
2808 2808 )
2809 def debugp1copies(ui, repo, **opts):
2809 def debugp2copies(ui, repo, **opts):
2810 2810 """dump copy information compared to p2"""
2811 2811
2812 2812 opts = pycompat.byteskwargs(opts)
2813 2813 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2814 2814 for dst, src in ctx.p2copies().items():
2815 2815 ui.write(b'%s -> %s\n' % (src, dst))
2816 2816
2817 2817
2818 2818 @command(
2819 2819 b'debugpathcomplete',
2820 2820 [
2821 2821 (b'f', b'full', None, _(b'complete an entire path')),
2822 2822 (b'n', b'normal', None, _(b'show only normal files')),
2823 2823 (b'a', b'added', None, _(b'show only added files')),
2824 2824 (b'r', b'removed', None, _(b'show only removed files')),
2825 2825 ],
2826 2826 _(b'FILESPEC...'),
2827 2827 )
2828 2828 def debugpathcomplete(ui, repo, *specs, **opts):
2829 2829 """complete part or all of a tracked path
2830 2830
2831 2831 This command supports shells that offer path name completion. It
2832 2832 currently completes only files already known to the dirstate.
2833 2833
2834 2834 Completion extends only to the next path segment unless
2835 2835 --full is specified, in which case entire paths are used."""
2836 2836
2837 2837 def complete(path, acceptable):
2838 2838 dirstate = repo.dirstate
2839 2839 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2840 2840 rootdir = repo.root + pycompat.ossep
2841 2841 if spec != repo.root and not spec.startswith(rootdir):
2842 2842 return [], []
2843 2843 if os.path.isdir(spec):
2844 2844 spec += b'/'
2845 2845 spec = spec[len(rootdir) :]
2846 2846 fixpaths = pycompat.ossep != b'/'
2847 2847 if fixpaths:
2848 2848 spec = spec.replace(pycompat.ossep, b'/')
2849 2849 speclen = len(spec)
2850 2850 fullpaths = opts['full']
2851 2851 files, dirs = set(), set()
2852 2852 adddir, addfile = dirs.add, files.add
2853 2853 for f, st in dirstate.items():
2854 2854 if f.startswith(spec) and st.state in acceptable:
2855 2855 if fixpaths:
2856 2856 f = f.replace(b'/', pycompat.ossep)
2857 2857 if fullpaths:
2858 2858 addfile(f)
2859 2859 continue
2860 2860 s = f.find(pycompat.ossep, speclen)
2861 2861 if s >= 0:
2862 2862 adddir(f[:s])
2863 2863 else:
2864 2864 addfile(f)
2865 2865 return files, dirs
2866 2866
2867 2867 acceptable = b''
2868 2868 if opts['normal']:
2869 2869 acceptable += b'nm'
2870 2870 if opts['added']:
2871 2871 acceptable += b'a'
2872 2872 if opts['removed']:
2873 2873 acceptable += b'r'
2874 2874 cwd = repo.getcwd()
2875 2875 if not specs:
2876 2876 specs = [b'.']
2877 2877
2878 2878 files, dirs = set(), set()
2879 2879 for spec in specs:
2880 2880 f, d = complete(spec, acceptable or b'nmar')
2881 2881 files.update(f)
2882 2882 dirs.update(d)
2883 2883 files.update(dirs)
2884 2884 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2885 2885 ui.write(b'\n')
2886 2886
2887 2887
2888 2888 @command(
2889 2889 b'debugpathcopies',
2890 2890 cmdutil.walkopts,
2891 2891 b'hg debugpathcopies REV1 REV2 [FILE]',
2892 2892 inferrepo=True,
2893 2893 )
2894 2894 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2895 2895 """show copies between two revisions"""
2896 2896 ctx1 = scmutil.revsingle(repo, rev1)
2897 2897 ctx2 = scmutil.revsingle(repo, rev2)
2898 2898 m = scmutil.match(ctx1, pats, opts)
2899 2899 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2900 2900 ui.write(b'%s -> %s\n' % (src, dst))
2901 2901
2902 2902
2903 2903 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2904 2904 def debugpeer(ui, path):
2905 2905 """establish a connection to a peer repository"""
2906 2906 # Always enable peer request logging. Requires --debug to display
2907 2907 # though.
2908 2908 overrides = {
2909 2909 (b'devel', b'debug.peer-request'): True,
2910 2910 }
2911 2911
2912 2912 with ui.configoverride(overrides):
2913 2913 peer = hg.peer(ui, {}, path)
2914 2914
2915 2915 try:
2916 2916 local = peer.local() is not None
2917 2917 canpush = peer.canpush()
2918 2918
2919 2919 ui.write(_(b'url: %s\n') % peer.url())
2920 2920 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2921 2921 ui.write(
2922 2922 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2923 2923 )
2924 2924 finally:
2925 2925 peer.close()
2926 2926
2927 2927
2928 2928 @command(
2929 2929 b'debugpickmergetool',
2930 2930 [
2931 2931 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2932 2932 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2933 2933 ]
2934 2934 + cmdutil.walkopts
2935 2935 + cmdutil.mergetoolopts,
2936 2936 _(b'[PATTERN]...'),
2937 2937 inferrepo=True,
2938 2938 )
2939 2939 def debugpickmergetool(ui, repo, *pats, **opts):
2940 2940 """examine which merge tool is chosen for specified file
2941 2941
2942 2942 As described in :hg:`help merge-tools`, Mercurial examines
2943 2943 configurations below in this order to decide which merge tool is
2944 2944 chosen for specified file.
2945 2945
2946 2946 1. ``--tool`` option
2947 2947 2. ``HGMERGE`` environment variable
2948 2948 3. configurations in ``merge-patterns`` section
2949 2949 4. configuration of ``ui.merge``
2950 2950 5. configurations in ``merge-tools`` section
2951 2951 6. ``hgmerge`` tool (for historical reason only)
2952 2952 7. default tool for fallback (``:merge`` or ``:prompt``)
2953 2953
2954 2954 This command writes out examination result in the style below::
2955 2955
2956 2956 FILE = MERGETOOL
2957 2957
2958 2958 By default, all files known in the first parent context of the
2959 2959 working directory are examined. Use file patterns and/or -I/-X
2960 2960 options to limit target files. -r/--rev is also useful to examine
2961 2961 files in another context without actual updating to it.
2962 2962
2963 2963 With --debug, this command shows warning messages while matching
2964 2964 against ``merge-patterns`` and so on, too. It is recommended to
2965 2965 use this option with explicit file patterns and/or -I/-X options,
2966 2966 because this option increases amount of output per file according
2967 2967 to configurations in hgrc.
2968 2968
2969 2969 With -v/--verbose, this command shows configurations below at
2970 2970 first (only if specified).
2971 2971
2972 2972 - ``--tool`` option
2973 2973 - ``HGMERGE`` environment variable
2974 2974 - configuration of ``ui.merge``
2975 2975
2976 2976 If merge tool is chosen before matching against
2977 2977 ``merge-patterns``, this command can't show any helpful
2978 2978 information, even with --debug. In such case, information above is
2979 2979 useful to know why a merge tool is chosen.
2980 2980 """
2981 2981 opts = pycompat.byteskwargs(opts)
2982 2982 overrides = {}
2983 2983 if opts[b'tool']:
2984 2984 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2985 2985 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2986 2986
2987 2987 with ui.configoverride(overrides, b'debugmergepatterns'):
2988 2988 hgmerge = encoding.environ.get(b"HGMERGE")
2989 2989 if hgmerge is not None:
2990 2990 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2991 2991 uimerge = ui.config(b"ui", b"merge")
2992 2992 if uimerge:
2993 2993 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2994 2994
2995 2995 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2996 2996 m = scmutil.match(ctx, pats, opts)
2997 2997 changedelete = opts[b'changedelete']
2998 2998 for path in ctx.walk(m):
2999 2999 fctx = ctx[path]
3000 3000 with ui.silent(
3001 3001 error=True
3002 3002 ) if not ui.debugflag else util.nullcontextmanager():
3003 3003 tool, toolpath = filemerge._picktool(
3004 3004 repo,
3005 3005 ui,
3006 3006 path,
3007 3007 fctx.isbinary(),
3008 3008 b'l' in fctx.flags(),
3009 3009 changedelete,
3010 3010 )
3011 3011 ui.write(b'%s = %s\n' % (path, tool))
3012 3012
3013 3013
3014 3014 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3015 3015 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3016 3016 """access the pushkey key/value protocol
3017 3017
3018 3018 With two args, list the keys in the given namespace.
3019 3019
3020 3020 With five args, set a key to new if it currently is set to old.
3021 3021 Reports success or failure.
3022 3022 """
3023 3023
3024 3024 target = hg.peer(ui, {}, repopath)
3025 3025 try:
3026 3026 if keyinfo:
3027 3027 key, old, new = keyinfo
3028 3028 with target.commandexecutor() as e:
3029 3029 r = e.callcommand(
3030 3030 b'pushkey',
3031 3031 {
3032 3032 b'namespace': namespace,
3033 3033 b'key': key,
3034 3034 b'old': old,
3035 3035 b'new': new,
3036 3036 },
3037 3037 ).result()
3038 3038
3039 3039 ui.status(pycompat.bytestr(r) + b'\n')
3040 3040 return not r
3041 3041 else:
3042 3042 for k, v in sorted(target.listkeys(namespace).items()):
3043 3043 ui.write(
3044 3044 b"%s\t%s\n"
3045 3045 % (stringutil.escapestr(k), stringutil.escapestr(v))
3046 3046 )
3047 3047 finally:
3048 3048 target.close()
3049 3049
3050 3050
3051 3051 @command(b'debugpvec', [], _(b'A B'))
3052 3052 def debugpvec(ui, repo, a, b=None):
3053 3053 ca = scmutil.revsingle(repo, a)
3054 3054 cb = scmutil.revsingle(repo, b)
3055 3055 pa = pvec.ctxpvec(ca)
3056 3056 pb = pvec.ctxpvec(cb)
3057 3057 if pa == pb:
3058 3058 rel = b"="
3059 3059 elif pa > pb:
3060 3060 rel = b">"
3061 3061 elif pa < pb:
3062 3062 rel = b"<"
3063 3063 elif pa | pb:
3064 3064 rel = b"|"
3065 3065 ui.write(_(b"a: %s\n") % pa)
3066 3066 ui.write(_(b"b: %s\n") % pb)
3067 3067 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3068 3068 ui.write(
3069 3069 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3070 3070 % (
3071 3071 abs(pa._depth - pb._depth),
3072 3072 pvec._hamming(pa._vec, pb._vec),
3073 3073 pa.distance(pb),
3074 3074 rel,
3075 3075 )
3076 3076 )
3077 3077
3078 3078
3079 3079 @command(
3080 3080 b'debugrebuilddirstate|debugrebuildstate',
3081 3081 [
3082 3082 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3083 3083 (
3084 3084 b'',
3085 3085 b'minimal',
3086 3086 None,
3087 3087 _(
3088 3088 b'only rebuild files that are inconsistent with '
3089 3089 b'the working copy parent'
3090 3090 ),
3091 3091 ),
3092 3092 ],
3093 3093 _(b'[-r REV]'),
3094 3094 )
3095 3095 def debugrebuilddirstate(ui, repo, rev, **opts):
3096 3096 """rebuild the dirstate as it would look like for the given revision
3097 3097
3098 3098 If no revision is specified the first current parent will be used.
3099 3099
3100 3100 The dirstate will be set to the files of the given revision.
3101 3101 The actual working directory content or existing dirstate
3102 3102 information such as adds or removes is not considered.
3103 3103
3104 3104 ``minimal`` will only rebuild the dirstate status for files that claim to be
3105 3105 tracked but are not in the parent manifest, or that exist in the parent
3106 3106 manifest but are not in the dirstate. It will not change adds, removes, or
3107 3107 modified files that are in the working copy parent.
3108 3108
3109 3109 One use of this command is to make the next :hg:`status` invocation
3110 3110 check the actual file content.
3111 3111 """
3112 3112 ctx = scmutil.revsingle(repo, rev)
3113 3113 with repo.wlock():
3114 3114 dirstate = repo.dirstate
3115 3115 changedfiles = None
3116 3116 # See command doc for what minimal does.
3117 3117 if opts.get('minimal'):
3118 3118 manifestfiles = set(ctx.manifest().keys())
3119 3119 dirstatefiles = set(dirstate)
3120 3120 manifestonly = manifestfiles - dirstatefiles
3121 3121 dsonly = dirstatefiles - manifestfiles
3122 3122 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3123 3123 changedfiles = manifestonly | dsnotadded
3124 3124
3125 3125 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3126 3126
3127 3127
3128 3128 @command(
3129 3129 b'debugrebuildfncache',
3130 3130 [
3131 3131 (
3132 3132 b'',
3133 3133 b'only-data',
3134 3134 False,
3135 3135 _(b'only look for wrong .d files (much faster)'),
3136 3136 )
3137 3137 ],
3138 3138 b'',
3139 3139 )
3140 3140 def debugrebuildfncache(ui, repo, **opts):
3141 3141 """rebuild the fncache file"""
3142 3142 opts = pycompat.byteskwargs(opts)
3143 3143 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3144 3144
3145 3145
3146 3146 @command(
3147 3147 b'debugrename',
3148 3148 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3149 3149 _(b'[-r REV] [FILE]...'),
3150 3150 )
3151 3151 def debugrename(ui, repo, *pats, **opts):
3152 3152 """dump rename information"""
3153 3153
3154 3154 opts = pycompat.byteskwargs(opts)
3155 3155 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3156 3156 m = scmutil.match(ctx, pats, opts)
3157 3157 for abs in ctx.walk(m):
3158 3158 fctx = ctx[abs]
3159 3159 o = fctx.filelog().renamed(fctx.filenode())
3160 3160 rel = repo.pathto(abs)
3161 3161 if o:
3162 3162 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3163 3163 else:
3164 3164 ui.write(_(b"%s not renamed\n") % rel)
3165 3165
3166 3166
3167 3167 @command(b'debugrequires|debugrequirements', [], b'')
3168 3168 def debugrequirements(ui, repo):
3169 3169 """print the current repo requirements"""
3170 3170 for r in sorted(repo.requirements):
3171 3171 ui.write(b"%s\n" % r)
3172 3172
3173 3173
3174 3174 @command(
3175 3175 b'debugrevlog',
3176 3176 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3177 3177 _(b'-c|-m|FILE'),
3178 3178 optionalrepo=True,
3179 3179 )
3180 3180 def debugrevlog(ui, repo, file_=None, **opts):
3181 3181 """show data and statistics about a revlog"""
3182 3182 opts = pycompat.byteskwargs(opts)
3183 3183 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3184 3184
3185 3185 if opts.get(b"dump"):
3186 3186 numrevs = len(r)
3187 3187 ui.write(
3188 3188 (
3189 3189 b"# rev p1rev p2rev start end deltastart base p1 p2"
3190 3190 b" rawsize totalsize compression heads chainlen\n"
3191 3191 )
3192 3192 )
3193 3193 ts = 0
3194 3194 heads = set()
3195 3195
3196 3196 for rev in pycompat.xrange(numrevs):
3197 3197 dbase = r.deltaparent(rev)
3198 3198 if dbase == -1:
3199 3199 dbase = rev
3200 3200 cbase = r.chainbase(rev)
3201 3201 clen = r.chainlen(rev)
3202 3202 p1, p2 = r.parentrevs(rev)
3203 3203 rs = r.rawsize(rev)
3204 3204 ts = ts + rs
3205 3205 heads -= set(r.parentrevs(rev))
3206 3206 heads.add(rev)
3207 3207 try:
3208 3208 compression = ts / r.end(rev)
3209 3209 except ZeroDivisionError:
3210 3210 compression = 0
3211 3211 ui.write(
3212 3212 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3213 3213 b"%11d %5d %8d\n"
3214 3214 % (
3215 3215 rev,
3216 3216 p1,
3217 3217 p2,
3218 3218 r.start(rev),
3219 3219 r.end(rev),
3220 3220 r.start(dbase),
3221 3221 r.start(cbase),
3222 3222 r.start(p1),
3223 3223 r.start(p2),
3224 3224 rs,
3225 3225 ts,
3226 3226 compression,
3227 3227 len(heads),
3228 3228 clen,
3229 3229 )
3230 3230 )
3231 3231 return 0
3232 3232
3233 3233 format = r._format_version
3234 3234 v = r._format_flags
3235 3235 flags = []
3236 3236 gdelta = False
3237 3237 if v & revlog.FLAG_INLINE_DATA:
3238 3238 flags.append(b'inline')
3239 3239 if v & revlog.FLAG_GENERALDELTA:
3240 3240 gdelta = True
3241 3241 flags.append(b'generaldelta')
3242 3242 if not flags:
3243 3243 flags = [b'(none)']
3244 3244
3245 3245 ### tracks merge vs single parent
3246 3246 nummerges = 0
3247 3247
3248 3248 ### tracks ways the "delta" are build
3249 3249 # nodelta
3250 3250 numempty = 0
3251 3251 numemptytext = 0
3252 3252 numemptydelta = 0
3253 3253 # full file content
3254 3254 numfull = 0
3255 3255 # intermediate snapshot against a prior snapshot
3256 3256 numsemi = 0
3257 3257 # snapshot count per depth
3258 3258 numsnapdepth = collections.defaultdict(lambda: 0)
3259 3259 # delta against previous revision
3260 3260 numprev = 0
3261 3261 # delta against first or second parent (not prev)
3262 3262 nump1 = 0
3263 3263 nump2 = 0
3264 3264 # delta against neither prev nor parents
3265 3265 numother = 0
3266 3266 # delta against prev that are also first or second parent
3267 3267 # (details of `numprev`)
3268 3268 nump1prev = 0
3269 3269 nump2prev = 0
3270 3270
3271 3271 # data about delta chain of each revs
3272 3272 chainlengths = []
3273 3273 chainbases = []
3274 3274 chainspans = []
3275 3275
3276 3276 # data about each revision
3277 3277 datasize = [None, 0, 0]
3278 3278 fullsize = [None, 0, 0]
3279 3279 semisize = [None, 0, 0]
3280 3280 # snapshot count per depth
3281 3281 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3282 3282 deltasize = [None, 0, 0]
3283 3283 chunktypecounts = {}
3284 3284 chunktypesizes = {}
3285 3285
3286 3286 def addsize(size, l):
3287 3287 if l[0] is None or size < l[0]:
3288 3288 l[0] = size
3289 3289 if size > l[1]:
3290 3290 l[1] = size
3291 3291 l[2] += size
3292 3292
3293 3293 numrevs = len(r)
3294 3294 for rev in pycompat.xrange(numrevs):
3295 3295 p1, p2 = r.parentrevs(rev)
3296 3296 delta = r.deltaparent(rev)
3297 3297 if format > 0:
3298 3298 addsize(r.rawsize(rev), datasize)
3299 3299 if p2 != nullrev:
3300 3300 nummerges += 1
3301 3301 size = r.length(rev)
3302 3302 if delta == nullrev:
3303 3303 chainlengths.append(0)
3304 3304 chainbases.append(r.start(rev))
3305 3305 chainspans.append(size)
3306 3306 if size == 0:
3307 3307 numempty += 1
3308 3308 numemptytext += 1
3309 3309 else:
3310 3310 numfull += 1
3311 3311 numsnapdepth[0] += 1
3312 3312 addsize(size, fullsize)
3313 3313 addsize(size, snapsizedepth[0])
3314 3314 else:
3315 3315 chainlengths.append(chainlengths[delta] + 1)
3316 3316 baseaddr = chainbases[delta]
3317 3317 revaddr = r.start(rev)
3318 3318 chainbases.append(baseaddr)
3319 3319 chainspans.append((revaddr - baseaddr) + size)
3320 3320 if size == 0:
3321 3321 numempty += 1
3322 3322 numemptydelta += 1
3323 3323 elif r.issnapshot(rev):
3324 3324 addsize(size, semisize)
3325 3325 numsemi += 1
3326 3326 depth = r.snapshotdepth(rev)
3327 3327 numsnapdepth[depth] += 1
3328 3328 addsize(size, snapsizedepth[depth])
3329 3329 else:
3330 3330 addsize(size, deltasize)
3331 3331 if delta == rev - 1:
3332 3332 numprev += 1
3333 3333 if delta == p1:
3334 3334 nump1prev += 1
3335 3335 elif delta == p2:
3336 3336 nump2prev += 1
3337 3337 elif delta == p1:
3338 3338 nump1 += 1
3339 3339 elif delta == p2:
3340 3340 nump2 += 1
3341 3341 elif delta != nullrev:
3342 3342 numother += 1
3343 3343
3344 3344 # Obtain data on the raw chunks in the revlog.
3345 3345 if util.safehasattr(r, b'_getsegmentforrevs'):
3346 3346 segment = r._getsegmentforrevs(rev, rev)[1]
3347 3347 else:
3348 3348 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3349 3349 if segment:
3350 3350 chunktype = bytes(segment[0:1])
3351 3351 else:
3352 3352 chunktype = b'empty'
3353 3353
3354 3354 if chunktype not in chunktypecounts:
3355 3355 chunktypecounts[chunktype] = 0
3356 3356 chunktypesizes[chunktype] = 0
3357 3357
3358 3358 chunktypecounts[chunktype] += 1
3359 3359 chunktypesizes[chunktype] += size
3360 3360
3361 3361 # Adjust size min value for empty cases
3362 3362 for size in (datasize, fullsize, semisize, deltasize):
3363 3363 if size[0] is None:
3364 3364 size[0] = 0
3365 3365
3366 3366 numdeltas = numrevs - numfull - numempty - numsemi
3367 3367 numoprev = numprev - nump1prev - nump2prev
3368 3368 totalrawsize = datasize[2]
3369 3369 datasize[2] /= numrevs
3370 3370 fulltotal = fullsize[2]
3371 3371 if numfull == 0:
3372 3372 fullsize[2] = 0
3373 3373 else:
3374 3374 fullsize[2] /= numfull
3375 3375 semitotal = semisize[2]
3376 3376 snaptotal = {}
3377 3377 if numsemi > 0:
3378 3378 semisize[2] /= numsemi
3379 3379 for depth in snapsizedepth:
3380 3380 snaptotal[depth] = snapsizedepth[depth][2]
3381 3381 snapsizedepth[depth][2] /= numsnapdepth[depth]
3382 3382
3383 3383 deltatotal = deltasize[2]
3384 3384 if numdeltas > 0:
3385 3385 deltasize[2] /= numdeltas
3386 3386 totalsize = fulltotal + semitotal + deltatotal
3387 3387 avgchainlen = sum(chainlengths) / numrevs
3388 3388 maxchainlen = max(chainlengths)
3389 3389 maxchainspan = max(chainspans)
3390 3390 compratio = 1
3391 3391 if totalsize:
3392 3392 compratio = totalrawsize / totalsize
3393 3393
3394 3394 basedfmtstr = b'%%%dd\n'
3395 3395 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3396 3396
3397 3397 def dfmtstr(max):
3398 3398 return basedfmtstr % len(str(max))
3399 3399
3400 3400 def pcfmtstr(max, padding=0):
3401 3401 return basepcfmtstr % (len(str(max)), b' ' * padding)
3402 3402
3403 3403 def pcfmt(value, total):
3404 3404 if total:
3405 3405 return (value, 100 * float(value) / total)
3406 3406 else:
3407 3407 return value, 100.0
3408 3408
3409 3409 ui.writenoi18n(b'format : %d\n' % format)
3410 3410 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3411 3411
3412 3412 ui.write(b'\n')
3413 3413 fmt = pcfmtstr(totalsize)
3414 3414 fmt2 = dfmtstr(totalsize)
3415 3415 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3416 3416 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3417 3417 ui.writenoi18n(
3418 3418 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3419 3419 )
3420 3420 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3421 3421 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3422 3422 ui.writenoi18n(
3423 3423 b' text : '
3424 3424 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3425 3425 )
3426 3426 ui.writenoi18n(
3427 3427 b' delta : '
3428 3428 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3429 3429 )
3430 3430 ui.writenoi18n(
3431 3431 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3432 3432 )
3433 3433 for depth in sorted(numsnapdepth):
3434 3434 ui.write(
3435 3435 (b' lvl-%-3d : ' % depth)
3436 3436 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3437 3437 )
3438 3438 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3439 3439 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3440 3440 ui.writenoi18n(
3441 3441 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3442 3442 )
3443 3443 for depth in sorted(numsnapdepth):
3444 3444 ui.write(
3445 3445 (b' lvl-%-3d : ' % depth)
3446 3446 + fmt % pcfmt(snaptotal[depth], totalsize)
3447 3447 )
3448 3448 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3449 3449
3450 3450 def fmtchunktype(chunktype):
3451 3451 if chunktype == b'empty':
3452 3452 return b' %s : ' % chunktype
3453 3453 elif chunktype in pycompat.bytestr(string.ascii_letters):
3454 3454 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3455 3455 else:
3456 3456 return b' 0x%s : ' % hex(chunktype)
3457 3457
3458 3458 ui.write(b'\n')
3459 3459 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3460 3460 for chunktype in sorted(chunktypecounts):
3461 3461 ui.write(fmtchunktype(chunktype))
3462 3462 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3463 3463 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3464 3464 for chunktype in sorted(chunktypecounts):
3465 3465 ui.write(fmtchunktype(chunktype))
3466 3466 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3467 3467
3468 3468 ui.write(b'\n')
3469 3469 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3470 3470 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3471 3471 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3472 3472 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3473 3473 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3474 3474
3475 3475 if format > 0:
3476 3476 ui.write(b'\n')
3477 3477 ui.writenoi18n(
3478 3478 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3479 3479 % tuple(datasize)
3480 3480 )
3481 3481 ui.writenoi18n(
3482 3482 b'full revision size (min/max/avg) : %d / %d / %d\n'
3483 3483 % tuple(fullsize)
3484 3484 )
3485 3485 ui.writenoi18n(
3486 3486 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3487 3487 % tuple(semisize)
3488 3488 )
3489 3489 for depth in sorted(snapsizedepth):
3490 3490 if depth == 0:
3491 3491 continue
3492 3492 ui.writenoi18n(
3493 3493 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3494 3494 % ((depth,) + tuple(snapsizedepth[depth]))
3495 3495 )
3496 3496 ui.writenoi18n(
3497 3497 b'delta size (min/max/avg) : %d / %d / %d\n'
3498 3498 % tuple(deltasize)
3499 3499 )
3500 3500
3501 3501 if numdeltas > 0:
3502 3502 ui.write(b'\n')
3503 3503 fmt = pcfmtstr(numdeltas)
3504 3504 fmt2 = pcfmtstr(numdeltas, 4)
3505 3505 ui.writenoi18n(
3506 3506 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3507 3507 )
3508 3508 if numprev > 0:
3509 3509 ui.writenoi18n(
3510 3510 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3511 3511 )
3512 3512 ui.writenoi18n(
3513 3513 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3514 3514 )
3515 3515 ui.writenoi18n(
3516 3516 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3517 3517 )
3518 3518 if gdelta:
3519 3519 ui.writenoi18n(
3520 3520 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3521 3521 )
3522 3522 ui.writenoi18n(
3523 3523 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3524 3524 )
3525 3525 ui.writenoi18n(
3526 3526 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3527 3527 )
3528 3528
3529 3529
3530 3530 @command(
3531 3531 b'debugrevlogindex',
3532 3532 cmdutil.debugrevlogopts
3533 3533 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3534 3534 _(b'[-f FORMAT] -c|-m|FILE'),
3535 3535 optionalrepo=True,
3536 3536 )
3537 3537 def debugrevlogindex(ui, repo, file_=None, **opts):
3538 3538 """dump the contents of a revlog index"""
3539 3539 opts = pycompat.byteskwargs(opts)
3540 3540 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3541 3541 format = opts.get(b'format', 0)
3542 3542 if format not in (0, 1):
3543 3543 raise error.Abort(_(b"unknown format %d") % format)
3544 3544
3545 3545 if ui.debugflag:
3546 3546 shortfn = hex
3547 3547 else:
3548 3548 shortfn = short
3549 3549
3550 3550 # There might not be anything in r, so have a sane default
3551 3551 idlen = 12
3552 3552 for i in r:
3553 3553 idlen = len(shortfn(r.node(i)))
3554 3554 break
3555 3555
3556 3556 if format == 0:
3557 3557 if ui.verbose:
3558 3558 ui.writenoi18n(
3559 3559 b" rev offset length linkrev %s %s p2\n"
3560 3560 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3561 3561 )
3562 3562 else:
3563 3563 ui.writenoi18n(
3564 3564 b" rev linkrev %s %s p2\n"
3565 3565 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3566 3566 )
3567 3567 elif format == 1:
3568 3568 if ui.verbose:
3569 3569 ui.writenoi18n(
3570 3570 (
3571 3571 b" rev flag offset length size link p1"
3572 3572 b" p2 %s\n"
3573 3573 )
3574 3574 % b"nodeid".rjust(idlen)
3575 3575 )
3576 3576 else:
3577 3577 ui.writenoi18n(
3578 3578 b" rev flag size link p1 p2 %s\n"
3579 3579 % b"nodeid".rjust(idlen)
3580 3580 )
3581 3581
3582 3582 for i in r:
3583 3583 node = r.node(i)
3584 3584 if format == 0:
3585 3585 try:
3586 3586 pp = r.parents(node)
3587 3587 except Exception:
3588 3588 pp = [repo.nullid, repo.nullid]
3589 3589 if ui.verbose:
3590 3590 ui.write(
3591 3591 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3592 3592 % (
3593 3593 i,
3594 3594 r.start(i),
3595 3595 r.length(i),
3596 3596 r.linkrev(i),
3597 3597 shortfn(node),
3598 3598 shortfn(pp[0]),
3599 3599 shortfn(pp[1]),
3600 3600 )
3601 3601 )
3602 3602 else:
3603 3603 ui.write(
3604 3604 b"% 6d % 7d %s %s %s\n"
3605 3605 % (
3606 3606 i,
3607 3607 r.linkrev(i),
3608 3608 shortfn(node),
3609 3609 shortfn(pp[0]),
3610 3610 shortfn(pp[1]),
3611 3611 )
3612 3612 )
3613 3613 elif format == 1:
3614 3614 pr = r.parentrevs(i)
3615 3615 if ui.verbose:
3616 3616 ui.write(
3617 3617 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3618 3618 % (
3619 3619 i,
3620 3620 r.flags(i),
3621 3621 r.start(i),
3622 3622 r.length(i),
3623 3623 r.rawsize(i),
3624 3624 r.linkrev(i),
3625 3625 pr[0],
3626 3626 pr[1],
3627 3627 shortfn(node),
3628 3628 )
3629 3629 )
3630 3630 else:
3631 3631 ui.write(
3632 3632 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3633 3633 % (
3634 3634 i,
3635 3635 r.flags(i),
3636 3636 r.rawsize(i),
3637 3637 r.linkrev(i),
3638 3638 pr[0],
3639 3639 pr[1],
3640 3640 shortfn(node),
3641 3641 )
3642 3642 )
3643 3643
3644 3644
3645 3645 @command(
3646 3646 b'debugrevspec',
3647 3647 [
3648 3648 (
3649 3649 b'',
3650 3650 b'optimize',
3651 3651 None,
3652 3652 _(b'print parsed tree after optimizing (DEPRECATED)'),
3653 3653 ),
3654 3654 (
3655 3655 b'',
3656 3656 b'show-revs',
3657 3657 True,
3658 3658 _(b'print list of result revisions (default)'),
3659 3659 ),
3660 3660 (
3661 3661 b's',
3662 3662 b'show-set',
3663 3663 None,
3664 3664 _(b'print internal representation of result set'),
3665 3665 ),
3666 3666 (
3667 3667 b'p',
3668 3668 b'show-stage',
3669 3669 [],
3670 3670 _(b'print parsed tree at the given stage'),
3671 3671 _(b'NAME'),
3672 3672 ),
3673 3673 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3674 3674 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3675 3675 ],
3676 3676 b'REVSPEC',
3677 3677 )
3678 3678 def debugrevspec(ui, repo, expr, **opts):
3679 3679 """parse and apply a revision specification
3680 3680
3681 3681 Use -p/--show-stage option to print the parsed tree at the given stages.
3682 3682 Use -p all to print tree at every stage.
3683 3683
3684 3684 Use --no-show-revs option with -s or -p to print only the set
3685 3685 representation or the parsed tree respectively.
3686 3686
3687 3687 Use --verify-optimized to compare the optimized result with the unoptimized
3688 3688 one. Returns 1 if the optimized result differs.
3689 3689 """
3690 3690 opts = pycompat.byteskwargs(opts)
3691 3691 aliases = ui.configitems(b'revsetalias')
3692 3692 stages = [
3693 3693 (b'parsed', lambda tree: tree),
3694 3694 (
3695 3695 b'expanded',
3696 3696 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3697 3697 ),
3698 3698 (b'concatenated', revsetlang.foldconcat),
3699 3699 (b'analyzed', revsetlang.analyze),
3700 3700 (b'optimized', revsetlang.optimize),
3701 3701 ]
3702 3702 if opts[b'no_optimized']:
3703 3703 stages = stages[:-1]
3704 3704 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3705 3705 raise error.Abort(
3706 3706 _(b'cannot use --verify-optimized with --no-optimized')
3707 3707 )
3708 3708 stagenames = {n for n, f in stages}
3709 3709
3710 3710 showalways = set()
3711 3711 showchanged = set()
3712 3712 if ui.verbose and not opts[b'show_stage']:
3713 3713 # show parsed tree by --verbose (deprecated)
3714 3714 showalways.add(b'parsed')
3715 3715 showchanged.update([b'expanded', b'concatenated'])
3716 3716 if opts[b'optimize']:
3717 3717 showalways.add(b'optimized')
3718 3718 if opts[b'show_stage'] and opts[b'optimize']:
3719 3719 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3720 3720 if opts[b'show_stage'] == [b'all']:
3721 3721 showalways.update(stagenames)
3722 3722 else:
3723 3723 for n in opts[b'show_stage']:
3724 3724 if n not in stagenames:
3725 3725 raise error.Abort(_(b'invalid stage name: %s') % n)
3726 3726 showalways.update(opts[b'show_stage'])
3727 3727
3728 3728 treebystage = {}
3729 3729 printedtree = None
3730 3730 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3731 3731 for n, f in stages:
3732 3732 treebystage[n] = tree = f(tree)
3733 3733 if n in showalways or (n in showchanged and tree != printedtree):
3734 3734 if opts[b'show_stage'] or n != b'parsed':
3735 3735 ui.write(b"* %s:\n" % n)
3736 3736 ui.write(revsetlang.prettyformat(tree), b"\n")
3737 3737 printedtree = tree
3738 3738
3739 3739 if opts[b'verify_optimized']:
3740 3740 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3741 3741 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3742 3742 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3743 3743 ui.writenoi18n(
3744 3744 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3745 3745 )
3746 3746 ui.writenoi18n(
3747 3747 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3748 3748 )
3749 3749 arevs = list(arevs)
3750 3750 brevs = list(brevs)
3751 3751 if arevs == brevs:
3752 3752 return 0
3753 3753 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3754 3754 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3755 3755 sm = difflib.SequenceMatcher(None, arevs, brevs)
3756 3756 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3757 3757 if tag in ('delete', 'replace'):
3758 3758 for c in arevs[alo:ahi]:
3759 3759 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3760 3760 if tag in ('insert', 'replace'):
3761 3761 for c in brevs[blo:bhi]:
3762 3762 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3763 3763 if tag == 'equal':
3764 3764 for c in arevs[alo:ahi]:
3765 3765 ui.write(b' %d\n' % c)
3766 3766 return 1
3767 3767
3768 3768 func = revset.makematcher(tree)
3769 3769 revs = func(repo)
3770 3770 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3771 3771 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3772 3772 if not opts[b'show_revs']:
3773 3773 return
3774 3774 for c in revs:
3775 3775 ui.write(b"%d\n" % c)
3776 3776
3777 3777
3778 3778 @command(
3779 3779 b'debugserve',
3780 3780 [
3781 3781 (
3782 3782 b'',
3783 3783 b'sshstdio',
3784 3784 False,
3785 3785 _(b'run an SSH server bound to process handles'),
3786 3786 ),
3787 3787 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3788 3788 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3789 3789 ],
3790 3790 b'',
3791 3791 )
3792 3792 def debugserve(ui, repo, **opts):
3793 3793 """run a server with advanced settings
3794 3794
3795 3795 This command is similar to :hg:`serve`. It exists partially as a
3796 3796 workaround to the fact that ``hg serve --stdio`` must have specific
3797 3797 arguments for security reasons.
3798 3798 """
3799 3799 opts = pycompat.byteskwargs(opts)
3800 3800
3801 3801 if not opts[b'sshstdio']:
3802 3802 raise error.Abort(_(b'only --sshstdio is currently supported'))
3803 3803
3804 3804 logfh = None
3805 3805
3806 3806 if opts[b'logiofd'] and opts[b'logiofile']:
3807 3807 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3808 3808
3809 3809 if opts[b'logiofd']:
3810 3810 # Ideally we would be line buffered. But line buffering in binary
3811 3811 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3812 3812 # buffering could have performance impacts. But since this isn't
3813 3813 # performance critical code, it should be fine.
3814 3814 try:
3815 3815 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3816 3816 except OSError as e:
3817 3817 if e.errno != errno.ESPIPE:
3818 3818 raise
3819 3819 # can't seek a pipe, so `ab` mode fails on py3
3820 3820 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3821 3821 elif opts[b'logiofile']:
3822 3822 logfh = open(opts[b'logiofile'], b'ab', 0)
3823 3823
3824 3824 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3825 3825 s.serve_forever()
3826 3826
3827 3827
3828 3828 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3829 3829 def debugsetparents(ui, repo, rev1, rev2=None):
3830 3830 """manually set the parents of the current working directory (DANGEROUS)
3831 3831
3832 3832 This command is not what you are looking for and should not be used. Using
3833 3833 this command will most certainly results in slight corruption of the file
3834 3834 level histories withing your repository. DO NOT USE THIS COMMAND.
3835 3835
3836 3836 The command update the p1 and p2 field in the dirstate, and not touching
3837 3837 anything else. This useful for writing repository conversion tools, but
3838 3838 should be used with extreme care. For example, neither the working
3839 3839 directory nor the dirstate is updated, so file status may be incorrect
3840 3840 after running this command. Only used if you are one of the few people that
3841 3841 deeply unstand both conversion tools and file level histories. If you are
3842 3842 reading this help, you are not one of this people (most of them sailed west
3843 3843 from Mithlond anyway.
3844 3844
3845 3845 So one last time DO NOT USE THIS COMMAND.
3846 3846
3847 3847 Returns 0 on success.
3848 3848 """
3849 3849
3850 3850 node1 = scmutil.revsingle(repo, rev1).node()
3851 3851 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3852 3852
3853 3853 with repo.wlock():
3854 3854 repo.setparents(node1, node2)
3855 3855
3856 3856
3857 3857 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3858 3858 def debugsidedata(ui, repo, file_, rev=None, **opts):
3859 3859 """dump the side data for a cl/manifest/file revision
3860 3860
3861 3861 Use --verbose to dump the sidedata content."""
3862 3862 opts = pycompat.byteskwargs(opts)
3863 3863 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3864 3864 if rev is not None:
3865 3865 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3866 3866 file_, rev = None, file_
3867 3867 elif rev is None:
3868 3868 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3869 3869 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3870 3870 r = getattr(r, '_revlog', r)
3871 3871 try:
3872 3872 sidedata = r.sidedata(r.lookup(rev))
3873 3873 except KeyError:
3874 3874 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3875 3875 if sidedata:
3876 3876 sidedata = list(sidedata.items())
3877 3877 sidedata.sort()
3878 3878 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3879 3879 for key, value in sidedata:
3880 3880 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3881 3881 if ui.verbose:
3882 3882 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3883 3883
3884 3884
3885 3885 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3886 3886 def debugssl(ui, repo, source=None, **opts):
3887 3887 """test a secure connection to a server
3888 3888
3889 3889 This builds the certificate chain for the server on Windows, installing the
3890 3890 missing intermediates and trusted root via Windows Update if necessary. It
3891 3891 does nothing on other platforms.
3892 3892
3893 3893 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3894 3894 that server is used. See :hg:`help urls` for more information.
3895 3895
3896 3896 If the update succeeds, retry the original operation. Otherwise, the cause
3897 3897 of the SSL error is likely another issue.
3898 3898 """
3899 3899 if not pycompat.iswindows:
3900 3900 raise error.Abort(
3901 3901 _(b'certificate chain building is only possible on Windows')
3902 3902 )
3903 3903
3904 3904 if not source:
3905 3905 if not repo:
3906 3906 raise error.Abort(
3907 3907 _(
3908 3908 b"there is no Mercurial repository here, and no "
3909 3909 b"server specified"
3910 3910 )
3911 3911 )
3912 3912 source = b"default"
3913 3913
3914 3914 source, branches = urlutil.get_unique_pull_path(
3915 3915 b'debugssl', repo, ui, source
3916 3916 )
3917 3917 url = urlutil.url(source)
3918 3918
3919 3919 defaultport = {b'https': 443, b'ssh': 22}
3920 3920 if url.scheme in defaultport:
3921 3921 try:
3922 3922 addr = (url.host, int(url.port or defaultport[url.scheme]))
3923 3923 except ValueError:
3924 3924 raise error.Abort(_(b"malformed port number in URL"))
3925 3925 else:
3926 3926 raise error.Abort(_(b"only https and ssh connections are supported"))
3927 3927
3928 3928 from . import win32
3929 3929
3930 3930 s = ssl.wrap_socket(
3931 3931 socket.socket(),
3932 3932 ssl_version=ssl.PROTOCOL_TLS,
3933 3933 cert_reqs=ssl.CERT_NONE,
3934 3934 ca_certs=None,
3935 3935 )
3936 3936
3937 3937 try:
3938 3938 s.connect(addr)
3939 3939 cert = s.getpeercert(True)
3940 3940
3941 3941 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3942 3942
3943 3943 complete = win32.checkcertificatechain(cert, build=False)
3944 3944
3945 3945 if not complete:
3946 3946 ui.status(_(b'certificate chain is incomplete, updating... '))
3947 3947
3948 3948 if not win32.checkcertificatechain(cert):
3949 3949 ui.status(_(b'failed.\n'))
3950 3950 else:
3951 3951 ui.status(_(b'done.\n'))
3952 3952 else:
3953 3953 ui.status(_(b'full certificate chain is available\n'))
3954 3954 finally:
3955 3955 s.close()
3956 3956
3957 3957
3958 3958 @command(
3959 3959 b"debugbackupbundle",
3960 3960 [
3961 3961 (
3962 3962 b"",
3963 3963 b"recover",
3964 3964 b"",
3965 3965 b"brings the specified changeset back into the repository",
3966 3966 )
3967 3967 ]
3968 3968 + cmdutil.logopts,
3969 3969 _(b"hg debugbackupbundle [--recover HASH]"),
3970 3970 )
3971 3971 def debugbackupbundle(ui, repo, *pats, **opts):
3972 3972 """lists the changesets available in backup bundles
3973 3973
3974 3974 Without any arguments, this command prints a list of the changesets in each
3975 3975 backup bundle.
3976 3976
3977 3977 --recover takes a changeset hash and unbundles the first bundle that
3978 3978 contains that hash, which puts that changeset back in your repository.
3979 3979
3980 3980 --verbose will print the entire commit message and the bundle path for that
3981 3981 backup.
3982 3982 """
3983 3983 backups = list(
3984 3984 filter(
3985 3985 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3986 3986 )
3987 3987 )
3988 3988 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3989 3989
3990 3990 opts = pycompat.byteskwargs(opts)
3991 3991 opts[b"bundle"] = b""
3992 3992 opts[b"force"] = None
3993 3993 limit = logcmdutil.getlimit(opts)
3994 3994
3995 3995 def display(other, chlist, displayer):
3996 3996 if opts.get(b"newest_first"):
3997 3997 chlist.reverse()
3998 3998 count = 0
3999 3999 for n in chlist:
4000 4000 if limit is not None and count >= limit:
4001 4001 break
4002 4002 parents = [
4003 4003 True for p in other.changelog.parents(n) if p != repo.nullid
4004 4004 ]
4005 4005 if opts.get(b"no_merges") and len(parents) == 2:
4006 4006 continue
4007 4007 count += 1
4008 4008 displayer.show(other[n])
4009 4009
4010 4010 recovernode = opts.get(b"recover")
4011 4011 if recovernode:
4012 4012 if scmutil.isrevsymbol(repo, recovernode):
4013 4013 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
4014 4014 return
4015 4015 elif backups:
4016 4016 msg = _(
4017 4017 b"Recover changesets using: hg debugbackupbundle --recover "
4018 4018 b"<changeset hash>\n\nAvailable backup changesets:"
4019 4019 )
4020 4020 ui.status(msg, label=b"status.removed")
4021 4021 else:
4022 4022 ui.status(_(b"no backup changesets found\n"))
4023 4023 return
4024 4024
4025 4025 for backup in backups:
4026 4026 # Much of this is copied from the hg incoming logic
4027 4027 source = os.path.relpath(backup, encoding.getcwd())
4028 4028 source, branches = urlutil.get_unique_pull_path(
4029 4029 b'debugbackupbundle',
4030 4030 repo,
4031 4031 ui,
4032 4032 source,
4033 4033 default_branches=opts.get(b'branch'),
4034 4034 )
4035 4035 try:
4036 4036 other = hg.peer(repo, opts, source)
4037 4037 except error.LookupError as ex:
4038 4038 msg = _(b"\nwarning: unable to open bundle %s") % source
4039 4039 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
4040 4040 ui.warn(msg, hint=hint)
4041 4041 continue
4042 4042 revs, checkout = hg.addbranchrevs(
4043 4043 repo, other, branches, opts.get(b"rev")
4044 4044 )
4045 4045
4046 4046 if revs:
4047 4047 revs = [other.lookup(rev) for rev in revs]
4048 4048
4049 4049 with ui.silent():
4050 4050 try:
4051 4051 other, chlist, cleanupfn = bundlerepo.getremotechanges(
4052 4052 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
4053 4053 )
4054 4054 except error.LookupError:
4055 4055 continue
4056 4056
4057 4057 try:
4058 4058 if not chlist:
4059 4059 continue
4060 4060 if recovernode:
4061 4061 with repo.lock(), repo.transaction(b"unbundle") as tr:
4062 4062 if scmutil.isrevsymbol(other, recovernode):
4063 4063 ui.status(_(b"Unbundling %s\n") % (recovernode))
4064 4064 f = hg.openpath(ui, source)
4065 4065 gen = exchange.readbundle(ui, f, source)
4066 4066 if isinstance(gen, bundle2.unbundle20):
4067 4067 bundle2.applybundle(
4068 4068 repo,
4069 4069 gen,
4070 4070 tr,
4071 4071 source=b"unbundle",
4072 4072 url=b"bundle:" + source,
4073 4073 )
4074 4074 else:
4075 4075 gen.apply(repo, b"unbundle", b"bundle:" + source)
4076 4076 break
4077 4077 else:
4078 4078 backupdate = encoding.strtolocal(
4079 4079 time.strftime(
4080 4080 "%a %H:%M, %Y-%m-%d",
4081 4081 time.localtime(os.path.getmtime(source)),
4082 4082 )
4083 4083 )
4084 4084 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
4085 4085 if ui.verbose:
4086 4086 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
4087 4087 else:
4088 4088 opts[
4089 4089 b"template"
4090 4090 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
4091 4091 displayer = logcmdutil.changesetdisplayer(
4092 4092 ui, other, opts, False
4093 4093 )
4094 4094 display(other, chlist, displayer)
4095 4095 displayer.close()
4096 4096 finally:
4097 4097 cleanupfn()
4098 4098
4099 4099
4100 4100 @command(
4101 4101 b'debugsub',
4102 4102 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
4103 4103 _(b'[-r REV] [REV]'),
4104 4104 )
4105 4105 def debugsub(ui, repo, rev=None):
4106 4106 ctx = scmutil.revsingle(repo, rev, None)
4107 4107 for k, v in sorted(ctx.substate.items()):
4108 4108 ui.writenoi18n(b'path %s\n' % k)
4109 4109 ui.writenoi18n(b' source %s\n' % v[0])
4110 4110 ui.writenoi18n(b' revision %s\n' % v[1])
4111 4111
4112 4112
4113 4113 @command(b'debugshell', optionalrepo=True)
4114 4114 def debugshell(ui, repo):
4115 4115 """run an interactive Python interpreter
4116 4116
4117 4117 The local namespace is provided with a reference to the ui and
4118 4118 the repo instance (if available).
4119 4119 """
4120 4120 import code
4121 4121
4122 4122 imported_objects = {
4123 4123 'ui': ui,
4124 4124 'repo': repo,
4125 4125 }
4126 4126
4127 4127 code.interact(local=imported_objects)
4128 4128
4129 4129
4130 4130 @command(
4131 4131 b'debugsuccessorssets',
4132 4132 [(b'', b'closest', False, _(b'return closest successors sets only'))],
4133 4133 _(b'[REV]'),
4134 4134 )
4135 4135 def debugsuccessorssets(ui, repo, *revs, **opts):
4136 4136 """show set of successors for revision
4137 4137
4138 4138 A successors set of changeset A is a consistent group of revisions that
4139 4139 succeed A. It contains non-obsolete changesets only unless closests
4140 4140 successors set is set.
4141 4141
4142 4142 In most cases a changeset A has a single successors set containing a single
4143 4143 successor (changeset A replaced by A').
4144 4144
4145 4145 A changeset that is made obsolete with no successors are called "pruned".
4146 4146 Such changesets have no successors sets at all.
4147 4147
4148 4148 A changeset that has been "split" will have a successors set containing
4149 4149 more than one successor.
4150 4150
4151 4151 A changeset that has been rewritten in multiple different ways is called
4152 4152 "divergent". Such changesets have multiple successor sets (each of which
4153 4153 may also be split, i.e. have multiple successors).
4154 4154
4155 4155 Results are displayed as follows::
4156 4156
4157 4157 <rev1>
4158 4158 <successors-1A>
4159 4159 <rev2>
4160 4160 <successors-2A>
4161 4161 <successors-2B1> <successors-2B2> <successors-2B3>
4162 4162
4163 4163 Here rev2 has two possible (i.e. divergent) successors sets. The first
4164 4164 holds one element, whereas the second holds three (i.e. the changeset has
4165 4165 been split).
4166 4166 """
4167 4167 # passed to successorssets caching computation from one call to another
4168 4168 cache = {}
4169 4169 ctx2str = bytes
4170 4170 node2str = short
4171 4171 for rev in logcmdutil.revrange(repo, revs):
4172 4172 ctx = repo[rev]
4173 4173 ui.write(b'%s\n' % ctx2str(ctx))
4174 4174 for succsset in obsutil.successorssets(
4175 4175 repo, ctx.node(), closest=opts['closest'], cache=cache
4176 4176 ):
4177 4177 if succsset:
4178 4178 ui.write(b' ')
4179 4179 ui.write(node2str(succsset[0]))
4180 4180 for node in succsset[1:]:
4181 4181 ui.write(b' ')
4182 4182 ui.write(node2str(node))
4183 4183 ui.write(b'\n')
4184 4184
4185 4185
4186 4186 @command(b'debugtagscache', [])
4187 4187 def debugtagscache(ui, repo):
4188 4188 """display the contents of .hg/cache/hgtagsfnodes1"""
4189 4189 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4190 4190 flog = repo.file(b'.hgtags')
4191 4191 for r in repo:
4192 4192 node = repo[r].node()
4193 4193 tagsnode = cache.getfnode(node, computemissing=False)
4194 4194 if tagsnode:
4195 4195 tagsnodedisplay = hex(tagsnode)
4196 4196 if not flog.hasnode(tagsnode):
4197 4197 tagsnodedisplay += b' (unknown node)'
4198 4198 elif tagsnode is None:
4199 4199 tagsnodedisplay = b'missing'
4200 4200 else:
4201 4201 tagsnodedisplay = b'invalid'
4202 4202
4203 4203 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4204 4204
4205 4205
4206 4206 @command(
4207 4207 b'debugtemplate',
4208 4208 [
4209 4209 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4210 4210 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4211 4211 ],
4212 4212 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4213 4213 optionalrepo=True,
4214 4214 )
4215 4215 def debugtemplate(ui, repo, tmpl, **opts):
4216 4216 """parse and apply a template
4217 4217
4218 4218 If -r/--rev is given, the template is processed as a log template and
4219 4219 applied to the given changesets. Otherwise, it is processed as a generic
4220 4220 template.
4221 4221
4222 4222 Use --verbose to print the parsed tree.
4223 4223 """
4224 4224 revs = None
4225 4225 if opts['rev']:
4226 4226 if repo is None:
4227 4227 raise error.RepoError(
4228 4228 _(b'there is no Mercurial repository here (.hg not found)')
4229 4229 )
4230 4230 revs = logcmdutil.revrange(repo, opts['rev'])
4231 4231
4232 4232 props = {}
4233 4233 for d in opts['define']:
4234 4234 try:
4235 4235 k, v = (e.strip() for e in d.split(b'=', 1))
4236 4236 if not k or k == b'ui':
4237 4237 raise ValueError
4238 4238 props[k] = v
4239 4239 except ValueError:
4240 4240 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4241 4241
4242 4242 if ui.verbose:
4243 4243 aliases = ui.configitems(b'templatealias')
4244 4244 tree = templater.parse(tmpl)
4245 4245 ui.note(templater.prettyformat(tree), b'\n')
4246 4246 newtree = templater.expandaliases(tree, aliases)
4247 4247 if newtree != tree:
4248 4248 ui.notenoi18n(
4249 4249 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4250 4250 )
4251 4251
4252 4252 if revs is None:
4253 4253 tres = formatter.templateresources(ui, repo)
4254 4254 t = formatter.maketemplater(ui, tmpl, resources=tres)
4255 4255 if ui.verbose:
4256 4256 kwds, funcs = t.symbolsuseddefault()
4257 4257 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4258 4258 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4259 4259 ui.write(t.renderdefault(props))
4260 4260 else:
4261 4261 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4262 4262 if ui.verbose:
4263 4263 kwds, funcs = displayer.t.symbolsuseddefault()
4264 4264 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4265 4265 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4266 4266 for r in revs:
4267 4267 displayer.show(repo[r], **pycompat.strkwargs(props))
4268 4268 displayer.close()
4269 4269
4270 4270
4271 4271 @command(
4272 4272 b'debuguigetpass',
4273 4273 [
4274 4274 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4275 4275 ],
4276 4276 _(b'[-p TEXT]'),
4277 4277 norepo=True,
4278 4278 )
4279 4279 def debuguigetpass(ui, prompt=b''):
4280 4280 """show prompt to type password"""
4281 4281 r = ui.getpass(prompt)
4282 4282 if r is None:
4283 4283 r = b"<default response>"
4284 4284 ui.writenoi18n(b'response: %s\n' % r)
4285 4285
4286 4286
4287 4287 @command(
4288 4288 b'debuguiprompt',
4289 4289 [
4290 4290 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4291 4291 ],
4292 4292 _(b'[-p TEXT]'),
4293 4293 norepo=True,
4294 4294 )
4295 4295 def debuguiprompt(ui, prompt=b''):
4296 4296 """show plain prompt"""
4297 4297 r = ui.prompt(prompt)
4298 4298 ui.writenoi18n(b'response: %s\n' % r)
4299 4299
4300 4300
4301 4301 @command(b'debugupdatecaches', [])
4302 4302 def debugupdatecaches(ui, repo, *pats, **opts):
4303 4303 """warm all known caches in the repository"""
4304 4304 with repo.wlock(), repo.lock():
4305 4305 repo.updatecaches(caches=repository.CACHES_ALL)
4306 4306
4307 4307
4308 4308 @command(
4309 4309 b'debugupgraderepo',
4310 4310 [
4311 4311 (
4312 4312 b'o',
4313 4313 b'optimize',
4314 4314 [],
4315 4315 _(b'extra optimization to perform'),
4316 4316 _(b'NAME'),
4317 4317 ),
4318 4318 (b'', b'run', False, _(b'performs an upgrade')),
4319 4319 (b'', b'backup', True, _(b'keep the old repository content around')),
4320 4320 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4321 4321 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4322 4322 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4323 4323 ],
4324 4324 )
4325 4325 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4326 4326 """upgrade a repository to use different features
4327 4327
4328 4328 If no arguments are specified, the repository is evaluated for upgrade
4329 4329 and a list of problems and potential optimizations is printed.
4330 4330
4331 4331 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4332 4332 can be influenced via additional arguments. More details will be provided
4333 4333 by the command output when run without ``--run``.
4334 4334
4335 4335 During the upgrade, the repository will be locked and no writes will be
4336 4336 allowed.
4337 4337
4338 4338 At the end of the upgrade, the repository may not be readable while new
4339 4339 repository data is swapped in. This window will be as long as it takes to
4340 4340 rename some directories inside the ``.hg`` directory. On most machines, this
4341 4341 should complete almost instantaneously and the chances of a consumer being
4342 4342 unable to access the repository should be low.
4343 4343
4344 4344 By default, all revlogs will be upgraded. You can restrict this using flags
4345 4345 such as `--manifest`:
4346 4346
4347 4347 * `--manifest`: only optimize the manifest
4348 4348 * `--no-manifest`: optimize all revlog but the manifest
4349 4349 * `--changelog`: optimize the changelog only
4350 4350 * `--no-changelog --no-manifest`: optimize filelogs only
4351 4351 * `--filelogs`: optimize the filelogs only
4352 4352 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4353 4353 """
4354 4354 return upgrade.upgraderepo(
4355 4355 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4356 4356 )
4357 4357
4358 4358
4359 4359 @command(
4360 4360 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4361 4361 )
4362 4362 def debugwalk(ui, repo, *pats, **opts):
4363 4363 """show how files match on given patterns"""
4364 4364 opts = pycompat.byteskwargs(opts)
4365 4365 m = scmutil.match(repo[None], pats, opts)
4366 4366 if ui.verbose:
4367 4367 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4368 4368 items = list(repo[None].walk(m))
4369 4369 if not items:
4370 4370 return
4371 4371 f = lambda fn: fn
4372 4372 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4373 4373 f = lambda fn: util.normpath(fn)
4374 4374 fmt = b'f %%-%ds %%-%ds %%s' % (
4375 4375 max([len(abs) for abs in items]),
4376 4376 max([len(repo.pathto(abs)) for abs in items]),
4377 4377 )
4378 4378 for abs in items:
4379 4379 line = fmt % (
4380 4380 abs,
4381 4381 f(repo.pathto(abs)),
4382 4382 m.exact(abs) and b'exact' or b'',
4383 4383 )
4384 4384 ui.write(b"%s\n" % line.rstrip())
4385 4385
4386 4386
4387 4387 @command(b'debugwhyunstable', [], _(b'REV'))
4388 4388 def debugwhyunstable(ui, repo, rev):
4389 4389 """explain instabilities of a changeset"""
4390 4390 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4391 4391 dnodes = b''
4392 4392 if entry.get(b'divergentnodes'):
4393 4393 dnodes = (
4394 4394 b' '.join(
4395 4395 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4396 4396 for ctx in entry[b'divergentnodes']
4397 4397 )
4398 4398 + b' '
4399 4399 )
4400 4400 ui.write(
4401 4401 b'%s: %s%s %s\n'
4402 4402 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4403 4403 )
4404 4404
4405 4405
4406 4406 @command(
4407 4407 b'debugwireargs',
4408 4408 [
4409 4409 (b'', b'three', b'', b'three'),
4410 4410 (b'', b'four', b'', b'four'),
4411 4411 (b'', b'five', b'', b'five'),
4412 4412 ]
4413 4413 + cmdutil.remoteopts,
4414 4414 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4415 4415 norepo=True,
4416 4416 )
4417 4417 def debugwireargs(ui, repopath, *vals, **opts):
4418 4418 opts = pycompat.byteskwargs(opts)
4419 4419 repo = hg.peer(ui, opts, repopath)
4420 4420 try:
4421 4421 for opt in cmdutil.remoteopts:
4422 4422 del opts[opt[1]]
4423 4423 args = {}
4424 4424 for k, v in opts.items():
4425 4425 if v:
4426 4426 args[k] = v
4427 4427 args = pycompat.strkwargs(args)
4428 4428 # run twice to check that we don't mess up the stream for the next command
4429 4429 res1 = repo.debugwireargs(*vals, **args)
4430 4430 res2 = repo.debugwireargs(*vals, **args)
4431 4431 ui.write(b"%s\n" % res1)
4432 4432 if res1 != res2:
4433 4433 ui.warn(b"%s\n" % res2)
4434 4434 finally:
4435 4435 repo.close()
4436 4436
4437 4437
4438 4438 def _parsewirelangblocks(fh):
4439 4439 activeaction = None
4440 4440 blocklines = []
4441 4441 lastindent = 0
4442 4442
4443 4443 for line in fh:
4444 4444 line = line.rstrip()
4445 4445 if not line:
4446 4446 continue
4447 4447
4448 4448 if line.startswith(b'#'):
4449 4449 continue
4450 4450
4451 4451 if not line.startswith(b' '):
4452 4452 # New block. Flush previous one.
4453 4453 if activeaction:
4454 4454 yield activeaction, blocklines
4455 4455
4456 4456 activeaction = line
4457 4457 blocklines = []
4458 4458 lastindent = 0
4459 4459 continue
4460 4460
4461 4461 # Else we start with an indent.
4462 4462
4463 4463 if not activeaction:
4464 4464 raise error.Abort(_(b'indented line outside of block'))
4465 4465
4466 4466 indent = len(line) - len(line.lstrip())
4467 4467
4468 4468 # If this line is indented more than the last line, concatenate it.
4469 4469 if indent > lastindent and blocklines:
4470 4470 blocklines[-1] += line.lstrip()
4471 4471 else:
4472 4472 blocklines.append(line)
4473 4473 lastindent = indent
4474 4474
4475 4475 # Flush last block.
4476 4476 if activeaction:
4477 4477 yield activeaction, blocklines
4478 4478
4479 4479
4480 4480 @command(
4481 4481 b'debugwireproto',
4482 4482 [
4483 4483 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4484 4484 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4485 4485 (
4486 4486 b'',
4487 4487 b'noreadstderr',
4488 4488 False,
4489 4489 _(b'do not read from stderr of the remote'),
4490 4490 ),
4491 4491 (
4492 4492 b'',
4493 4493 b'nologhandshake',
4494 4494 False,
4495 4495 _(b'do not log I/O related to the peer handshake'),
4496 4496 ),
4497 4497 ]
4498 4498 + cmdutil.remoteopts,
4499 4499 _(b'[PATH]'),
4500 4500 optionalrepo=True,
4501 4501 )
4502 4502 def debugwireproto(ui, repo, path=None, **opts):
4503 4503 """send wire protocol commands to a server
4504 4504
4505 4505 This command can be used to issue wire protocol commands to remote
4506 4506 peers and to debug the raw data being exchanged.
4507 4507
4508 4508 ``--localssh`` will start an SSH server against the current repository
4509 4509 and connect to that. By default, the connection will perform a handshake
4510 4510 and establish an appropriate peer instance.
4511 4511
4512 4512 ``--peer`` can be used to bypass the handshake protocol and construct a
4513 4513 peer instance using the specified class type. Valid values are ``raw``,
4514 4514 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4515 4515 don't support higher-level command actions.
4516 4516
4517 4517 ``--noreadstderr`` can be used to disable automatic reading from stderr
4518 4518 of the peer (for SSH connections only). Disabling automatic reading of
4519 4519 stderr is useful for making output more deterministic.
4520 4520
4521 4521 Commands are issued via a mini language which is specified via stdin.
4522 4522 The language consists of individual actions to perform. An action is
4523 4523 defined by a block. A block is defined as a line with no leading
4524 4524 space followed by 0 or more lines with leading space. Blocks are
4525 4525 effectively a high-level command with additional metadata.
4526 4526
4527 4527 Lines beginning with ``#`` are ignored.
4528 4528
4529 4529 The following sections denote available actions.
4530 4530
4531 4531 raw
4532 4532 ---
4533 4533
4534 4534 Send raw data to the server.
4535 4535
4536 4536 The block payload contains the raw data to send as one atomic send
4537 4537 operation. The data may not actually be delivered in a single system
4538 4538 call: it depends on the abilities of the transport being used.
4539 4539
4540 4540 Each line in the block is de-indented and concatenated. Then, that
4541 4541 value is evaluated as a Python b'' literal. This allows the use of
4542 4542 backslash escaping, etc.
4543 4543
4544 4544 raw+
4545 4545 ----
4546 4546
4547 4547 Behaves like ``raw`` except flushes output afterwards.
4548 4548
4549 4549 command <X>
4550 4550 -----------
4551 4551
4552 4552 Send a request to run a named command, whose name follows the ``command``
4553 4553 string.
4554 4554
4555 4555 Arguments to the command are defined as lines in this block. The format of
4556 4556 each line is ``<key> <value>``. e.g.::
4557 4557
4558 4558 command listkeys
4559 4559 namespace bookmarks
4560 4560
4561 4561 If the value begins with ``eval:``, it will be interpreted as a Python
4562 4562 literal expression. Otherwise values are interpreted as Python b'' literals.
4563 4563 This allows sending complex types and encoding special byte sequences via
4564 4564 backslash escaping.
4565 4565
4566 4566 The following arguments have special meaning:
4567 4567
4568 4568 ``PUSHFILE``
4569 4569 When defined, the *push* mechanism of the peer will be used instead
4570 4570 of the static request-response mechanism and the content of the
4571 4571 file specified in the value of this argument will be sent as the
4572 4572 command payload.
4573 4573
4574 4574 This can be used to submit a local bundle file to the remote.
4575 4575
4576 4576 batchbegin
4577 4577 ----------
4578 4578
4579 4579 Instruct the peer to begin a batched send.
4580 4580
4581 4581 All ``command`` blocks are queued for execution until the next
4582 4582 ``batchsubmit`` block.
4583 4583
4584 4584 batchsubmit
4585 4585 -----------
4586 4586
4587 4587 Submit previously queued ``command`` blocks as a batch request.
4588 4588
4589 4589 This action MUST be paired with a ``batchbegin`` action.
4590 4590
4591 4591 httprequest <method> <path>
4592 4592 ---------------------------
4593 4593
4594 4594 (HTTP peer only)
4595 4595
4596 4596 Send an HTTP request to the peer.
4597 4597
4598 4598 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4599 4599
4600 4600 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4601 4601 headers to add to the request. e.g. ``Accept: foo``.
4602 4602
4603 4603 The following arguments are special:
4604 4604
4605 4605 ``BODYFILE``
4606 4606 The content of the file defined as the value to this argument will be
4607 4607 transferred verbatim as the HTTP request body.
4608 4608
4609 4609 ``frame <type> <flags> <payload>``
4610 4610 Send a unified protocol frame as part of the request body.
4611 4611
4612 4612 All frames will be collected and sent as the body to the HTTP
4613 4613 request.
4614 4614
4615 4615 close
4616 4616 -----
4617 4617
4618 4618 Close the connection to the server.
4619 4619
4620 4620 flush
4621 4621 -----
4622 4622
4623 4623 Flush data written to the server.
4624 4624
4625 4625 readavailable
4626 4626 -------------
4627 4627
4628 4628 Close the write end of the connection and read all available data from
4629 4629 the server.
4630 4630
4631 4631 If the connection to the server encompasses multiple pipes, we poll both
4632 4632 pipes and read available data.
4633 4633
4634 4634 readline
4635 4635 --------
4636 4636
4637 4637 Read a line of output from the server. If there are multiple output
4638 4638 pipes, reads only the main pipe.
4639 4639
4640 4640 ereadline
4641 4641 ---------
4642 4642
4643 4643 Like ``readline``, but read from the stderr pipe, if available.
4644 4644
4645 4645 read <X>
4646 4646 --------
4647 4647
4648 4648 ``read()`` N bytes from the server's main output pipe.
4649 4649
4650 4650 eread <X>
4651 4651 ---------
4652 4652
4653 4653 ``read()`` N bytes from the server's stderr pipe, if available.
4654 4654
4655 4655 Specifying Unified Frame-Based Protocol Frames
4656 4656 ----------------------------------------------
4657 4657
4658 4658 It is possible to emit a *Unified Frame-Based Protocol* by using special
4659 4659 syntax.
4660 4660
4661 4661 A frame is composed as a type, flags, and payload. These can be parsed
4662 4662 from a string of the form:
4663 4663
4664 4664 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4665 4665
4666 4666 ``request-id`` and ``stream-id`` are integers defining the request and
4667 4667 stream identifiers.
4668 4668
4669 4669 ``type`` can be an integer value for the frame type or the string name
4670 4670 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4671 4671 ``command-name``.
4672 4672
4673 4673 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4674 4674 components. Each component (and there can be just one) can be an integer
4675 4675 or a flag name for stream flags or frame flags, respectively. Values are
4676 4676 resolved to integers and then bitwise OR'd together.
4677 4677
4678 4678 ``payload`` represents the raw frame payload. If it begins with
4679 4679 ``cbor:``, the following string is evaluated as Python code and the
4680 4680 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4681 4681 as a Python byte string literal.
4682 4682 """
4683 4683 opts = pycompat.byteskwargs(opts)
4684 4684
4685 4685 if opts[b'localssh'] and not repo:
4686 4686 raise error.Abort(_(b'--localssh requires a repository'))
4687 4687
4688 4688 if opts[b'peer'] and opts[b'peer'] not in (
4689 4689 b'raw',
4690 4690 b'ssh1',
4691 4691 ):
4692 4692 raise error.Abort(
4693 4693 _(b'invalid value for --peer'),
4694 4694 hint=_(b'valid values are "raw" and "ssh1"'),
4695 4695 )
4696 4696
4697 4697 if path and opts[b'localssh']:
4698 4698 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4699 4699
4700 4700 if ui.interactive():
4701 4701 ui.write(_(b'(waiting for commands on stdin)\n'))
4702 4702
4703 4703 blocks = list(_parsewirelangblocks(ui.fin))
4704 4704
4705 4705 proc = None
4706 4706 stdin = None
4707 4707 stdout = None
4708 4708 stderr = None
4709 4709 opener = None
4710 4710
4711 4711 if opts[b'localssh']:
4712 4712 # We start the SSH server in its own process so there is process
4713 4713 # separation. This prevents a whole class of potential bugs around
4714 4714 # shared state from interfering with server operation.
4715 4715 args = procutil.hgcmd() + [
4716 4716 b'-R',
4717 4717 repo.root,
4718 4718 b'debugserve',
4719 4719 b'--sshstdio',
4720 4720 ]
4721 4721 proc = subprocess.Popen(
4722 4722 pycompat.rapply(procutil.tonativestr, args),
4723 4723 stdin=subprocess.PIPE,
4724 4724 stdout=subprocess.PIPE,
4725 4725 stderr=subprocess.PIPE,
4726 4726 bufsize=0,
4727 4727 )
4728 4728
4729 4729 stdin = proc.stdin
4730 4730 stdout = proc.stdout
4731 4731 stderr = proc.stderr
4732 4732
4733 4733 # We turn the pipes into observers so we can log I/O.
4734 4734 if ui.verbose or opts[b'peer'] == b'raw':
4735 4735 stdin = util.makeloggingfileobject(
4736 4736 ui, proc.stdin, b'i', logdata=True
4737 4737 )
4738 4738 stdout = util.makeloggingfileobject(
4739 4739 ui, proc.stdout, b'o', logdata=True
4740 4740 )
4741 4741 stderr = util.makeloggingfileobject(
4742 4742 ui, proc.stderr, b'e', logdata=True
4743 4743 )
4744 4744
4745 4745 # --localssh also implies the peer connection settings.
4746 4746
4747 4747 url = b'ssh://localserver'
4748 4748 autoreadstderr = not opts[b'noreadstderr']
4749 4749
4750 4750 if opts[b'peer'] == b'ssh1':
4751 4751 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4752 4752 peer = sshpeer.sshv1peer(
4753 4753 ui,
4754 4754 url,
4755 4755 proc,
4756 4756 stdin,
4757 4757 stdout,
4758 4758 stderr,
4759 4759 None,
4760 4760 autoreadstderr=autoreadstderr,
4761 4761 )
4762 4762 elif opts[b'peer'] == b'raw':
4763 4763 ui.write(_(b'using raw connection to peer\n'))
4764 4764 peer = None
4765 4765 else:
4766 4766 ui.write(_(b'creating ssh peer from handshake results\n'))
4767 4767 peer = sshpeer.makepeer(
4768 4768 ui,
4769 4769 url,
4770 4770 proc,
4771 4771 stdin,
4772 4772 stdout,
4773 4773 stderr,
4774 4774 autoreadstderr=autoreadstderr,
4775 4775 )
4776 4776
4777 4777 elif path:
4778 4778 # We bypass hg.peer() so we can proxy the sockets.
4779 4779 # TODO consider not doing this because we skip
4780 4780 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4781 4781 u = urlutil.url(path)
4782 4782 if u.scheme != b'http':
4783 4783 raise error.Abort(_(b'only http:// paths are currently supported'))
4784 4784
4785 4785 url, authinfo = u.authinfo()
4786 4786 openerargs = {
4787 4787 'useragent': b'Mercurial debugwireproto',
4788 4788 }
4789 4789
4790 4790 # Turn pipes/sockets into observers so we can log I/O.
4791 4791 if ui.verbose:
4792 4792 openerargs.update(
4793 4793 {
4794 4794 'loggingfh': ui,
4795 4795 'loggingname': b's',
4796 4796 'loggingopts': {
4797 4797 'logdata': True,
4798 4798 'logdataapis': False,
4799 4799 },
4800 4800 }
4801 4801 )
4802 4802
4803 4803 if ui.debugflag:
4804 4804 openerargs['loggingopts']['logdataapis'] = True
4805 4805
4806 4806 # Don't send default headers when in raw mode. This allows us to
4807 4807 # bypass most of the behavior of our URL handling code so we can
4808 4808 # have near complete control over what's sent on the wire.
4809 4809 if opts[b'peer'] == b'raw':
4810 4810 openerargs['sendaccept'] = False
4811 4811
4812 4812 opener = urlmod.opener(ui, authinfo, **openerargs)
4813 4813
4814 4814 if opts[b'peer'] == b'raw':
4815 4815 ui.write(_(b'using raw connection to peer\n'))
4816 4816 peer = None
4817 4817 elif opts[b'peer']:
4818 4818 raise error.Abort(
4819 4819 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4820 4820 )
4821 4821 else:
4822 4822 peer = httppeer.makepeer(ui, path, opener=opener)
4823 4823
4824 4824 # We /could/ populate stdin/stdout with sock.makefile()...
4825 4825 else:
4826 4826 raise error.Abort(_(b'unsupported connection configuration'))
4827 4827
4828 4828 batchedcommands = None
4829 4829
4830 4830 # Now perform actions based on the parsed wire language instructions.
4831 4831 for action, lines in blocks:
4832 4832 if action in (b'raw', b'raw+'):
4833 4833 if not stdin:
4834 4834 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4835 4835
4836 4836 # Concatenate the data together.
4837 4837 data = b''.join(l.lstrip() for l in lines)
4838 4838 data = stringutil.unescapestr(data)
4839 4839 stdin.write(data)
4840 4840
4841 4841 if action == b'raw+':
4842 4842 stdin.flush()
4843 4843 elif action == b'flush':
4844 4844 if not stdin:
4845 4845 raise error.Abort(_(b'cannot call flush on this peer'))
4846 4846 stdin.flush()
4847 4847 elif action.startswith(b'command'):
4848 4848 if not peer:
4849 4849 raise error.Abort(
4850 4850 _(
4851 4851 b'cannot send commands unless peer instance '
4852 4852 b'is available'
4853 4853 )
4854 4854 )
4855 4855
4856 4856 command = action.split(b' ', 1)[1]
4857 4857
4858 4858 args = {}
4859 4859 for line in lines:
4860 4860 # We need to allow empty values.
4861 4861 fields = line.lstrip().split(b' ', 1)
4862 4862 if len(fields) == 1:
4863 4863 key = fields[0]
4864 4864 value = b''
4865 4865 else:
4866 4866 key, value = fields
4867 4867
4868 4868 if value.startswith(b'eval:'):
4869 4869 value = stringutil.evalpythonliteral(value[5:])
4870 4870 else:
4871 4871 value = stringutil.unescapestr(value)
4872 4872
4873 4873 args[key] = value
4874 4874
4875 4875 if batchedcommands is not None:
4876 4876 batchedcommands.append((command, args))
4877 4877 continue
4878 4878
4879 4879 ui.status(_(b'sending %s command\n') % command)
4880 4880
4881 4881 if b'PUSHFILE' in args:
4882 4882 with open(args[b'PUSHFILE'], 'rb') as fh:
4883 4883 del args[b'PUSHFILE']
4884 4884 res, output = peer._callpush(
4885 4885 command, fh, **pycompat.strkwargs(args)
4886 4886 )
4887 4887 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4888 4888 ui.status(
4889 4889 _(b'remote output: %s\n') % stringutil.escapestr(output)
4890 4890 )
4891 4891 else:
4892 4892 with peer.commandexecutor() as e:
4893 4893 res = e.callcommand(command, args).result()
4894 4894
4895 4895 ui.status(
4896 4896 _(b'response: %s\n')
4897 4897 % stringutil.pprint(res, bprefix=True, indent=2)
4898 4898 )
4899 4899
4900 4900 elif action == b'batchbegin':
4901 4901 if batchedcommands is not None:
4902 4902 raise error.Abort(_(b'nested batchbegin not allowed'))
4903 4903
4904 4904 batchedcommands = []
4905 4905 elif action == b'batchsubmit':
4906 4906 # There is a batching API we could go through. But it would be
4907 4907 # difficult to normalize requests into function calls. It is easier
4908 4908 # to bypass this layer and normalize to commands + args.
4909 4909 ui.status(
4910 4910 _(b'sending batch with %d sub-commands\n')
4911 4911 % len(batchedcommands)
4912 4912 )
4913 4913 assert peer is not None
4914 4914 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4915 4915 ui.status(
4916 4916 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4917 4917 )
4918 4918
4919 4919 batchedcommands = None
4920 4920
4921 4921 elif action.startswith(b'httprequest '):
4922 4922 if not opener:
4923 4923 raise error.Abort(
4924 4924 _(b'cannot use httprequest without an HTTP peer')
4925 4925 )
4926 4926
4927 4927 request = action.split(b' ', 2)
4928 4928 if len(request) != 3:
4929 4929 raise error.Abort(
4930 4930 _(
4931 4931 b'invalid httprequest: expected format is '
4932 4932 b'"httprequest <method> <path>'
4933 4933 )
4934 4934 )
4935 4935
4936 4936 method, httppath = request[1:]
4937 4937 headers = {}
4938 4938 body = None
4939 4939 frames = []
4940 4940 for line in lines:
4941 4941 line = line.lstrip()
4942 4942 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4943 4943 if m:
4944 4944 # Headers need to use native strings.
4945 4945 key = pycompat.strurl(m.group(1))
4946 4946 value = pycompat.strurl(m.group(2))
4947 4947 headers[key] = value
4948 4948 continue
4949 4949
4950 4950 if line.startswith(b'BODYFILE '):
4951 4951 with open(line.split(b' ', 1), b'rb') as fh:
4952 4952 body = fh.read()
4953 4953 elif line.startswith(b'frame '):
4954 4954 frame = wireprotoframing.makeframefromhumanstring(
4955 4955 line[len(b'frame ') :]
4956 4956 )
4957 4957
4958 4958 frames.append(frame)
4959 4959 else:
4960 4960 raise error.Abort(
4961 4961 _(b'unknown argument to httprequest: %s') % line
4962 4962 )
4963 4963
4964 4964 url = path + httppath
4965 4965
4966 4966 if frames:
4967 4967 body = b''.join(bytes(f) for f in frames)
4968 4968
4969 4969 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4970 4970
4971 4971 # urllib.Request insists on using has_data() as a proxy for
4972 4972 # determining the request method. Override that to use our
4973 4973 # explicitly requested method.
4974 4974 req.get_method = lambda: pycompat.sysstr(method)
4975 4975
4976 4976 try:
4977 4977 res = opener.open(req)
4978 4978 body = res.read()
4979 4979 except util.urlerr.urlerror as e:
4980 4980 # read() method must be called, but only exists in Python 2
4981 4981 getattr(e, 'read', lambda: None)()
4982 4982 continue
4983 4983
4984 4984 ct = res.headers.get('Content-Type')
4985 4985 if ct == 'application/mercurial-cbor':
4986 4986 ui.write(
4987 4987 _(b'cbor> %s\n')
4988 4988 % stringutil.pprint(
4989 4989 cborutil.decodeall(body), bprefix=True, indent=2
4990 4990 )
4991 4991 )
4992 4992
4993 4993 elif action == b'close':
4994 4994 assert peer is not None
4995 4995 peer.close()
4996 4996 elif action == b'readavailable':
4997 4997 if not stdout or not stderr:
4998 4998 raise error.Abort(
4999 4999 _(b'readavailable not available on this peer')
5000 5000 )
5001 5001
5002 5002 stdin.close()
5003 5003 stdout.read()
5004 5004 stderr.read()
5005 5005
5006 5006 elif action == b'readline':
5007 5007 if not stdout:
5008 5008 raise error.Abort(_(b'readline not available on this peer'))
5009 5009 stdout.readline()
5010 5010 elif action == b'ereadline':
5011 5011 if not stderr:
5012 5012 raise error.Abort(_(b'ereadline not available on this peer'))
5013 5013 stderr.readline()
5014 5014 elif action.startswith(b'read '):
5015 5015 count = int(action.split(b' ', 1)[1])
5016 5016 if not stdout:
5017 5017 raise error.Abort(_(b'read not available on this peer'))
5018 5018 stdout.read(count)
5019 5019 elif action.startswith(b'eread '):
5020 5020 count = int(action.split(b' ', 1)[1])
5021 5021 if not stderr:
5022 5022 raise error.Abort(_(b'eread not available on this peer'))
5023 5023 stderr.read(count)
5024 5024 else:
5025 5025 raise error.Abort(_(b'unknown action: %s') % action)
5026 5026
5027 5027 if batchedcommands is not None:
5028 5028 raise error.Abort(_(b'unclosed "batchbegin" request'))
5029 5029
5030 5030 if peer:
5031 5031 peer.close()
5032 5032
5033 5033 if proc:
5034 5034 proc.kill()
@@ -1,1170 +1,1170 b''
1 1 import distutils.version
2 2 import os
3 3 import re
4 4 import socket
5 5 import stat
6 6 import subprocess
7 7 import sys
8 8 import tempfile
9 9
10 10 tempprefix = 'hg-hghave-'
11 11
12 12 checks = {
13 13 "true": (lambda: True, "yak shaving"),
14 14 "false": (lambda: False, "nail clipper"),
15 15 "known-bad-output": (lambda: True, "use for currently known bad output"),
16 16 "missing-correct-output": (lambda: False, "use for missing good output"),
17 17 }
18 18
19 19 try:
20 20 import msvcrt
21 21
22 22 msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
23 23 msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
24 24 except ImportError:
25 25 pass
26 26
27 27 stdout = getattr(sys.stdout, 'buffer', sys.stdout)
28 28 stderr = getattr(sys.stderr, 'buffer', sys.stderr)
29 29
30 30 is_not_python2 = sys.version_info[0] >= 3
31 31 if is_not_python2:
32 32
33 33 def _sys2bytes(p):
34 34 if p is None:
35 35 return p
36 36 return p.encode('utf-8')
37 37
38 38 def _bytes2sys(p):
39 39 if p is None:
40 40 return p
41 41 return p.decode('utf-8')
42 42
43 43
44 44 else:
45 45
46 46 def _sys2bytes(p):
47 47 return p
48 48
49 49 _bytes2sys = _sys2bytes
50 50
51 51
52 52 def check(name, desc):
53 53 """Registers a check function for a feature."""
54 54
55 55 def decorator(func):
56 56 checks[name] = (func, desc)
57 57 return func
58 58
59 59 return decorator
60 60
61 61
62 62 def checkvers(name, desc, vers):
63 63 """Registers a check function for each of a series of versions.
64 64
65 65 vers can be a list or an iterator.
66 66
67 67 Produces a series of feature checks that have the form <name><vers> without
68 68 any punctuation (even if there's punctuation in 'vers'; i.e. this produces
69 69 'py38', not 'py3.8' or 'py-38')."""
70 70
71 71 def decorator(func):
72 72 def funcv(v):
73 73 def f():
74 74 return func(v)
75 75
76 76 return f
77 77
78 78 for v in vers:
79 79 v = str(v)
80 80 f = funcv(v)
81 81 checks['%s%s' % (name, v.replace('.', ''))] = (f, desc % v)
82 82 return func
83 83
84 84 return decorator
85 85
86 86
87 87 def checkfeatures(features):
88 88 result = {
89 89 'error': [],
90 90 'missing': [],
91 91 'skipped': [],
92 92 }
93 93
94 94 for feature in features:
95 95 negate = feature.startswith('no-')
96 96 if negate:
97 97 feature = feature[3:]
98 98
99 99 if feature not in checks:
100 100 result['missing'].append(feature)
101 101 continue
102 102
103 103 check, desc = checks[feature]
104 104 try:
105 105 available = check()
106 106 except Exception as e:
107 107 result['error'].append('hghave check %s failed: %r' % (feature, e))
108 108 continue
109 109
110 110 if not negate and not available:
111 111 result['skipped'].append('missing feature: %s' % desc)
112 112 elif negate and available:
113 113 result['skipped'].append('system supports %s' % desc)
114 114
115 115 return result
116 116
117 117
118 118 def require(features):
119 119 """Require that features are available, exiting if not."""
120 120 result = checkfeatures(features)
121 121
122 122 for missing in result['missing']:
123 123 stderr.write(
124 124 ('skipped: unknown feature: %s\n' % missing).encode('utf-8')
125 125 )
126 126 for msg in result['skipped']:
127 127 stderr.write(('skipped: %s\n' % msg).encode('utf-8'))
128 128 for msg in result['error']:
129 129 stderr.write(('%s\n' % msg).encode('utf-8'))
130 130
131 131 if result['missing']:
132 132 sys.exit(2)
133 133
134 134 if result['skipped'] or result['error']:
135 135 sys.exit(1)
136 136
137 137
138 138 def matchoutput(cmd, regexp, ignorestatus=False):
139 139 """Return the match object if cmd executes successfully and its output
140 140 is matched by the supplied regular expression.
141 141 """
142 142
143 143 # Tests on Windows have to fake USERPROFILE to point to the test area so
144 144 # that `~` is properly expanded on py3.8+. However, some tools like black
145 145 # make calls that need the real USERPROFILE in order to run `foo --version`.
146 146 env = os.environ
147 147 if os.name == 'nt':
148 148 env = os.environ.copy()
149 149 env['USERPROFILE'] = env['REALUSERPROFILE']
150 150
151 151 r = re.compile(regexp)
152 152 p = subprocess.Popen(
153 153 cmd,
154 154 shell=True,
155 155 stdout=subprocess.PIPE,
156 156 stderr=subprocess.STDOUT,
157 157 env=env,
158 158 )
159 159 s = p.communicate()[0]
160 160 ret = p.returncode
161 161 return (ignorestatus or not ret) and r.search(s)
162 162
163 163
164 164 @check("baz", "GNU Arch baz client")
165 165 def has_baz():
166 166 return matchoutput('baz --version 2>&1', br'baz Bazaar version')
167 167
168 168
169 169 @check("bzr", "Breezy library and executable version >= 3.1")
170 170 def has_bzr():
171 171 if not is_not_python2:
172 172 return False
173 173 try:
174 174 # Test the Breezy python lib
175 175 import breezy
176 176 import breezy.bzr.bzrdir
177 177 import breezy.errors
178 178 import breezy.revision
179 179 import breezy.revisionspec
180 180
181 181 breezy.revisionspec.RevisionSpec
182 182 if breezy.__doc__ is None or breezy.version_info[:2] < (3, 1):
183 183 return False
184 184 except (AttributeError, ImportError):
185 185 return False
186 186 # Test the executable
187 187 return matchoutput('brz --version 2>&1', br'Breezy \(brz\) ')
188 188
189 189
190 190 @check("chg", "running with chg")
191 191 def has_chg():
192 192 return 'CHG_INSTALLED_AS_HG' in os.environ
193 193
194 194
195 195 @check("rhg", "running with rhg as 'hg'")
196 196 def has_rhg():
197 197 return 'RHG_INSTALLED_AS_HG' in os.environ
198 198
199 199
200 200 @check("pyoxidizer", "running with pyoxidizer build as 'hg'")
201 def has_rhg():
201 def has_pyoxidizer():
202 202 return 'PYOXIDIZED_INSTALLED_AS_HG' in os.environ
203 203
204 204
205 205 @check("cvs", "cvs client/server")
206 206 def has_cvs():
207 207 re = br'Concurrent Versions System.*?server'
208 208 return matchoutput('cvs --version 2>&1', re) and not has_msys()
209 209
210 210
211 211 @check("cvs112", "cvs client/server 1.12.* (not cvsnt)")
212 212 def has_cvs112():
213 213 re = br'Concurrent Versions System \(CVS\) 1.12.*?server'
214 214 return matchoutput('cvs --version 2>&1', re) and not has_msys()
215 215
216 216
217 217 @check("cvsnt", "cvsnt client/server")
218 218 def has_cvsnt():
219 219 re = br'Concurrent Versions System \(CVSNT\) (\d+).(\d+).*\(client/server\)'
220 220 return matchoutput('cvsnt --version 2>&1', re)
221 221
222 222
223 223 @check("darcs", "darcs client")
224 224 def has_darcs():
225 225 return matchoutput('darcs --version', br'\b2\.([2-9]|\d{2})', True)
226 226
227 227
228 228 @check("mtn", "monotone client (>= 1.0)")
229 229 def has_mtn():
230 230 return matchoutput('mtn --version', br'monotone', True) and not matchoutput(
231 231 'mtn --version', br'monotone 0\.', True
232 232 )
233 233
234 234
235 235 @check("eol-in-paths", "end-of-lines in paths")
236 236 def has_eol_in_paths():
237 237 try:
238 238 fd, path = tempfile.mkstemp(dir='.', prefix=tempprefix, suffix='\n\r')
239 239 os.close(fd)
240 240 os.remove(path)
241 241 return True
242 242 except (IOError, OSError):
243 243 return False
244 244
245 245
246 246 @check("execbit", "executable bit")
247 247 def has_executablebit():
248 248 try:
249 249 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
250 250 fh, fn = tempfile.mkstemp(dir='.', prefix=tempprefix)
251 251 try:
252 252 os.close(fh)
253 253 m = os.stat(fn).st_mode & 0o777
254 254 new_file_has_exec = m & EXECFLAGS
255 255 os.chmod(fn, m ^ EXECFLAGS)
256 256 exec_flags_cannot_flip = (os.stat(fn).st_mode & 0o777) == m
257 257 finally:
258 258 os.unlink(fn)
259 259 except (IOError, OSError):
260 260 # we don't care, the user probably won't be able to commit anyway
261 261 return False
262 262 return not (new_file_has_exec or exec_flags_cannot_flip)
263 263
264 264
265 265 @check("suidbit", "setuid and setgid bit")
266 266 def has_suidbit():
267 267 if (
268 268 getattr(os, "statvfs", None) is None
269 269 or getattr(os, "ST_NOSUID", None) is None
270 270 ):
271 271 return False
272 272 return bool(os.statvfs('.').f_flag & os.ST_NOSUID)
273 273
274 274
275 275 @check("icasefs", "case insensitive file system")
276 276 def has_icasefs():
277 277 # Stolen from mercurial.util
278 278 fd, path = tempfile.mkstemp(dir='.', prefix=tempprefix)
279 279 os.close(fd)
280 280 try:
281 281 s1 = os.stat(path)
282 282 d, b = os.path.split(path)
283 283 p2 = os.path.join(d, b.upper())
284 284 if path == p2:
285 285 p2 = os.path.join(d, b.lower())
286 286 try:
287 287 s2 = os.stat(p2)
288 288 return s2 == s1
289 289 except OSError:
290 290 return False
291 291 finally:
292 292 os.remove(path)
293 293
294 294
295 295 @check("fifo", "named pipes")
296 296 def has_fifo():
297 297 if getattr(os, "mkfifo", None) is None:
298 298 return False
299 299 name = tempfile.mktemp(dir='.', prefix=tempprefix)
300 300 try:
301 301 os.mkfifo(name)
302 302 os.unlink(name)
303 303 return True
304 304 except OSError:
305 305 return False
306 306
307 307
308 308 @check("killdaemons", 'killdaemons.py support')
309 309 def has_killdaemons():
310 310 return True
311 311
312 312
313 313 @check("cacheable", "cacheable filesystem")
314 314 def has_cacheable_fs():
315 315 from mercurial import util
316 316
317 317 fd, path = tempfile.mkstemp(dir='.', prefix=tempprefix)
318 318 os.close(fd)
319 319 try:
320 320 return util.cachestat(path).cacheable()
321 321 finally:
322 322 os.remove(path)
323 323
324 324
325 325 @check("lsprof", "python lsprof module")
326 326 def has_lsprof():
327 327 try:
328 328 import _lsprof
329 329
330 330 _lsprof.Profiler # silence unused import warning
331 331 return True
332 332 except ImportError:
333 333 return False
334 334
335 335
336 336 def _gethgversion():
337 337 m = matchoutput('hg --version --quiet 2>&1', br'(\d+)\.(\d+)')
338 338 if not m:
339 339 return (0, 0)
340 340 return (int(m.group(1)), int(m.group(2)))
341 341
342 342
343 343 _hgversion = None
344 344
345 345
346 346 def gethgversion():
347 347 global _hgversion
348 348 if _hgversion is None:
349 349 _hgversion = _gethgversion()
350 350 return _hgversion
351 351
352 352
353 353 @checkvers(
354 354 "hg", "Mercurial >= %s", list([(1.0 * x) / 10 for x in range(9, 99)])
355 355 )
356 356 def has_hg_range(v):
357 357 major, minor = v.split('.')[0:2]
358 358 return gethgversion() >= (int(major), int(minor))
359 359
360 360
361 361 @check("rust", "Using the Rust extensions")
362 362 def has_rust():
363 363 """Check is the mercurial currently running is using some rust code"""
364 364 cmd = 'hg debuginstall --quiet 2>&1'
365 365 match = br'checking module policy \(([^)]+)\)'
366 366 policy = matchoutput(cmd, match)
367 367 if not policy:
368 368 return False
369 369 return b'rust' in policy.group(1)
370 370
371 371
372 372 @check("hg08", "Mercurial >= 0.8")
373 373 def has_hg08():
374 374 if checks["hg09"][0]():
375 375 return True
376 376 return matchoutput('hg help annotate 2>&1', '--date')
377 377
378 378
379 379 @check("hg07", "Mercurial >= 0.7")
380 380 def has_hg07():
381 381 if checks["hg08"][0]():
382 382 return True
383 383 return matchoutput('hg --version --quiet 2>&1', 'Mercurial Distributed SCM')
384 384
385 385
386 386 @check("hg06", "Mercurial >= 0.6")
387 387 def has_hg06():
388 388 if checks["hg07"][0]():
389 389 return True
390 390 return matchoutput('hg --version --quiet 2>&1', 'Mercurial version')
391 391
392 392
393 393 @check("gettext", "GNU Gettext (msgfmt)")
394 394 def has_gettext():
395 395 return matchoutput('msgfmt --version', br'GNU gettext-tools')
396 396
397 397
398 398 @check("git", "git command line client")
399 399 def has_git():
400 400 return matchoutput('git --version 2>&1', br'^git version')
401 401
402 402
403 403 def getgitversion():
404 404 m = matchoutput('git --version 2>&1', br'git version (\d+)\.(\d+)')
405 405 if not m:
406 406 return (0, 0)
407 407 return (int(m.group(1)), int(m.group(2)))
408 408
409 409
410 410 @check("pygit2", "pygit2 Python library")
411 def has_git():
411 def has_pygit2():
412 412 try:
413 413 import pygit2
414 414
415 415 pygit2.Oid # silence unused import
416 416 return True
417 417 except ImportError:
418 418 return False
419 419
420 420
421 421 # https://github.com/git-lfs/lfs-test-server
422 422 @check("lfs-test-server", "git-lfs test server")
423 423 def has_lfsserver():
424 424 exe = 'lfs-test-server'
425 425 if has_windows():
426 426 exe = 'lfs-test-server.exe'
427 427 return any(
428 428 os.access(os.path.join(path, exe), os.X_OK)
429 429 for path in os.environ["PATH"].split(os.pathsep)
430 430 )
431 431
432 432
433 433 @checkvers("git", "git client (with ext::sh support) version >= %s", (1.9,))
434 434 def has_git_range(v):
435 435 major, minor = v.split('.')[0:2]
436 436 return getgitversion() >= (int(major), int(minor))
437 437
438 438
439 439 @check("docutils", "Docutils text processing library")
440 440 def has_docutils():
441 441 try:
442 442 import docutils.core
443 443
444 444 docutils.core.publish_cmdline # silence unused import
445 445 return True
446 446 except ImportError:
447 447 return False
448 448
449 449
450 450 def getsvnversion():
451 451 m = matchoutput('svn --version --quiet 2>&1', br'^(\d+)\.(\d+)')
452 452 if not m:
453 453 return (0, 0)
454 454 return (int(m.group(1)), int(m.group(2)))
455 455
456 456
457 457 @checkvers("svn", "subversion client and admin tools >= %s", (1.3, 1.5))
458 458 def has_svn_range(v):
459 459 major, minor = v.split('.')[0:2]
460 460 return getsvnversion() >= (int(major), int(minor))
461 461
462 462
463 463 @check("svn", "subversion client and admin tools")
464 464 def has_svn():
465 465 return matchoutput('svn --version 2>&1', br'^svn, version') and matchoutput(
466 466 'svnadmin --version 2>&1', br'^svnadmin, version'
467 467 )
468 468
469 469
470 470 @check("svn-bindings", "subversion python bindings")
471 471 def has_svn_bindings():
472 472 try:
473 473 import svn.core
474 474
475 475 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
476 476 if version < (1, 4):
477 477 return False
478 478 return True
479 479 except ImportError:
480 480 return False
481 481
482 482
483 483 @check("p4", "Perforce server and client")
484 484 def has_p4():
485 485 return matchoutput('p4 -V', br'Rev\. P4/') and matchoutput(
486 486 'p4d -V', br'Rev\. P4D/'
487 487 )
488 488
489 489
490 490 @check("symlink", "symbolic links")
491 491 def has_symlink():
492 492 # mercurial.windows.checklink() is a hard 'no' at the moment
493 493 if os.name == 'nt' or getattr(os, "symlink", None) is None:
494 494 return False
495 495 name = tempfile.mktemp(dir='.', prefix=tempprefix)
496 496 try:
497 497 os.symlink(".", name)
498 498 os.unlink(name)
499 499 return True
500 500 except (OSError, AttributeError):
501 501 return False
502 502
503 503
504 504 @check("hardlink", "hardlinks")
505 505 def has_hardlink():
506 506 from mercurial import util
507 507
508 508 fh, fn = tempfile.mkstemp(dir='.', prefix=tempprefix)
509 509 os.close(fh)
510 510 name = tempfile.mktemp(dir='.', prefix=tempprefix)
511 511 try:
512 512 util.oslink(_sys2bytes(fn), _sys2bytes(name))
513 513 os.unlink(name)
514 514 return True
515 515 except OSError:
516 516 return False
517 517 finally:
518 518 os.unlink(fn)
519 519
520 520
521 521 @check("hardlink-whitelisted", "hardlinks on whitelisted filesystems")
522 522 def has_hardlink_whitelisted():
523 523 from mercurial import util
524 524
525 525 try:
526 526 fstype = util.getfstype(b'.')
527 527 except OSError:
528 528 return False
529 529 return fstype in util._hardlinkfswhitelist
530 530
531 531
532 532 @check("rmcwd", "can remove current working directory")
533 533 def has_rmcwd():
534 534 ocwd = os.getcwd()
535 535 temp = tempfile.mkdtemp(dir='.', prefix=tempprefix)
536 536 try:
537 537 os.chdir(temp)
538 538 # On Linux, 'rmdir .' isn't allowed, but the other names are okay.
539 539 # On Solaris and Windows, the cwd can't be removed by any names.
540 540 os.rmdir(os.getcwd())
541 541 return True
542 542 except OSError:
543 543 return False
544 544 finally:
545 545 os.chdir(ocwd)
546 546 # clean up temp dir on platforms where cwd can't be removed
547 547 try:
548 548 os.rmdir(temp)
549 549 except OSError:
550 550 pass
551 551
552 552
553 553 @check("tla", "GNU Arch tla client")
554 554 def has_tla():
555 555 return matchoutput('tla --version 2>&1', br'The GNU Arch Revision')
556 556
557 557
558 558 @check("gpg", "gpg client")
559 559 def has_gpg():
560 560 return matchoutput('gpg --version 2>&1', br'GnuPG')
561 561
562 562
563 563 @check("gpg2", "gpg client v2")
564 564 def has_gpg2():
565 565 return matchoutput('gpg --version 2>&1', br'GnuPG[^0-9]+2\.')
566 566
567 567
568 568 @check("gpg21", "gpg client v2.1+")
569 569 def has_gpg21():
570 570 return matchoutput('gpg --version 2>&1', br'GnuPG[^0-9]+2\.(?!0)')
571 571
572 572
573 573 @check("unix-permissions", "unix-style permissions")
574 574 def has_unix_permissions():
575 575 d = tempfile.mkdtemp(dir='.', prefix=tempprefix)
576 576 try:
577 577 fname = os.path.join(d, 'foo')
578 578 for umask in (0o77, 0o07, 0o22):
579 579 os.umask(umask)
580 580 f = open(fname, 'w')
581 581 f.close()
582 582 mode = os.stat(fname).st_mode
583 583 os.unlink(fname)
584 584 if mode & 0o777 != ~umask & 0o666:
585 585 return False
586 586 return True
587 587 finally:
588 588 os.rmdir(d)
589 589
590 590
591 591 @check("unix-socket", "AF_UNIX socket family")
592 592 def has_unix_socket():
593 593 return getattr(socket, 'AF_UNIX', None) is not None
594 594
595 595
596 596 @check("root", "root permissions")
597 597 def has_root():
598 598 return getattr(os, 'geteuid', None) and os.geteuid() == 0
599 599
600 600
601 601 @check("pyflakes", "Pyflakes python linter")
602 602 def has_pyflakes():
603 603 try:
604 604 import pyflakes
605 605
606 606 pyflakes.__version__
607 607 except ImportError:
608 608 return False
609 609 else:
610 610 return True
611 611
612 612
613 613 @check("pylint", "Pylint python linter")
614 614 def has_pylint():
615 615 return matchoutput("pylint --help", br"Usage:[ ]+pylint", True)
616 616
617 617
618 618 @check("clang-format", "clang-format C code formatter (>= 11)")
619 619 def has_clang_format():
620 620 m = matchoutput('clang-format --version', br'clang-format version (\d+)')
621 621 # style changed somewhere between 10.x and 11.x
622 622 if m:
623 623 return int(m.group(1)) >= 11
624 624 # Assist Googler contributors, they have a centrally-maintained version of
625 625 # clang-format that is generally very fresh, but unlike most builds (both
626 626 # official and unofficial), it does *not* include a version number.
627 627 return matchoutput(
628 628 'clang-format --version', br'clang-format .*google3-trunk \([0-9a-f]+\)'
629 629 )
630 630
631 631
632 632 @check("jshint", "JSHint static code analysis tool")
633 633 def has_jshint():
634 634 return matchoutput("jshint --version 2>&1", br"jshint v")
635 635
636 636
637 637 @check("pygments", "Pygments source highlighting library")
638 638 def has_pygments():
639 639 try:
640 640 import pygments
641 641
642 642 pygments.highlight # silence unused import warning
643 643 return True
644 644 except ImportError:
645 645 return False
646 646
647 647
648 648 @check("pygments25", "Pygments version >= 2.5")
649 649 def pygments25():
650 650 try:
651 651 import pygments
652 652
653 653 v = pygments.__version__
654 654 except ImportError:
655 655 return False
656 656
657 657 parts = v.split(".")
658 658 major = int(parts[0])
659 659 minor = int(parts[1])
660 660
661 661 return (major, minor) >= (2, 5)
662 662
663 663
664 664 @check("pygments211", "Pygments version >= 2.11")
665 665 def pygments211():
666 666 try:
667 667 import pygments
668 668
669 669 v = pygments.__version__
670 670 except ImportError:
671 671 return False
672 672
673 673 parts = v.split(".")
674 674 major = int(parts[0])
675 675 minor = int(parts[1])
676 676
677 677 return (major, minor) >= (2, 11)
678 678
679 679
680 680 @check("outer-repo", "outer repo")
681 681 def has_outer_repo():
682 682 # failing for other reasons than 'no repo' imply that there is a repo
683 683 return not matchoutput('hg root 2>&1', br'abort: no repository found', True)
684 684
685 685
686 686 @check("ssl", "ssl module available")
687 687 def has_ssl():
688 688 try:
689 689 import ssl
690 690
691 691 ssl.CERT_NONE
692 692 return True
693 693 except ImportError:
694 694 return False
695 695
696 696
697 697 @check("defaultcacertsloaded", "detected presence of loaded system CA certs")
698 698 def has_defaultcacertsloaded():
699 699 import ssl
700 700 from mercurial import sslutil, ui as uimod
701 701
702 702 ui = uimod.ui.load()
703 703 cafile = sslutil._defaultcacerts(ui)
704 704 ctx = ssl.create_default_context()
705 705 if cafile:
706 706 ctx.load_verify_locations(cafile=cafile)
707 707 else:
708 708 ctx.load_default_certs()
709 709
710 710 return len(ctx.get_ca_certs()) > 0
711 711
712 712
713 713 @check("tls1.2", "TLS 1.2 protocol support")
714 714 def has_tls1_2():
715 715 from mercurial import sslutil
716 716
717 717 return b'tls1.2' in sslutil.supportedprotocols
718 718
719 719
720 720 @check("windows", "Windows")
721 721 def has_windows():
722 722 return os.name == 'nt'
723 723
724 724
725 725 @check("system-sh", "system() uses sh")
726 726 def has_system_sh():
727 727 return os.name != 'nt'
728 728
729 729
730 730 @check("serve", "platform and python can manage 'hg serve -d'")
731 731 def has_serve():
732 732 return True
733 733
734 734
735 735 @check("setprocname", "whether osutil.setprocname is available or not")
736 736 def has_setprocname():
737 737 try:
738 738 from mercurial.utils import procutil
739 739
740 740 procutil.setprocname
741 741 return True
742 742 except AttributeError:
743 743 return False
744 744
745 745
746 746 @check("test-repo", "running tests from repository")
747 747 def has_test_repo():
748 748 t = os.environ["TESTDIR"]
749 749 return os.path.isdir(os.path.join(t, "..", ".hg"))
750 750
751 751
752 752 @check("network-io", "whether tests are allowed to access 3rd party services")
753 def has_test_repo():
753 def has_network_io():
754 754 t = os.environ.get("HGTESTS_ALLOW_NETIO")
755 755 return t == "1"
756 756
757 757
758 758 @check("curses", "terminfo compiler and curses module")
759 759 def has_curses():
760 760 try:
761 761 import curses
762 762
763 763 curses.COLOR_BLUE
764 764
765 765 # Windows doesn't have a `tic` executable, but the windows_curses
766 766 # package is sufficient to run the tests without it.
767 767 if os.name == 'nt':
768 768 return True
769 769
770 770 return has_tic()
771 771
772 772 except (ImportError, AttributeError):
773 773 return False
774 774
775 775
776 776 @check("tic", "terminfo compiler")
777 777 def has_tic():
778 778 return matchoutput('test -x "`which tic`"', br'')
779 779
780 780
781 781 @check("xz", "xz compression utility")
782 782 def has_xz():
783 783 # When Windows invokes a subprocess in shell mode, it uses `cmd.exe`, which
784 784 # only knows `where`, not `which`. So invoke MSYS shell explicitly.
785 785 return matchoutput("sh -c 'test -x \"`which xz`\"'", b'')
786 786
787 787
788 788 @check("msys", "Windows with MSYS")
789 789 def has_msys():
790 790 return os.getenv('MSYSTEM')
791 791
792 792
793 793 @check("aix", "AIX")
794 794 def has_aix():
795 795 return sys.platform.startswith("aix")
796 796
797 797
798 798 @check("osx", "OS X")
799 799 def has_osx():
800 800 return sys.platform == 'darwin'
801 801
802 802
803 803 @check("osxpackaging", "OS X packaging tools")
804 804 def has_osxpackaging():
805 805 try:
806 806 return (
807 807 matchoutput('pkgbuild', br'Usage: pkgbuild ', ignorestatus=1)
808 808 and matchoutput(
809 809 'productbuild', br'Usage: productbuild ', ignorestatus=1
810 810 )
811 811 and matchoutput('lsbom', br'Usage: lsbom', ignorestatus=1)
812 812 and matchoutput('xar --help', br'Usage: xar', ignorestatus=1)
813 813 )
814 814 except ImportError:
815 815 return False
816 816
817 817
818 818 @check('linuxormacos', 'Linux or MacOS')
819 819 def has_linuxormacos():
820 820 # This isn't a perfect test for MacOS. But it is sufficient for our needs.
821 821 return sys.platform.startswith(('linux', 'darwin'))
822 822
823 823
824 824 @check("docker", "docker support")
825 825 def has_docker():
826 826 pat = br'A self-sufficient runtime for'
827 827 if matchoutput('docker --help', pat):
828 828 if 'linux' not in sys.platform:
829 829 # TODO: in theory we should be able to test docker-based
830 830 # package creation on non-linux using boot2docker, but in
831 831 # practice that requires extra coordination to make sure
832 832 # $TESTTEMP is going to be visible at the same path to the
833 833 # boot2docker VM. If we figure out how to verify that, we
834 834 # can use the following instead of just saying False:
835 835 # return 'DOCKER_HOST' in os.environ
836 836 return False
837 837
838 838 return True
839 839 return False
840 840
841 841
842 842 @check("debhelper", "debian packaging tools")
843 843 def has_debhelper():
844 844 # Some versions of dpkg say `dpkg', some say 'dpkg' (` vs ' on the first
845 845 # quote), so just accept anything in that spot.
846 846 dpkg = matchoutput(
847 847 'dpkg --version', br"Debian .dpkg' package management program"
848 848 )
849 849 dh = matchoutput(
850 850 'dh --help', br'dh is a part of debhelper.', ignorestatus=True
851 851 )
852 852 dh_py2 = matchoutput(
853 853 'dh_python2 --help', br'other supported Python versions'
854 854 )
855 855 # debuild comes from the 'devscripts' package, though you might want
856 856 # the 'build-debs' package instead, which has a dependency on devscripts.
857 857 debuild = matchoutput(
858 858 'debuild --help', br'to run debian/rules with given parameter'
859 859 )
860 860 return dpkg and dh and dh_py2 and debuild
861 861
862 862
863 863 @check(
864 864 "debdeps", "debian build dependencies (run dpkg-checkbuilddeps in contrib/)"
865 865 )
866 866 def has_debdeps():
867 867 # just check exit status (ignoring output)
868 868 path = '%s/../contrib/packaging/debian/control' % os.environ['TESTDIR']
869 869 return matchoutput('dpkg-checkbuilddeps %s' % path, br'')
870 870
871 871
872 872 @check("demandimport", "demandimport enabled")
873 873 def has_demandimport():
874 874 # chg disables demandimport intentionally for performance wins.
875 875 return (not has_chg()) and os.environ.get('HGDEMANDIMPORT') != 'disable'
876 876
877 877
878 878 # Add "py27", "py35", ... as possible feature checks. Note that there's no
879 879 # punctuation here.
880 880 @checkvers("py", "Python >= %s", (2.7, 3.5, 3.6, 3.7, 3.8, 3.9))
881 881 def has_python_range(v):
882 882 major, minor = v.split('.')[0:2]
883 883 py_major, py_minor = sys.version_info.major, sys.version_info.minor
884 884
885 885 return (py_major, py_minor) >= (int(major), int(minor))
886 886
887 887
888 888 @check("py3", "running with Python 3.x")
889 889 def has_py3():
890 890 return 3 == sys.version_info[0]
891 891
892 892
893 893 @check("py3exe", "a Python 3.x interpreter is available")
894 894 def has_python3exe():
895 895 py = 'python3'
896 896 if os.name == 'nt':
897 897 py = 'py -3'
898 898 return matchoutput('%s -V' % py, br'^Python 3.(5|6|7|8|9)')
899 899
900 900
901 901 @check("pure", "running with pure Python code")
902 902 def has_pure():
903 903 return any(
904 904 [
905 905 os.environ.get("HGMODULEPOLICY") == "py",
906 906 os.environ.get("HGTEST_RUN_TESTS_PURE") == "--pure",
907 907 ]
908 908 )
909 909
910 910
911 911 @check("slow", "allow slow tests (use --allow-slow-tests)")
912 912 def has_slow():
913 913 return os.environ.get('HGTEST_SLOW') == 'slow'
914 914
915 915
916 916 @check("hypothesis", "Hypothesis automated test generation")
917 917 def has_hypothesis():
918 918 try:
919 919 import hypothesis
920 920
921 921 hypothesis.given
922 922 return True
923 923 except ImportError:
924 924 return False
925 925
926 926
927 927 @check("unziplinks", "unzip(1) understands and extracts symlinks")
928 928 def unzip_understands_symlinks():
929 929 return matchoutput('unzip --help', br'Info-ZIP')
930 930
931 931
932 932 @check("zstd", "zstd Python module available")
933 933 def has_zstd():
934 934 try:
935 935 import mercurial.zstd
936 936
937 937 mercurial.zstd.__version__
938 938 return True
939 939 except ImportError:
940 940 return False
941 941
942 942
943 943 @check("devfull", "/dev/full special file")
944 944 def has_dev_full():
945 945 return os.path.exists('/dev/full')
946 946
947 947
948 948 @check("ensurepip", "ensurepip module")
949 949 def has_ensurepip():
950 950 try:
951 951 import ensurepip
952 952
953 953 ensurepip.bootstrap
954 954 return True
955 955 except ImportError:
956 956 return False
957 957
958 958
959 959 @check("virtualenv", "virtualenv support")
960 960 def has_virtualenv():
961 961 try:
962 962 import virtualenv
963 963
964 964 # --no-site-package became the default in 1.7 (Nov 2011), and the
965 965 # argument was removed in 20.0 (Feb 2020). Rather than make the
966 966 # script complicated, just ignore ancient versions.
967 967 return int(virtualenv.__version__.split('.')[0]) > 1
968 968 except (AttributeError, ImportError, IndexError):
969 969 return False
970 970
971 971
972 972 @check("fsmonitor", "running tests with fsmonitor")
973 973 def has_fsmonitor():
974 974 return 'HGFSMONITOR_TESTS' in os.environ
975 975
976 976
977 977 @check("fuzzywuzzy", "Fuzzy string matching library")
978 978 def has_fuzzywuzzy():
979 979 try:
980 980 import fuzzywuzzy
981 981
982 982 fuzzywuzzy.__version__
983 983 return True
984 984 except ImportError:
985 985 return False
986 986
987 987
988 988 @check("clang-libfuzzer", "clang new enough to include libfuzzer")
989 989 def has_clang_libfuzzer():
990 990 mat = matchoutput('clang --version', br'clang version (\d)')
991 991 if mat:
992 992 # libfuzzer is new in clang 6
993 993 return int(mat.group(1)) > 5
994 994 return False
995 995
996 996
997 997 @check("clang-6.0", "clang 6.0 with version suffix (libfuzzer included)")
998 998 def has_clang60():
999 999 return matchoutput('clang-6.0 --version', br'clang version 6\.')
1000 1000
1001 1001
1002 1002 @check("xdiff", "xdiff algorithm")
1003 1003 def has_xdiff():
1004 1004 try:
1005 1005 from mercurial import policy
1006 1006
1007 1007 bdiff = policy.importmod('bdiff')
1008 1008 return bdiff.xdiffblocks(b'', b'') == [(0, 0, 0, 0)]
1009 1009 except (ImportError, AttributeError):
1010 1010 return False
1011 1011
1012 1012
1013 1013 @check('extraextensions', 'whether tests are running with extra extensions')
1014 1014 def has_extraextensions():
1015 1015 return 'HGTESTEXTRAEXTENSIONS' in os.environ
1016 1016
1017 1017
1018 1018 def getrepofeatures():
1019 1019 """Obtain set of repository features in use.
1020 1020
1021 1021 HGREPOFEATURES can be used to define or remove features. It contains
1022 1022 a space-delimited list of feature strings. Strings beginning with ``-``
1023 1023 mean to remove.
1024 1024 """
1025 1025 # Default list provided by core.
1026 1026 features = {
1027 1027 'bundlerepo',
1028 1028 'revlogstore',
1029 1029 'fncache',
1030 1030 }
1031 1031
1032 1032 # Features that imply other features.
1033 1033 implies = {
1034 1034 'simplestore': ['-revlogstore', '-bundlerepo', '-fncache'],
1035 1035 }
1036 1036
1037 1037 for override in os.environ.get('HGREPOFEATURES', '').split(' '):
1038 1038 if not override:
1039 1039 continue
1040 1040
1041 1041 if override.startswith('-'):
1042 1042 if override[1:] in features:
1043 1043 features.remove(override[1:])
1044 1044 else:
1045 1045 features.add(override)
1046 1046
1047 1047 for imply in implies.get(override, []):
1048 1048 if imply.startswith('-'):
1049 1049 if imply[1:] in features:
1050 1050 features.remove(imply[1:])
1051 1051 else:
1052 1052 features.add(imply)
1053 1053
1054 1054 return features
1055 1055
1056 1056
1057 1057 @check('reporevlogstore', 'repository using the default revlog store')
1058 1058 def has_reporevlogstore():
1059 1059 return 'revlogstore' in getrepofeatures()
1060 1060
1061 1061
1062 1062 @check('reposimplestore', 'repository using simple storage extension')
1063 1063 def has_reposimplestore():
1064 1064 return 'simplestore' in getrepofeatures()
1065 1065
1066 1066
1067 1067 @check('repobundlerepo', 'whether we can open bundle files as repos')
1068 1068 def has_repobundlerepo():
1069 1069 return 'bundlerepo' in getrepofeatures()
1070 1070
1071 1071
1072 1072 @check('repofncache', 'repository has an fncache')
1073 1073 def has_repofncache():
1074 1074 return 'fncache' in getrepofeatures()
1075 1075
1076 1076
1077 1077 @check('dirstate-v2', 'using the v2 format of .hg/dirstate')
1078 1078 def has_dirstate_v2():
1079 1079 # Keep this logic in sync with `newreporequirements()` in `mercurial/localrepo.py`
1080 1080 return has_rust() and matchoutput(
1081 1081 'hg config format.exp-rc-dirstate-v2', b'(?i)1|yes|true|on|always'
1082 1082 )
1083 1083
1084 1084
1085 1085 @check('sqlite', 'sqlite3 module and matching cli is available')
1086 1086 def has_sqlite():
1087 1087 try:
1088 1088 import sqlite3
1089 1089
1090 1090 version = sqlite3.sqlite_version_info
1091 1091 except ImportError:
1092 1092 return False
1093 1093
1094 1094 if version < (3, 8, 3):
1095 1095 # WITH clause not supported
1096 1096 return False
1097 1097
1098 1098 return matchoutput('sqlite3 -version', br'^3\.\d+')
1099 1099
1100 1100
1101 1101 @check('vcr', 'vcr http mocking library (pytest-vcr)')
1102 1102 def has_vcr():
1103 1103 try:
1104 1104 import vcr
1105 1105
1106 1106 vcr.VCR
1107 1107 return True
1108 1108 except (ImportError, AttributeError):
1109 1109 pass
1110 1110 return False
1111 1111
1112 1112
1113 1113 @check('emacs', 'GNU Emacs')
1114 1114 def has_emacs():
1115 1115 # Our emacs lisp uses `with-eval-after-load` which is new in emacs
1116 1116 # 24.4, so we allow emacs 24.4, 24.5, and 25+ (24.5 was the last
1117 1117 # 24 release)
1118 1118 return matchoutput('emacs --version', b'GNU Emacs 2(4.4|4.5|5|6|7|8|9)')
1119 1119
1120 1120
1121 1121 @check('black', 'the black formatter for python (>= 20.8b1)')
1122 1122 def has_black():
1123 1123 blackcmd = 'black --version'
1124 1124 version_regex = b'black, (?:version )?([0-9a-b.]+)'
1125 1125 version = matchoutput(blackcmd, version_regex)
1126 1126 sv = distutils.version.StrictVersion
1127 1127 return version and sv(_bytes2sys(version.group(1))) >= sv('20.8b1')
1128 1128
1129 1129
1130 1130 @check('pytype', 'the pytype type checker')
1131 1131 def has_pytype():
1132 1132 pytypecmd = 'pytype --version'
1133 1133 version = matchoutput(pytypecmd, b'[0-9a-b.]+')
1134 1134 sv = distutils.version.StrictVersion
1135 1135 return version and sv(_bytes2sys(version.group(0))) >= sv('2019.10.17')
1136 1136
1137 1137
1138 1138 @check("rustfmt", "rustfmt tool at version nightly-2021-11-02")
1139 1139 def has_rustfmt():
1140 1140 # We use Nightly's rustfmt due to current unstable config options.
1141 1141 return matchoutput(
1142 1142 '`rustup which --toolchain nightly-2021-11-02 rustfmt` --version',
1143 1143 b'rustfmt',
1144 1144 )
1145 1145
1146 1146
1147 1147 @check("cargo", "cargo tool")
1148 1148 def has_cargo():
1149 1149 return matchoutput('`rustup which cargo` --version', b'cargo')
1150 1150
1151 1151
1152 1152 @check("lzma", "python lzma module")
1153 1153 def has_lzma():
1154 1154 try:
1155 1155 import _lzma
1156 1156
1157 1157 _lzma.FORMAT_XZ
1158 1158 return True
1159 1159 except ImportError:
1160 1160 return False
1161 1161
1162 1162
1163 1163 @check("bash", "bash shell")
1164 1164 def has_bash():
1165 1165 return matchoutput("bash -c 'echo hi'", b'^hi$')
1166 1166
1167 1167
1168 1168 @check("bigendian", "big-endian CPU")
1169 1169 def has_bigendian():
1170 1170 return sys.byteorder == 'big'
General Comments 0
You need to be logged in to leave comments. Login now