##// END OF EJS Templates
simplemerge: clarify names of functions that render conflict markers...
Martin von Zweigbergk -
r49407:2dbee604 default
parent child Browse files
Show More
@@ -1,4874 +1,4876 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import binascii
11 11 import codecs
12 12 import collections
13 13 import contextlib
14 14 import difflib
15 15 import errno
16 16 import glob
17 17 import operator
18 18 import os
19 19 import platform
20 20 import random
21 21 import re
22 22 import socket
23 23 import ssl
24 24 import stat
25 25 import string
26 26 import subprocess
27 27 import sys
28 28 import time
29 29
30 30 from .i18n import _
31 31 from .node import (
32 32 bin,
33 33 hex,
34 34 nullrev,
35 35 short,
36 36 )
37 37 from .pycompat import (
38 38 getattr,
39 39 open,
40 40 )
41 41 from . import (
42 42 bundle2,
43 43 bundlerepo,
44 44 changegroup,
45 45 cmdutil,
46 46 color,
47 47 context,
48 48 copies,
49 49 dagparser,
50 50 encoding,
51 51 error,
52 52 exchange,
53 53 extensions,
54 54 filemerge,
55 55 filesetlang,
56 56 formatter,
57 57 hg,
58 58 httppeer,
59 59 localrepo,
60 60 lock as lockmod,
61 61 logcmdutil,
62 62 mergestate as mergestatemod,
63 63 metadata,
64 64 obsolete,
65 65 obsutil,
66 66 pathutil,
67 67 phases,
68 68 policy,
69 69 pvec,
70 70 pycompat,
71 71 registrar,
72 72 repair,
73 73 repoview,
74 74 requirements,
75 75 revlog,
76 76 revset,
77 77 revsetlang,
78 78 scmutil,
79 79 setdiscovery,
80 80 simplemerge,
81 81 sshpeer,
82 82 sslutil,
83 83 streamclone,
84 84 strip,
85 85 tags as tagsmod,
86 86 templater,
87 87 treediscovery,
88 88 upgrade,
89 89 url as urlmod,
90 90 util,
91 91 vfs as vfsmod,
92 92 wireprotoframing,
93 93 wireprotoserver,
94 94 )
95 95 from .interfaces import repository
96 96 from .utils import (
97 97 cborutil,
98 98 compression,
99 99 dateutil,
100 100 procutil,
101 101 stringutil,
102 102 urlutil,
103 103 )
104 104
105 105 from .revlogutils import (
106 106 deltas as deltautil,
107 107 nodemap,
108 108 rewrite,
109 109 sidedata,
110 110 )
111 111
112 112 release = lockmod.release
113 113
114 114 table = {}
115 115 table.update(strip.command._table)
116 116 command = registrar.command(table)
117 117
118 118
119 119 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
120 120 def debugancestor(ui, repo, *args):
121 121 """find the ancestor revision of two revisions in a given index"""
122 122 if len(args) == 3:
123 123 index, rev1, rev2 = args
124 124 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
125 125 lookup = r.lookup
126 126 elif len(args) == 2:
127 127 if not repo:
128 128 raise error.Abort(
129 129 _(b'there is no Mercurial repository here (.hg not found)')
130 130 )
131 131 rev1, rev2 = args
132 132 r = repo.changelog
133 133 lookup = repo.lookup
134 134 else:
135 135 raise error.Abort(_(b'either two or three arguments required'))
136 136 a = r.ancestor(lookup(rev1), lookup(rev2))
137 137 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
138 138
139 139
140 140 @command(b'debugantivirusrunning', [])
141 141 def debugantivirusrunning(ui, repo):
142 142 """attempt to trigger an antivirus scanner to see if one is active"""
143 143 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
144 144 f.write(
145 145 util.b85decode(
146 146 # This is a base85-armored version of the EICAR test file. See
147 147 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
148 148 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
149 149 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
150 150 )
151 151 )
152 152 # Give an AV engine time to scan the file.
153 153 time.sleep(2)
154 154 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
155 155
156 156
157 157 @command(b'debugapplystreamclonebundle', [], b'FILE')
158 158 def debugapplystreamclonebundle(ui, repo, fname):
159 159 """apply a stream clone bundle file"""
160 160 f = hg.openpath(ui, fname)
161 161 gen = exchange.readbundle(ui, f, fname)
162 162 gen.apply(repo)
163 163
164 164
165 165 @command(
166 166 b'debugbuilddag',
167 167 [
168 168 (
169 169 b'm',
170 170 b'mergeable-file',
171 171 None,
172 172 _(b'add single file mergeable changes'),
173 173 ),
174 174 (
175 175 b'o',
176 176 b'overwritten-file',
177 177 None,
178 178 _(b'add single file all revs overwrite'),
179 179 ),
180 180 (b'n', b'new-file', None, _(b'add new file at each rev')),
181 181 ],
182 182 _(b'[OPTION]... [TEXT]'),
183 183 )
184 184 def debugbuilddag(
185 185 ui,
186 186 repo,
187 187 text=None,
188 188 mergeable_file=False,
189 189 overwritten_file=False,
190 190 new_file=False,
191 191 ):
192 192 """builds a repo with a given DAG from scratch in the current empty repo
193 193
194 194 The description of the DAG is read from stdin if not given on the
195 195 command line.
196 196
197 197 Elements:
198 198
199 199 - "+n" is a linear run of n nodes based on the current default parent
200 200 - "." is a single node based on the current default parent
201 201 - "$" resets the default parent to null (implied at the start);
202 202 otherwise the default parent is always the last node created
203 203 - "<p" sets the default parent to the backref p
204 204 - "*p" is a fork at parent p, which is a backref
205 205 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
206 206 - "/p2" is a merge of the preceding node and p2
207 207 - ":tag" defines a local tag for the preceding node
208 208 - "@branch" sets the named branch for subsequent nodes
209 209 - "#...\\n" is a comment up to the end of the line
210 210
211 211 Whitespace between the above elements is ignored.
212 212
213 213 A backref is either
214 214
215 215 - a number n, which references the node curr-n, where curr is the current
216 216 node, or
217 217 - the name of a local tag you placed earlier using ":tag", or
218 218 - empty to denote the default parent.
219 219
220 220 All string valued-elements are either strictly alphanumeric, or must
221 221 be enclosed in double quotes ("..."), with "\\" as escape character.
222 222 """
223 223
224 224 if text is None:
225 225 ui.status(_(b"reading DAG from stdin\n"))
226 226 text = ui.fin.read()
227 227
228 228 cl = repo.changelog
229 229 if len(cl) > 0:
230 230 raise error.Abort(_(b'repository is not empty'))
231 231
232 232 # determine number of revs in DAG
233 233 total = 0
234 234 for type, data in dagparser.parsedag(text):
235 235 if type == b'n':
236 236 total += 1
237 237
238 238 if mergeable_file:
239 239 linesperrev = 2
240 240 # make a file with k lines per rev
241 241 initialmergedlines = [
242 242 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
243 243 ]
244 244 initialmergedlines.append(b"")
245 245
246 246 tags = []
247 247 progress = ui.makeprogress(
248 248 _(b'building'), unit=_(b'revisions'), total=total
249 249 )
250 250 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
251 251 at = -1
252 252 atbranch = b'default'
253 253 nodeids = []
254 254 id = 0
255 255 progress.update(id)
256 256 for type, data in dagparser.parsedag(text):
257 257 if type == b'n':
258 258 ui.note((b'node %s\n' % pycompat.bytestr(data)))
259 259 id, ps = data
260 260
261 261 files = []
262 262 filecontent = {}
263 263
264 264 p2 = None
265 265 if mergeable_file:
266 266 fn = b"mf"
267 267 p1 = repo[ps[0]]
268 268 if len(ps) > 1:
269 269 p2 = repo[ps[1]]
270 270 pa = p1.ancestor(p2)
271 271 base, local, other = [
272 272 x[fn].data() for x in (pa, p1, p2)
273 273 ]
274 274 m3 = simplemerge.Merge3Text(base, local, other)
275 ml = [l.strip() for l in simplemerge.merge_lines(m3)[0]]
275 ml = [
276 l.strip() for l in simplemerge.render_markers(m3)[0]
277 ]
276 278 ml.append(b"")
277 279 elif at > 0:
278 280 ml = p1[fn].data().split(b"\n")
279 281 else:
280 282 ml = initialmergedlines
281 283 ml[id * linesperrev] += b" r%i" % id
282 284 mergedtext = b"\n".join(ml)
283 285 files.append(fn)
284 286 filecontent[fn] = mergedtext
285 287
286 288 if overwritten_file:
287 289 fn = b"of"
288 290 files.append(fn)
289 291 filecontent[fn] = b"r%i\n" % id
290 292
291 293 if new_file:
292 294 fn = b"nf%i" % id
293 295 files.append(fn)
294 296 filecontent[fn] = b"r%i\n" % id
295 297 if len(ps) > 1:
296 298 if not p2:
297 299 p2 = repo[ps[1]]
298 300 for fn in p2:
299 301 if fn.startswith(b"nf"):
300 302 files.append(fn)
301 303 filecontent[fn] = p2[fn].data()
302 304
303 305 def fctxfn(repo, cx, path):
304 306 if path in filecontent:
305 307 return context.memfilectx(
306 308 repo, cx, path, filecontent[path]
307 309 )
308 310 return None
309 311
310 312 if len(ps) == 0 or ps[0] < 0:
311 313 pars = [None, None]
312 314 elif len(ps) == 1:
313 315 pars = [nodeids[ps[0]], None]
314 316 else:
315 317 pars = [nodeids[p] for p in ps]
316 318 cx = context.memctx(
317 319 repo,
318 320 pars,
319 321 b"r%i" % id,
320 322 files,
321 323 fctxfn,
322 324 date=(id, 0),
323 325 user=b"debugbuilddag",
324 326 extra={b'branch': atbranch},
325 327 )
326 328 nodeid = repo.commitctx(cx)
327 329 nodeids.append(nodeid)
328 330 at = id
329 331 elif type == b'l':
330 332 id, name = data
331 333 ui.note((b'tag %s\n' % name))
332 334 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
333 335 elif type == b'a':
334 336 ui.note((b'branch %s\n' % data))
335 337 atbranch = data
336 338 progress.update(id)
337 339
338 340 if tags:
339 341 repo.vfs.write(b"localtags", b"".join(tags))
340 342
341 343
342 344 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
343 345 indent_string = b' ' * indent
344 346 if all:
345 347 ui.writenoi18n(
346 348 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
347 349 % indent_string
348 350 )
349 351
350 352 def showchunks(named):
351 353 ui.write(b"\n%s%s\n" % (indent_string, named))
352 354 for deltadata in gen.deltaiter():
353 355 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
354 356 ui.write(
355 357 b"%s%s %s %s %s %s %d\n"
356 358 % (
357 359 indent_string,
358 360 hex(node),
359 361 hex(p1),
360 362 hex(p2),
361 363 hex(cs),
362 364 hex(deltabase),
363 365 len(delta),
364 366 )
365 367 )
366 368
367 369 gen.changelogheader()
368 370 showchunks(b"changelog")
369 371 gen.manifestheader()
370 372 showchunks(b"manifest")
371 373 for chunkdata in iter(gen.filelogheader, {}):
372 374 fname = chunkdata[b'filename']
373 375 showchunks(fname)
374 376 else:
375 377 if isinstance(gen, bundle2.unbundle20):
376 378 raise error.Abort(_(b'use debugbundle2 for this file'))
377 379 gen.changelogheader()
378 380 for deltadata in gen.deltaiter():
379 381 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
380 382 ui.write(b"%s%s\n" % (indent_string, hex(node)))
381 383
382 384
383 385 def _debugobsmarkers(ui, part, indent=0, **opts):
384 386 """display version and markers contained in 'data'"""
385 387 opts = pycompat.byteskwargs(opts)
386 388 data = part.read()
387 389 indent_string = b' ' * indent
388 390 try:
389 391 version, markers = obsolete._readmarkers(data)
390 392 except error.UnknownVersion as exc:
391 393 msg = b"%sunsupported version: %s (%d bytes)\n"
392 394 msg %= indent_string, exc.version, len(data)
393 395 ui.write(msg)
394 396 else:
395 397 msg = b"%sversion: %d (%d bytes)\n"
396 398 msg %= indent_string, version, len(data)
397 399 ui.write(msg)
398 400 fm = ui.formatter(b'debugobsolete', opts)
399 401 for rawmarker in sorted(markers):
400 402 m = obsutil.marker(None, rawmarker)
401 403 fm.startitem()
402 404 fm.plain(indent_string)
403 405 cmdutil.showmarker(fm, m)
404 406 fm.end()
405 407
406 408
407 409 def _debugphaseheads(ui, data, indent=0):
408 410 """display version and markers contained in 'data'"""
409 411 indent_string = b' ' * indent
410 412 headsbyphase = phases.binarydecode(data)
411 413 for phase in phases.allphases:
412 414 for head in headsbyphase[phase]:
413 415 ui.write(indent_string)
414 416 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
415 417
416 418
417 419 def _quasirepr(thing):
418 420 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
419 421 return b'{%s}' % (
420 422 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
421 423 )
422 424 return pycompat.bytestr(repr(thing))
423 425
424 426
425 427 def _debugbundle2(ui, gen, all=None, **opts):
426 428 """lists the contents of a bundle2"""
427 429 if not isinstance(gen, bundle2.unbundle20):
428 430 raise error.Abort(_(b'not a bundle2 file'))
429 431 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
430 432 parttypes = opts.get('part_type', [])
431 433 for part in gen.iterparts():
432 434 if parttypes and part.type not in parttypes:
433 435 continue
434 436 msg = b'%s -- %s (mandatory: %r)\n'
435 437 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
436 438 if part.type == b'changegroup':
437 439 version = part.params.get(b'version', b'01')
438 440 cg = changegroup.getunbundler(version, part, b'UN')
439 441 if not ui.quiet:
440 442 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
441 443 if part.type == b'obsmarkers':
442 444 if not ui.quiet:
443 445 _debugobsmarkers(ui, part, indent=4, **opts)
444 446 if part.type == b'phase-heads':
445 447 if not ui.quiet:
446 448 _debugphaseheads(ui, part, indent=4)
447 449
448 450
449 451 @command(
450 452 b'debugbundle',
451 453 [
452 454 (b'a', b'all', None, _(b'show all details')),
453 455 (b'', b'part-type', [], _(b'show only the named part type')),
454 456 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
455 457 ],
456 458 _(b'FILE'),
457 459 norepo=True,
458 460 )
459 461 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
460 462 """lists the contents of a bundle"""
461 463 with hg.openpath(ui, bundlepath) as f:
462 464 if spec:
463 465 spec = exchange.getbundlespec(ui, f)
464 466 ui.write(b'%s\n' % spec)
465 467 return
466 468
467 469 gen = exchange.readbundle(ui, f, bundlepath)
468 470 if isinstance(gen, bundle2.unbundle20):
469 471 return _debugbundle2(ui, gen, all=all, **opts)
470 472 _debugchangegroup(ui, gen, all=all, **opts)
471 473
472 474
473 475 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
474 476 def debugcapabilities(ui, path, **opts):
475 477 """lists the capabilities of a remote peer"""
476 478 opts = pycompat.byteskwargs(opts)
477 479 peer = hg.peer(ui, opts, path)
478 480 try:
479 481 caps = peer.capabilities()
480 482 ui.writenoi18n(b'Main capabilities:\n')
481 483 for c in sorted(caps):
482 484 ui.write(b' %s\n' % c)
483 485 b2caps = bundle2.bundle2caps(peer)
484 486 if b2caps:
485 487 ui.writenoi18n(b'Bundle2 capabilities:\n')
486 488 for key, values in sorted(pycompat.iteritems(b2caps)):
487 489 ui.write(b' %s\n' % key)
488 490 for v in values:
489 491 ui.write(b' %s\n' % v)
490 492 finally:
491 493 peer.close()
492 494
493 495
494 496 @command(
495 497 b'debugchangedfiles',
496 498 [
497 499 (
498 500 b'',
499 501 b'compute',
500 502 False,
501 503 b"compute information instead of reading it from storage",
502 504 ),
503 505 ],
504 506 b'REV',
505 507 )
506 508 def debugchangedfiles(ui, repo, rev, **opts):
507 509 """list the stored files changes for a revision"""
508 510 ctx = logcmdutil.revsingle(repo, rev, None)
509 511 files = None
510 512
511 513 if opts['compute']:
512 514 files = metadata.compute_all_files_changes(ctx)
513 515 else:
514 516 sd = repo.changelog.sidedata(ctx.rev())
515 517 files_block = sd.get(sidedata.SD_FILES)
516 518 if files_block is not None:
517 519 files = metadata.decode_files_sidedata(sd)
518 520 if files is not None:
519 521 for f in sorted(files.touched):
520 522 if f in files.added:
521 523 action = b"added"
522 524 elif f in files.removed:
523 525 action = b"removed"
524 526 elif f in files.merged:
525 527 action = b"merged"
526 528 elif f in files.salvaged:
527 529 action = b"salvaged"
528 530 else:
529 531 action = b"touched"
530 532
531 533 copy_parent = b""
532 534 copy_source = b""
533 535 if f in files.copied_from_p1:
534 536 copy_parent = b"p1"
535 537 copy_source = files.copied_from_p1[f]
536 538 elif f in files.copied_from_p2:
537 539 copy_parent = b"p2"
538 540 copy_source = files.copied_from_p2[f]
539 541
540 542 data = (action, copy_parent, f, copy_source)
541 543 template = b"%-8s %2s: %s, %s;\n"
542 544 ui.write(template % data)
543 545
544 546
545 547 @command(b'debugcheckstate', [], b'')
546 548 def debugcheckstate(ui, repo):
547 549 """validate the correctness of the current dirstate"""
548 550 parent1, parent2 = repo.dirstate.parents()
549 551 m1 = repo[parent1].manifest()
550 552 m2 = repo[parent2].manifest()
551 553 errors = 0
552 554 for err in repo.dirstate.verify(m1, m2):
553 555 ui.warn(err[0] % err[1:])
554 556 errors += 1
555 557 if errors:
556 558 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
557 559 raise error.Abort(errstr)
558 560
559 561
560 562 @command(
561 563 b'debugcolor',
562 564 [(b'', b'style', None, _(b'show all configured styles'))],
563 565 b'hg debugcolor',
564 566 )
565 567 def debugcolor(ui, repo, **opts):
566 568 """show available color, effects or style"""
567 569 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
568 570 if opts.get('style'):
569 571 return _debugdisplaystyle(ui)
570 572 else:
571 573 return _debugdisplaycolor(ui)
572 574
573 575
574 576 def _debugdisplaycolor(ui):
575 577 ui = ui.copy()
576 578 ui._styles.clear()
577 579 for effect in color._activeeffects(ui).keys():
578 580 ui._styles[effect] = effect
579 581 if ui._terminfoparams:
580 582 for k, v in ui.configitems(b'color'):
581 583 if k.startswith(b'color.'):
582 584 ui._styles[k] = k[6:]
583 585 elif k.startswith(b'terminfo.'):
584 586 ui._styles[k] = k[9:]
585 587 ui.write(_(b'available colors:\n'))
586 588 # sort label with a '_' after the other to group '_background' entry.
587 589 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
588 590 for colorname, label in items:
589 591 ui.write(b'%s\n' % colorname, label=label)
590 592
591 593
592 594 def _debugdisplaystyle(ui):
593 595 ui.write(_(b'available style:\n'))
594 596 if not ui._styles:
595 597 return
596 598 width = max(len(s) for s in ui._styles)
597 599 for label, effects in sorted(ui._styles.items()):
598 600 ui.write(b'%s' % label, label=label)
599 601 if effects:
600 602 # 50
601 603 ui.write(b': ')
602 604 ui.write(b' ' * (max(0, width - len(label))))
603 605 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
604 606 ui.write(b'\n')
605 607
606 608
607 609 @command(b'debugcreatestreamclonebundle', [], b'FILE')
608 610 def debugcreatestreamclonebundle(ui, repo, fname):
609 611 """create a stream clone bundle file
610 612
611 613 Stream bundles are special bundles that are essentially archives of
612 614 revlog files. They are commonly used for cloning very quickly.
613 615 """
614 616 # TODO we may want to turn this into an abort when this functionality
615 617 # is moved into `hg bundle`.
616 618 if phases.hassecret(repo):
617 619 ui.warn(
618 620 _(
619 621 b'(warning: stream clone bundle will contain secret '
620 622 b'revisions)\n'
621 623 )
622 624 )
623 625
624 626 requirements, gen = streamclone.generatebundlev1(repo)
625 627 changegroup.writechunks(ui, gen, fname)
626 628
627 629 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
628 630
629 631
630 632 @command(
631 633 b'debugdag',
632 634 [
633 635 (b't', b'tags', None, _(b'use tags as labels')),
634 636 (b'b', b'branches', None, _(b'annotate with branch names')),
635 637 (b'', b'dots', None, _(b'use dots for runs')),
636 638 (b's', b'spaces', None, _(b'separate elements by spaces')),
637 639 ],
638 640 _(b'[OPTION]... [FILE [REV]...]'),
639 641 optionalrepo=True,
640 642 )
641 643 def debugdag(ui, repo, file_=None, *revs, **opts):
642 644 """format the changelog or an index DAG as a concise textual description
643 645
644 646 If you pass a revlog index, the revlog's DAG is emitted. If you list
645 647 revision numbers, they get labeled in the output as rN.
646 648
647 649 Otherwise, the changelog DAG of the current repo is emitted.
648 650 """
649 651 spaces = opts.get('spaces')
650 652 dots = opts.get('dots')
651 653 if file_:
652 654 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
653 655 revs = {int(r) for r in revs}
654 656
655 657 def events():
656 658 for r in rlog:
657 659 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
658 660 if r in revs:
659 661 yield b'l', (r, b"r%i" % r)
660 662
661 663 elif repo:
662 664 cl = repo.changelog
663 665 tags = opts.get('tags')
664 666 branches = opts.get('branches')
665 667 if tags:
666 668 labels = {}
667 669 for l, n in repo.tags().items():
668 670 labels.setdefault(cl.rev(n), []).append(l)
669 671
670 672 def events():
671 673 b = b"default"
672 674 for r in cl:
673 675 if branches:
674 676 newb = cl.read(cl.node(r))[5][b'branch']
675 677 if newb != b:
676 678 yield b'a', newb
677 679 b = newb
678 680 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
679 681 if tags:
680 682 ls = labels.get(r)
681 683 if ls:
682 684 for l in ls:
683 685 yield b'l', (r, l)
684 686
685 687 else:
686 688 raise error.Abort(_(b'need repo for changelog dag'))
687 689
688 690 for line in dagparser.dagtextlines(
689 691 events(),
690 692 addspaces=spaces,
691 693 wraplabels=True,
692 694 wrapannotations=True,
693 695 wrapnonlinear=dots,
694 696 usedots=dots,
695 697 maxlinewidth=70,
696 698 ):
697 699 ui.write(line)
698 700 ui.write(b"\n")
699 701
700 702
701 703 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
702 704 def debugdata(ui, repo, file_, rev=None, **opts):
703 705 """dump the contents of a data file revision"""
704 706 opts = pycompat.byteskwargs(opts)
705 707 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
706 708 if rev is not None:
707 709 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
708 710 file_, rev = None, file_
709 711 elif rev is None:
710 712 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
711 713 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
712 714 try:
713 715 ui.write(r.rawdata(r.lookup(rev)))
714 716 except KeyError:
715 717 raise error.Abort(_(b'invalid revision identifier %s') % rev)
716 718
717 719
718 720 @command(
719 721 b'debugdate',
720 722 [(b'e', b'extended', None, _(b'try extended date formats'))],
721 723 _(b'[-e] DATE [RANGE]'),
722 724 norepo=True,
723 725 optionalrepo=True,
724 726 )
725 727 def debugdate(ui, date, range=None, **opts):
726 728 """parse and display a date"""
727 729 if opts["extended"]:
728 730 d = dateutil.parsedate(date, dateutil.extendeddateformats)
729 731 else:
730 732 d = dateutil.parsedate(date)
731 733 ui.writenoi18n(b"internal: %d %d\n" % d)
732 734 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
733 735 if range:
734 736 m = dateutil.matchdate(range)
735 737 ui.writenoi18n(b"match: %s\n" % m(d[0]))
736 738
737 739
738 740 @command(
739 741 b'debugdeltachain',
740 742 cmdutil.debugrevlogopts + cmdutil.formatteropts,
741 743 _(b'-c|-m|FILE'),
742 744 optionalrepo=True,
743 745 )
744 746 def debugdeltachain(ui, repo, file_=None, **opts):
745 747 """dump information about delta chains in a revlog
746 748
747 749 Output can be templatized. Available template keywords are:
748 750
749 751 :``rev``: revision number
750 752 :``chainid``: delta chain identifier (numbered by unique base)
751 753 :``chainlen``: delta chain length to this revision
752 754 :``prevrev``: previous revision in delta chain
753 755 :``deltatype``: role of delta / how it was computed
754 756 :``compsize``: compressed size of revision
755 757 :``uncompsize``: uncompressed size of revision
756 758 :``chainsize``: total size of compressed revisions in chain
757 759 :``chainratio``: total chain size divided by uncompressed revision size
758 760 (new delta chains typically start at ratio 2.00)
759 761 :``lindist``: linear distance from base revision in delta chain to end
760 762 of this revision
761 763 :``extradist``: total size of revisions not part of this delta chain from
762 764 base of delta chain to end of this revision; a measurement
763 765 of how much extra data we need to read/seek across to read
764 766 the delta chain for this revision
765 767 :``extraratio``: extradist divided by chainsize; another representation of
766 768 how much unrelated data is needed to load this delta chain
767 769
768 770 If the repository is configured to use the sparse read, additional keywords
769 771 are available:
770 772
771 773 :``readsize``: total size of data read from the disk for a revision
772 774 (sum of the sizes of all the blocks)
773 775 :``largestblock``: size of the largest block of data read from the disk
774 776 :``readdensity``: density of useful bytes in the data read from the disk
775 777 :``srchunks``: in how many data hunks the whole revision would be read
776 778
777 779 The sparse read can be enabled with experimental.sparse-read = True
778 780 """
779 781 opts = pycompat.byteskwargs(opts)
780 782 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
781 783 index = r.index
782 784 start = r.start
783 785 length = r.length
784 786 generaldelta = r._generaldelta
785 787 withsparseread = getattr(r, '_withsparseread', False)
786 788
787 789 def revinfo(rev):
788 790 e = index[rev]
789 791 compsize = e[1]
790 792 uncompsize = e[2]
791 793 chainsize = 0
792 794
793 795 if generaldelta:
794 796 if e[3] == e[5]:
795 797 deltatype = b'p1'
796 798 elif e[3] == e[6]:
797 799 deltatype = b'p2'
798 800 elif e[3] == rev - 1:
799 801 deltatype = b'prev'
800 802 elif e[3] == rev:
801 803 deltatype = b'base'
802 804 else:
803 805 deltatype = b'other'
804 806 else:
805 807 if e[3] == rev:
806 808 deltatype = b'base'
807 809 else:
808 810 deltatype = b'prev'
809 811
810 812 chain = r._deltachain(rev)[0]
811 813 for iterrev in chain:
812 814 e = index[iterrev]
813 815 chainsize += e[1]
814 816
815 817 return compsize, uncompsize, deltatype, chain, chainsize
816 818
817 819 fm = ui.formatter(b'debugdeltachain', opts)
818 820
819 821 fm.plain(
820 822 b' rev chain# chainlen prev delta '
821 823 b'size rawsize chainsize ratio lindist extradist '
822 824 b'extraratio'
823 825 )
824 826 if withsparseread:
825 827 fm.plain(b' readsize largestblk rddensity srchunks')
826 828 fm.plain(b'\n')
827 829
828 830 chainbases = {}
829 831 for rev in r:
830 832 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
831 833 chainbase = chain[0]
832 834 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
833 835 basestart = start(chainbase)
834 836 revstart = start(rev)
835 837 lineardist = revstart + comp - basestart
836 838 extradist = lineardist - chainsize
837 839 try:
838 840 prevrev = chain[-2]
839 841 except IndexError:
840 842 prevrev = -1
841 843
842 844 if uncomp != 0:
843 845 chainratio = float(chainsize) / float(uncomp)
844 846 else:
845 847 chainratio = chainsize
846 848
847 849 if chainsize != 0:
848 850 extraratio = float(extradist) / float(chainsize)
849 851 else:
850 852 extraratio = extradist
851 853
852 854 fm.startitem()
853 855 fm.write(
854 856 b'rev chainid chainlen prevrev deltatype compsize '
855 857 b'uncompsize chainsize chainratio lindist extradist '
856 858 b'extraratio',
857 859 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
858 860 rev,
859 861 chainid,
860 862 len(chain),
861 863 prevrev,
862 864 deltatype,
863 865 comp,
864 866 uncomp,
865 867 chainsize,
866 868 chainratio,
867 869 lineardist,
868 870 extradist,
869 871 extraratio,
870 872 rev=rev,
871 873 chainid=chainid,
872 874 chainlen=len(chain),
873 875 prevrev=prevrev,
874 876 deltatype=deltatype,
875 877 compsize=comp,
876 878 uncompsize=uncomp,
877 879 chainsize=chainsize,
878 880 chainratio=chainratio,
879 881 lindist=lineardist,
880 882 extradist=extradist,
881 883 extraratio=extraratio,
882 884 )
883 885 if withsparseread:
884 886 readsize = 0
885 887 largestblock = 0
886 888 srchunks = 0
887 889
888 890 for revschunk in deltautil.slicechunk(r, chain):
889 891 srchunks += 1
890 892 blkend = start(revschunk[-1]) + length(revschunk[-1])
891 893 blksize = blkend - start(revschunk[0])
892 894
893 895 readsize += blksize
894 896 if largestblock < blksize:
895 897 largestblock = blksize
896 898
897 899 if readsize:
898 900 readdensity = float(chainsize) / float(readsize)
899 901 else:
900 902 readdensity = 1
901 903
902 904 fm.write(
903 905 b'readsize largestblock readdensity srchunks',
904 906 b' %10d %10d %9.5f %8d',
905 907 readsize,
906 908 largestblock,
907 909 readdensity,
908 910 srchunks,
909 911 readsize=readsize,
910 912 largestblock=largestblock,
911 913 readdensity=readdensity,
912 914 srchunks=srchunks,
913 915 )
914 916
915 917 fm.plain(b'\n')
916 918
917 919 fm.end()
918 920
919 921
920 922 @command(
921 923 b'debugdirstate|debugstate',
922 924 [
923 925 (
924 926 b'',
925 927 b'nodates',
926 928 None,
927 929 _(b'do not display the saved mtime (DEPRECATED)'),
928 930 ),
929 931 (b'', b'dates', True, _(b'display the saved mtime')),
930 932 (b'', b'datesort', None, _(b'sort by saved mtime')),
931 933 (
932 934 b'',
933 935 b'all',
934 936 False,
935 937 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
936 938 ),
937 939 ],
938 940 _(b'[OPTION]...'),
939 941 )
940 942 def debugstate(ui, repo, **opts):
941 943 """show the contents of the current dirstate"""
942 944
943 945 nodates = not opts['dates']
944 946 if opts.get('nodates') is not None:
945 947 nodates = True
946 948 datesort = opts.get('datesort')
947 949
948 950 if datesort:
949 951
950 952 def keyfunc(entry):
951 953 filename, _state, _mode, _size, mtime = entry
952 954 return (mtime, filename)
953 955
954 956 else:
955 957 keyfunc = None # sort by filename
956 958 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
957 959 entries.sort(key=keyfunc)
958 960 for entry in entries:
959 961 filename, state, mode, size, mtime = entry
960 962 if mtime == -1:
961 963 timestr = b'unset '
962 964 elif nodates:
963 965 timestr = b'set '
964 966 else:
965 967 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
966 968 timestr = encoding.strtolocal(timestr)
967 969 if mode & 0o20000:
968 970 mode = b'lnk'
969 971 else:
970 972 mode = b'%3o' % (mode & 0o777 & ~util.umask)
971 973 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
972 974 for f in repo.dirstate.copies():
973 975 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
974 976
975 977
976 978 @command(
977 979 b'debugdirstateignorepatternshash',
978 980 [],
979 981 _(b''),
980 982 )
981 983 def debugdirstateignorepatternshash(ui, repo, **opts):
982 984 """show the hash of ignore patterns stored in dirstate if v2,
983 985 or nothing for dirstate-v2
984 986 """
985 987 if repo.dirstate._use_dirstate_v2:
986 988 docket = repo.dirstate._map.docket
987 989 hash_len = 20 # 160 bits for SHA-1
988 990 hash_bytes = docket.tree_metadata[-hash_len:]
989 991 ui.write(binascii.hexlify(hash_bytes) + b'\n')
990 992
991 993
992 994 @command(
993 995 b'debugdiscovery',
994 996 [
995 997 (b'', b'old', None, _(b'use old-style discovery')),
996 998 (
997 999 b'',
998 1000 b'nonheads',
999 1001 None,
1000 1002 _(b'use old-style discovery with non-heads included'),
1001 1003 ),
1002 1004 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1003 1005 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1004 1006 (
1005 1007 b'',
1006 1008 b'local-as-revs',
1007 1009 b"",
1008 1010 b'treat local has having these revisions only',
1009 1011 ),
1010 1012 (
1011 1013 b'',
1012 1014 b'remote-as-revs',
1013 1015 b"",
1014 1016 b'use local as remote, with only these these revisions',
1015 1017 ),
1016 1018 ]
1017 1019 + cmdutil.remoteopts
1018 1020 + cmdutil.formatteropts,
1019 1021 _(b'[--rev REV] [OTHER]'),
1020 1022 )
1021 1023 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1022 1024 """runs the changeset discovery protocol in isolation
1023 1025
1024 1026 The local peer can be "replaced" by a subset of the local repository by
1025 1027 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1026 1028 be "replaced" by a subset of the local repository using the
1027 1029 `--local-as-revs` flag. This is useful to efficiently debug pathological
1028 1030 discovery situation.
1029 1031
1030 1032 The following developer oriented config are relevant for people playing with this command:
1031 1033
1032 1034 * devel.discovery.exchange-heads=True
1033 1035
1034 1036 If False, the discovery will not start with
1035 1037 remote head fetching and local head querying.
1036 1038
1037 1039 * devel.discovery.grow-sample=True
1038 1040
1039 1041 If False, the sample size used in set discovery will not be increased
1040 1042 through the process
1041 1043
1042 1044 * devel.discovery.grow-sample.dynamic=True
1043 1045
1044 1046 When discovery.grow-sample.dynamic is True, the default, the sample size is
1045 1047 adapted to the shape of the undecided set (it is set to the max of:
1046 1048 <target-size>, len(roots(undecided)), len(heads(undecided)
1047 1049
1048 1050 * devel.discovery.grow-sample.rate=1.05
1049 1051
1050 1052 the rate at which the sample grow
1051 1053
1052 1054 * devel.discovery.randomize=True
1053 1055
1054 1056 If andom sampling during discovery are deterministic. It is meant for
1055 1057 integration tests.
1056 1058
1057 1059 * devel.discovery.sample-size=200
1058 1060
1059 1061 Control the initial size of the discovery sample
1060 1062
1061 1063 * devel.discovery.sample-size.initial=100
1062 1064
1063 1065 Control the initial size of the discovery for initial change
1064 1066 """
1065 1067 opts = pycompat.byteskwargs(opts)
1066 1068 unfi = repo.unfiltered()
1067 1069
1068 1070 # setup potential extra filtering
1069 1071 local_revs = opts[b"local_as_revs"]
1070 1072 remote_revs = opts[b"remote_as_revs"]
1071 1073
1072 1074 # make sure tests are repeatable
1073 1075 random.seed(int(opts[b'seed']))
1074 1076
1075 1077 if not remote_revs:
1076 1078
1077 1079 remoteurl, branches = urlutil.get_unique_pull_path(
1078 1080 b'debugdiscovery', repo, ui, remoteurl
1079 1081 )
1080 1082 remote = hg.peer(repo, opts, remoteurl)
1081 1083 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1082 1084 else:
1083 1085 branches = (None, [])
1084 1086 remote_filtered_revs = logcmdutil.revrange(
1085 1087 unfi, [b"not (::(%s))" % remote_revs]
1086 1088 )
1087 1089 remote_filtered_revs = frozenset(remote_filtered_revs)
1088 1090
1089 1091 def remote_func(x):
1090 1092 return remote_filtered_revs
1091 1093
1092 1094 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1093 1095
1094 1096 remote = repo.peer()
1095 1097 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1096 1098
1097 1099 if local_revs:
1098 1100 local_filtered_revs = logcmdutil.revrange(
1099 1101 unfi, [b"not (::(%s))" % local_revs]
1100 1102 )
1101 1103 local_filtered_revs = frozenset(local_filtered_revs)
1102 1104
1103 1105 def local_func(x):
1104 1106 return local_filtered_revs
1105 1107
1106 1108 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1107 1109 repo = repo.filtered(b'debug-discovery-local-filter')
1108 1110
1109 1111 data = {}
1110 1112 if opts.get(b'old'):
1111 1113
1112 1114 def doit(pushedrevs, remoteheads, remote=remote):
1113 1115 if not util.safehasattr(remote, b'branches'):
1114 1116 # enable in-client legacy support
1115 1117 remote = localrepo.locallegacypeer(remote.local())
1116 1118 common, _in, hds = treediscovery.findcommonincoming(
1117 1119 repo, remote, force=True, audit=data
1118 1120 )
1119 1121 common = set(common)
1120 1122 if not opts.get(b'nonheads'):
1121 1123 ui.writenoi18n(
1122 1124 b"unpruned common: %s\n"
1123 1125 % b" ".join(sorted(short(n) for n in common))
1124 1126 )
1125 1127
1126 1128 clnode = repo.changelog.node
1127 1129 common = repo.revs(b'heads(::%ln)', common)
1128 1130 common = {clnode(r) for r in common}
1129 1131 return common, hds
1130 1132
1131 1133 else:
1132 1134
1133 1135 def doit(pushedrevs, remoteheads, remote=remote):
1134 1136 nodes = None
1135 1137 if pushedrevs:
1136 1138 revs = logcmdutil.revrange(repo, pushedrevs)
1137 1139 nodes = [repo[r].node() for r in revs]
1138 1140 common, any, hds = setdiscovery.findcommonheads(
1139 1141 ui, repo, remote, ancestorsof=nodes, audit=data
1140 1142 )
1141 1143 return common, hds
1142 1144
1143 1145 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1144 1146 localrevs = opts[b'rev']
1145 1147
1146 1148 fm = ui.formatter(b'debugdiscovery', opts)
1147 1149 if fm.strict_format:
1148 1150
1149 1151 @contextlib.contextmanager
1150 1152 def may_capture_output():
1151 1153 ui.pushbuffer()
1152 1154 yield
1153 1155 data[b'output'] = ui.popbuffer()
1154 1156
1155 1157 else:
1156 1158 may_capture_output = util.nullcontextmanager
1157 1159 with may_capture_output():
1158 1160 with util.timedcm('debug-discovery') as t:
1159 1161 common, hds = doit(localrevs, remoterevs)
1160 1162
1161 1163 # compute all statistics
1162 1164 heads_common = set(common)
1163 1165 heads_remote = set(hds)
1164 1166 heads_local = set(repo.heads())
1165 1167 # note: they cannot be a local or remote head that is in common and not
1166 1168 # itself a head of common.
1167 1169 heads_common_local = heads_common & heads_local
1168 1170 heads_common_remote = heads_common & heads_remote
1169 1171 heads_common_both = heads_common & heads_remote & heads_local
1170 1172
1171 1173 all = repo.revs(b'all()')
1172 1174 common = repo.revs(b'::%ln', common)
1173 1175 roots_common = repo.revs(b'roots(::%ld)', common)
1174 1176 missing = repo.revs(b'not ::%ld', common)
1175 1177 heads_missing = repo.revs(b'heads(%ld)', missing)
1176 1178 roots_missing = repo.revs(b'roots(%ld)', missing)
1177 1179 assert len(common) + len(missing) == len(all)
1178 1180
1179 1181 initial_undecided = repo.revs(
1180 1182 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1181 1183 )
1182 1184 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1183 1185 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1184 1186 common_initial_undecided = initial_undecided & common
1185 1187 missing_initial_undecided = initial_undecided & missing
1186 1188
1187 1189 data[b'elapsed'] = t.elapsed
1188 1190 data[b'nb-common-heads'] = len(heads_common)
1189 1191 data[b'nb-common-heads-local'] = len(heads_common_local)
1190 1192 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1191 1193 data[b'nb-common-heads-both'] = len(heads_common_both)
1192 1194 data[b'nb-common-roots'] = len(roots_common)
1193 1195 data[b'nb-head-local'] = len(heads_local)
1194 1196 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1195 1197 data[b'nb-head-remote'] = len(heads_remote)
1196 1198 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1197 1199 heads_common_remote
1198 1200 )
1199 1201 data[b'nb-revs'] = len(all)
1200 1202 data[b'nb-revs-common'] = len(common)
1201 1203 data[b'nb-revs-missing'] = len(missing)
1202 1204 data[b'nb-missing-heads'] = len(heads_missing)
1203 1205 data[b'nb-missing-roots'] = len(roots_missing)
1204 1206 data[b'nb-ini_und'] = len(initial_undecided)
1205 1207 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1206 1208 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1207 1209 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1208 1210 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1209 1211
1210 1212 fm.startitem()
1211 1213 fm.data(**pycompat.strkwargs(data))
1212 1214 # display discovery summary
1213 1215 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1214 1216 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1215 1217 fm.plain(b"heads summary:\n")
1216 1218 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1217 1219 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1218 1220 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1219 1221 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1220 1222 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1221 1223 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1222 1224 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1223 1225 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1224 1226 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1225 1227 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1226 1228 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1227 1229 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1228 1230 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1229 1231 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1230 1232 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1231 1233 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1232 1234 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1233 1235 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1234 1236 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1235 1237 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1236 1238 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1237 1239 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1238 1240
1239 1241 if ui.verbose:
1240 1242 fm.plain(
1241 1243 b"common heads: %s\n"
1242 1244 % b" ".join(sorted(short(n) for n in heads_common))
1243 1245 )
1244 1246 fm.end()
1245 1247
1246 1248
1247 1249 _chunksize = 4 << 10
1248 1250
1249 1251
1250 1252 @command(
1251 1253 b'debugdownload',
1252 1254 [
1253 1255 (b'o', b'output', b'', _(b'path')),
1254 1256 ],
1255 1257 optionalrepo=True,
1256 1258 )
1257 1259 def debugdownload(ui, repo, url, output=None, **opts):
1258 1260 """download a resource using Mercurial logic and config"""
1259 1261 fh = urlmod.open(ui, url, output)
1260 1262
1261 1263 dest = ui
1262 1264 if output:
1263 1265 dest = open(output, b"wb", _chunksize)
1264 1266 try:
1265 1267 data = fh.read(_chunksize)
1266 1268 while data:
1267 1269 dest.write(data)
1268 1270 data = fh.read(_chunksize)
1269 1271 finally:
1270 1272 if output:
1271 1273 dest.close()
1272 1274
1273 1275
1274 1276 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1275 1277 def debugextensions(ui, repo, **opts):
1276 1278 '''show information about active extensions'''
1277 1279 opts = pycompat.byteskwargs(opts)
1278 1280 exts = extensions.extensions(ui)
1279 1281 hgver = util.version()
1280 1282 fm = ui.formatter(b'debugextensions', opts)
1281 1283 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1282 1284 isinternal = extensions.ismoduleinternal(extmod)
1283 1285 extsource = None
1284 1286
1285 1287 if util.safehasattr(extmod, '__file__'):
1286 1288 extsource = pycompat.fsencode(extmod.__file__)
1287 1289 elif getattr(sys, 'oxidized', False):
1288 1290 extsource = pycompat.sysexecutable
1289 1291 if isinternal:
1290 1292 exttestedwith = [] # never expose magic string to users
1291 1293 else:
1292 1294 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1293 1295 extbuglink = getattr(extmod, 'buglink', None)
1294 1296
1295 1297 fm.startitem()
1296 1298
1297 1299 if ui.quiet or ui.verbose:
1298 1300 fm.write(b'name', b'%s\n', extname)
1299 1301 else:
1300 1302 fm.write(b'name', b'%s', extname)
1301 1303 if isinternal or hgver in exttestedwith:
1302 1304 fm.plain(b'\n')
1303 1305 elif not exttestedwith:
1304 1306 fm.plain(_(b' (untested!)\n'))
1305 1307 else:
1306 1308 lasttestedversion = exttestedwith[-1]
1307 1309 fm.plain(b' (%s!)\n' % lasttestedversion)
1308 1310
1309 1311 fm.condwrite(
1310 1312 ui.verbose and extsource,
1311 1313 b'source',
1312 1314 _(b' location: %s\n'),
1313 1315 extsource or b"",
1314 1316 )
1315 1317
1316 1318 if ui.verbose:
1317 1319 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1318 1320 fm.data(bundled=isinternal)
1319 1321
1320 1322 fm.condwrite(
1321 1323 ui.verbose and exttestedwith,
1322 1324 b'testedwith',
1323 1325 _(b' tested with: %s\n'),
1324 1326 fm.formatlist(exttestedwith, name=b'ver'),
1325 1327 )
1326 1328
1327 1329 fm.condwrite(
1328 1330 ui.verbose and extbuglink,
1329 1331 b'buglink',
1330 1332 _(b' bug reporting: %s\n'),
1331 1333 extbuglink or b"",
1332 1334 )
1333 1335
1334 1336 fm.end()
1335 1337
1336 1338
1337 1339 @command(
1338 1340 b'debugfileset',
1339 1341 [
1340 1342 (
1341 1343 b'r',
1342 1344 b'rev',
1343 1345 b'',
1344 1346 _(b'apply the filespec on this revision'),
1345 1347 _(b'REV'),
1346 1348 ),
1347 1349 (
1348 1350 b'',
1349 1351 b'all-files',
1350 1352 False,
1351 1353 _(b'test files from all revisions and working directory'),
1352 1354 ),
1353 1355 (
1354 1356 b's',
1355 1357 b'show-matcher',
1356 1358 None,
1357 1359 _(b'print internal representation of matcher'),
1358 1360 ),
1359 1361 (
1360 1362 b'p',
1361 1363 b'show-stage',
1362 1364 [],
1363 1365 _(b'print parsed tree at the given stage'),
1364 1366 _(b'NAME'),
1365 1367 ),
1366 1368 ],
1367 1369 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1368 1370 )
1369 1371 def debugfileset(ui, repo, expr, **opts):
1370 1372 '''parse and apply a fileset specification'''
1371 1373 from . import fileset
1372 1374
1373 1375 fileset.symbols # force import of fileset so we have predicates to optimize
1374 1376 opts = pycompat.byteskwargs(opts)
1375 1377 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1376 1378
1377 1379 stages = [
1378 1380 (b'parsed', pycompat.identity),
1379 1381 (b'analyzed', filesetlang.analyze),
1380 1382 (b'optimized', filesetlang.optimize),
1381 1383 ]
1382 1384 stagenames = {n for n, f in stages}
1383 1385
1384 1386 showalways = set()
1385 1387 if ui.verbose and not opts[b'show_stage']:
1386 1388 # show parsed tree by --verbose (deprecated)
1387 1389 showalways.add(b'parsed')
1388 1390 if opts[b'show_stage'] == [b'all']:
1389 1391 showalways.update(stagenames)
1390 1392 else:
1391 1393 for n in opts[b'show_stage']:
1392 1394 if n not in stagenames:
1393 1395 raise error.Abort(_(b'invalid stage name: %s') % n)
1394 1396 showalways.update(opts[b'show_stage'])
1395 1397
1396 1398 tree = filesetlang.parse(expr)
1397 1399 for n, f in stages:
1398 1400 tree = f(tree)
1399 1401 if n in showalways:
1400 1402 if opts[b'show_stage'] or n != b'parsed':
1401 1403 ui.write(b"* %s:\n" % n)
1402 1404 ui.write(filesetlang.prettyformat(tree), b"\n")
1403 1405
1404 1406 files = set()
1405 1407 if opts[b'all_files']:
1406 1408 for r in repo:
1407 1409 c = repo[r]
1408 1410 files.update(c.files())
1409 1411 files.update(c.substate)
1410 1412 if opts[b'all_files'] or ctx.rev() is None:
1411 1413 wctx = repo[None]
1412 1414 files.update(
1413 1415 repo.dirstate.walk(
1414 1416 scmutil.matchall(repo),
1415 1417 subrepos=list(wctx.substate),
1416 1418 unknown=True,
1417 1419 ignored=True,
1418 1420 )
1419 1421 )
1420 1422 files.update(wctx.substate)
1421 1423 else:
1422 1424 files.update(ctx.files())
1423 1425 files.update(ctx.substate)
1424 1426
1425 1427 m = ctx.matchfileset(repo.getcwd(), expr)
1426 1428 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1427 1429 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1428 1430 for f in sorted(files):
1429 1431 if not m(f):
1430 1432 continue
1431 1433 ui.write(b"%s\n" % f)
1432 1434
1433 1435
1434 1436 @command(
1435 1437 b"debug-repair-issue6528",
1436 1438 [
1437 1439 (
1438 1440 b'',
1439 1441 b'to-report',
1440 1442 b'',
1441 1443 _(b'build a report of affected revisions to this file'),
1442 1444 _(b'FILE'),
1443 1445 ),
1444 1446 (
1445 1447 b'',
1446 1448 b'from-report',
1447 1449 b'',
1448 1450 _(b'repair revisions listed in this report file'),
1449 1451 _(b'FILE'),
1450 1452 ),
1451 1453 (
1452 1454 b'',
1453 1455 b'paranoid',
1454 1456 False,
1455 1457 _(b'check that both detection methods do the same thing'),
1456 1458 ),
1457 1459 ]
1458 1460 + cmdutil.dryrunopts,
1459 1461 )
1460 1462 def debug_repair_issue6528(ui, repo, **opts):
1461 1463 """find affected revisions and repair them. See issue6528 for more details.
1462 1464
1463 1465 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1464 1466 computation of affected revisions for a given repository across clones.
1465 1467 The report format is line-based (with empty lines ignored):
1466 1468
1467 1469 ```
1468 1470 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1469 1471 ```
1470 1472
1471 1473 There can be multiple broken revisions per filelog, they are separated by
1472 1474 a comma with no spaces. The only space is between the revision(s) and the
1473 1475 filename.
1474 1476
1475 1477 Note that this does *not* mean that this repairs future affected revisions,
1476 1478 that needs a separate fix at the exchange level that was introduced in
1477 1479 Mercurial 5.9.1.
1478 1480
1479 1481 There is a `--paranoid` flag to test that the fast implementation is correct
1480 1482 by checking it against the slow implementation. Since this matter is quite
1481 1483 urgent and testing every edge-case is probably quite costly, we use this
1482 1484 method to test on large repositories as a fuzzing method of sorts.
1483 1485 """
1484 1486 cmdutil.check_incompatible_arguments(
1485 1487 opts, 'to_report', ['from_report', 'dry_run']
1486 1488 )
1487 1489 dry_run = opts.get('dry_run')
1488 1490 to_report = opts.get('to_report')
1489 1491 from_report = opts.get('from_report')
1490 1492 paranoid = opts.get('paranoid')
1491 1493 # TODO maybe add filelog pattern and revision pattern parameters to help
1492 1494 # narrow down the search for users that know what they're looking for?
1493 1495
1494 1496 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1495 1497 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1496 1498 raise error.Abort(_(msg))
1497 1499
1498 1500 rewrite.repair_issue6528(
1499 1501 ui,
1500 1502 repo,
1501 1503 dry_run=dry_run,
1502 1504 to_report=to_report,
1503 1505 from_report=from_report,
1504 1506 paranoid=paranoid,
1505 1507 )
1506 1508
1507 1509
1508 1510 @command(b'debugformat', [] + cmdutil.formatteropts)
1509 1511 def debugformat(ui, repo, **opts):
1510 1512 """display format information about the current repository
1511 1513
1512 1514 Use --verbose to get extra information about current config value and
1513 1515 Mercurial default."""
1514 1516 opts = pycompat.byteskwargs(opts)
1515 1517 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1516 1518 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1517 1519
1518 1520 def makeformatname(name):
1519 1521 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1520 1522
1521 1523 fm = ui.formatter(b'debugformat', opts)
1522 1524 if fm.isplain():
1523 1525
1524 1526 def formatvalue(value):
1525 1527 if util.safehasattr(value, b'startswith'):
1526 1528 return value
1527 1529 if value:
1528 1530 return b'yes'
1529 1531 else:
1530 1532 return b'no'
1531 1533
1532 1534 else:
1533 1535 formatvalue = pycompat.identity
1534 1536
1535 1537 fm.plain(b'format-variant')
1536 1538 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1537 1539 fm.plain(b' repo')
1538 1540 if ui.verbose:
1539 1541 fm.plain(b' config default')
1540 1542 fm.plain(b'\n')
1541 1543 for fv in upgrade.allformatvariant:
1542 1544 fm.startitem()
1543 1545 repovalue = fv.fromrepo(repo)
1544 1546 configvalue = fv.fromconfig(repo)
1545 1547
1546 1548 if repovalue != configvalue:
1547 1549 namelabel = b'formatvariant.name.mismatchconfig'
1548 1550 repolabel = b'formatvariant.repo.mismatchconfig'
1549 1551 elif repovalue != fv.default:
1550 1552 namelabel = b'formatvariant.name.mismatchdefault'
1551 1553 repolabel = b'formatvariant.repo.mismatchdefault'
1552 1554 else:
1553 1555 namelabel = b'formatvariant.name.uptodate'
1554 1556 repolabel = b'formatvariant.repo.uptodate'
1555 1557
1556 1558 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1557 1559 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1558 1560 if fv.default != configvalue:
1559 1561 configlabel = b'formatvariant.config.special'
1560 1562 else:
1561 1563 configlabel = b'formatvariant.config.default'
1562 1564 fm.condwrite(
1563 1565 ui.verbose,
1564 1566 b'config',
1565 1567 b' %6s',
1566 1568 formatvalue(configvalue),
1567 1569 label=configlabel,
1568 1570 )
1569 1571 fm.condwrite(
1570 1572 ui.verbose,
1571 1573 b'default',
1572 1574 b' %7s',
1573 1575 formatvalue(fv.default),
1574 1576 label=b'formatvariant.default',
1575 1577 )
1576 1578 fm.plain(b'\n')
1577 1579 fm.end()
1578 1580
1579 1581
1580 1582 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1581 1583 def debugfsinfo(ui, path=b"."):
1582 1584 """show information detected about current filesystem"""
1583 1585 ui.writenoi18n(b'path: %s\n' % path)
1584 1586 ui.writenoi18n(
1585 1587 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1586 1588 )
1587 1589 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1588 1590 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1589 1591 ui.writenoi18n(
1590 1592 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1591 1593 )
1592 1594 ui.writenoi18n(
1593 1595 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1594 1596 )
1595 1597 casesensitive = b'(unknown)'
1596 1598 try:
1597 1599 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1598 1600 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1599 1601 except OSError:
1600 1602 pass
1601 1603 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1602 1604
1603 1605
1604 1606 @command(
1605 1607 b'debuggetbundle',
1606 1608 [
1607 1609 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1608 1610 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1609 1611 (
1610 1612 b't',
1611 1613 b'type',
1612 1614 b'bzip2',
1613 1615 _(b'bundle compression type to use'),
1614 1616 _(b'TYPE'),
1615 1617 ),
1616 1618 ],
1617 1619 _(b'REPO FILE [-H|-C ID]...'),
1618 1620 norepo=True,
1619 1621 )
1620 1622 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1621 1623 """retrieves a bundle from a repo
1622 1624
1623 1625 Every ID must be a full-length hex node id string. Saves the bundle to the
1624 1626 given file.
1625 1627 """
1626 1628 opts = pycompat.byteskwargs(opts)
1627 1629 repo = hg.peer(ui, opts, repopath)
1628 1630 if not repo.capable(b'getbundle'):
1629 1631 raise error.Abort(b"getbundle() not supported by target repository")
1630 1632 args = {}
1631 1633 if common:
1632 1634 args['common'] = [bin(s) for s in common]
1633 1635 if head:
1634 1636 args['heads'] = [bin(s) for s in head]
1635 1637 # TODO: get desired bundlecaps from command line.
1636 1638 args['bundlecaps'] = None
1637 1639 bundle = repo.getbundle(b'debug', **args)
1638 1640
1639 1641 bundletype = opts.get(b'type', b'bzip2').lower()
1640 1642 btypes = {
1641 1643 b'none': b'HG10UN',
1642 1644 b'bzip2': b'HG10BZ',
1643 1645 b'gzip': b'HG10GZ',
1644 1646 b'bundle2': b'HG20',
1645 1647 }
1646 1648 bundletype = btypes.get(bundletype)
1647 1649 if bundletype not in bundle2.bundletypes:
1648 1650 raise error.Abort(_(b'unknown bundle type specified with --type'))
1649 1651 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1650 1652
1651 1653
1652 1654 @command(b'debugignore', [], b'[FILE]')
1653 1655 def debugignore(ui, repo, *files, **opts):
1654 1656 """display the combined ignore pattern and information about ignored files
1655 1657
1656 1658 With no argument display the combined ignore pattern.
1657 1659
1658 1660 Given space separated file names, shows if the given file is ignored and
1659 1661 if so, show the ignore rule (file and line number) that matched it.
1660 1662 """
1661 1663 ignore = repo.dirstate._ignore
1662 1664 if not files:
1663 1665 # Show all the patterns
1664 1666 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1665 1667 else:
1666 1668 m = scmutil.match(repo[None], pats=files)
1667 1669 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1668 1670 for f in m.files():
1669 1671 nf = util.normpath(f)
1670 1672 ignored = None
1671 1673 ignoredata = None
1672 1674 if nf != b'.':
1673 1675 if ignore(nf):
1674 1676 ignored = nf
1675 1677 ignoredata = repo.dirstate._ignorefileandline(nf)
1676 1678 else:
1677 1679 for p in pathutil.finddirs(nf):
1678 1680 if ignore(p):
1679 1681 ignored = p
1680 1682 ignoredata = repo.dirstate._ignorefileandline(p)
1681 1683 break
1682 1684 if ignored:
1683 1685 if ignored == nf:
1684 1686 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1685 1687 else:
1686 1688 ui.write(
1687 1689 _(
1688 1690 b"%s is ignored because of "
1689 1691 b"containing directory %s\n"
1690 1692 )
1691 1693 % (uipathfn(f), ignored)
1692 1694 )
1693 1695 ignorefile, lineno, line = ignoredata
1694 1696 ui.write(
1695 1697 _(b"(ignore rule in %s, line %d: '%s')\n")
1696 1698 % (ignorefile, lineno, line)
1697 1699 )
1698 1700 else:
1699 1701 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1700 1702
1701 1703
1702 1704 @command(
1703 1705 b'debugindex',
1704 1706 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1705 1707 _(b'-c|-m|FILE'),
1706 1708 )
1707 1709 def debugindex(ui, repo, file_=None, **opts):
1708 1710 """dump index data for a storage primitive"""
1709 1711 opts = pycompat.byteskwargs(opts)
1710 1712 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1711 1713
1712 1714 if ui.debugflag:
1713 1715 shortfn = hex
1714 1716 else:
1715 1717 shortfn = short
1716 1718
1717 1719 idlen = 12
1718 1720 for i in store:
1719 1721 idlen = len(shortfn(store.node(i)))
1720 1722 break
1721 1723
1722 1724 fm = ui.formatter(b'debugindex', opts)
1723 1725 fm.plain(
1724 1726 b' rev linkrev %s %s p2\n'
1725 1727 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1726 1728 )
1727 1729
1728 1730 for rev in store:
1729 1731 node = store.node(rev)
1730 1732 parents = store.parents(node)
1731 1733
1732 1734 fm.startitem()
1733 1735 fm.write(b'rev', b'%6d ', rev)
1734 1736 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1735 1737 fm.write(b'node', b'%s ', shortfn(node))
1736 1738 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1737 1739 fm.write(b'p2', b'%s', shortfn(parents[1]))
1738 1740 fm.plain(b'\n')
1739 1741
1740 1742 fm.end()
1741 1743
1742 1744
1743 1745 @command(
1744 1746 b'debugindexdot',
1745 1747 cmdutil.debugrevlogopts,
1746 1748 _(b'-c|-m|FILE'),
1747 1749 optionalrepo=True,
1748 1750 )
1749 1751 def debugindexdot(ui, repo, file_=None, **opts):
1750 1752 """dump an index DAG as a graphviz dot file"""
1751 1753 opts = pycompat.byteskwargs(opts)
1752 1754 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1753 1755 ui.writenoi18n(b"digraph G {\n")
1754 1756 for i in r:
1755 1757 node = r.node(i)
1756 1758 pp = r.parents(node)
1757 1759 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1758 1760 if pp[1] != repo.nullid:
1759 1761 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1760 1762 ui.write(b"}\n")
1761 1763
1762 1764
1763 1765 @command(b'debugindexstats', [])
1764 1766 def debugindexstats(ui, repo):
1765 1767 """show stats related to the changelog index"""
1766 1768 repo.changelog.shortest(repo.nullid, 1)
1767 1769 index = repo.changelog.index
1768 1770 if not util.safehasattr(index, b'stats'):
1769 1771 raise error.Abort(_(b'debugindexstats only works with native code'))
1770 1772 for k, v in sorted(index.stats().items()):
1771 1773 ui.write(b'%s: %d\n' % (k, v))
1772 1774
1773 1775
1774 1776 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1775 1777 def debuginstall(ui, **opts):
1776 1778 """test Mercurial installation
1777 1779
1778 1780 Returns 0 on success.
1779 1781 """
1780 1782 opts = pycompat.byteskwargs(opts)
1781 1783
1782 1784 problems = 0
1783 1785
1784 1786 fm = ui.formatter(b'debuginstall', opts)
1785 1787 fm.startitem()
1786 1788
1787 1789 # encoding might be unknown or wrong. don't translate these messages.
1788 1790 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1789 1791 err = None
1790 1792 try:
1791 1793 codecs.lookup(pycompat.sysstr(encoding.encoding))
1792 1794 except LookupError as inst:
1793 1795 err = stringutil.forcebytestr(inst)
1794 1796 problems += 1
1795 1797 fm.condwrite(
1796 1798 err,
1797 1799 b'encodingerror',
1798 1800 b" %s\n (check that your locale is properly set)\n",
1799 1801 err,
1800 1802 )
1801 1803
1802 1804 # Python
1803 1805 pythonlib = None
1804 1806 if util.safehasattr(os, '__file__'):
1805 1807 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1806 1808 elif getattr(sys, 'oxidized', False):
1807 1809 pythonlib = pycompat.sysexecutable
1808 1810
1809 1811 fm.write(
1810 1812 b'pythonexe',
1811 1813 _(b"checking Python executable (%s)\n"),
1812 1814 pycompat.sysexecutable or _(b"unknown"),
1813 1815 )
1814 1816 fm.write(
1815 1817 b'pythonimplementation',
1816 1818 _(b"checking Python implementation (%s)\n"),
1817 1819 pycompat.sysbytes(platform.python_implementation()),
1818 1820 )
1819 1821 fm.write(
1820 1822 b'pythonver',
1821 1823 _(b"checking Python version (%s)\n"),
1822 1824 (b"%d.%d.%d" % sys.version_info[:3]),
1823 1825 )
1824 1826 fm.write(
1825 1827 b'pythonlib',
1826 1828 _(b"checking Python lib (%s)...\n"),
1827 1829 pythonlib or _(b"unknown"),
1828 1830 )
1829 1831
1830 1832 try:
1831 1833 from . import rustext # pytype: disable=import-error
1832 1834
1833 1835 rustext.__doc__ # trigger lazy import
1834 1836 except ImportError:
1835 1837 rustext = None
1836 1838
1837 1839 security = set(sslutil.supportedprotocols)
1838 1840 if sslutil.hassni:
1839 1841 security.add(b'sni')
1840 1842
1841 1843 fm.write(
1842 1844 b'pythonsecurity',
1843 1845 _(b"checking Python security support (%s)\n"),
1844 1846 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1845 1847 )
1846 1848
1847 1849 # These are warnings, not errors. So don't increment problem count. This
1848 1850 # may change in the future.
1849 1851 if b'tls1.2' not in security:
1850 1852 fm.plain(
1851 1853 _(
1852 1854 b' TLS 1.2 not supported by Python install; '
1853 1855 b'network connections lack modern security\n'
1854 1856 )
1855 1857 )
1856 1858 if b'sni' not in security:
1857 1859 fm.plain(
1858 1860 _(
1859 1861 b' SNI not supported by Python install; may have '
1860 1862 b'connectivity issues with some servers\n'
1861 1863 )
1862 1864 )
1863 1865
1864 1866 fm.plain(
1865 1867 _(
1866 1868 b"checking Rust extensions (%s)\n"
1867 1869 % (b'missing' if rustext is None else b'installed')
1868 1870 ),
1869 1871 )
1870 1872
1871 1873 # TODO print CA cert info
1872 1874
1873 1875 # hg version
1874 1876 hgver = util.version()
1875 1877 fm.write(
1876 1878 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1877 1879 )
1878 1880 fm.write(
1879 1881 b'hgverextra',
1880 1882 _(b"checking Mercurial custom build (%s)\n"),
1881 1883 b'+'.join(hgver.split(b'+')[1:]),
1882 1884 )
1883 1885
1884 1886 # compiled modules
1885 1887 hgmodules = None
1886 1888 if util.safehasattr(sys.modules[__name__], '__file__'):
1887 1889 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1888 1890 elif getattr(sys, 'oxidized', False):
1889 1891 hgmodules = pycompat.sysexecutable
1890 1892
1891 1893 fm.write(
1892 1894 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1893 1895 )
1894 1896 fm.write(
1895 1897 b'hgmodules',
1896 1898 _(b"checking installed modules (%s)...\n"),
1897 1899 hgmodules or _(b"unknown"),
1898 1900 )
1899 1901
1900 1902 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1901 1903 rustext = rustandc # for now, that's the only case
1902 1904 cext = policy.policy in (b'c', b'allow') or rustandc
1903 1905 nopure = cext or rustext
1904 1906 if nopure:
1905 1907 err = None
1906 1908 try:
1907 1909 if cext:
1908 1910 from .cext import ( # pytype: disable=import-error
1909 1911 base85,
1910 1912 bdiff,
1911 1913 mpatch,
1912 1914 osutil,
1913 1915 )
1914 1916
1915 1917 # quiet pyflakes
1916 1918 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1917 1919 if rustext:
1918 1920 from .rustext import ( # pytype: disable=import-error
1919 1921 ancestor,
1920 1922 dirstate,
1921 1923 )
1922 1924
1923 1925 dir(ancestor), dir(dirstate) # quiet pyflakes
1924 1926 except Exception as inst:
1925 1927 err = stringutil.forcebytestr(inst)
1926 1928 problems += 1
1927 1929 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1928 1930
1929 1931 compengines = util.compengines._engines.values()
1930 1932 fm.write(
1931 1933 b'compengines',
1932 1934 _(b'checking registered compression engines (%s)\n'),
1933 1935 fm.formatlist(
1934 1936 sorted(e.name() for e in compengines),
1935 1937 name=b'compengine',
1936 1938 fmt=b'%s',
1937 1939 sep=b', ',
1938 1940 ),
1939 1941 )
1940 1942 fm.write(
1941 1943 b'compenginesavail',
1942 1944 _(b'checking available compression engines (%s)\n'),
1943 1945 fm.formatlist(
1944 1946 sorted(e.name() for e in compengines if e.available()),
1945 1947 name=b'compengine',
1946 1948 fmt=b'%s',
1947 1949 sep=b', ',
1948 1950 ),
1949 1951 )
1950 1952 wirecompengines = compression.compengines.supportedwireengines(
1951 1953 compression.SERVERROLE
1952 1954 )
1953 1955 fm.write(
1954 1956 b'compenginesserver',
1955 1957 _(
1956 1958 b'checking available compression engines '
1957 1959 b'for wire protocol (%s)\n'
1958 1960 ),
1959 1961 fm.formatlist(
1960 1962 [e.name() for e in wirecompengines if e.wireprotosupport()],
1961 1963 name=b'compengine',
1962 1964 fmt=b'%s',
1963 1965 sep=b', ',
1964 1966 ),
1965 1967 )
1966 1968 re2 = b'missing'
1967 1969 if util._re2:
1968 1970 re2 = b'available'
1969 1971 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1970 1972 fm.data(re2=bool(util._re2))
1971 1973
1972 1974 # templates
1973 1975 p = templater.templatedir()
1974 1976 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1975 1977 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1976 1978 if p:
1977 1979 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1978 1980 if m:
1979 1981 # template found, check if it is working
1980 1982 err = None
1981 1983 try:
1982 1984 templater.templater.frommapfile(m)
1983 1985 except Exception as inst:
1984 1986 err = stringutil.forcebytestr(inst)
1985 1987 p = None
1986 1988 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1987 1989 else:
1988 1990 p = None
1989 1991 fm.condwrite(
1990 1992 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1991 1993 )
1992 1994 fm.condwrite(
1993 1995 not m,
1994 1996 b'defaulttemplatenotfound',
1995 1997 _(b" template '%s' not found\n"),
1996 1998 b"default",
1997 1999 )
1998 2000 if not p:
1999 2001 problems += 1
2000 2002 fm.condwrite(
2001 2003 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2002 2004 )
2003 2005
2004 2006 # editor
2005 2007 editor = ui.geteditor()
2006 2008 editor = util.expandpath(editor)
2007 2009 editorbin = procutil.shellsplit(editor)[0]
2008 2010 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2009 2011 cmdpath = procutil.findexe(editorbin)
2010 2012 fm.condwrite(
2011 2013 not cmdpath and editor == b'vi',
2012 2014 b'vinotfound',
2013 2015 _(
2014 2016 b" No commit editor set and can't find %s in PATH\n"
2015 2017 b" (specify a commit editor in your configuration"
2016 2018 b" file)\n"
2017 2019 ),
2018 2020 not cmdpath and editor == b'vi' and editorbin,
2019 2021 )
2020 2022 fm.condwrite(
2021 2023 not cmdpath and editor != b'vi',
2022 2024 b'editornotfound',
2023 2025 _(
2024 2026 b" Can't find editor '%s' in PATH\n"
2025 2027 b" (specify a commit editor in your configuration"
2026 2028 b" file)\n"
2027 2029 ),
2028 2030 not cmdpath and editorbin,
2029 2031 )
2030 2032 if not cmdpath and editor != b'vi':
2031 2033 problems += 1
2032 2034
2033 2035 # check username
2034 2036 username = None
2035 2037 err = None
2036 2038 try:
2037 2039 username = ui.username()
2038 2040 except error.Abort as e:
2039 2041 err = e.message
2040 2042 problems += 1
2041 2043
2042 2044 fm.condwrite(
2043 2045 username, b'username', _(b"checking username (%s)\n"), username
2044 2046 )
2045 2047 fm.condwrite(
2046 2048 err,
2047 2049 b'usernameerror',
2048 2050 _(
2049 2051 b"checking username...\n %s\n"
2050 2052 b" (specify a username in your configuration file)\n"
2051 2053 ),
2052 2054 err,
2053 2055 )
2054 2056
2055 2057 for name, mod in extensions.extensions():
2056 2058 handler = getattr(mod, 'debuginstall', None)
2057 2059 if handler is not None:
2058 2060 problems += handler(ui, fm)
2059 2061
2060 2062 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2061 2063 if not problems:
2062 2064 fm.data(problems=problems)
2063 2065 fm.condwrite(
2064 2066 problems,
2065 2067 b'problems',
2066 2068 _(b"%d problems detected, please check your install!\n"),
2067 2069 problems,
2068 2070 )
2069 2071 fm.end()
2070 2072
2071 2073 return problems
2072 2074
2073 2075
2074 2076 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2075 2077 def debugknown(ui, repopath, *ids, **opts):
2076 2078 """test whether node ids are known to a repo
2077 2079
2078 2080 Every ID must be a full-length hex node id string. Returns a list of 0s
2079 2081 and 1s indicating unknown/known.
2080 2082 """
2081 2083 opts = pycompat.byteskwargs(opts)
2082 2084 repo = hg.peer(ui, opts, repopath)
2083 2085 if not repo.capable(b'known'):
2084 2086 raise error.Abort(b"known() not supported by target repository")
2085 2087 flags = repo.known([bin(s) for s in ids])
2086 2088 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2087 2089
2088 2090
2089 2091 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2090 2092 def debuglabelcomplete(ui, repo, *args):
2091 2093 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2092 2094 debugnamecomplete(ui, repo, *args)
2093 2095
2094 2096
2095 2097 @command(
2096 2098 b'debuglocks',
2097 2099 [
2098 2100 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2099 2101 (
2100 2102 b'W',
2101 2103 b'force-free-wlock',
2102 2104 None,
2103 2105 _(b'free the working state lock (DANGEROUS)'),
2104 2106 ),
2105 2107 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2106 2108 (
2107 2109 b'S',
2108 2110 b'set-wlock',
2109 2111 None,
2110 2112 _(b'set the working state lock until stopped'),
2111 2113 ),
2112 2114 ],
2113 2115 _(b'[OPTION]...'),
2114 2116 )
2115 2117 def debuglocks(ui, repo, **opts):
2116 2118 """show or modify state of locks
2117 2119
2118 2120 By default, this command will show which locks are held. This
2119 2121 includes the user and process holding the lock, the amount of time
2120 2122 the lock has been held, and the machine name where the process is
2121 2123 running if it's not local.
2122 2124
2123 2125 Locks protect the integrity of Mercurial's data, so should be
2124 2126 treated with care. System crashes or other interruptions may cause
2125 2127 locks to not be properly released, though Mercurial will usually
2126 2128 detect and remove such stale locks automatically.
2127 2129
2128 2130 However, detecting stale locks may not always be possible (for
2129 2131 instance, on a shared filesystem). Removing locks may also be
2130 2132 blocked by filesystem permissions.
2131 2133
2132 2134 Setting a lock will prevent other commands from changing the data.
2133 2135 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2134 2136 The set locks are removed when the command exits.
2135 2137
2136 2138 Returns 0 if no locks are held.
2137 2139
2138 2140 """
2139 2141
2140 2142 if opts.get('force_free_lock'):
2141 2143 repo.svfs.unlink(b'lock')
2142 2144 if opts.get('force_free_wlock'):
2143 2145 repo.vfs.unlink(b'wlock')
2144 2146 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2145 2147 return 0
2146 2148
2147 2149 locks = []
2148 2150 try:
2149 2151 if opts.get('set_wlock'):
2150 2152 try:
2151 2153 locks.append(repo.wlock(False))
2152 2154 except error.LockHeld:
2153 2155 raise error.Abort(_(b'wlock is already held'))
2154 2156 if opts.get('set_lock'):
2155 2157 try:
2156 2158 locks.append(repo.lock(False))
2157 2159 except error.LockHeld:
2158 2160 raise error.Abort(_(b'lock is already held'))
2159 2161 if len(locks):
2160 2162 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2161 2163 return 0
2162 2164 finally:
2163 2165 release(*locks)
2164 2166
2165 2167 now = time.time()
2166 2168 held = 0
2167 2169
2168 2170 def report(vfs, name, method):
2169 2171 # this causes stale locks to get reaped for more accurate reporting
2170 2172 try:
2171 2173 l = method(False)
2172 2174 except error.LockHeld:
2173 2175 l = None
2174 2176
2175 2177 if l:
2176 2178 l.release()
2177 2179 else:
2178 2180 try:
2179 2181 st = vfs.lstat(name)
2180 2182 age = now - st[stat.ST_MTIME]
2181 2183 user = util.username(st.st_uid)
2182 2184 locker = vfs.readlock(name)
2183 2185 if b":" in locker:
2184 2186 host, pid = locker.split(b':')
2185 2187 if host == socket.gethostname():
2186 2188 locker = b'user %s, process %s' % (user or b'None', pid)
2187 2189 else:
2188 2190 locker = b'user %s, process %s, host %s' % (
2189 2191 user or b'None',
2190 2192 pid,
2191 2193 host,
2192 2194 )
2193 2195 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2194 2196 return 1
2195 2197 except OSError as e:
2196 2198 if e.errno != errno.ENOENT:
2197 2199 raise
2198 2200
2199 2201 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2200 2202 return 0
2201 2203
2202 2204 held += report(repo.svfs, b"lock", repo.lock)
2203 2205 held += report(repo.vfs, b"wlock", repo.wlock)
2204 2206
2205 2207 return held
2206 2208
2207 2209
2208 2210 @command(
2209 2211 b'debugmanifestfulltextcache',
2210 2212 [
2211 2213 (b'', b'clear', False, _(b'clear the cache')),
2212 2214 (
2213 2215 b'a',
2214 2216 b'add',
2215 2217 [],
2216 2218 _(b'add the given manifest nodes to the cache'),
2217 2219 _(b'NODE'),
2218 2220 ),
2219 2221 ],
2220 2222 b'',
2221 2223 )
2222 2224 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2223 2225 """show, clear or amend the contents of the manifest fulltext cache"""
2224 2226
2225 2227 def getcache():
2226 2228 r = repo.manifestlog.getstorage(b'')
2227 2229 try:
2228 2230 return r._fulltextcache
2229 2231 except AttributeError:
2230 2232 msg = _(
2231 2233 b"Current revlog implementation doesn't appear to have a "
2232 2234 b"manifest fulltext cache\n"
2233 2235 )
2234 2236 raise error.Abort(msg)
2235 2237
2236 2238 if opts.get('clear'):
2237 2239 with repo.wlock():
2238 2240 cache = getcache()
2239 2241 cache.clear(clear_persisted_data=True)
2240 2242 return
2241 2243
2242 2244 if add:
2243 2245 with repo.wlock():
2244 2246 m = repo.manifestlog
2245 2247 store = m.getstorage(b'')
2246 2248 for n in add:
2247 2249 try:
2248 2250 manifest = m[store.lookup(n)]
2249 2251 except error.LookupError as e:
2250 2252 raise error.Abort(
2251 2253 bytes(e), hint=b"Check your manifest node id"
2252 2254 )
2253 2255 manifest.read() # stores revisision in cache too
2254 2256 return
2255 2257
2256 2258 cache = getcache()
2257 2259 if not len(cache):
2258 2260 ui.write(_(b'cache empty\n'))
2259 2261 else:
2260 2262 ui.write(
2261 2263 _(
2262 2264 b'cache contains %d manifest entries, in order of most to '
2263 2265 b'least recent:\n'
2264 2266 )
2265 2267 % (len(cache),)
2266 2268 )
2267 2269 totalsize = 0
2268 2270 for nodeid in cache:
2269 2271 # Use cache.get to not update the LRU order
2270 2272 data = cache.peek(nodeid)
2271 2273 size = len(data)
2272 2274 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2273 2275 ui.write(
2274 2276 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2275 2277 )
2276 2278 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2277 2279 ui.write(
2278 2280 _(b'total cache data size %s, on-disk %s\n')
2279 2281 % (util.bytecount(totalsize), util.bytecount(ondisk))
2280 2282 )
2281 2283
2282 2284
2283 2285 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2284 2286 def debugmergestate(ui, repo, *args, **opts):
2285 2287 """print merge state
2286 2288
2287 2289 Use --verbose to print out information about whether v1 or v2 merge state
2288 2290 was chosen."""
2289 2291
2290 2292 if ui.verbose:
2291 2293 ms = mergestatemod.mergestate(repo)
2292 2294
2293 2295 # sort so that reasonable information is on top
2294 2296 v1records = ms._readrecordsv1()
2295 2297 v2records = ms._readrecordsv2()
2296 2298
2297 2299 if not v1records and not v2records:
2298 2300 pass
2299 2301 elif not v2records:
2300 2302 ui.writenoi18n(b'no version 2 merge state\n')
2301 2303 elif ms._v1v2match(v1records, v2records):
2302 2304 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2303 2305 else:
2304 2306 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2305 2307
2306 2308 opts = pycompat.byteskwargs(opts)
2307 2309 if not opts[b'template']:
2308 2310 opts[b'template'] = (
2309 2311 b'{if(commits, "", "no merge state found\n")}'
2310 2312 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2311 2313 b'{files % "file: {path} (state \\"{state}\\")\n'
2312 2314 b'{if(local_path, "'
2313 2315 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2314 2316 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2315 2317 b' other path: {other_path} (node {other_node})\n'
2316 2318 b'")}'
2317 2319 b'{if(rename_side, "'
2318 2320 b' rename side: {rename_side}\n'
2319 2321 b' renamed path: {renamed_path}\n'
2320 2322 b'")}'
2321 2323 b'{extras % " extra: {key} = {value}\n"}'
2322 2324 b'"}'
2323 2325 b'{extras % "extra: {file} ({key} = {value})\n"}'
2324 2326 )
2325 2327
2326 2328 ms = mergestatemod.mergestate.read(repo)
2327 2329
2328 2330 fm = ui.formatter(b'debugmergestate', opts)
2329 2331 fm.startitem()
2330 2332
2331 2333 fm_commits = fm.nested(b'commits')
2332 2334 if ms.active():
2333 2335 for name, node, label_index in (
2334 2336 (b'local', ms.local, 0),
2335 2337 (b'other', ms.other, 1),
2336 2338 ):
2337 2339 fm_commits.startitem()
2338 2340 fm_commits.data(name=name)
2339 2341 fm_commits.data(node=hex(node))
2340 2342 if ms._labels and len(ms._labels) > label_index:
2341 2343 fm_commits.data(label=ms._labels[label_index])
2342 2344 fm_commits.end()
2343 2345
2344 2346 fm_files = fm.nested(b'files')
2345 2347 if ms.active():
2346 2348 for f in ms:
2347 2349 fm_files.startitem()
2348 2350 fm_files.data(path=f)
2349 2351 state = ms._state[f]
2350 2352 fm_files.data(state=state[0])
2351 2353 if state[0] in (
2352 2354 mergestatemod.MERGE_RECORD_UNRESOLVED,
2353 2355 mergestatemod.MERGE_RECORD_RESOLVED,
2354 2356 ):
2355 2357 fm_files.data(local_key=state[1])
2356 2358 fm_files.data(local_path=state[2])
2357 2359 fm_files.data(ancestor_path=state[3])
2358 2360 fm_files.data(ancestor_node=state[4])
2359 2361 fm_files.data(other_path=state[5])
2360 2362 fm_files.data(other_node=state[6])
2361 2363 fm_files.data(local_flags=state[7])
2362 2364 elif state[0] in (
2363 2365 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2364 2366 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2365 2367 ):
2366 2368 fm_files.data(renamed_path=state[1])
2367 2369 fm_files.data(rename_side=state[2])
2368 2370 fm_extras = fm_files.nested(b'extras')
2369 2371 for k, v in sorted(ms.extras(f).items()):
2370 2372 fm_extras.startitem()
2371 2373 fm_extras.data(key=k)
2372 2374 fm_extras.data(value=v)
2373 2375 fm_extras.end()
2374 2376
2375 2377 fm_files.end()
2376 2378
2377 2379 fm_extras = fm.nested(b'extras')
2378 2380 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2379 2381 if f in ms:
2380 2382 # If file is in mergestate, we have already processed it's extras
2381 2383 continue
2382 2384 for k, v in pycompat.iteritems(d):
2383 2385 fm_extras.startitem()
2384 2386 fm_extras.data(file=f)
2385 2387 fm_extras.data(key=k)
2386 2388 fm_extras.data(value=v)
2387 2389 fm_extras.end()
2388 2390
2389 2391 fm.end()
2390 2392
2391 2393
2392 2394 @command(b'debugnamecomplete', [], _(b'NAME...'))
2393 2395 def debugnamecomplete(ui, repo, *args):
2394 2396 '''complete "names" - tags, open branch names, bookmark names'''
2395 2397
2396 2398 names = set()
2397 2399 # since we previously only listed open branches, we will handle that
2398 2400 # specially (after this for loop)
2399 2401 for name, ns in pycompat.iteritems(repo.names):
2400 2402 if name != b'branches':
2401 2403 names.update(ns.listnames(repo))
2402 2404 names.update(
2403 2405 tag
2404 2406 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2405 2407 if not closed
2406 2408 )
2407 2409 completions = set()
2408 2410 if not args:
2409 2411 args = [b'']
2410 2412 for a in args:
2411 2413 completions.update(n for n in names if n.startswith(a))
2412 2414 ui.write(b'\n'.join(sorted(completions)))
2413 2415 ui.write(b'\n')
2414 2416
2415 2417
2416 2418 @command(
2417 2419 b'debugnodemap',
2418 2420 [
2419 2421 (
2420 2422 b'',
2421 2423 b'dump-new',
2422 2424 False,
2423 2425 _(b'write a (new) persistent binary nodemap on stdout'),
2424 2426 ),
2425 2427 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2426 2428 (
2427 2429 b'',
2428 2430 b'check',
2429 2431 False,
2430 2432 _(b'check that the data on disk data are correct.'),
2431 2433 ),
2432 2434 (
2433 2435 b'',
2434 2436 b'metadata',
2435 2437 False,
2436 2438 _(b'display the on disk meta data for the nodemap'),
2437 2439 ),
2438 2440 ],
2439 2441 )
2440 2442 def debugnodemap(ui, repo, **opts):
2441 2443 """write and inspect on disk nodemap"""
2442 2444 if opts['dump_new']:
2443 2445 unfi = repo.unfiltered()
2444 2446 cl = unfi.changelog
2445 2447 if util.safehasattr(cl.index, "nodemap_data_all"):
2446 2448 data = cl.index.nodemap_data_all()
2447 2449 else:
2448 2450 data = nodemap.persistent_data(cl.index)
2449 2451 ui.write(data)
2450 2452 elif opts['dump_disk']:
2451 2453 unfi = repo.unfiltered()
2452 2454 cl = unfi.changelog
2453 2455 nm_data = nodemap.persisted_data(cl)
2454 2456 if nm_data is not None:
2455 2457 docket, data = nm_data
2456 2458 ui.write(data[:])
2457 2459 elif opts['check']:
2458 2460 unfi = repo.unfiltered()
2459 2461 cl = unfi.changelog
2460 2462 nm_data = nodemap.persisted_data(cl)
2461 2463 if nm_data is not None:
2462 2464 docket, data = nm_data
2463 2465 return nodemap.check_data(ui, cl.index, data)
2464 2466 elif opts['metadata']:
2465 2467 unfi = repo.unfiltered()
2466 2468 cl = unfi.changelog
2467 2469 nm_data = nodemap.persisted_data(cl)
2468 2470 if nm_data is not None:
2469 2471 docket, data = nm_data
2470 2472 ui.write((b"uid: %s\n") % docket.uid)
2471 2473 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2472 2474 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2473 2475 ui.write((b"data-length: %d\n") % docket.data_length)
2474 2476 ui.write((b"data-unused: %d\n") % docket.data_unused)
2475 2477 unused_perc = docket.data_unused * 100.0 / docket.data_length
2476 2478 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2477 2479
2478 2480
2479 2481 @command(
2480 2482 b'debugobsolete',
2481 2483 [
2482 2484 (b'', b'flags', 0, _(b'markers flag')),
2483 2485 (
2484 2486 b'',
2485 2487 b'record-parents',
2486 2488 False,
2487 2489 _(b'record parent information for the precursor'),
2488 2490 ),
2489 2491 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2490 2492 (
2491 2493 b'',
2492 2494 b'exclusive',
2493 2495 False,
2494 2496 _(b'restrict display to markers only relevant to REV'),
2495 2497 ),
2496 2498 (b'', b'index', False, _(b'display index of the marker')),
2497 2499 (b'', b'delete', [], _(b'delete markers specified by indices')),
2498 2500 ]
2499 2501 + cmdutil.commitopts2
2500 2502 + cmdutil.formatteropts,
2501 2503 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2502 2504 )
2503 2505 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2504 2506 """create arbitrary obsolete marker
2505 2507
2506 2508 With no arguments, displays the list of obsolescence markers."""
2507 2509
2508 2510 opts = pycompat.byteskwargs(opts)
2509 2511
2510 2512 def parsenodeid(s):
2511 2513 try:
2512 2514 # We do not use revsingle/revrange functions here to accept
2513 2515 # arbitrary node identifiers, possibly not present in the
2514 2516 # local repository.
2515 2517 n = bin(s)
2516 2518 if len(n) != repo.nodeconstants.nodelen:
2517 2519 raise TypeError()
2518 2520 return n
2519 2521 except TypeError:
2520 2522 raise error.InputError(
2521 2523 b'changeset references must be full hexadecimal '
2522 2524 b'node identifiers'
2523 2525 )
2524 2526
2525 2527 if opts.get(b'delete'):
2526 2528 indices = []
2527 2529 for v in opts.get(b'delete'):
2528 2530 try:
2529 2531 indices.append(int(v))
2530 2532 except ValueError:
2531 2533 raise error.InputError(
2532 2534 _(b'invalid index value: %r') % v,
2533 2535 hint=_(b'use integers for indices'),
2534 2536 )
2535 2537
2536 2538 if repo.currenttransaction():
2537 2539 raise error.Abort(
2538 2540 _(b'cannot delete obsmarkers in the middle of transaction.')
2539 2541 )
2540 2542
2541 2543 with repo.lock():
2542 2544 n = repair.deleteobsmarkers(repo.obsstore, indices)
2543 2545 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2544 2546
2545 2547 return
2546 2548
2547 2549 if precursor is not None:
2548 2550 if opts[b'rev']:
2549 2551 raise error.InputError(
2550 2552 b'cannot select revision when creating marker'
2551 2553 )
2552 2554 metadata = {}
2553 2555 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2554 2556 succs = tuple(parsenodeid(succ) for succ in successors)
2555 2557 l = repo.lock()
2556 2558 try:
2557 2559 tr = repo.transaction(b'debugobsolete')
2558 2560 try:
2559 2561 date = opts.get(b'date')
2560 2562 if date:
2561 2563 date = dateutil.parsedate(date)
2562 2564 else:
2563 2565 date = None
2564 2566 prec = parsenodeid(precursor)
2565 2567 parents = None
2566 2568 if opts[b'record_parents']:
2567 2569 if prec not in repo.unfiltered():
2568 2570 raise error.Abort(
2569 2571 b'cannot used --record-parents on '
2570 2572 b'unknown changesets'
2571 2573 )
2572 2574 parents = repo.unfiltered()[prec].parents()
2573 2575 parents = tuple(p.node() for p in parents)
2574 2576 repo.obsstore.create(
2575 2577 tr,
2576 2578 prec,
2577 2579 succs,
2578 2580 opts[b'flags'],
2579 2581 parents=parents,
2580 2582 date=date,
2581 2583 metadata=metadata,
2582 2584 ui=ui,
2583 2585 )
2584 2586 tr.close()
2585 2587 except ValueError as exc:
2586 2588 raise error.Abort(
2587 2589 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2588 2590 )
2589 2591 finally:
2590 2592 tr.release()
2591 2593 finally:
2592 2594 l.release()
2593 2595 else:
2594 2596 if opts[b'rev']:
2595 2597 revs = logcmdutil.revrange(repo, opts[b'rev'])
2596 2598 nodes = [repo[r].node() for r in revs]
2597 2599 markers = list(
2598 2600 obsutil.getmarkers(
2599 2601 repo, nodes=nodes, exclusive=opts[b'exclusive']
2600 2602 )
2601 2603 )
2602 2604 markers.sort(key=lambda x: x._data)
2603 2605 else:
2604 2606 markers = obsutil.getmarkers(repo)
2605 2607
2606 2608 markerstoiter = markers
2607 2609 isrelevant = lambda m: True
2608 2610 if opts.get(b'rev') and opts.get(b'index'):
2609 2611 markerstoiter = obsutil.getmarkers(repo)
2610 2612 markerset = set(markers)
2611 2613 isrelevant = lambda m: m in markerset
2612 2614
2613 2615 fm = ui.formatter(b'debugobsolete', opts)
2614 2616 for i, m in enumerate(markerstoiter):
2615 2617 if not isrelevant(m):
2616 2618 # marker can be irrelevant when we're iterating over a set
2617 2619 # of markers (markerstoiter) which is bigger than the set
2618 2620 # of markers we want to display (markers)
2619 2621 # this can happen if both --index and --rev options are
2620 2622 # provided and thus we need to iterate over all of the markers
2621 2623 # to get the correct indices, but only display the ones that
2622 2624 # are relevant to --rev value
2623 2625 continue
2624 2626 fm.startitem()
2625 2627 ind = i if opts.get(b'index') else None
2626 2628 cmdutil.showmarker(fm, m, index=ind)
2627 2629 fm.end()
2628 2630
2629 2631
2630 2632 @command(
2631 2633 b'debugp1copies',
2632 2634 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2633 2635 _(b'[-r REV]'),
2634 2636 )
2635 2637 def debugp1copies(ui, repo, **opts):
2636 2638 """dump copy information compared to p1"""
2637 2639
2638 2640 opts = pycompat.byteskwargs(opts)
2639 2641 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2640 2642 for dst, src in ctx.p1copies().items():
2641 2643 ui.write(b'%s -> %s\n' % (src, dst))
2642 2644
2643 2645
2644 2646 @command(
2645 2647 b'debugp2copies',
2646 2648 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2647 2649 _(b'[-r REV]'),
2648 2650 )
2649 2651 def debugp1copies(ui, repo, **opts):
2650 2652 """dump copy information compared to p2"""
2651 2653
2652 2654 opts = pycompat.byteskwargs(opts)
2653 2655 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2654 2656 for dst, src in ctx.p2copies().items():
2655 2657 ui.write(b'%s -> %s\n' % (src, dst))
2656 2658
2657 2659
2658 2660 @command(
2659 2661 b'debugpathcomplete',
2660 2662 [
2661 2663 (b'f', b'full', None, _(b'complete an entire path')),
2662 2664 (b'n', b'normal', None, _(b'show only normal files')),
2663 2665 (b'a', b'added', None, _(b'show only added files')),
2664 2666 (b'r', b'removed', None, _(b'show only removed files')),
2665 2667 ],
2666 2668 _(b'FILESPEC...'),
2667 2669 )
2668 2670 def debugpathcomplete(ui, repo, *specs, **opts):
2669 2671 """complete part or all of a tracked path
2670 2672
2671 2673 This command supports shells that offer path name completion. It
2672 2674 currently completes only files already known to the dirstate.
2673 2675
2674 2676 Completion extends only to the next path segment unless
2675 2677 --full is specified, in which case entire paths are used."""
2676 2678
2677 2679 def complete(path, acceptable):
2678 2680 dirstate = repo.dirstate
2679 2681 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2680 2682 rootdir = repo.root + pycompat.ossep
2681 2683 if spec != repo.root and not spec.startswith(rootdir):
2682 2684 return [], []
2683 2685 if os.path.isdir(spec):
2684 2686 spec += b'/'
2685 2687 spec = spec[len(rootdir) :]
2686 2688 fixpaths = pycompat.ossep != b'/'
2687 2689 if fixpaths:
2688 2690 spec = spec.replace(pycompat.ossep, b'/')
2689 2691 speclen = len(spec)
2690 2692 fullpaths = opts['full']
2691 2693 files, dirs = set(), set()
2692 2694 adddir, addfile = dirs.add, files.add
2693 2695 for f, st in pycompat.iteritems(dirstate):
2694 2696 if f.startswith(spec) and st.state in acceptable:
2695 2697 if fixpaths:
2696 2698 f = f.replace(b'/', pycompat.ossep)
2697 2699 if fullpaths:
2698 2700 addfile(f)
2699 2701 continue
2700 2702 s = f.find(pycompat.ossep, speclen)
2701 2703 if s >= 0:
2702 2704 adddir(f[:s])
2703 2705 else:
2704 2706 addfile(f)
2705 2707 return files, dirs
2706 2708
2707 2709 acceptable = b''
2708 2710 if opts['normal']:
2709 2711 acceptable += b'nm'
2710 2712 if opts['added']:
2711 2713 acceptable += b'a'
2712 2714 if opts['removed']:
2713 2715 acceptable += b'r'
2714 2716 cwd = repo.getcwd()
2715 2717 if not specs:
2716 2718 specs = [b'.']
2717 2719
2718 2720 files, dirs = set(), set()
2719 2721 for spec in specs:
2720 2722 f, d = complete(spec, acceptable or b'nmar')
2721 2723 files.update(f)
2722 2724 dirs.update(d)
2723 2725 files.update(dirs)
2724 2726 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2725 2727 ui.write(b'\n')
2726 2728
2727 2729
2728 2730 @command(
2729 2731 b'debugpathcopies',
2730 2732 cmdutil.walkopts,
2731 2733 b'hg debugpathcopies REV1 REV2 [FILE]',
2732 2734 inferrepo=True,
2733 2735 )
2734 2736 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2735 2737 """show copies between two revisions"""
2736 2738 ctx1 = scmutil.revsingle(repo, rev1)
2737 2739 ctx2 = scmutil.revsingle(repo, rev2)
2738 2740 m = scmutil.match(ctx1, pats, opts)
2739 2741 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2740 2742 ui.write(b'%s -> %s\n' % (src, dst))
2741 2743
2742 2744
2743 2745 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2744 2746 def debugpeer(ui, path):
2745 2747 """establish a connection to a peer repository"""
2746 2748 # Always enable peer request logging. Requires --debug to display
2747 2749 # though.
2748 2750 overrides = {
2749 2751 (b'devel', b'debug.peer-request'): True,
2750 2752 }
2751 2753
2752 2754 with ui.configoverride(overrides):
2753 2755 peer = hg.peer(ui, {}, path)
2754 2756
2755 2757 try:
2756 2758 local = peer.local() is not None
2757 2759 canpush = peer.canpush()
2758 2760
2759 2761 ui.write(_(b'url: %s\n') % peer.url())
2760 2762 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2761 2763 ui.write(
2762 2764 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2763 2765 )
2764 2766 finally:
2765 2767 peer.close()
2766 2768
2767 2769
2768 2770 @command(
2769 2771 b'debugpickmergetool',
2770 2772 [
2771 2773 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2772 2774 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2773 2775 ]
2774 2776 + cmdutil.walkopts
2775 2777 + cmdutil.mergetoolopts,
2776 2778 _(b'[PATTERN]...'),
2777 2779 inferrepo=True,
2778 2780 )
2779 2781 def debugpickmergetool(ui, repo, *pats, **opts):
2780 2782 """examine which merge tool is chosen for specified file
2781 2783
2782 2784 As described in :hg:`help merge-tools`, Mercurial examines
2783 2785 configurations below in this order to decide which merge tool is
2784 2786 chosen for specified file.
2785 2787
2786 2788 1. ``--tool`` option
2787 2789 2. ``HGMERGE`` environment variable
2788 2790 3. configurations in ``merge-patterns`` section
2789 2791 4. configuration of ``ui.merge``
2790 2792 5. configurations in ``merge-tools`` section
2791 2793 6. ``hgmerge`` tool (for historical reason only)
2792 2794 7. default tool for fallback (``:merge`` or ``:prompt``)
2793 2795
2794 2796 This command writes out examination result in the style below::
2795 2797
2796 2798 FILE = MERGETOOL
2797 2799
2798 2800 By default, all files known in the first parent context of the
2799 2801 working directory are examined. Use file patterns and/or -I/-X
2800 2802 options to limit target files. -r/--rev is also useful to examine
2801 2803 files in another context without actual updating to it.
2802 2804
2803 2805 With --debug, this command shows warning messages while matching
2804 2806 against ``merge-patterns`` and so on, too. It is recommended to
2805 2807 use this option with explicit file patterns and/or -I/-X options,
2806 2808 because this option increases amount of output per file according
2807 2809 to configurations in hgrc.
2808 2810
2809 2811 With -v/--verbose, this command shows configurations below at
2810 2812 first (only if specified).
2811 2813
2812 2814 - ``--tool`` option
2813 2815 - ``HGMERGE`` environment variable
2814 2816 - configuration of ``ui.merge``
2815 2817
2816 2818 If merge tool is chosen before matching against
2817 2819 ``merge-patterns``, this command can't show any helpful
2818 2820 information, even with --debug. In such case, information above is
2819 2821 useful to know why a merge tool is chosen.
2820 2822 """
2821 2823 opts = pycompat.byteskwargs(opts)
2822 2824 overrides = {}
2823 2825 if opts[b'tool']:
2824 2826 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2825 2827 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2826 2828
2827 2829 with ui.configoverride(overrides, b'debugmergepatterns'):
2828 2830 hgmerge = encoding.environ.get(b"HGMERGE")
2829 2831 if hgmerge is not None:
2830 2832 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2831 2833 uimerge = ui.config(b"ui", b"merge")
2832 2834 if uimerge:
2833 2835 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2834 2836
2835 2837 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2836 2838 m = scmutil.match(ctx, pats, opts)
2837 2839 changedelete = opts[b'changedelete']
2838 2840 for path in ctx.walk(m):
2839 2841 fctx = ctx[path]
2840 2842 with ui.silent(
2841 2843 error=True
2842 2844 ) if not ui.debugflag else util.nullcontextmanager():
2843 2845 tool, toolpath = filemerge._picktool(
2844 2846 repo,
2845 2847 ui,
2846 2848 path,
2847 2849 fctx.isbinary(),
2848 2850 b'l' in fctx.flags(),
2849 2851 changedelete,
2850 2852 )
2851 2853 ui.write(b'%s = %s\n' % (path, tool))
2852 2854
2853 2855
2854 2856 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2855 2857 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2856 2858 """access the pushkey key/value protocol
2857 2859
2858 2860 With two args, list the keys in the given namespace.
2859 2861
2860 2862 With five args, set a key to new if it currently is set to old.
2861 2863 Reports success or failure.
2862 2864 """
2863 2865
2864 2866 target = hg.peer(ui, {}, repopath)
2865 2867 try:
2866 2868 if keyinfo:
2867 2869 key, old, new = keyinfo
2868 2870 with target.commandexecutor() as e:
2869 2871 r = e.callcommand(
2870 2872 b'pushkey',
2871 2873 {
2872 2874 b'namespace': namespace,
2873 2875 b'key': key,
2874 2876 b'old': old,
2875 2877 b'new': new,
2876 2878 },
2877 2879 ).result()
2878 2880
2879 2881 ui.status(pycompat.bytestr(r) + b'\n')
2880 2882 return not r
2881 2883 else:
2882 2884 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2883 2885 ui.write(
2884 2886 b"%s\t%s\n"
2885 2887 % (stringutil.escapestr(k), stringutil.escapestr(v))
2886 2888 )
2887 2889 finally:
2888 2890 target.close()
2889 2891
2890 2892
2891 2893 @command(b'debugpvec', [], _(b'A B'))
2892 2894 def debugpvec(ui, repo, a, b=None):
2893 2895 ca = scmutil.revsingle(repo, a)
2894 2896 cb = scmutil.revsingle(repo, b)
2895 2897 pa = pvec.ctxpvec(ca)
2896 2898 pb = pvec.ctxpvec(cb)
2897 2899 if pa == pb:
2898 2900 rel = b"="
2899 2901 elif pa > pb:
2900 2902 rel = b">"
2901 2903 elif pa < pb:
2902 2904 rel = b"<"
2903 2905 elif pa | pb:
2904 2906 rel = b"|"
2905 2907 ui.write(_(b"a: %s\n") % pa)
2906 2908 ui.write(_(b"b: %s\n") % pb)
2907 2909 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2908 2910 ui.write(
2909 2911 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2910 2912 % (
2911 2913 abs(pa._depth - pb._depth),
2912 2914 pvec._hamming(pa._vec, pb._vec),
2913 2915 pa.distance(pb),
2914 2916 rel,
2915 2917 )
2916 2918 )
2917 2919
2918 2920
2919 2921 @command(
2920 2922 b'debugrebuilddirstate|debugrebuildstate',
2921 2923 [
2922 2924 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2923 2925 (
2924 2926 b'',
2925 2927 b'minimal',
2926 2928 None,
2927 2929 _(
2928 2930 b'only rebuild files that are inconsistent with '
2929 2931 b'the working copy parent'
2930 2932 ),
2931 2933 ),
2932 2934 ],
2933 2935 _(b'[-r REV]'),
2934 2936 )
2935 2937 def debugrebuilddirstate(ui, repo, rev, **opts):
2936 2938 """rebuild the dirstate as it would look like for the given revision
2937 2939
2938 2940 If no revision is specified the first current parent will be used.
2939 2941
2940 2942 The dirstate will be set to the files of the given revision.
2941 2943 The actual working directory content or existing dirstate
2942 2944 information such as adds or removes is not considered.
2943 2945
2944 2946 ``minimal`` will only rebuild the dirstate status for files that claim to be
2945 2947 tracked but are not in the parent manifest, or that exist in the parent
2946 2948 manifest but are not in the dirstate. It will not change adds, removes, or
2947 2949 modified files that are in the working copy parent.
2948 2950
2949 2951 One use of this command is to make the next :hg:`status` invocation
2950 2952 check the actual file content.
2951 2953 """
2952 2954 ctx = scmutil.revsingle(repo, rev)
2953 2955 with repo.wlock():
2954 2956 dirstate = repo.dirstate
2955 2957 changedfiles = None
2956 2958 # See command doc for what minimal does.
2957 2959 if opts.get('minimal'):
2958 2960 manifestfiles = set(ctx.manifest().keys())
2959 2961 dirstatefiles = set(dirstate)
2960 2962 manifestonly = manifestfiles - dirstatefiles
2961 2963 dsonly = dirstatefiles - manifestfiles
2962 2964 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
2963 2965 changedfiles = manifestonly | dsnotadded
2964 2966
2965 2967 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2966 2968
2967 2969
2968 2970 @command(
2969 2971 b'debugrebuildfncache',
2970 2972 [
2971 2973 (
2972 2974 b'',
2973 2975 b'only-data',
2974 2976 False,
2975 2977 _(b'only look for wrong .d files (much faster)'),
2976 2978 )
2977 2979 ],
2978 2980 b'',
2979 2981 )
2980 2982 def debugrebuildfncache(ui, repo, **opts):
2981 2983 """rebuild the fncache file"""
2982 2984 opts = pycompat.byteskwargs(opts)
2983 2985 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
2984 2986
2985 2987
2986 2988 @command(
2987 2989 b'debugrename',
2988 2990 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2989 2991 _(b'[-r REV] [FILE]...'),
2990 2992 )
2991 2993 def debugrename(ui, repo, *pats, **opts):
2992 2994 """dump rename information"""
2993 2995
2994 2996 opts = pycompat.byteskwargs(opts)
2995 2997 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2996 2998 m = scmutil.match(ctx, pats, opts)
2997 2999 for abs in ctx.walk(m):
2998 3000 fctx = ctx[abs]
2999 3001 o = fctx.filelog().renamed(fctx.filenode())
3000 3002 rel = repo.pathto(abs)
3001 3003 if o:
3002 3004 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3003 3005 else:
3004 3006 ui.write(_(b"%s not renamed\n") % rel)
3005 3007
3006 3008
3007 3009 @command(b'debugrequires|debugrequirements', [], b'')
3008 3010 def debugrequirements(ui, repo):
3009 3011 """print the current repo requirements"""
3010 3012 for r in sorted(repo.requirements):
3011 3013 ui.write(b"%s\n" % r)
3012 3014
3013 3015
3014 3016 @command(
3015 3017 b'debugrevlog',
3016 3018 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3017 3019 _(b'-c|-m|FILE'),
3018 3020 optionalrepo=True,
3019 3021 )
3020 3022 def debugrevlog(ui, repo, file_=None, **opts):
3021 3023 """show data and statistics about a revlog"""
3022 3024 opts = pycompat.byteskwargs(opts)
3023 3025 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3024 3026
3025 3027 if opts.get(b"dump"):
3026 3028 numrevs = len(r)
3027 3029 ui.write(
3028 3030 (
3029 3031 b"# rev p1rev p2rev start end deltastart base p1 p2"
3030 3032 b" rawsize totalsize compression heads chainlen\n"
3031 3033 )
3032 3034 )
3033 3035 ts = 0
3034 3036 heads = set()
3035 3037
3036 3038 for rev in pycompat.xrange(numrevs):
3037 3039 dbase = r.deltaparent(rev)
3038 3040 if dbase == -1:
3039 3041 dbase = rev
3040 3042 cbase = r.chainbase(rev)
3041 3043 clen = r.chainlen(rev)
3042 3044 p1, p2 = r.parentrevs(rev)
3043 3045 rs = r.rawsize(rev)
3044 3046 ts = ts + rs
3045 3047 heads -= set(r.parentrevs(rev))
3046 3048 heads.add(rev)
3047 3049 try:
3048 3050 compression = ts / r.end(rev)
3049 3051 except ZeroDivisionError:
3050 3052 compression = 0
3051 3053 ui.write(
3052 3054 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3053 3055 b"%11d %5d %8d\n"
3054 3056 % (
3055 3057 rev,
3056 3058 p1,
3057 3059 p2,
3058 3060 r.start(rev),
3059 3061 r.end(rev),
3060 3062 r.start(dbase),
3061 3063 r.start(cbase),
3062 3064 r.start(p1),
3063 3065 r.start(p2),
3064 3066 rs,
3065 3067 ts,
3066 3068 compression,
3067 3069 len(heads),
3068 3070 clen,
3069 3071 )
3070 3072 )
3071 3073 return 0
3072 3074
3073 3075 format = r._format_version
3074 3076 v = r._format_flags
3075 3077 flags = []
3076 3078 gdelta = False
3077 3079 if v & revlog.FLAG_INLINE_DATA:
3078 3080 flags.append(b'inline')
3079 3081 if v & revlog.FLAG_GENERALDELTA:
3080 3082 gdelta = True
3081 3083 flags.append(b'generaldelta')
3082 3084 if not flags:
3083 3085 flags = [b'(none)']
3084 3086
3085 3087 ### tracks merge vs single parent
3086 3088 nummerges = 0
3087 3089
3088 3090 ### tracks ways the "delta" are build
3089 3091 # nodelta
3090 3092 numempty = 0
3091 3093 numemptytext = 0
3092 3094 numemptydelta = 0
3093 3095 # full file content
3094 3096 numfull = 0
3095 3097 # intermediate snapshot against a prior snapshot
3096 3098 numsemi = 0
3097 3099 # snapshot count per depth
3098 3100 numsnapdepth = collections.defaultdict(lambda: 0)
3099 3101 # delta against previous revision
3100 3102 numprev = 0
3101 3103 # delta against first or second parent (not prev)
3102 3104 nump1 = 0
3103 3105 nump2 = 0
3104 3106 # delta against neither prev nor parents
3105 3107 numother = 0
3106 3108 # delta against prev that are also first or second parent
3107 3109 # (details of `numprev`)
3108 3110 nump1prev = 0
3109 3111 nump2prev = 0
3110 3112
3111 3113 # data about delta chain of each revs
3112 3114 chainlengths = []
3113 3115 chainbases = []
3114 3116 chainspans = []
3115 3117
3116 3118 # data about each revision
3117 3119 datasize = [None, 0, 0]
3118 3120 fullsize = [None, 0, 0]
3119 3121 semisize = [None, 0, 0]
3120 3122 # snapshot count per depth
3121 3123 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3122 3124 deltasize = [None, 0, 0]
3123 3125 chunktypecounts = {}
3124 3126 chunktypesizes = {}
3125 3127
3126 3128 def addsize(size, l):
3127 3129 if l[0] is None or size < l[0]:
3128 3130 l[0] = size
3129 3131 if size > l[1]:
3130 3132 l[1] = size
3131 3133 l[2] += size
3132 3134
3133 3135 numrevs = len(r)
3134 3136 for rev in pycompat.xrange(numrevs):
3135 3137 p1, p2 = r.parentrevs(rev)
3136 3138 delta = r.deltaparent(rev)
3137 3139 if format > 0:
3138 3140 addsize(r.rawsize(rev), datasize)
3139 3141 if p2 != nullrev:
3140 3142 nummerges += 1
3141 3143 size = r.length(rev)
3142 3144 if delta == nullrev:
3143 3145 chainlengths.append(0)
3144 3146 chainbases.append(r.start(rev))
3145 3147 chainspans.append(size)
3146 3148 if size == 0:
3147 3149 numempty += 1
3148 3150 numemptytext += 1
3149 3151 else:
3150 3152 numfull += 1
3151 3153 numsnapdepth[0] += 1
3152 3154 addsize(size, fullsize)
3153 3155 addsize(size, snapsizedepth[0])
3154 3156 else:
3155 3157 chainlengths.append(chainlengths[delta] + 1)
3156 3158 baseaddr = chainbases[delta]
3157 3159 revaddr = r.start(rev)
3158 3160 chainbases.append(baseaddr)
3159 3161 chainspans.append((revaddr - baseaddr) + size)
3160 3162 if size == 0:
3161 3163 numempty += 1
3162 3164 numemptydelta += 1
3163 3165 elif r.issnapshot(rev):
3164 3166 addsize(size, semisize)
3165 3167 numsemi += 1
3166 3168 depth = r.snapshotdepth(rev)
3167 3169 numsnapdepth[depth] += 1
3168 3170 addsize(size, snapsizedepth[depth])
3169 3171 else:
3170 3172 addsize(size, deltasize)
3171 3173 if delta == rev - 1:
3172 3174 numprev += 1
3173 3175 if delta == p1:
3174 3176 nump1prev += 1
3175 3177 elif delta == p2:
3176 3178 nump2prev += 1
3177 3179 elif delta == p1:
3178 3180 nump1 += 1
3179 3181 elif delta == p2:
3180 3182 nump2 += 1
3181 3183 elif delta != nullrev:
3182 3184 numother += 1
3183 3185
3184 3186 # Obtain data on the raw chunks in the revlog.
3185 3187 if util.safehasattr(r, b'_getsegmentforrevs'):
3186 3188 segment = r._getsegmentforrevs(rev, rev)[1]
3187 3189 else:
3188 3190 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3189 3191 if segment:
3190 3192 chunktype = bytes(segment[0:1])
3191 3193 else:
3192 3194 chunktype = b'empty'
3193 3195
3194 3196 if chunktype not in chunktypecounts:
3195 3197 chunktypecounts[chunktype] = 0
3196 3198 chunktypesizes[chunktype] = 0
3197 3199
3198 3200 chunktypecounts[chunktype] += 1
3199 3201 chunktypesizes[chunktype] += size
3200 3202
3201 3203 # Adjust size min value for empty cases
3202 3204 for size in (datasize, fullsize, semisize, deltasize):
3203 3205 if size[0] is None:
3204 3206 size[0] = 0
3205 3207
3206 3208 numdeltas = numrevs - numfull - numempty - numsemi
3207 3209 numoprev = numprev - nump1prev - nump2prev
3208 3210 totalrawsize = datasize[2]
3209 3211 datasize[2] /= numrevs
3210 3212 fulltotal = fullsize[2]
3211 3213 if numfull == 0:
3212 3214 fullsize[2] = 0
3213 3215 else:
3214 3216 fullsize[2] /= numfull
3215 3217 semitotal = semisize[2]
3216 3218 snaptotal = {}
3217 3219 if numsemi > 0:
3218 3220 semisize[2] /= numsemi
3219 3221 for depth in snapsizedepth:
3220 3222 snaptotal[depth] = snapsizedepth[depth][2]
3221 3223 snapsizedepth[depth][2] /= numsnapdepth[depth]
3222 3224
3223 3225 deltatotal = deltasize[2]
3224 3226 if numdeltas > 0:
3225 3227 deltasize[2] /= numdeltas
3226 3228 totalsize = fulltotal + semitotal + deltatotal
3227 3229 avgchainlen = sum(chainlengths) / numrevs
3228 3230 maxchainlen = max(chainlengths)
3229 3231 maxchainspan = max(chainspans)
3230 3232 compratio = 1
3231 3233 if totalsize:
3232 3234 compratio = totalrawsize / totalsize
3233 3235
3234 3236 basedfmtstr = b'%%%dd\n'
3235 3237 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3236 3238
3237 3239 def dfmtstr(max):
3238 3240 return basedfmtstr % len(str(max))
3239 3241
3240 3242 def pcfmtstr(max, padding=0):
3241 3243 return basepcfmtstr % (len(str(max)), b' ' * padding)
3242 3244
3243 3245 def pcfmt(value, total):
3244 3246 if total:
3245 3247 return (value, 100 * float(value) / total)
3246 3248 else:
3247 3249 return value, 100.0
3248 3250
3249 3251 ui.writenoi18n(b'format : %d\n' % format)
3250 3252 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3251 3253
3252 3254 ui.write(b'\n')
3253 3255 fmt = pcfmtstr(totalsize)
3254 3256 fmt2 = dfmtstr(totalsize)
3255 3257 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3256 3258 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3257 3259 ui.writenoi18n(
3258 3260 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3259 3261 )
3260 3262 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3261 3263 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3262 3264 ui.writenoi18n(
3263 3265 b' text : '
3264 3266 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3265 3267 )
3266 3268 ui.writenoi18n(
3267 3269 b' delta : '
3268 3270 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3269 3271 )
3270 3272 ui.writenoi18n(
3271 3273 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3272 3274 )
3273 3275 for depth in sorted(numsnapdepth):
3274 3276 ui.write(
3275 3277 (b' lvl-%-3d : ' % depth)
3276 3278 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3277 3279 )
3278 3280 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3279 3281 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3280 3282 ui.writenoi18n(
3281 3283 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3282 3284 )
3283 3285 for depth in sorted(numsnapdepth):
3284 3286 ui.write(
3285 3287 (b' lvl-%-3d : ' % depth)
3286 3288 + fmt % pcfmt(snaptotal[depth], totalsize)
3287 3289 )
3288 3290 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3289 3291
3290 3292 def fmtchunktype(chunktype):
3291 3293 if chunktype == b'empty':
3292 3294 return b' %s : ' % chunktype
3293 3295 elif chunktype in pycompat.bytestr(string.ascii_letters):
3294 3296 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3295 3297 else:
3296 3298 return b' 0x%s : ' % hex(chunktype)
3297 3299
3298 3300 ui.write(b'\n')
3299 3301 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3300 3302 for chunktype in sorted(chunktypecounts):
3301 3303 ui.write(fmtchunktype(chunktype))
3302 3304 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3303 3305 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3304 3306 for chunktype in sorted(chunktypecounts):
3305 3307 ui.write(fmtchunktype(chunktype))
3306 3308 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3307 3309
3308 3310 ui.write(b'\n')
3309 3311 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3310 3312 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3311 3313 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3312 3314 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3313 3315 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3314 3316
3315 3317 if format > 0:
3316 3318 ui.write(b'\n')
3317 3319 ui.writenoi18n(
3318 3320 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3319 3321 % tuple(datasize)
3320 3322 )
3321 3323 ui.writenoi18n(
3322 3324 b'full revision size (min/max/avg) : %d / %d / %d\n'
3323 3325 % tuple(fullsize)
3324 3326 )
3325 3327 ui.writenoi18n(
3326 3328 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3327 3329 % tuple(semisize)
3328 3330 )
3329 3331 for depth in sorted(snapsizedepth):
3330 3332 if depth == 0:
3331 3333 continue
3332 3334 ui.writenoi18n(
3333 3335 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3334 3336 % ((depth,) + tuple(snapsizedepth[depth]))
3335 3337 )
3336 3338 ui.writenoi18n(
3337 3339 b'delta size (min/max/avg) : %d / %d / %d\n'
3338 3340 % tuple(deltasize)
3339 3341 )
3340 3342
3341 3343 if numdeltas > 0:
3342 3344 ui.write(b'\n')
3343 3345 fmt = pcfmtstr(numdeltas)
3344 3346 fmt2 = pcfmtstr(numdeltas, 4)
3345 3347 ui.writenoi18n(
3346 3348 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3347 3349 )
3348 3350 if numprev > 0:
3349 3351 ui.writenoi18n(
3350 3352 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3351 3353 )
3352 3354 ui.writenoi18n(
3353 3355 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3354 3356 )
3355 3357 ui.writenoi18n(
3356 3358 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3357 3359 )
3358 3360 if gdelta:
3359 3361 ui.writenoi18n(
3360 3362 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3361 3363 )
3362 3364 ui.writenoi18n(
3363 3365 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3364 3366 )
3365 3367 ui.writenoi18n(
3366 3368 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3367 3369 )
3368 3370
3369 3371
3370 3372 @command(
3371 3373 b'debugrevlogindex',
3372 3374 cmdutil.debugrevlogopts
3373 3375 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3374 3376 _(b'[-f FORMAT] -c|-m|FILE'),
3375 3377 optionalrepo=True,
3376 3378 )
3377 3379 def debugrevlogindex(ui, repo, file_=None, **opts):
3378 3380 """dump the contents of a revlog index"""
3379 3381 opts = pycompat.byteskwargs(opts)
3380 3382 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3381 3383 format = opts.get(b'format', 0)
3382 3384 if format not in (0, 1):
3383 3385 raise error.Abort(_(b"unknown format %d") % format)
3384 3386
3385 3387 if ui.debugflag:
3386 3388 shortfn = hex
3387 3389 else:
3388 3390 shortfn = short
3389 3391
3390 3392 # There might not be anything in r, so have a sane default
3391 3393 idlen = 12
3392 3394 for i in r:
3393 3395 idlen = len(shortfn(r.node(i)))
3394 3396 break
3395 3397
3396 3398 if format == 0:
3397 3399 if ui.verbose:
3398 3400 ui.writenoi18n(
3399 3401 b" rev offset length linkrev %s %s p2\n"
3400 3402 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3401 3403 )
3402 3404 else:
3403 3405 ui.writenoi18n(
3404 3406 b" rev linkrev %s %s p2\n"
3405 3407 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3406 3408 )
3407 3409 elif format == 1:
3408 3410 if ui.verbose:
3409 3411 ui.writenoi18n(
3410 3412 (
3411 3413 b" rev flag offset length size link p1"
3412 3414 b" p2 %s\n"
3413 3415 )
3414 3416 % b"nodeid".rjust(idlen)
3415 3417 )
3416 3418 else:
3417 3419 ui.writenoi18n(
3418 3420 b" rev flag size link p1 p2 %s\n"
3419 3421 % b"nodeid".rjust(idlen)
3420 3422 )
3421 3423
3422 3424 for i in r:
3423 3425 node = r.node(i)
3424 3426 if format == 0:
3425 3427 try:
3426 3428 pp = r.parents(node)
3427 3429 except Exception:
3428 3430 pp = [repo.nullid, repo.nullid]
3429 3431 if ui.verbose:
3430 3432 ui.write(
3431 3433 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3432 3434 % (
3433 3435 i,
3434 3436 r.start(i),
3435 3437 r.length(i),
3436 3438 r.linkrev(i),
3437 3439 shortfn(node),
3438 3440 shortfn(pp[0]),
3439 3441 shortfn(pp[1]),
3440 3442 )
3441 3443 )
3442 3444 else:
3443 3445 ui.write(
3444 3446 b"% 6d % 7d %s %s %s\n"
3445 3447 % (
3446 3448 i,
3447 3449 r.linkrev(i),
3448 3450 shortfn(node),
3449 3451 shortfn(pp[0]),
3450 3452 shortfn(pp[1]),
3451 3453 )
3452 3454 )
3453 3455 elif format == 1:
3454 3456 pr = r.parentrevs(i)
3455 3457 if ui.verbose:
3456 3458 ui.write(
3457 3459 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3458 3460 % (
3459 3461 i,
3460 3462 r.flags(i),
3461 3463 r.start(i),
3462 3464 r.length(i),
3463 3465 r.rawsize(i),
3464 3466 r.linkrev(i),
3465 3467 pr[0],
3466 3468 pr[1],
3467 3469 shortfn(node),
3468 3470 )
3469 3471 )
3470 3472 else:
3471 3473 ui.write(
3472 3474 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3473 3475 % (
3474 3476 i,
3475 3477 r.flags(i),
3476 3478 r.rawsize(i),
3477 3479 r.linkrev(i),
3478 3480 pr[0],
3479 3481 pr[1],
3480 3482 shortfn(node),
3481 3483 )
3482 3484 )
3483 3485
3484 3486
3485 3487 @command(
3486 3488 b'debugrevspec',
3487 3489 [
3488 3490 (
3489 3491 b'',
3490 3492 b'optimize',
3491 3493 None,
3492 3494 _(b'print parsed tree after optimizing (DEPRECATED)'),
3493 3495 ),
3494 3496 (
3495 3497 b'',
3496 3498 b'show-revs',
3497 3499 True,
3498 3500 _(b'print list of result revisions (default)'),
3499 3501 ),
3500 3502 (
3501 3503 b's',
3502 3504 b'show-set',
3503 3505 None,
3504 3506 _(b'print internal representation of result set'),
3505 3507 ),
3506 3508 (
3507 3509 b'p',
3508 3510 b'show-stage',
3509 3511 [],
3510 3512 _(b'print parsed tree at the given stage'),
3511 3513 _(b'NAME'),
3512 3514 ),
3513 3515 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3514 3516 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3515 3517 ],
3516 3518 b'REVSPEC',
3517 3519 )
3518 3520 def debugrevspec(ui, repo, expr, **opts):
3519 3521 """parse and apply a revision specification
3520 3522
3521 3523 Use -p/--show-stage option to print the parsed tree at the given stages.
3522 3524 Use -p all to print tree at every stage.
3523 3525
3524 3526 Use --no-show-revs option with -s or -p to print only the set
3525 3527 representation or the parsed tree respectively.
3526 3528
3527 3529 Use --verify-optimized to compare the optimized result with the unoptimized
3528 3530 one. Returns 1 if the optimized result differs.
3529 3531 """
3530 3532 opts = pycompat.byteskwargs(opts)
3531 3533 aliases = ui.configitems(b'revsetalias')
3532 3534 stages = [
3533 3535 (b'parsed', lambda tree: tree),
3534 3536 (
3535 3537 b'expanded',
3536 3538 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3537 3539 ),
3538 3540 (b'concatenated', revsetlang.foldconcat),
3539 3541 (b'analyzed', revsetlang.analyze),
3540 3542 (b'optimized', revsetlang.optimize),
3541 3543 ]
3542 3544 if opts[b'no_optimized']:
3543 3545 stages = stages[:-1]
3544 3546 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3545 3547 raise error.Abort(
3546 3548 _(b'cannot use --verify-optimized with --no-optimized')
3547 3549 )
3548 3550 stagenames = {n for n, f in stages}
3549 3551
3550 3552 showalways = set()
3551 3553 showchanged = set()
3552 3554 if ui.verbose and not opts[b'show_stage']:
3553 3555 # show parsed tree by --verbose (deprecated)
3554 3556 showalways.add(b'parsed')
3555 3557 showchanged.update([b'expanded', b'concatenated'])
3556 3558 if opts[b'optimize']:
3557 3559 showalways.add(b'optimized')
3558 3560 if opts[b'show_stage'] and opts[b'optimize']:
3559 3561 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3560 3562 if opts[b'show_stage'] == [b'all']:
3561 3563 showalways.update(stagenames)
3562 3564 else:
3563 3565 for n in opts[b'show_stage']:
3564 3566 if n not in stagenames:
3565 3567 raise error.Abort(_(b'invalid stage name: %s') % n)
3566 3568 showalways.update(opts[b'show_stage'])
3567 3569
3568 3570 treebystage = {}
3569 3571 printedtree = None
3570 3572 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3571 3573 for n, f in stages:
3572 3574 treebystage[n] = tree = f(tree)
3573 3575 if n in showalways or (n in showchanged and tree != printedtree):
3574 3576 if opts[b'show_stage'] or n != b'parsed':
3575 3577 ui.write(b"* %s:\n" % n)
3576 3578 ui.write(revsetlang.prettyformat(tree), b"\n")
3577 3579 printedtree = tree
3578 3580
3579 3581 if opts[b'verify_optimized']:
3580 3582 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3581 3583 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3582 3584 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3583 3585 ui.writenoi18n(
3584 3586 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3585 3587 )
3586 3588 ui.writenoi18n(
3587 3589 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3588 3590 )
3589 3591 arevs = list(arevs)
3590 3592 brevs = list(brevs)
3591 3593 if arevs == brevs:
3592 3594 return 0
3593 3595 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3594 3596 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3595 3597 sm = difflib.SequenceMatcher(None, arevs, brevs)
3596 3598 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3597 3599 if tag in ('delete', 'replace'):
3598 3600 for c in arevs[alo:ahi]:
3599 3601 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3600 3602 if tag in ('insert', 'replace'):
3601 3603 for c in brevs[blo:bhi]:
3602 3604 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3603 3605 if tag == 'equal':
3604 3606 for c in arevs[alo:ahi]:
3605 3607 ui.write(b' %d\n' % c)
3606 3608 return 1
3607 3609
3608 3610 func = revset.makematcher(tree)
3609 3611 revs = func(repo)
3610 3612 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3611 3613 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3612 3614 if not opts[b'show_revs']:
3613 3615 return
3614 3616 for c in revs:
3615 3617 ui.write(b"%d\n" % c)
3616 3618
3617 3619
3618 3620 @command(
3619 3621 b'debugserve',
3620 3622 [
3621 3623 (
3622 3624 b'',
3623 3625 b'sshstdio',
3624 3626 False,
3625 3627 _(b'run an SSH server bound to process handles'),
3626 3628 ),
3627 3629 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3628 3630 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3629 3631 ],
3630 3632 b'',
3631 3633 )
3632 3634 def debugserve(ui, repo, **opts):
3633 3635 """run a server with advanced settings
3634 3636
3635 3637 This command is similar to :hg:`serve`. It exists partially as a
3636 3638 workaround to the fact that ``hg serve --stdio`` must have specific
3637 3639 arguments for security reasons.
3638 3640 """
3639 3641 opts = pycompat.byteskwargs(opts)
3640 3642
3641 3643 if not opts[b'sshstdio']:
3642 3644 raise error.Abort(_(b'only --sshstdio is currently supported'))
3643 3645
3644 3646 logfh = None
3645 3647
3646 3648 if opts[b'logiofd'] and opts[b'logiofile']:
3647 3649 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3648 3650
3649 3651 if opts[b'logiofd']:
3650 3652 # Ideally we would be line buffered. But line buffering in binary
3651 3653 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3652 3654 # buffering could have performance impacts. But since this isn't
3653 3655 # performance critical code, it should be fine.
3654 3656 try:
3655 3657 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3656 3658 except OSError as e:
3657 3659 if e.errno != errno.ESPIPE:
3658 3660 raise
3659 3661 # can't seek a pipe, so `ab` mode fails on py3
3660 3662 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3661 3663 elif opts[b'logiofile']:
3662 3664 logfh = open(opts[b'logiofile'], b'ab', 0)
3663 3665
3664 3666 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3665 3667 s.serve_forever()
3666 3668
3667 3669
3668 3670 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3669 3671 def debugsetparents(ui, repo, rev1, rev2=None):
3670 3672 """manually set the parents of the current working directory (DANGEROUS)
3671 3673
3672 3674 This command is not what you are looking for and should not be used. Using
3673 3675 this command will most certainly results in slight corruption of the file
3674 3676 level histories withing your repository. DO NOT USE THIS COMMAND.
3675 3677
3676 3678 The command update the p1 and p2 field in the dirstate, and not touching
3677 3679 anything else. This useful for writing repository conversion tools, but
3678 3680 should be used with extreme care. For example, neither the working
3679 3681 directory nor the dirstate is updated, so file status may be incorrect
3680 3682 after running this command. Only used if you are one of the few people that
3681 3683 deeply unstand both conversion tools and file level histories. If you are
3682 3684 reading this help, you are not one of this people (most of them sailed west
3683 3685 from Mithlond anyway.
3684 3686
3685 3687 So one last time DO NOT USE THIS COMMAND.
3686 3688
3687 3689 Returns 0 on success.
3688 3690 """
3689 3691
3690 3692 node1 = scmutil.revsingle(repo, rev1).node()
3691 3693 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3692 3694
3693 3695 with repo.wlock():
3694 3696 repo.setparents(node1, node2)
3695 3697
3696 3698
3697 3699 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3698 3700 def debugsidedata(ui, repo, file_, rev=None, **opts):
3699 3701 """dump the side data for a cl/manifest/file revision
3700 3702
3701 3703 Use --verbose to dump the sidedata content."""
3702 3704 opts = pycompat.byteskwargs(opts)
3703 3705 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3704 3706 if rev is not None:
3705 3707 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3706 3708 file_, rev = None, file_
3707 3709 elif rev is None:
3708 3710 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3709 3711 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3710 3712 r = getattr(r, '_revlog', r)
3711 3713 try:
3712 3714 sidedata = r.sidedata(r.lookup(rev))
3713 3715 except KeyError:
3714 3716 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3715 3717 if sidedata:
3716 3718 sidedata = list(sidedata.items())
3717 3719 sidedata.sort()
3718 3720 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3719 3721 for key, value in sidedata:
3720 3722 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3721 3723 if ui.verbose:
3722 3724 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3723 3725
3724 3726
3725 3727 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3726 3728 def debugssl(ui, repo, source=None, **opts):
3727 3729 """test a secure connection to a server
3728 3730
3729 3731 This builds the certificate chain for the server on Windows, installing the
3730 3732 missing intermediates and trusted root via Windows Update if necessary. It
3731 3733 does nothing on other platforms.
3732 3734
3733 3735 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3734 3736 that server is used. See :hg:`help urls` for more information.
3735 3737
3736 3738 If the update succeeds, retry the original operation. Otherwise, the cause
3737 3739 of the SSL error is likely another issue.
3738 3740 """
3739 3741 if not pycompat.iswindows:
3740 3742 raise error.Abort(
3741 3743 _(b'certificate chain building is only possible on Windows')
3742 3744 )
3743 3745
3744 3746 if not source:
3745 3747 if not repo:
3746 3748 raise error.Abort(
3747 3749 _(
3748 3750 b"there is no Mercurial repository here, and no "
3749 3751 b"server specified"
3750 3752 )
3751 3753 )
3752 3754 source = b"default"
3753 3755
3754 3756 source, branches = urlutil.get_unique_pull_path(
3755 3757 b'debugssl', repo, ui, source
3756 3758 )
3757 3759 url = urlutil.url(source)
3758 3760
3759 3761 defaultport = {b'https': 443, b'ssh': 22}
3760 3762 if url.scheme in defaultport:
3761 3763 try:
3762 3764 addr = (url.host, int(url.port or defaultport[url.scheme]))
3763 3765 except ValueError:
3764 3766 raise error.Abort(_(b"malformed port number in URL"))
3765 3767 else:
3766 3768 raise error.Abort(_(b"only https and ssh connections are supported"))
3767 3769
3768 3770 from . import win32
3769 3771
3770 3772 s = ssl.wrap_socket(
3771 3773 socket.socket(),
3772 3774 ssl_version=ssl.PROTOCOL_TLS,
3773 3775 cert_reqs=ssl.CERT_NONE,
3774 3776 ca_certs=None,
3775 3777 )
3776 3778
3777 3779 try:
3778 3780 s.connect(addr)
3779 3781 cert = s.getpeercert(True)
3780 3782
3781 3783 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3782 3784
3783 3785 complete = win32.checkcertificatechain(cert, build=False)
3784 3786
3785 3787 if not complete:
3786 3788 ui.status(_(b'certificate chain is incomplete, updating... '))
3787 3789
3788 3790 if not win32.checkcertificatechain(cert):
3789 3791 ui.status(_(b'failed.\n'))
3790 3792 else:
3791 3793 ui.status(_(b'done.\n'))
3792 3794 else:
3793 3795 ui.status(_(b'full certificate chain is available\n'))
3794 3796 finally:
3795 3797 s.close()
3796 3798
3797 3799
3798 3800 @command(
3799 3801 b"debugbackupbundle",
3800 3802 [
3801 3803 (
3802 3804 b"",
3803 3805 b"recover",
3804 3806 b"",
3805 3807 b"brings the specified changeset back into the repository",
3806 3808 )
3807 3809 ]
3808 3810 + cmdutil.logopts,
3809 3811 _(b"hg debugbackupbundle [--recover HASH]"),
3810 3812 )
3811 3813 def debugbackupbundle(ui, repo, *pats, **opts):
3812 3814 """lists the changesets available in backup bundles
3813 3815
3814 3816 Without any arguments, this command prints a list of the changesets in each
3815 3817 backup bundle.
3816 3818
3817 3819 --recover takes a changeset hash and unbundles the first bundle that
3818 3820 contains that hash, which puts that changeset back in your repository.
3819 3821
3820 3822 --verbose will print the entire commit message and the bundle path for that
3821 3823 backup.
3822 3824 """
3823 3825 backups = list(
3824 3826 filter(
3825 3827 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3826 3828 )
3827 3829 )
3828 3830 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3829 3831
3830 3832 opts = pycompat.byteskwargs(opts)
3831 3833 opts[b"bundle"] = b""
3832 3834 opts[b"force"] = None
3833 3835 limit = logcmdutil.getlimit(opts)
3834 3836
3835 3837 def display(other, chlist, displayer):
3836 3838 if opts.get(b"newest_first"):
3837 3839 chlist.reverse()
3838 3840 count = 0
3839 3841 for n in chlist:
3840 3842 if limit is not None and count >= limit:
3841 3843 break
3842 3844 parents = [
3843 3845 True for p in other.changelog.parents(n) if p != repo.nullid
3844 3846 ]
3845 3847 if opts.get(b"no_merges") and len(parents) == 2:
3846 3848 continue
3847 3849 count += 1
3848 3850 displayer.show(other[n])
3849 3851
3850 3852 recovernode = opts.get(b"recover")
3851 3853 if recovernode:
3852 3854 if scmutil.isrevsymbol(repo, recovernode):
3853 3855 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3854 3856 return
3855 3857 elif backups:
3856 3858 msg = _(
3857 3859 b"Recover changesets using: hg debugbackupbundle --recover "
3858 3860 b"<changeset hash>\n\nAvailable backup changesets:"
3859 3861 )
3860 3862 ui.status(msg, label=b"status.removed")
3861 3863 else:
3862 3864 ui.status(_(b"no backup changesets found\n"))
3863 3865 return
3864 3866
3865 3867 for backup in backups:
3866 3868 # Much of this is copied from the hg incoming logic
3867 3869 source = os.path.relpath(backup, encoding.getcwd())
3868 3870 source, branches = urlutil.get_unique_pull_path(
3869 3871 b'debugbackupbundle',
3870 3872 repo,
3871 3873 ui,
3872 3874 source,
3873 3875 default_branches=opts.get(b'branch'),
3874 3876 )
3875 3877 try:
3876 3878 other = hg.peer(repo, opts, source)
3877 3879 except error.LookupError as ex:
3878 3880 msg = _(b"\nwarning: unable to open bundle %s") % source
3879 3881 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3880 3882 ui.warn(msg, hint=hint)
3881 3883 continue
3882 3884 revs, checkout = hg.addbranchrevs(
3883 3885 repo, other, branches, opts.get(b"rev")
3884 3886 )
3885 3887
3886 3888 if revs:
3887 3889 revs = [other.lookup(rev) for rev in revs]
3888 3890
3889 3891 with ui.silent():
3890 3892 try:
3891 3893 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3892 3894 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3893 3895 )
3894 3896 except error.LookupError:
3895 3897 continue
3896 3898
3897 3899 try:
3898 3900 if not chlist:
3899 3901 continue
3900 3902 if recovernode:
3901 3903 with repo.lock(), repo.transaction(b"unbundle") as tr:
3902 3904 if scmutil.isrevsymbol(other, recovernode):
3903 3905 ui.status(_(b"Unbundling %s\n") % (recovernode))
3904 3906 f = hg.openpath(ui, source)
3905 3907 gen = exchange.readbundle(ui, f, source)
3906 3908 if isinstance(gen, bundle2.unbundle20):
3907 3909 bundle2.applybundle(
3908 3910 repo,
3909 3911 gen,
3910 3912 tr,
3911 3913 source=b"unbundle",
3912 3914 url=b"bundle:" + source,
3913 3915 )
3914 3916 else:
3915 3917 gen.apply(repo, b"unbundle", b"bundle:" + source)
3916 3918 break
3917 3919 else:
3918 3920 backupdate = encoding.strtolocal(
3919 3921 time.strftime(
3920 3922 "%a %H:%M, %Y-%m-%d",
3921 3923 time.localtime(os.path.getmtime(source)),
3922 3924 )
3923 3925 )
3924 3926 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3925 3927 if ui.verbose:
3926 3928 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3927 3929 else:
3928 3930 opts[
3929 3931 b"template"
3930 3932 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3931 3933 displayer = logcmdutil.changesetdisplayer(
3932 3934 ui, other, opts, False
3933 3935 )
3934 3936 display(other, chlist, displayer)
3935 3937 displayer.close()
3936 3938 finally:
3937 3939 cleanupfn()
3938 3940
3939 3941
3940 3942 @command(
3941 3943 b'debugsub',
3942 3944 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3943 3945 _(b'[-r REV] [REV]'),
3944 3946 )
3945 3947 def debugsub(ui, repo, rev=None):
3946 3948 ctx = scmutil.revsingle(repo, rev, None)
3947 3949 for k, v in sorted(ctx.substate.items()):
3948 3950 ui.writenoi18n(b'path %s\n' % k)
3949 3951 ui.writenoi18n(b' source %s\n' % v[0])
3950 3952 ui.writenoi18n(b' revision %s\n' % v[1])
3951 3953
3952 3954
3953 3955 @command(b'debugshell', optionalrepo=True)
3954 3956 def debugshell(ui, repo):
3955 3957 """run an interactive Python interpreter
3956 3958
3957 3959 The local namespace is provided with a reference to the ui and
3958 3960 the repo instance (if available).
3959 3961 """
3960 3962 import code
3961 3963
3962 3964 imported_objects = {
3963 3965 'ui': ui,
3964 3966 'repo': repo,
3965 3967 }
3966 3968
3967 3969 code.interact(local=imported_objects)
3968 3970
3969 3971
3970 3972 @command(
3971 3973 b'debugsuccessorssets',
3972 3974 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3973 3975 _(b'[REV]'),
3974 3976 )
3975 3977 def debugsuccessorssets(ui, repo, *revs, **opts):
3976 3978 """show set of successors for revision
3977 3979
3978 3980 A successors set of changeset A is a consistent group of revisions that
3979 3981 succeed A. It contains non-obsolete changesets only unless closests
3980 3982 successors set is set.
3981 3983
3982 3984 In most cases a changeset A has a single successors set containing a single
3983 3985 successor (changeset A replaced by A').
3984 3986
3985 3987 A changeset that is made obsolete with no successors are called "pruned".
3986 3988 Such changesets have no successors sets at all.
3987 3989
3988 3990 A changeset that has been "split" will have a successors set containing
3989 3991 more than one successor.
3990 3992
3991 3993 A changeset that has been rewritten in multiple different ways is called
3992 3994 "divergent". Such changesets have multiple successor sets (each of which
3993 3995 may also be split, i.e. have multiple successors).
3994 3996
3995 3997 Results are displayed as follows::
3996 3998
3997 3999 <rev1>
3998 4000 <successors-1A>
3999 4001 <rev2>
4000 4002 <successors-2A>
4001 4003 <successors-2B1> <successors-2B2> <successors-2B3>
4002 4004
4003 4005 Here rev2 has two possible (i.e. divergent) successors sets. The first
4004 4006 holds one element, whereas the second holds three (i.e. the changeset has
4005 4007 been split).
4006 4008 """
4007 4009 # passed to successorssets caching computation from one call to another
4008 4010 cache = {}
4009 4011 ctx2str = bytes
4010 4012 node2str = short
4011 4013 for rev in logcmdutil.revrange(repo, revs):
4012 4014 ctx = repo[rev]
4013 4015 ui.write(b'%s\n' % ctx2str(ctx))
4014 4016 for succsset in obsutil.successorssets(
4015 4017 repo, ctx.node(), closest=opts['closest'], cache=cache
4016 4018 ):
4017 4019 if succsset:
4018 4020 ui.write(b' ')
4019 4021 ui.write(node2str(succsset[0]))
4020 4022 for node in succsset[1:]:
4021 4023 ui.write(b' ')
4022 4024 ui.write(node2str(node))
4023 4025 ui.write(b'\n')
4024 4026
4025 4027
4026 4028 @command(b'debugtagscache', [])
4027 4029 def debugtagscache(ui, repo):
4028 4030 """display the contents of .hg/cache/hgtagsfnodes1"""
4029 4031 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4030 4032 flog = repo.file(b'.hgtags')
4031 4033 for r in repo:
4032 4034 node = repo[r].node()
4033 4035 tagsnode = cache.getfnode(node, computemissing=False)
4034 4036 if tagsnode:
4035 4037 tagsnodedisplay = hex(tagsnode)
4036 4038 if not flog.hasnode(tagsnode):
4037 4039 tagsnodedisplay += b' (unknown node)'
4038 4040 elif tagsnode is None:
4039 4041 tagsnodedisplay = b'missing'
4040 4042 else:
4041 4043 tagsnodedisplay = b'invalid'
4042 4044
4043 4045 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4044 4046
4045 4047
4046 4048 @command(
4047 4049 b'debugtemplate',
4048 4050 [
4049 4051 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4050 4052 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4051 4053 ],
4052 4054 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4053 4055 optionalrepo=True,
4054 4056 )
4055 4057 def debugtemplate(ui, repo, tmpl, **opts):
4056 4058 """parse and apply a template
4057 4059
4058 4060 If -r/--rev is given, the template is processed as a log template and
4059 4061 applied to the given changesets. Otherwise, it is processed as a generic
4060 4062 template.
4061 4063
4062 4064 Use --verbose to print the parsed tree.
4063 4065 """
4064 4066 revs = None
4065 4067 if opts['rev']:
4066 4068 if repo is None:
4067 4069 raise error.RepoError(
4068 4070 _(b'there is no Mercurial repository here (.hg not found)')
4069 4071 )
4070 4072 revs = logcmdutil.revrange(repo, opts['rev'])
4071 4073
4072 4074 props = {}
4073 4075 for d in opts['define']:
4074 4076 try:
4075 4077 k, v = (e.strip() for e in d.split(b'=', 1))
4076 4078 if not k or k == b'ui':
4077 4079 raise ValueError
4078 4080 props[k] = v
4079 4081 except ValueError:
4080 4082 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4081 4083
4082 4084 if ui.verbose:
4083 4085 aliases = ui.configitems(b'templatealias')
4084 4086 tree = templater.parse(tmpl)
4085 4087 ui.note(templater.prettyformat(tree), b'\n')
4086 4088 newtree = templater.expandaliases(tree, aliases)
4087 4089 if newtree != tree:
4088 4090 ui.notenoi18n(
4089 4091 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4090 4092 )
4091 4093
4092 4094 if revs is None:
4093 4095 tres = formatter.templateresources(ui, repo)
4094 4096 t = formatter.maketemplater(ui, tmpl, resources=tres)
4095 4097 if ui.verbose:
4096 4098 kwds, funcs = t.symbolsuseddefault()
4097 4099 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4098 4100 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4099 4101 ui.write(t.renderdefault(props))
4100 4102 else:
4101 4103 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4102 4104 if ui.verbose:
4103 4105 kwds, funcs = displayer.t.symbolsuseddefault()
4104 4106 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4105 4107 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4106 4108 for r in revs:
4107 4109 displayer.show(repo[r], **pycompat.strkwargs(props))
4108 4110 displayer.close()
4109 4111
4110 4112
4111 4113 @command(
4112 4114 b'debuguigetpass',
4113 4115 [
4114 4116 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4115 4117 ],
4116 4118 _(b'[-p TEXT]'),
4117 4119 norepo=True,
4118 4120 )
4119 4121 def debuguigetpass(ui, prompt=b''):
4120 4122 """show prompt to type password"""
4121 4123 r = ui.getpass(prompt)
4122 4124 if r is None:
4123 4125 r = b"<default response>"
4124 4126 ui.writenoi18n(b'response: %s\n' % r)
4125 4127
4126 4128
4127 4129 @command(
4128 4130 b'debuguiprompt',
4129 4131 [
4130 4132 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4131 4133 ],
4132 4134 _(b'[-p TEXT]'),
4133 4135 norepo=True,
4134 4136 )
4135 4137 def debuguiprompt(ui, prompt=b''):
4136 4138 """show plain prompt"""
4137 4139 r = ui.prompt(prompt)
4138 4140 ui.writenoi18n(b'response: %s\n' % r)
4139 4141
4140 4142
4141 4143 @command(b'debugupdatecaches', [])
4142 4144 def debugupdatecaches(ui, repo, *pats, **opts):
4143 4145 """warm all known caches in the repository"""
4144 4146 with repo.wlock(), repo.lock():
4145 4147 repo.updatecaches(caches=repository.CACHES_ALL)
4146 4148
4147 4149
4148 4150 @command(
4149 4151 b'debugupgraderepo',
4150 4152 [
4151 4153 (
4152 4154 b'o',
4153 4155 b'optimize',
4154 4156 [],
4155 4157 _(b'extra optimization to perform'),
4156 4158 _(b'NAME'),
4157 4159 ),
4158 4160 (b'', b'run', False, _(b'performs an upgrade')),
4159 4161 (b'', b'backup', True, _(b'keep the old repository content around')),
4160 4162 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4161 4163 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4162 4164 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4163 4165 ],
4164 4166 )
4165 4167 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4166 4168 """upgrade a repository to use different features
4167 4169
4168 4170 If no arguments are specified, the repository is evaluated for upgrade
4169 4171 and a list of problems and potential optimizations is printed.
4170 4172
4171 4173 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4172 4174 can be influenced via additional arguments. More details will be provided
4173 4175 by the command output when run without ``--run``.
4174 4176
4175 4177 During the upgrade, the repository will be locked and no writes will be
4176 4178 allowed.
4177 4179
4178 4180 At the end of the upgrade, the repository may not be readable while new
4179 4181 repository data is swapped in. This window will be as long as it takes to
4180 4182 rename some directories inside the ``.hg`` directory. On most machines, this
4181 4183 should complete almost instantaneously and the chances of a consumer being
4182 4184 unable to access the repository should be low.
4183 4185
4184 4186 By default, all revlogs will be upgraded. You can restrict this using flags
4185 4187 such as `--manifest`:
4186 4188
4187 4189 * `--manifest`: only optimize the manifest
4188 4190 * `--no-manifest`: optimize all revlog but the manifest
4189 4191 * `--changelog`: optimize the changelog only
4190 4192 * `--no-changelog --no-manifest`: optimize filelogs only
4191 4193 * `--filelogs`: optimize the filelogs only
4192 4194 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4193 4195 """
4194 4196 return upgrade.upgraderepo(
4195 4197 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4196 4198 )
4197 4199
4198 4200
4199 4201 @command(
4200 4202 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4201 4203 )
4202 4204 def debugwalk(ui, repo, *pats, **opts):
4203 4205 """show how files match on given patterns"""
4204 4206 opts = pycompat.byteskwargs(opts)
4205 4207 m = scmutil.match(repo[None], pats, opts)
4206 4208 if ui.verbose:
4207 4209 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4208 4210 items = list(repo[None].walk(m))
4209 4211 if not items:
4210 4212 return
4211 4213 f = lambda fn: fn
4212 4214 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4213 4215 f = lambda fn: util.normpath(fn)
4214 4216 fmt = b'f %%-%ds %%-%ds %%s' % (
4215 4217 max([len(abs) for abs in items]),
4216 4218 max([len(repo.pathto(abs)) for abs in items]),
4217 4219 )
4218 4220 for abs in items:
4219 4221 line = fmt % (
4220 4222 abs,
4221 4223 f(repo.pathto(abs)),
4222 4224 m.exact(abs) and b'exact' or b'',
4223 4225 )
4224 4226 ui.write(b"%s\n" % line.rstrip())
4225 4227
4226 4228
4227 4229 @command(b'debugwhyunstable', [], _(b'REV'))
4228 4230 def debugwhyunstable(ui, repo, rev):
4229 4231 """explain instabilities of a changeset"""
4230 4232 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4231 4233 dnodes = b''
4232 4234 if entry.get(b'divergentnodes'):
4233 4235 dnodes = (
4234 4236 b' '.join(
4235 4237 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4236 4238 for ctx in entry[b'divergentnodes']
4237 4239 )
4238 4240 + b' '
4239 4241 )
4240 4242 ui.write(
4241 4243 b'%s: %s%s %s\n'
4242 4244 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4243 4245 )
4244 4246
4245 4247
4246 4248 @command(
4247 4249 b'debugwireargs',
4248 4250 [
4249 4251 (b'', b'three', b'', b'three'),
4250 4252 (b'', b'four', b'', b'four'),
4251 4253 (b'', b'five', b'', b'five'),
4252 4254 ]
4253 4255 + cmdutil.remoteopts,
4254 4256 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4255 4257 norepo=True,
4256 4258 )
4257 4259 def debugwireargs(ui, repopath, *vals, **opts):
4258 4260 opts = pycompat.byteskwargs(opts)
4259 4261 repo = hg.peer(ui, opts, repopath)
4260 4262 try:
4261 4263 for opt in cmdutil.remoteopts:
4262 4264 del opts[opt[1]]
4263 4265 args = {}
4264 4266 for k, v in pycompat.iteritems(opts):
4265 4267 if v:
4266 4268 args[k] = v
4267 4269 args = pycompat.strkwargs(args)
4268 4270 # run twice to check that we don't mess up the stream for the next command
4269 4271 res1 = repo.debugwireargs(*vals, **args)
4270 4272 res2 = repo.debugwireargs(*vals, **args)
4271 4273 ui.write(b"%s\n" % res1)
4272 4274 if res1 != res2:
4273 4275 ui.warn(b"%s\n" % res2)
4274 4276 finally:
4275 4277 repo.close()
4276 4278
4277 4279
4278 4280 def _parsewirelangblocks(fh):
4279 4281 activeaction = None
4280 4282 blocklines = []
4281 4283 lastindent = 0
4282 4284
4283 4285 for line in fh:
4284 4286 line = line.rstrip()
4285 4287 if not line:
4286 4288 continue
4287 4289
4288 4290 if line.startswith(b'#'):
4289 4291 continue
4290 4292
4291 4293 if not line.startswith(b' '):
4292 4294 # New block. Flush previous one.
4293 4295 if activeaction:
4294 4296 yield activeaction, blocklines
4295 4297
4296 4298 activeaction = line
4297 4299 blocklines = []
4298 4300 lastindent = 0
4299 4301 continue
4300 4302
4301 4303 # Else we start with an indent.
4302 4304
4303 4305 if not activeaction:
4304 4306 raise error.Abort(_(b'indented line outside of block'))
4305 4307
4306 4308 indent = len(line) - len(line.lstrip())
4307 4309
4308 4310 # If this line is indented more than the last line, concatenate it.
4309 4311 if indent > lastindent and blocklines:
4310 4312 blocklines[-1] += line.lstrip()
4311 4313 else:
4312 4314 blocklines.append(line)
4313 4315 lastindent = indent
4314 4316
4315 4317 # Flush last block.
4316 4318 if activeaction:
4317 4319 yield activeaction, blocklines
4318 4320
4319 4321
4320 4322 @command(
4321 4323 b'debugwireproto',
4322 4324 [
4323 4325 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4324 4326 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4325 4327 (
4326 4328 b'',
4327 4329 b'noreadstderr',
4328 4330 False,
4329 4331 _(b'do not read from stderr of the remote'),
4330 4332 ),
4331 4333 (
4332 4334 b'',
4333 4335 b'nologhandshake',
4334 4336 False,
4335 4337 _(b'do not log I/O related to the peer handshake'),
4336 4338 ),
4337 4339 ]
4338 4340 + cmdutil.remoteopts,
4339 4341 _(b'[PATH]'),
4340 4342 optionalrepo=True,
4341 4343 )
4342 4344 def debugwireproto(ui, repo, path=None, **opts):
4343 4345 """send wire protocol commands to a server
4344 4346
4345 4347 This command can be used to issue wire protocol commands to remote
4346 4348 peers and to debug the raw data being exchanged.
4347 4349
4348 4350 ``--localssh`` will start an SSH server against the current repository
4349 4351 and connect to that. By default, the connection will perform a handshake
4350 4352 and establish an appropriate peer instance.
4351 4353
4352 4354 ``--peer`` can be used to bypass the handshake protocol and construct a
4353 4355 peer instance using the specified class type. Valid values are ``raw``,
4354 4356 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4355 4357 don't support higher-level command actions.
4356 4358
4357 4359 ``--noreadstderr`` can be used to disable automatic reading from stderr
4358 4360 of the peer (for SSH connections only). Disabling automatic reading of
4359 4361 stderr is useful for making output more deterministic.
4360 4362
4361 4363 Commands are issued via a mini language which is specified via stdin.
4362 4364 The language consists of individual actions to perform. An action is
4363 4365 defined by a block. A block is defined as a line with no leading
4364 4366 space followed by 0 or more lines with leading space. Blocks are
4365 4367 effectively a high-level command with additional metadata.
4366 4368
4367 4369 Lines beginning with ``#`` are ignored.
4368 4370
4369 4371 The following sections denote available actions.
4370 4372
4371 4373 raw
4372 4374 ---
4373 4375
4374 4376 Send raw data to the server.
4375 4377
4376 4378 The block payload contains the raw data to send as one atomic send
4377 4379 operation. The data may not actually be delivered in a single system
4378 4380 call: it depends on the abilities of the transport being used.
4379 4381
4380 4382 Each line in the block is de-indented and concatenated. Then, that
4381 4383 value is evaluated as a Python b'' literal. This allows the use of
4382 4384 backslash escaping, etc.
4383 4385
4384 4386 raw+
4385 4387 ----
4386 4388
4387 4389 Behaves like ``raw`` except flushes output afterwards.
4388 4390
4389 4391 command <X>
4390 4392 -----------
4391 4393
4392 4394 Send a request to run a named command, whose name follows the ``command``
4393 4395 string.
4394 4396
4395 4397 Arguments to the command are defined as lines in this block. The format of
4396 4398 each line is ``<key> <value>``. e.g.::
4397 4399
4398 4400 command listkeys
4399 4401 namespace bookmarks
4400 4402
4401 4403 If the value begins with ``eval:``, it will be interpreted as a Python
4402 4404 literal expression. Otherwise values are interpreted as Python b'' literals.
4403 4405 This allows sending complex types and encoding special byte sequences via
4404 4406 backslash escaping.
4405 4407
4406 4408 The following arguments have special meaning:
4407 4409
4408 4410 ``PUSHFILE``
4409 4411 When defined, the *push* mechanism of the peer will be used instead
4410 4412 of the static request-response mechanism and the content of the
4411 4413 file specified in the value of this argument will be sent as the
4412 4414 command payload.
4413 4415
4414 4416 This can be used to submit a local bundle file to the remote.
4415 4417
4416 4418 batchbegin
4417 4419 ----------
4418 4420
4419 4421 Instruct the peer to begin a batched send.
4420 4422
4421 4423 All ``command`` blocks are queued for execution until the next
4422 4424 ``batchsubmit`` block.
4423 4425
4424 4426 batchsubmit
4425 4427 -----------
4426 4428
4427 4429 Submit previously queued ``command`` blocks as a batch request.
4428 4430
4429 4431 This action MUST be paired with a ``batchbegin`` action.
4430 4432
4431 4433 httprequest <method> <path>
4432 4434 ---------------------------
4433 4435
4434 4436 (HTTP peer only)
4435 4437
4436 4438 Send an HTTP request to the peer.
4437 4439
4438 4440 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4439 4441
4440 4442 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4441 4443 headers to add to the request. e.g. ``Accept: foo``.
4442 4444
4443 4445 The following arguments are special:
4444 4446
4445 4447 ``BODYFILE``
4446 4448 The content of the file defined as the value to this argument will be
4447 4449 transferred verbatim as the HTTP request body.
4448 4450
4449 4451 ``frame <type> <flags> <payload>``
4450 4452 Send a unified protocol frame as part of the request body.
4451 4453
4452 4454 All frames will be collected and sent as the body to the HTTP
4453 4455 request.
4454 4456
4455 4457 close
4456 4458 -----
4457 4459
4458 4460 Close the connection to the server.
4459 4461
4460 4462 flush
4461 4463 -----
4462 4464
4463 4465 Flush data written to the server.
4464 4466
4465 4467 readavailable
4466 4468 -------------
4467 4469
4468 4470 Close the write end of the connection and read all available data from
4469 4471 the server.
4470 4472
4471 4473 If the connection to the server encompasses multiple pipes, we poll both
4472 4474 pipes and read available data.
4473 4475
4474 4476 readline
4475 4477 --------
4476 4478
4477 4479 Read a line of output from the server. If there are multiple output
4478 4480 pipes, reads only the main pipe.
4479 4481
4480 4482 ereadline
4481 4483 ---------
4482 4484
4483 4485 Like ``readline``, but read from the stderr pipe, if available.
4484 4486
4485 4487 read <X>
4486 4488 --------
4487 4489
4488 4490 ``read()`` N bytes from the server's main output pipe.
4489 4491
4490 4492 eread <X>
4491 4493 ---------
4492 4494
4493 4495 ``read()`` N bytes from the server's stderr pipe, if available.
4494 4496
4495 4497 Specifying Unified Frame-Based Protocol Frames
4496 4498 ----------------------------------------------
4497 4499
4498 4500 It is possible to emit a *Unified Frame-Based Protocol* by using special
4499 4501 syntax.
4500 4502
4501 4503 A frame is composed as a type, flags, and payload. These can be parsed
4502 4504 from a string of the form:
4503 4505
4504 4506 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4505 4507
4506 4508 ``request-id`` and ``stream-id`` are integers defining the request and
4507 4509 stream identifiers.
4508 4510
4509 4511 ``type`` can be an integer value for the frame type or the string name
4510 4512 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4511 4513 ``command-name``.
4512 4514
4513 4515 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4514 4516 components. Each component (and there can be just one) can be an integer
4515 4517 or a flag name for stream flags or frame flags, respectively. Values are
4516 4518 resolved to integers and then bitwise OR'd together.
4517 4519
4518 4520 ``payload`` represents the raw frame payload. If it begins with
4519 4521 ``cbor:``, the following string is evaluated as Python code and the
4520 4522 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4521 4523 as a Python byte string literal.
4522 4524 """
4523 4525 opts = pycompat.byteskwargs(opts)
4524 4526
4525 4527 if opts[b'localssh'] and not repo:
4526 4528 raise error.Abort(_(b'--localssh requires a repository'))
4527 4529
4528 4530 if opts[b'peer'] and opts[b'peer'] not in (
4529 4531 b'raw',
4530 4532 b'ssh1',
4531 4533 ):
4532 4534 raise error.Abort(
4533 4535 _(b'invalid value for --peer'),
4534 4536 hint=_(b'valid values are "raw" and "ssh1"'),
4535 4537 )
4536 4538
4537 4539 if path and opts[b'localssh']:
4538 4540 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4539 4541
4540 4542 if ui.interactive():
4541 4543 ui.write(_(b'(waiting for commands on stdin)\n'))
4542 4544
4543 4545 blocks = list(_parsewirelangblocks(ui.fin))
4544 4546
4545 4547 proc = None
4546 4548 stdin = None
4547 4549 stdout = None
4548 4550 stderr = None
4549 4551 opener = None
4550 4552
4551 4553 if opts[b'localssh']:
4552 4554 # We start the SSH server in its own process so there is process
4553 4555 # separation. This prevents a whole class of potential bugs around
4554 4556 # shared state from interfering with server operation.
4555 4557 args = procutil.hgcmd() + [
4556 4558 b'-R',
4557 4559 repo.root,
4558 4560 b'debugserve',
4559 4561 b'--sshstdio',
4560 4562 ]
4561 4563 proc = subprocess.Popen(
4562 4564 pycompat.rapply(procutil.tonativestr, args),
4563 4565 stdin=subprocess.PIPE,
4564 4566 stdout=subprocess.PIPE,
4565 4567 stderr=subprocess.PIPE,
4566 4568 bufsize=0,
4567 4569 )
4568 4570
4569 4571 stdin = proc.stdin
4570 4572 stdout = proc.stdout
4571 4573 stderr = proc.stderr
4572 4574
4573 4575 # We turn the pipes into observers so we can log I/O.
4574 4576 if ui.verbose or opts[b'peer'] == b'raw':
4575 4577 stdin = util.makeloggingfileobject(
4576 4578 ui, proc.stdin, b'i', logdata=True
4577 4579 )
4578 4580 stdout = util.makeloggingfileobject(
4579 4581 ui, proc.stdout, b'o', logdata=True
4580 4582 )
4581 4583 stderr = util.makeloggingfileobject(
4582 4584 ui, proc.stderr, b'e', logdata=True
4583 4585 )
4584 4586
4585 4587 # --localssh also implies the peer connection settings.
4586 4588
4587 4589 url = b'ssh://localserver'
4588 4590 autoreadstderr = not opts[b'noreadstderr']
4589 4591
4590 4592 if opts[b'peer'] == b'ssh1':
4591 4593 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4592 4594 peer = sshpeer.sshv1peer(
4593 4595 ui,
4594 4596 url,
4595 4597 proc,
4596 4598 stdin,
4597 4599 stdout,
4598 4600 stderr,
4599 4601 None,
4600 4602 autoreadstderr=autoreadstderr,
4601 4603 )
4602 4604 elif opts[b'peer'] == b'raw':
4603 4605 ui.write(_(b'using raw connection to peer\n'))
4604 4606 peer = None
4605 4607 else:
4606 4608 ui.write(_(b'creating ssh peer from handshake results\n'))
4607 4609 peer = sshpeer.makepeer(
4608 4610 ui,
4609 4611 url,
4610 4612 proc,
4611 4613 stdin,
4612 4614 stdout,
4613 4615 stderr,
4614 4616 autoreadstderr=autoreadstderr,
4615 4617 )
4616 4618
4617 4619 elif path:
4618 4620 # We bypass hg.peer() so we can proxy the sockets.
4619 4621 # TODO consider not doing this because we skip
4620 4622 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4621 4623 u = urlutil.url(path)
4622 4624 if u.scheme != b'http':
4623 4625 raise error.Abort(_(b'only http:// paths are currently supported'))
4624 4626
4625 4627 url, authinfo = u.authinfo()
4626 4628 openerargs = {
4627 4629 'useragent': b'Mercurial debugwireproto',
4628 4630 }
4629 4631
4630 4632 # Turn pipes/sockets into observers so we can log I/O.
4631 4633 if ui.verbose:
4632 4634 openerargs.update(
4633 4635 {
4634 4636 'loggingfh': ui,
4635 4637 'loggingname': b's',
4636 4638 'loggingopts': {
4637 4639 'logdata': True,
4638 4640 'logdataapis': False,
4639 4641 },
4640 4642 }
4641 4643 )
4642 4644
4643 4645 if ui.debugflag:
4644 4646 openerargs['loggingopts']['logdataapis'] = True
4645 4647
4646 4648 # Don't send default headers when in raw mode. This allows us to
4647 4649 # bypass most of the behavior of our URL handling code so we can
4648 4650 # have near complete control over what's sent on the wire.
4649 4651 if opts[b'peer'] == b'raw':
4650 4652 openerargs['sendaccept'] = False
4651 4653
4652 4654 opener = urlmod.opener(ui, authinfo, **openerargs)
4653 4655
4654 4656 if opts[b'peer'] == b'raw':
4655 4657 ui.write(_(b'using raw connection to peer\n'))
4656 4658 peer = None
4657 4659 elif opts[b'peer']:
4658 4660 raise error.Abort(
4659 4661 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4660 4662 )
4661 4663 else:
4662 4664 peer = httppeer.makepeer(ui, path, opener=opener)
4663 4665
4664 4666 # We /could/ populate stdin/stdout with sock.makefile()...
4665 4667 else:
4666 4668 raise error.Abort(_(b'unsupported connection configuration'))
4667 4669
4668 4670 batchedcommands = None
4669 4671
4670 4672 # Now perform actions based on the parsed wire language instructions.
4671 4673 for action, lines in blocks:
4672 4674 if action in (b'raw', b'raw+'):
4673 4675 if not stdin:
4674 4676 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4675 4677
4676 4678 # Concatenate the data together.
4677 4679 data = b''.join(l.lstrip() for l in lines)
4678 4680 data = stringutil.unescapestr(data)
4679 4681 stdin.write(data)
4680 4682
4681 4683 if action == b'raw+':
4682 4684 stdin.flush()
4683 4685 elif action == b'flush':
4684 4686 if not stdin:
4685 4687 raise error.Abort(_(b'cannot call flush on this peer'))
4686 4688 stdin.flush()
4687 4689 elif action.startswith(b'command'):
4688 4690 if not peer:
4689 4691 raise error.Abort(
4690 4692 _(
4691 4693 b'cannot send commands unless peer instance '
4692 4694 b'is available'
4693 4695 )
4694 4696 )
4695 4697
4696 4698 command = action.split(b' ', 1)[1]
4697 4699
4698 4700 args = {}
4699 4701 for line in lines:
4700 4702 # We need to allow empty values.
4701 4703 fields = line.lstrip().split(b' ', 1)
4702 4704 if len(fields) == 1:
4703 4705 key = fields[0]
4704 4706 value = b''
4705 4707 else:
4706 4708 key, value = fields
4707 4709
4708 4710 if value.startswith(b'eval:'):
4709 4711 value = stringutil.evalpythonliteral(value[5:])
4710 4712 else:
4711 4713 value = stringutil.unescapestr(value)
4712 4714
4713 4715 args[key] = value
4714 4716
4715 4717 if batchedcommands is not None:
4716 4718 batchedcommands.append((command, args))
4717 4719 continue
4718 4720
4719 4721 ui.status(_(b'sending %s command\n') % command)
4720 4722
4721 4723 if b'PUSHFILE' in args:
4722 4724 with open(args[b'PUSHFILE'], 'rb') as fh:
4723 4725 del args[b'PUSHFILE']
4724 4726 res, output = peer._callpush(
4725 4727 command, fh, **pycompat.strkwargs(args)
4726 4728 )
4727 4729 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4728 4730 ui.status(
4729 4731 _(b'remote output: %s\n') % stringutil.escapestr(output)
4730 4732 )
4731 4733 else:
4732 4734 with peer.commandexecutor() as e:
4733 4735 res = e.callcommand(command, args).result()
4734 4736
4735 4737 ui.status(
4736 4738 _(b'response: %s\n')
4737 4739 % stringutil.pprint(res, bprefix=True, indent=2)
4738 4740 )
4739 4741
4740 4742 elif action == b'batchbegin':
4741 4743 if batchedcommands is not None:
4742 4744 raise error.Abort(_(b'nested batchbegin not allowed'))
4743 4745
4744 4746 batchedcommands = []
4745 4747 elif action == b'batchsubmit':
4746 4748 # There is a batching API we could go through. But it would be
4747 4749 # difficult to normalize requests into function calls. It is easier
4748 4750 # to bypass this layer and normalize to commands + args.
4749 4751 ui.status(
4750 4752 _(b'sending batch with %d sub-commands\n')
4751 4753 % len(batchedcommands)
4752 4754 )
4753 4755 assert peer is not None
4754 4756 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4755 4757 ui.status(
4756 4758 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4757 4759 )
4758 4760
4759 4761 batchedcommands = None
4760 4762
4761 4763 elif action.startswith(b'httprequest '):
4762 4764 if not opener:
4763 4765 raise error.Abort(
4764 4766 _(b'cannot use httprequest without an HTTP peer')
4765 4767 )
4766 4768
4767 4769 request = action.split(b' ', 2)
4768 4770 if len(request) != 3:
4769 4771 raise error.Abort(
4770 4772 _(
4771 4773 b'invalid httprequest: expected format is '
4772 4774 b'"httprequest <method> <path>'
4773 4775 )
4774 4776 )
4775 4777
4776 4778 method, httppath = request[1:]
4777 4779 headers = {}
4778 4780 body = None
4779 4781 frames = []
4780 4782 for line in lines:
4781 4783 line = line.lstrip()
4782 4784 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4783 4785 if m:
4784 4786 # Headers need to use native strings.
4785 4787 key = pycompat.strurl(m.group(1))
4786 4788 value = pycompat.strurl(m.group(2))
4787 4789 headers[key] = value
4788 4790 continue
4789 4791
4790 4792 if line.startswith(b'BODYFILE '):
4791 4793 with open(line.split(b' ', 1), b'rb') as fh:
4792 4794 body = fh.read()
4793 4795 elif line.startswith(b'frame '):
4794 4796 frame = wireprotoframing.makeframefromhumanstring(
4795 4797 line[len(b'frame ') :]
4796 4798 )
4797 4799
4798 4800 frames.append(frame)
4799 4801 else:
4800 4802 raise error.Abort(
4801 4803 _(b'unknown argument to httprequest: %s') % line
4802 4804 )
4803 4805
4804 4806 url = path + httppath
4805 4807
4806 4808 if frames:
4807 4809 body = b''.join(bytes(f) for f in frames)
4808 4810
4809 4811 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4810 4812
4811 4813 # urllib.Request insists on using has_data() as a proxy for
4812 4814 # determining the request method. Override that to use our
4813 4815 # explicitly requested method.
4814 4816 req.get_method = lambda: pycompat.sysstr(method)
4815 4817
4816 4818 try:
4817 4819 res = opener.open(req)
4818 4820 body = res.read()
4819 4821 except util.urlerr.urlerror as e:
4820 4822 # read() method must be called, but only exists in Python 2
4821 4823 getattr(e, 'read', lambda: None)()
4822 4824 continue
4823 4825
4824 4826 ct = res.headers.get('Content-Type')
4825 4827 if ct == 'application/mercurial-cbor':
4826 4828 ui.write(
4827 4829 _(b'cbor> %s\n')
4828 4830 % stringutil.pprint(
4829 4831 cborutil.decodeall(body), bprefix=True, indent=2
4830 4832 )
4831 4833 )
4832 4834
4833 4835 elif action == b'close':
4834 4836 assert peer is not None
4835 4837 peer.close()
4836 4838 elif action == b'readavailable':
4837 4839 if not stdout or not stderr:
4838 4840 raise error.Abort(
4839 4841 _(b'readavailable not available on this peer')
4840 4842 )
4841 4843
4842 4844 stdin.close()
4843 4845 stdout.read()
4844 4846 stderr.read()
4845 4847
4846 4848 elif action == b'readline':
4847 4849 if not stdout:
4848 4850 raise error.Abort(_(b'readline not available on this peer'))
4849 4851 stdout.readline()
4850 4852 elif action == b'ereadline':
4851 4853 if not stderr:
4852 4854 raise error.Abort(_(b'ereadline not available on this peer'))
4853 4855 stderr.readline()
4854 4856 elif action.startswith(b'read '):
4855 4857 count = int(action.split(b' ', 1)[1])
4856 4858 if not stdout:
4857 4859 raise error.Abort(_(b'read not available on this peer'))
4858 4860 stdout.read(count)
4859 4861 elif action.startswith(b'eread '):
4860 4862 count = int(action.split(b' ', 1)[1])
4861 4863 if not stderr:
4862 4864 raise error.Abort(_(b'eread not available on this peer'))
4863 4865 stderr.read(count)
4864 4866 else:
4865 4867 raise error.Abort(_(b'unknown action: %s') % action)
4866 4868
4867 4869 if batchedcommands is not None:
4868 4870 raise error.Abort(_(b'unclosed "batchbegin" request'))
4869 4871
4870 4872 if peer:
4871 4873 peer.close()
4872 4874
4873 4875 if proc:
4874 4876 proc.kill()
@@ -1,525 +1,525 b''
1 1 # Copyright (C) 2004, 2005 Canonical Ltd
2 2 #
3 3 # This program is free software; you can redistribute it and/or modify
4 4 # it under the terms of the GNU General Public License as published by
5 5 # the Free Software Foundation; either version 2 of the License, or
6 6 # (at your option) any later version.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU General Public License
14 14 # along with this program; if not, see <http://www.gnu.org/licenses/>.
15 15
16 16 # mbp: "you know that thing where cvs gives you conflict markers?"
17 17 # s: "i hate that."
18 18
19 19 from __future__ import absolute_import
20 20
21 21 from .i18n import _
22 22 from . import (
23 23 error,
24 24 mdiff,
25 25 pycompat,
26 26 )
27 27 from .utils import stringutil
28 28
29 29
30 30 class CantReprocessAndShowBase(Exception):
31 31 pass
32 32
33 33
34 34 def intersect(ra, rb):
35 35 """Given two ranges return the range where they intersect or None.
36 36
37 37 >>> intersect((0, 10), (0, 6))
38 38 (0, 6)
39 39 >>> intersect((0, 10), (5, 15))
40 40 (5, 10)
41 41 >>> intersect((0, 10), (10, 15))
42 42 >>> intersect((0, 9), (10, 15))
43 43 >>> intersect((0, 9), (7, 15))
44 44 (7, 9)
45 45 """
46 46 assert ra[0] <= ra[1]
47 47 assert rb[0] <= rb[1]
48 48
49 49 sa = max(ra[0], rb[0])
50 50 sb = min(ra[1], rb[1])
51 51 if sa < sb:
52 52 return sa, sb
53 53 else:
54 54 return None
55 55
56 56
57 57 def compare_range(a, astart, aend, b, bstart, bend):
58 58 """Compare a[astart:aend] == b[bstart:bend], without slicing."""
59 59 if (aend - astart) != (bend - bstart):
60 60 return False
61 61 for ia, ib in zip(
62 62 pycompat.xrange(astart, aend), pycompat.xrange(bstart, bend)
63 63 ):
64 64 if a[ia] != b[ib]:
65 65 return False
66 66 else:
67 67 return True
68 68
69 69
70 70 class Merge3Text(object):
71 71 """3-way merge of texts.
72 72
73 73 Given strings BASE, OTHER, THIS, tries to produce a combined text
74 74 incorporating the changes from both BASE->OTHER and BASE->THIS."""
75 75
76 76 def __init__(self, basetext, atext, btext, base=None, a=None, b=None):
77 77 self.basetext = basetext
78 78 self.atext = atext
79 79 self.btext = btext
80 80 if base is None:
81 81 base = mdiff.splitnewlines(basetext)
82 82 if a is None:
83 83 a = mdiff.splitnewlines(atext)
84 84 if b is None:
85 85 b = mdiff.splitnewlines(btext)
86 86 self.base = base
87 87 self.a = a
88 88 self.b = b
89 89
90 90 def merge_groups(self):
91 91 """Yield sequence of line groups. Each one is a tuple:
92 92
93 93 'unchanged', lines
94 94 Lines unchanged from base
95 95
96 96 'a', lines
97 97 Lines taken from a
98 98
99 99 'same', lines
100 100 Lines taken from a (and equal to b)
101 101
102 102 'b', lines
103 103 Lines taken from b
104 104
105 105 'conflict', (base_lines, a_lines, b_lines)
106 106 Lines from base were changed to either a or b and conflict.
107 107 """
108 108 for t in self.merge_regions():
109 109 what = t[0]
110 110 if what == b'unchanged':
111 111 yield what, self.base[t[1] : t[2]]
112 112 elif what == b'a' or what == b'same':
113 113 yield what, self.a[t[1] : t[2]]
114 114 elif what == b'b':
115 115 yield what, self.b[t[1] : t[2]]
116 116 elif what == b'conflict':
117 117 yield (
118 118 what,
119 119 (
120 120 self.base[t[1] : t[2]],
121 121 self.a[t[3] : t[4]],
122 122 self.b[t[5] : t[6]],
123 123 ),
124 124 )
125 125 else:
126 126 raise ValueError(what)
127 127
128 128 def merge_regions(self):
129 129 """Return sequences of matching and conflicting regions.
130 130
131 131 This returns tuples, where the first value says what kind we
132 132 have:
133 133
134 134 'unchanged', start, end
135 135 Take a region of base[start:end]
136 136
137 137 'same', astart, aend
138 138 b and a are different from base but give the same result
139 139
140 140 'a', start, end
141 141 Non-clashing insertion from a[start:end]
142 142
143 143 'conflict', zstart, zend, astart, aend, bstart, bend
144 144 Conflict between a and b, with z as common ancestor
145 145
146 146 Method is as follows:
147 147
148 148 The two sequences align only on regions which match the base
149 149 and both descendants. These are found by doing a two-way diff
150 150 of each one against the base, and then finding the
151 151 intersections between those regions. These "sync regions"
152 152 are by definition unchanged in both and easily dealt with.
153 153
154 154 The regions in between can be in any of three cases:
155 155 conflicted, or changed on only one side.
156 156 """
157 157
158 158 # section a[0:ia] has been disposed of, etc
159 159 iz = ia = ib = 0
160 160
161 161 for region in self.find_sync_regions():
162 162 zmatch, zend, amatch, aend, bmatch, bend = region
163 163 # print 'match base [%d:%d]' % (zmatch, zend)
164 164
165 165 matchlen = zend - zmatch
166 166 assert matchlen >= 0
167 167 assert matchlen == (aend - amatch)
168 168 assert matchlen == (bend - bmatch)
169 169
170 170 len_a = amatch - ia
171 171 len_b = bmatch - ib
172 172 len_base = zmatch - iz
173 173 assert len_a >= 0
174 174 assert len_b >= 0
175 175 assert len_base >= 0
176 176
177 177 # print 'unmatched a=%d, b=%d' % (len_a, len_b)
178 178
179 179 if len_a or len_b:
180 180 # try to avoid actually slicing the lists
181 181 equal_a = compare_range(
182 182 self.a, ia, amatch, self.base, iz, zmatch
183 183 )
184 184 equal_b = compare_range(
185 185 self.b, ib, bmatch, self.base, iz, zmatch
186 186 )
187 187 same = compare_range(self.a, ia, amatch, self.b, ib, bmatch)
188 188
189 189 if same:
190 190 yield b'same', ia, amatch
191 191 elif equal_a and not equal_b:
192 192 yield b'b', ib, bmatch
193 193 elif equal_b and not equal_a:
194 194 yield b'a', ia, amatch
195 195 elif not equal_a and not equal_b:
196 196 yield b'conflict', iz, zmatch, ia, amatch, ib, bmatch
197 197 else:
198 198 raise AssertionError(b"can't handle a=b=base but unmatched")
199 199
200 200 ia = amatch
201 201 ib = bmatch
202 202 iz = zmatch
203 203
204 204 # if the same part of the base was deleted on both sides
205 205 # that's OK, we can just skip it.
206 206
207 207 if matchlen > 0:
208 208 assert ia == amatch
209 209 assert ib == bmatch
210 210 assert iz == zmatch
211 211
212 212 yield b'unchanged', zmatch, zend
213 213 iz = zend
214 214 ia = aend
215 215 ib = bend
216 216
217 217 def minimize(self, merge_groups):
218 218 """Trim conflict regions of lines where A and B sides match.
219 219
220 220 Lines where both A and B have made the same changes at the beginning
221 221 or the end of each merge region are eliminated from the conflict
222 222 region and are instead considered the same.
223 223 """
224 224 for what, lines in merge_groups:
225 225 if what != b"conflict":
226 226 yield what, lines
227 227 continue
228 228 base_lines, a_lines, b_lines = lines
229 229 alen = len(a_lines)
230 230 blen = len(b_lines)
231 231
232 232 # find matches at the front
233 233 ii = 0
234 234 while ii < alen and ii < blen and a_lines[ii] == b_lines[ii]:
235 235 ii += 1
236 236 startmatches = ii
237 237
238 238 # find matches at the end
239 239 ii = 0
240 240 while (
241 241 ii < alen and ii < blen and a_lines[-ii - 1] == b_lines[-ii - 1]
242 242 ):
243 243 ii += 1
244 244 endmatches = ii
245 245
246 246 if startmatches > 0:
247 247 yield b'same', a_lines[:startmatches]
248 248
249 249 yield (
250 250 b'conflict',
251 251 (
252 252 base_lines,
253 253 a_lines[startmatches : alen - endmatches],
254 254 b_lines[startmatches : blen - endmatches],
255 255 ),
256 256 )
257 257
258 258 if endmatches > 0:
259 259 yield b'same', a_lines[alen - endmatches :]
260 260
261 261 def find_sync_regions(self):
262 262 """Return a list of sync regions, where both descendants match the base.
263 263
264 264 Generates a list of (base1, base2, a1, a2, b1, b2). There is
265 265 always a zero-length sync region at the end of all the files.
266 266 """
267 267
268 268 ia = ib = 0
269 269 amatches = mdiff.get_matching_blocks(self.basetext, self.atext)
270 270 bmatches = mdiff.get_matching_blocks(self.basetext, self.btext)
271 271 len_a = len(amatches)
272 272 len_b = len(bmatches)
273 273
274 274 sl = []
275 275
276 276 while ia < len_a and ib < len_b:
277 277 abase, amatch, alen = amatches[ia]
278 278 bbase, bmatch, blen = bmatches[ib]
279 279
280 280 # there is an unconflicted block at i; how long does it
281 281 # extend? until whichever one ends earlier.
282 282 i = intersect((abase, abase + alen), (bbase, bbase + blen))
283 283 if i:
284 284 intbase = i[0]
285 285 intend = i[1]
286 286 intlen = intend - intbase
287 287
288 288 # found a match of base[i[0], i[1]]; this may be less than
289 289 # the region that matches in either one
290 290 assert intlen <= alen
291 291 assert intlen <= blen
292 292 assert abase <= intbase
293 293 assert bbase <= intbase
294 294
295 295 asub = amatch + (intbase - abase)
296 296 bsub = bmatch + (intbase - bbase)
297 297 aend = asub + intlen
298 298 bend = bsub + intlen
299 299
300 300 assert self.base[intbase:intend] == self.a[asub:aend], (
301 301 self.base[intbase:intend],
302 302 self.a[asub:aend],
303 303 )
304 304
305 305 assert self.base[intbase:intend] == self.b[bsub:bend]
306 306
307 307 sl.append((intbase, intend, asub, aend, bsub, bend))
308 308
309 309 # advance whichever one ends first in the base text
310 310 if (abase + alen) < (bbase + blen):
311 311 ia += 1
312 312 else:
313 313 ib += 1
314 314
315 315 intbase = len(self.base)
316 316 abase = len(self.a)
317 317 bbase = len(self.b)
318 318 sl.append((intbase, intbase, abase, abase, bbase, bbase))
319 319
320 320 return sl
321 321
322 322
323 323 def _verifytext(text, path, ui, opts):
324 324 """verifies that text is non-binary (unless opts[text] is passed,
325 325 then we just warn)"""
326 326 if stringutil.binary(text):
327 327 msg = _(b"%s looks like a binary file.") % path
328 328 if not opts.get('quiet'):
329 329 ui.warn(_(b'warning: %s\n') % msg)
330 330 if not opts.get('text'):
331 331 raise error.Abort(msg)
332 332 return text
333 333
334 334
335 335 def _picklabels(defaults, overrides):
336 336 if len(overrides) > 3:
337 337 raise error.Abort(_(b"can only specify three labels."))
338 338 result = defaults[:]
339 339 for i, override in enumerate(overrides):
340 340 result[i] = override
341 341 return result
342 342
343 343
344 def merge_lines(
344 def render_markers(
345 345 m3,
346 346 name_a=None,
347 347 name_b=None,
348 348 name_base=None,
349 349 start_marker=b'<<<<<<<',
350 350 mid_marker=b'=======',
351 351 end_marker=b'>>>>>>>',
352 352 base_marker=None,
353 353 minimize=False,
354 354 ):
355 355 """Return merge in cvs-like form."""
356 356 conflicts = False
357 357 newline = b'\n'
358 358 if len(m3.a) > 0:
359 359 if m3.a[0].endswith(b'\r\n'):
360 360 newline = b'\r\n'
361 361 elif m3.a[0].endswith(b'\r'):
362 362 newline = b'\r'
363 363 if name_a and start_marker:
364 364 start_marker = start_marker + b' ' + name_a
365 365 if name_b and end_marker:
366 366 end_marker = end_marker + b' ' + name_b
367 367 if name_base and base_marker:
368 368 base_marker = base_marker + b' ' + name_base
369 369 merge_groups = m3.merge_groups()
370 370 if minimize:
371 371 merge_groups = m3.minimize(merge_groups)
372 372 lines = []
373 373 for what, group_lines in merge_groups:
374 374 if what == b'conflict':
375 375 base_lines, a_lines, b_lines = group_lines
376 376 conflicts = True
377 377 if start_marker is not None:
378 378 lines.append(start_marker + newline)
379 379 lines.extend(a_lines)
380 380 if base_marker is not None:
381 381 lines.append(base_marker + newline)
382 382 lines.extend(base_lines)
383 383 if mid_marker is not None:
384 384 lines.append(mid_marker + newline)
385 385 lines.extend(b_lines)
386 386 if end_marker is not None:
387 387 lines.append(end_marker + newline)
388 388 else:
389 389 lines.extend(group_lines)
390 390 return lines, conflicts
391 391
392 392
393 def _mergediff(m3, name_a, name_b, name_base):
393 def render_mergediff(m3, name_a, name_b, name_base):
394 394 lines = []
395 395 conflicts = False
396 396 for what, group_lines in m3.merge_groups():
397 397 if what == b'conflict':
398 398 base_lines, a_lines, b_lines = group_lines
399 399 base_text = b''.join(base_lines)
400 400 b_blocks = list(
401 401 mdiff.allblocks(
402 402 base_text,
403 403 b''.join(b_lines),
404 404 lines1=base_lines,
405 405 lines2=b_lines,
406 406 )
407 407 )
408 408 a_blocks = list(
409 409 mdiff.allblocks(
410 410 base_text,
411 411 b''.join(a_lines),
412 412 lines1=base_lines,
413 413 lines2=b_lines,
414 414 )
415 415 )
416 416
417 417 def matching_lines(blocks):
418 418 return sum(
419 419 block[1] - block[0]
420 420 for block, kind in blocks
421 421 if kind == b'='
422 422 )
423 423
424 424 def diff_lines(blocks, lines1, lines2):
425 425 for block, kind in blocks:
426 426 if kind == b'=':
427 427 for line in lines1[block[0] : block[1]]:
428 428 yield b' ' + line
429 429 else:
430 430 for line in lines1[block[0] : block[1]]:
431 431 yield b'-' + line
432 432 for line in lines2[block[2] : block[3]]:
433 433 yield b'+' + line
434 434
435 435 lines.append(b"<<<<<<<\n")
436 436 if matching_lines(a_blocks) < matching_lines(b_blocks):
437 437 lines.append(b"======= %s\n" % name_a)
438 438 lines.extend(a_lines)
439 439 lines.append(b"------- %s\n" % name_base)
440 440 lines.append(b"+++++++ %s\n" % name_b)
441 441 lines.extend(diff_lines(b_blocks, base_lines, b_lines))
442 442 else:
443 443 lines.append(b"------- %s\n" % name_base)
444 444 lines.append(b"+++++++ %s\n" % name_a)
445 445 lines.extend(diff_lines(a_blocks, base_lines, a_lines))
446 446 lines.append(b"======= %s\n" % name_b)
447 447 lines.extend(b_lines)
448 448 lines.append(b">>>>>>>\n")
449 449 conflicts = True
450 450 else:
451 451 lines.extend(group_lines)
452 452 return lines, conflicts
453 453
454 454
455 455 def _resolve(m3, sides):
456 456 lines = []
457 457 for what, group_lines in m3.merge_groups():
458 458 if what == b'conflict':
459 459 for side in sides:
460 460 lines.extend(group_lines[side])
461 461 else:
462 462 lines.extend(group_lines)
463 463 return lines
464 464
465 465
466 466 def simplemerge(ui, localctx, basectx, otherctx, **opts):
467 467 """Performs the simplemerge algorithm.
468 468
469 469 The merged result is written into `localctx`.
470 470 """
471 471
472 472 def readctx(ctx):
473 473 # Merges were always run in the working copy before, which means
474 474 # they used decoded data, if the user defined any repository
475 475 # filters.
476 476 #
477 477 # Maintain that behavior today for BC, though perhaps in the future
478 478 # it'd be worth considering whether merging encoded data (what the
479 479 # repository usually sees) might be more useful.
480 480 return _verifytext(ctx.decodeddata(), ctx.path(), ui, opts)
481 481
482 482 try:
483 483 localtext = readctx(localctx)
484 484 basetext = readctx(basectx)
485 485 othertext = readctx(otherctx)
486 486 except error.Abort:
487 487 return 1
488 488
489 489 m3 = Merge3Text(basetext, localtext, othertext)
490 490 conflicts = False
491 491 mode = opts.get('mode', b'merge')
492 492 if mode == b'union':
493 493 lines = _resolve(m3, (1, 2))
494 494 elif mode == b'local':
495 495 lines = _resolve(m3, (1,))
496 496 elif mode == b'other':
497 497 lines = _resolve(m3, (2,))
498 498 else:
499 499 name_a, name_b, name_base = _picklabels(
500 500 [localctx.path(), otherctx.path(), None], opts.get('label', [])
501 501 )
502 502 if mode == b'mergediff':
503 lines, conflicts = _mergediff(m3, name_a, name_b, name_base)
503 lines, conflicts = render_mergediff(m3, name_a, name_b, name_base)
504 504 else:
505 505 extrakwargs = {
506 506 'minimize': True,
507 507 }
508 508 if mode == b'merge3':
509 509 extrakwargs['base_marker'] = b'|||||||'
510 510 extrakwargs['name_base'] = name_base
511 511 extrakwargs['minimize'] = False
512 lines, conflicts = merge_lines(
512 lines, conflicts = render_markers(
513 513 m3, name_a=name_a, name_b=name_b, **extrakwargs
514 514 )
515 515
516 516 mergedtext = b''.join(lines)
517 517 if opts.get('print'):
518 518 ui.fout.write(mergedtext)
519 519 else:
520 520 # localctx.flags() already has the merged flags (done in
521 521 # mergestate.resolve())
522 522 localctx.write(mergedtext, localctx.flags())
523 523
524 524 if conflicts:
525 525 return 1
@@ -1,395 +1,397 b''
1 1 # Copyright (C) 2004, 2005 Canonical Ltd
2 2 #
3 3 # This program is free software; you can redistribute it and/or modify
4 4 # it under the terms of the GNU General Public License as published by
5 5 # the Free Software Foundation; either version 2 of the License, or
6 6 # (at your option) any later version.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU General Public License
14 14 # along with this program; if not, see <http://www.gnu.org/licenses/>.
15 15
16 16 from __future__ import absolute_import
17 17
18 18 import unittest
19 19 from mercurial import (
20 20 error,
21 21 simplemerge,
22 22 util,
23 23 )
24 24
25 25 from mercurial.utils import stringutil
26 26
27 27 TestCase = unittest.TestCase
28 28 # bzr compatible interface, for the tests
29 29 class Merge3(simplemerge.Merge3Text):
30 30 """3-way merge of texts.
31 31
32 32 Given BASE, OTHER, THIS, tries to produce a combined text
33 33 incorporating the changes from both BASE->OTHER and BASE->THIS.
34 34 All three will typically be sequences of lines."""
35 35
36 36 def __init__(self, base, a, b):
37 37 basetext = b'\n'.join([i.strip(b'\n') for i in base] + [b''])
38 38 atext = b'\n'.join([i.strip(b'\n') for i in a] + [b''])
39 39 btext = b'\n'.join([i.strip(b'\n') for i in b] + [b''])
40 40 if (
41 41 stringutil.binary(basetext)
42 42 or stringutil.binary(atext)
43 43 or stringutil.binary(btext)
44 44 ):
45 45 raise error.Abort(b"don't know how to merge binary files")
46 46 simplemerge.Merge3Text.__init__(
47 47 self, basetext, atext, btext, base, a, b
48 48 )
49 49
50 50
51 51 CantReprocessAndShowBase = simplemerge.CantReprocessAndShowBase
52 52
53 53
54 54 def split_lines(t):
55 55 return util.stringio(t).readlines()
56 56
57 57
58 58 ############################################################
59 59 # test case data from the gnu diffutils manual
60 60 # common base
61 61 TZU = split_lines(
62 62 b""" The Nameless is the origin of Heaven and Earth;
63 63 The named is the mother of all things.
64 64
65 65 Therefore let there always be non-being,
66 66 so we may see their subtlety,
67 67 And let there always be being,
68 68 so we may see their outcome.
69 69 The two are the same,
70 70 But after they are produced,
71 71 they have different names.
72 72 They both may be called deep and profound.
73 73 Deeper and more profound,
74 74 The door of all subtleties!
75 75 """
76 76 )
77 77
78 78 LAO = split_lines(
79 79 b""" The Way that can be told of is not the eternal Way;
80 80 The name that can be named is not the eternal name.
81 81 The Nameless is the origin of Heaven and Earth;
82 82 The Named is the mother of all things.
83 83 Therefore let there always be non-being,
84 84 so we may see their subtlety,
85 85 And let there always be being,
86 86 so we may see their outcome.
87 87 The two are the same,
88 88 But after they are produced,
89 89 they have different names.
90 90 """
91 91 )
92 92
93 93
94 94 TAO = split_lines(
95 95 b""" The Way that can be told of is not the eternal Way;
96 96 The name that can be named is not the eternal name.
97 97 The Nameless is the origin of Heaven and Earth;
98 98 The named is the mother of all things.
99 99
100 100 Therefore let there always be non-being,
101 101 so we may see their subtlety,
102 102 And let there always be being,
103 103 so we may see their result.
104 104 The two are the same,
105 105 But after they are produced,
106 106 they have different names.
107 107
108 108 -- The Way of Lao-Tzu, tr. Wing-tsit Chan
109 109
110 110 """
111 111 )
112 112
113 113 MERGED_RESULT = split_lines(
114 114 b"""\
115 115 The Way that can be told of is not the eternal Way;
116 116 The name that can be named is not the eternal name.
117 117 The Nameless is the origin of Heaven and Earth;
118 118 The Named is the mother of all things.
119 119 Therefore let there always be non-being,
120 120 so we may see their subtlety,
121 121 And let there always be being,
122 122 so we may see their result.
123 123 The two are the same,
124 124 But after they are produced,
125 125 they have different names.\
126 126 \n<<<<<<< LAO\
127 127 \n=======
128 128
129 129 -- The Way of Lao-Tzu, tr. Wing-tsit Chan
130 130 \
131 131 \n>>>>>>> TAO
132 132 """
133 133 )
134 134
135 135
136 136 class TestMerge3(TestCase):
137 137 def log(self, msg):
138 138 pass
139 139
140 140 def test_no_changes(self):
141 141 """No conflicts because nothing changed"""
142 142 m3 = Merge3([b'aaa', b'bbb'], [b'aaa', b'bbb'], [b'aaa', b'bbb'])
143 143
144 144 self.assertEqual(
145 145 list(m3.find_sync_regions()),
146 146 [(0, 2, 0, 2, 0, 2), (2, 2, 2, 2, 2, 2)],
147 147 )
148 148
149 149 self.assertEqual(list(m3.merge_regions()), [(b'unchanged', 0, 2)])
150 150
151 151 self.assertEqual(
152 152 list(m3.merge_groups()), [(b'unchanged', [b'aaa', b'bbb'])]
153 153 )
154 154
155 155 def test_front_insert(self):
156 156 m3 = Merge3([b'zz'], [b'aaa', b'bbb', b'zz'], [b'zz'])
157 157
158 158 # todo: should use a sentinel at end as from get_matching_blocks
159 159 # to match without zz
160 160 self.assertEqual(
161 161 list(m3.find_sync_regions()),
162 162 [(0, 1, 2, 3, 0, 1), (1, 1, 3, 3, 1, 1)],
163 163 )
164 164
165 165 self.assertEqual(
166 166 list(m3.merge_regions()), [(b'a', 0, 2), (b'unchanged', 0, 1)]
167 167 )
168 168
169 169 self.assertEqual(
170 170 list(m3.merge_groups()),
171 171 [(b'a', [b'aaa', b'bbb']), (b'unchanged', [b'zz'])],
172 172 )
173 173
174 174 def test_null_insert(self):
175 175 m3 = Merge3([], [b'aaa', b'bbb'], [])
176 176 # todo: should use a sentinel at end as from get_matching_blocks
177 177 # to match without zz
178 178 self.assertEqual(list(m3.find_sync_regions()), [(0, 0, 2, 2, 0, 0)])
179 179
180 180 self.assertEqual(list(m3.merge_regions()), [(b'a', 0, 2)])
181 181
182 self.assertEqual(simplemerge.merge_lines(m3), ([b'aaa', b'bbb'], False))
182 self.assertEqual(
183 simplemerge.render_markers(m3), ([b'aaa', b'bbb'], False)
184 )
183 185
184 186 def test_no_conflicts(self):
185 187 """No conflicts because only one side changed"""
186 188 m3 = Merge3(
187 189 [b'aaa', b'bbb'], [b'aaa', b'111', b'bbb'], [b'aaa', b'bbb']
188 190 )
189 191
190 192 self.assertEqual(
191 193 list(m3.find_sync_regions()),
192 194 [(0, 1, 0, 1, 0, 1), (1, 2, 2, 3, 1, 2), (2, 2, 3, 3, 2, 2)],
193 195 )
194 196
195 197 self.assertEqual(
196 198 list(m3.merge_regions()),
197 199 [(b'unchanged', 0, 1), (b'a', 1, 2), (b'unchanged', 1, 2)],
198 200 )
199 201
200 202 def test_append_a(self):
201 203 m3 = Merge3(
202 204 [b'aaa\n', b'bbb\n'],
203 205 [b'aaa\n', b'bbb\n', b'222\n'],
204 206 [b'aaa\n', b'bbb\n'],
205 207 )
206 208
207 209 self.assertEqual(
208 b''.join(simplemerge.merge_lines(m3)[0]), b'aaa\nbbb\n222\n'
210 b''.join(simplemerge.render_markers(m3)[0]), b'aaa\nbbb\n222\n'
209 211 )
210 212
211 213 def test_append_b(self):
212 214 m3 = Merge3(
213 215 [b'aaa\n', b'bbb\n'],
214 216 [b'aaa\n', b'bbb\n'],
215 217 [b'aaa\n', b'bbb\n', b'222\n'],
216 218 )
217 219
218 220 self.assertEqual(
219 b''.join(simplemerge.merge_lines(m3)[0]), b'aaa\nbbb\n222\n'
221 b''.join(simplemerge.render_markers(m3)[0]), b'aaa\nbbb\n222\n'
220 222 )
221 223
222 224 def test_append_agreement(self):
223 225 m3 = Merge3(
224 226 [b'aaa\n', b'bbb\n'],
225 227 [b'aaa\n', b'bbb\n', b'222\n'],
226 228 [b'aaa\n', b'bbb\n', b'222\n'],
227 229 )
228 230
229 231 self.assertEqual(
230 b''.join(simplemerge.merge_lines(m3)[0]), b'aaa\nbbb\n222\n'
232 b''.join(simplemerge.render_markers(m3)[0]), b'aaa\nbbb\n222\n'
231 233 )
232 234
233 235 def test_append_clash(self):
234 236 m3 = Merge3(
235 237 [b'aaa\n', b'bbb\n'],
236 238 [b'aaa\n', b'bbb\n', b'222\n'],
237 239 [b'aaa\n', b'bbb\n', b'333\n'],
238 240 )
239 241
240 ml, conflicts = simplemerge.merge_lines(
242 ml, conflicts = simplemerge.render_markers(
241 243 m3,
242 244 name_a=b'a',
243 245 name_b=b'b',
244 246 start_marker=b'<<',
245 247 mid_marker=b'--',
246 248 end_marker=b'>>',
247 249 )
248 250 self.assertEqual(
249 251 b''.join(ml),
250 252 b'aaa\n' b'bbb\n' b'<< a\n' b'222\n' b'--\n' b'333\n' b'>> b\n',
251 253 )
252 254
253 255 def test_insert_agreement(self):
254 256 m3 = Merge3(
255 257 [b'aaa\n', b'bbb\n'],
256 258 [b'aaa\n', b'222\n', b'bbb\n'],
257 259 [b'aaa\n', b'222\n', b'bbb\n'],
258 260 )
259 261
260 ml, conflicts = simplemerge.merge_lines(
262 ml, conflicts = simplemerge.render_markers(
261 263 m3,
262 264 name_a=b'a',
263 265 name_b=b'b',
264 266 start_marker=b'<<',
265 267 mid_marker=b'--',
266 268 end_marker=b'>>',
267 269 )
268 270 self.assertEqual(b''.join(ml), b'aaa\n222\nbbb\n')
269 271
270 272 def test_insert_clash(self):
271 273 """Both try to insert lines in the same place."""
272 274 m3 = Merge3(
273 275 [b'aaa\n', b'bbb\n'],
274 276 [b'aaa\n', b'111\n', b'bbb\n'],
275 277 [b'aaa\n', b'222\n', b'bbb\n'],
276 278 )
277 279
278 280 self.assertEqual(
279 281 list(m3.find_sync_regions()),
280 282 [(0, 1, 0, 1, 0, 1), (1, 2, 2, 3, 2, 3), (2, 2, 3, 3, 3, 3)],
281 283 )
282 284
283 285 self.assertEqual(
284 286 list(m3.merge_regions()),
285 287 [
286 288 (b'unchanged', 0, 1),
287 289 (b'conflict', 1, 1, 1, 2, 1, 2),
288 290 (b'unchanged', 1, 2),
289 291 ],
290 292 )
291 293
292 294 self.assertEqual(
293 295 list(m3.merge_groups()),
294 296 [
295 297 (b'unchanged', [b'aaa\n']),
296 298 (b'conflict', ([], [b'111\n'], [b'222\n'])),
297 299 (b'unchanged', [b'bbb\n']),
298 300 ],
299 301 )
300 302
301 ml, conflicts = simplemerge.merge_lines(
303 ml, conflicts = simplemerge.render_markers(
302 304 m3,
303 305 name_a=b'a',
304 306 name_b=b'b',
305 307 start_marker=b'<<',
306 308 mid_marker=b'--',
307 309 end_marker=b'>>',
308 310 )
309 311 self.assertEqual(
310 312 b''.join(ml),
311 313 b'''aaa
312 314 << a
313 315 111
314 316 --
315 317 222
316 318 >> b
317 319 bbb
318 320 ''',
319 321 )
320 322
321 323 def test_replace_clash(self):
322 324 """Both try to insert lines in the same place."""
323 325 m3 = Merge3(
324 326 [b'aaa', b'000', b'bbb'],
325 327 [b'aaa', b'111', b'bbb'],
326 328 [b'aaa', b'222', b'bbb'],
327 329 )
328 330
329 331 self.assertEqual(
330 332 list(m3.find_sync_regions()),
331 333 [(0, 1, 0, 1, 0, 1), (2, 3, 2, 3, 2, 3), (3, 3, 3, 3, 3, 3)],
332 334 )
333 335
334 336 def test_replace_multi(self):
335 337 """Replacement with regions of different size."""
336 338 m3 = Merge3(
337 339 [b'aaa', b'000', b'000', b'bbb'],
338 340 [b'aaa', b'111', b'111', b'111', b'bbb'],
339 341 [b'aaa', b'222', b'222', b'222', b'222', b'bbb'],
340 342 )
341 343
342 344 self.assertEqual(
343 345 list(m3.find_sync_regions()),
344 346 [(0, 1, 0, 1, 0, 1), (3, 4, 4, 5, 5, 6), (4, 4, 5, 5, 6, 6)],
345 347 )
346 348
347 349 def test_merge_poem(self):
348 350 """Test case from diff3 manual"""
349 351 m3 = Merge3(TZU, LAO, TAO)
350 ml, conflicts = simplemerge.merge_lines(m3, b'LAO', b'TAO')
352 ml, conflicts = simplemerge.render_markers(m3, b'LAO', b'TAO')
351 353 self.log(b'merge result:')
352 354 self.log(b''.join(ml))
353 355 self.assertEqual(ml, MERGED_RESULT)
354 356
355 357 def test_binary(self):
356 358 with self.assertRaises(error.Abort):
357 359 Merge3([b'\x00'], [b'a'], [b'b'])
358 360
359 361 def test_dos_text(self):
360 362 base_text = b'a\r\n'
361 363 this_text = b'b\r\n'
362 364 other_text = b'c\r\n'
363 365 m3 = Merge3(
364 366 base_text.splitlines(True),
365 367 other_text.splitlines(True),
366 368 this_text.splitlines(True),
367 369 )
368 m_lines, conflicts = simplemerge.merge_lines(m3, b'OTHER', b'THIS')
370 m_lines, conflicts = simplemerge.render_markers(m3, b'OTHER', b'THIS')
369 371 self.assertEqual(
370 372 b'<<<<<<< OTHER\r\nc\r\n=======\r\nb\r\n'
371 373 b'>>>>>>> THIS\r\n'.splitlines(True),
372 374 m_lines,
373 375 )
374 376
375 377 def test_mac_text(self):
376 378 base_text = b'a\r'
377 379 this_text = b'b\r'
378 380 other_text = b'c\r'
379 381 m3 = Merge3(
380 382 base_text.splitlines(True),
381 383 other_text.splitlines(True),
382 384 this_text.splitlines(True),
383 385 )
384 m_lines, conflicts = simplemerge.merge_lines(m3, b'OTHER', b'THIS')
386 m_lines, conflicts = simplemerge.render_markers(m3, b'OTHER', b'THIS')
385 387 self.assertEqual(
386 388 b'<<<<<<< OTHER\rc\r=======\rb\r'
387 389 b'>>>>>>> THIS\r'.splitlines(True),
388 390 m_lines,
389 391 )
390 392
391 393
392 394 if __name__ == '__main__':
393 395 import silenttestrunner
394 396
395 397 silenttestrunner.main(__name__)
General Comments 0
You need to be logged in to leave comments. Login now