##// END OF EJS Templates
simplemerge: make merge_lines() a free function...
Martin von Zweigbergk -
r49406:c6649c53 default
parent child Browse files
Show More
@@ -1,4874 +1,4874 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import binascii
11 11 import codecs
12 12 import collections
13 13 import contextlib
14 14 import difflib
15 15 import errno
16 16 import glob
17 17 import operator
18 18 import os
19 19 import platform
20 20 import random
21 21 import re
22 22 import socket
23 23 import ssl
24 24 import stat
25 25 import string
26 26 import subprocess
27 27 import sys
28 28 import time
29 29
30 30 from .i18n import _
31 31 from .node import (
32 32 bin,
33 33 hex,
34 34 nullrev,
35 35 short,
36 36 )
37 37 from .pycompat import (
38 38 getattr,
39 39 open,
40 40 )
41 41 from . import (
42 42 bundle2,
43 43 bundlerepo,
44 44 changegroup,
45 45 cmdutil,
46 46 color,
47 47 context,
48 48 copies,
49 49 dagparser,
50 50 encoding,
51 51 error,
52 52 exchange,
53 53 extensions,
54 54 filemerge,
55 55 filesetlang,
56 56 formatter,
57 57 hg,
58 58 httppeer,
59 59 localrepo,
60 60 lock as lockmod,
61 61 logcmdutil,
62 62 mergestate as mergestatemod,
63 63 metadata,
64 64 obsolete,
65 65 obsutil,
66 66 pathutil,
67 67 phases,
68 68 policy,
69 69 pvec,
70 70 pycompat,
71 71 registrar,
72 72 repair,
73 73 repoview,
74 74 requirements,
75 75 revlog,
76 76 revset,
77 77 revsetlang,
78 78 scmutil,
79 79 setdiscovery,
80 80 simplemerge,
81 81 sshpeer,
82 82 sslutil,
83 83 streamclone,
84 84 strip,
85 85 tags as tagsmod,
86 86 templater,
87 87 treediscovery,
88 88 upgrade,
89 89 url as urlmod,
90 90 util,
91 91 vfs as vfsmod,
92 92 wireprotoframing,
93 93 wireprotoserver,
94 94 )
95 95 from .interfaces import repository
96 96 from .utils import (
97 97 cborutil,
98 98 compression,
99 99 dateutil,
100 100 procutil,
101 101 stringutil,
102 102 urlutil,
103 103 )
104 104
105 105 from .revlogutils import (
106 106 deltas as deltautil,
107 107 nodemap,
108 108 rewrite,
109 109 sidedata,
110 110 )
111 111
112 112 release = lockmod.release
113 113
114 114 table = {}
115 115 table.update(strip.command._table)
116 116 command = registrar.command(table)
117 117
118 118
119 119 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
120 120 def debugancestor(ui, repo, *args):
121 121 """find the ancestor revision of two revisions in a given index"""
122 122 if len(args) == 3:
123 123 index, rev1, rev2 = args
124 124 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
125 125 lookup = r.lookup
126 126 elif len(args) == 2:
127 127 if not repo:
128 128 raise error.Abort(
129 129 _(b'there is no Mercurial repository here (.hg not found)')
130 130 )
131 131 rev1, rev2 = args
132 132 r = repo.changelog
133 133 lookup = repo.lookup
134 134 else:
135 135 raise error.Abort(_(b'either two or three arguments required'))
136 136 a = r.ancestor(lookup(rev1), lookup(rev2))
137 137 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
138 138
139 139
140 140 @command(b'debugantivirusrunning', [])
141 141 def debugantivirusrunning(ui, repo):
142 142 """attempt to trigger an antivirus scanner to see if one is active"""
143 143 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
144 144 f.write(
145 145 util.b85decode(
146 146 # This is a base85-armored version of the EICAR test file. See
147 147 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
148 148 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
149 149 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
150 150 )
151 151 )
152 152 # Give an AV engine time to scan the file.
153 153 time.sleep(2)
154 154 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
155 155
156 156
157 157 @command(b'debugapplystreamclonebundle', [], b'FILE')
158 158 def debugapplystreamclonebundle(ui, repo, fname):
159 159 """apply a stream clone bundle file"""
160 160 f = hg.openpath(ui, fname)
161 161 gen = exchange.readbundle(ui, f, fname)
162 162 gen.apply(repo)
163 163
164 164
165 165 @command(
166 166 b'debugbuilddag',
167 167 [
168 168 (
169 169 b'm',
170 170 b'mergeable-file',
171 171 None,
172 172 _(b'add single file mergeable changes'),
173 173 ),
174 174 (
175 175 b'o',
176 176 b'overwritten-file',
177 177 None,
178 178 _(b'add single file all revs overwrite'),
179 179 ),
180 180 (b'n', b'new-file', None, _(b'add new file at each rev')),
181 181 ],
182 182 _(b'[OPTION]... [TEXT]'),
183 183 )
184 184 def debugbuilddag(
185 185 ui,
186 186 repo,
187 187 text=None,
188 188 mergeable_file=False,
189 189 overwritten_file=False,
190 190 new_file=False,
191 191 ):
192 192 """builds a repo with a given DAG from scratch in the current empty repo
193 193
194 194 The description of the DAG is read from stdin if not given on the
195 195 command line.
196 196
197 197 Elements:
198 198
199 199 - "+n" is a linear run of n nodes based on the current default parent
200 200 - "." is a single node based on the current default parent
201 201 - "$" resets the default parent to null (implied at the start);
202 202 otherwise the default parent is always the last node created
203 203 - "<p" sets the default parent to the backref p
204 204 - "*p" is a fork at parent p, which is a backref
205 205 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
206 206 - "/p2" is a merge of the preceding node and p2
207 207 - ":tag" defines a local tag for the preceding node
208 208 - "@branch" sets the named branch for subsequent nodes
209 209 - "#...\\n" is a comment up to the end of the line
210 210
211 211 Whitespace between the above elements is ignored.
212 212
213 213 A backref is either
214 214
215 215 - a number n, which references the node curr-n, where curr is the current
216 216 node, or
217 217 - the name of a local tag you placed earlier using ":tag", or
218 218 - empty to denote the default parent.
219 219
220 220 All string valued-elements are either strictly alphanumeric, or must
221 221 be enclosed in double quotes ("..."), with "\\" as escape character.
222 222 """
223 223
224 224 if text is None:
225 225 ui.status(_(b"reading DAG from stdin\n"))
226 226 text = ui.fin.read()
227 227
228 228 cl = repo.changelog
229 229 if len(cl) > 0:
230 230 raise error.Abort(_(b'repository is not empty'))
231 231
232 232 # determine number of revs in DAG
233 233 total = 0
234 234 for type, data in dagparser.parsedag(text):
235 235 if type == b'n':
236 236 total += 1
237 237
238 238 if mergeable_file:
239 239 linesperrev = 2
240 240 # make a file with k lines per rev
241 241 initialmergedlines = [
242 242 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
243 243 ]
244 244 initialmergedlines.append(b"")
245 245
246 246 tags = []
247 247 progress = ui.makeprogress(
248 248 _(b'building'), unit=_(b'revisions'), total=total
249 249 )
250 250 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
251 251 at = -1
252 252 atbranch = b'default'
253 253 nodeids = []
254 254 id = 0
255 255 progress.update(id)
256 256 for type, data in dagparser.parsedag(text):
257 257 if type == b'n':
258 258 ui.note((b'node %s\n' % pycompat.bytestr(data)))
259 259 id, ps = data
260 260
261 261 files = []
262 262 filecontent = {}
263 263
264 264 p2 = None
265 265 if mergeable_file:
266 266 fn = b"mf"
267 267 p1 = repo[ps[0]]
268 268 if len(ps) > 1:
269 269 p2 = repo[ps[1]]
270 270 pa = p1.ancestor(p2)
271 271 base, local, other = [
272 272 x[fn].data() for x in (pa, p1, p2)
273 273 ]
274 274 m3 = simplemerge.Merge3Text(base, local, other)
275 ml = [l.strip() for l in m3.merge_lines()[0]]
275 ml = [l.strip() for l in simplemerge.merge_lines(m3)[0]]
276 276 ml.append(b"")
277 277 elif at > 0:
278 278 ml = p1[fn].data().split(b"\n")
279 279 else:
280 280 ml = initialmergedlines
281 281 ml[id * linesperrev] += b" r%i" % id
282 282 mergedtext = b"\n".join(ml)
283 283 files.append(fn)
284 284 filecontent[fn] = mergedtext
285 285
286 286 if overwritten_file:
287 287 fn = b"of"
288 288 files.append(fn)
289 289 filecontent[fn] = b"r%i\n" % id
290 290
291 291 if new_file:
292 292 fn = b"nf%i" % id
293 293 files.append(fn)
294 294 filecontent[fn] = b"r%i\n" % id
295 295 if len(ps) > 1:
296 296 if not p2:
297 297 p2 = repo[ps[1]]
298 298 for fn in p2:
299 299 if fn.startswith(b"nf"):
300 300 files.append(fn)
301 301 filecontent[fn] = p2[fn].data()
302 302
303 303 def fctxfn(repo, cx, path):
304 304 if path in filecontent:
305 305 return context.memfilectx(
306 306 repo, cx, path, filecontent[path]
307 307 )
308 308 return None
309 309
310 310 if len(ps) == 0 or ps[0] < 0:
311 311 pars = [None, None]
312 312 elif len(ps) == 1:
313 313 pars = [nodeids[ps[0]], None]
314 314 else:
315 315 pars = [nodeids[p] for p in ps]
316 316 cx = context.memctx(
317 317 repo,
318 318 pars,
319 319 b"r%i" % id,
320 320 files,
321 321 fctxfn,
322 322 date=(id, 0),
323 323 user=b"debugbuilddag",
324 324 extra={b'branch': atbranch},
325 325 )
326 326 nodeid = repo.commitctx(cx)
327 327 nodeids.append(nodeid)
328 328 at = id
329 329 elif type == b'l':
330 330 id, name = data
331 331 ui.note((b'tag %s\n' % name))
332 332 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
333 333 elif type == b'a':
334 334 ui.note((b'branch %s\n' % data))
335 335 atbranch = data
336 336 progress.update(id)
337 337
338 338 if tags:
339 339 repo.vfs.write(b"localtags", b"".join(tags))
340 340
341 341
342 342 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
343 343 indent_string = b' ' * indent
344 344 if all:
345 345 ui.writenoi18n(
346 346 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
347 347 % indent_string
348 348 )
349 349
350 350 def showchunks(named):
351 351 ui.write(b"\n%s%s\n" % (indent_string, named))
352 352 for deltadata in gen.deltaiter():
353 353 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
354 354 ui.write(
355 355 b"%s%s %s %s %s %s %d\n"
356 356 % (
357 357 indent_string,
358 358 hex(node),
359 359 hex(p1),
360 360 hex(p2),
361 361 hex(cs),
362 362 hex(deltabase),
363 363 len(delta),
364 364 )
365 365 )
366 366
367 367 gen.changelogheader()
368 368 showchunks(b"changelog")
369 369 gen.manifestheader()
370 370 showchunks(b"manifest")
371 371 for chunkdata in iter(gen.filelogheader, {}):
372 372 fname = chunkdata[b'filename']
373 373 showchunks(fname)
374 374 else:
375 375 if isinstance(gen, bundle2.unbundle20):
376 376 raise error.Abort(_(b'use debugbundle2 for this file'))
377 377 gen.changelogheader()
378 378 for deltadata in gen.deltaiter():
379 379 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
380 380 ui.write(b"%s%s\n" % (indent_string, hex(node)))
381 381
382 382
383 383 def _debugobsmarkers(ui, part, indent=0, **opts):
384 384 """display version and markers contained in 'data'"""
385 385 opts = pycompat.byteskwargs(opts)
386 386 data = part.read()
387 387 indent_string = b' ' * indent
388 388 try:
389 389 version, markers = obsolete._readmarkers(data)
390 390 except error.UnknownVersion as exc:
391 391 msg = b"%sunsupported version: %s (%d bytes)\n"
392 392 msg %= indent_string, exc.version, len(data)
393 393 ui.write(msg)
394 394 else:
395 395 msg = b"%sversion: %d (%d bytes)\n"
396 396 msg %= indent_string, version, len(data)
397 397 ui.write(msg)
398 398 fm = ui.formatter(b'debugobsolete', opts)
399 399 for rawmarker in sorted(markers):
400 400 m = obsutil.marker(None, rawmarker)
401 401 fm.startitem()
402 402 fm.plain(indent_string)
403 403 cmdutil.showmarker(fm, m)
404 404 fm.end()
405 405
406 406
407 407 def _debugphaseheads(ui, data, indent=0):
408 408 """display version and markers contained in 'data'"""
409 409 indent_string = b' ' * indent
410 410 headsbyphase = phases.binarydecode(data)
411 411 for phase in phases.allphases:
412 412 for head in headsbyphase[phase]:
413 413 ui.write(indent_string)
414 414 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
415 415
416 416
417 417 def _quasirepr(thing):
418 418 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
419 419 return b'{%s}' % (
420 420 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
421 421 )
422 422 return pycompat.bytestr(repr(thing))
423 423
424 424
425 425 def _debugbundle2(ui, gen, all=None, **opts):
426 426 """lists the contents of a bundle2"""
427 427 if not isinstance(gen, bundle2.unbundle20):
428 428 raise error.Abort(_(b'not a bundle2 file'))
429 429 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
430 430 parttypes = opts.get('part_type', [])
431 431 for part in gen.iterparts():
432 432 if parttypes and part.type not in parttypes:
433 433 continue
434 434 msg = b'%s -- %s (mandatory: %r)\n'
435 435 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
436 436 if part.type == b'changegroup':
437 437 version = part.params.get(b'version', b'01')
438 438 cg = changegroup.getunbundler(version, part, b'UN')
439 439 if not ui.quiet:
440 440 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
441 441 if part.type == b'obsmarkers':
442 442 if not ui.quiet:
443 443 _debugobsmarkers(ui, part, indent=4, **opts)
444 444 if part.type == b'phase-heads':
445 445 if not ui.quiet:
446 446 _debugphaseheads(ui, part, indent=4)
447 447
448 448
449 449 @command(
450 450 b'debugbundle',
451 451 [
452 452 (b'a', b'all', None, _(b'show all details')),
453 453 (b'', b'part-type', [], _(b'show only the named part type')),
454 454 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
455 455 ],
456 456 _(b'FILE'),
457 457 norepo=True,
458 458 )
459 459 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
460 460 """lists the contents of a bundle"""
461 461 with hg.openpath(ui, bundlepath) as f:
462 462 if spec:
463 463 spec = exchange.getbundlespec(ui, f)
464 464 ui.write(b'%s\n' % spec)
465 465 return
466 466
467 467 gen = exchange.readbundle(ui, f, bundlepath)
468 468 if isinstance(gen, bundle2.unbundle20):
469 469 return _debugbundle2(ui, gen, all=all, **opts)
470 470 _debugchangegroup(ui, gen, all=all, **opts)
471 471
472 472
473 473 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
474 474 def debugcapabilities(ui, path, **opts):
475 475 """lists the capabilities of a remote peer"""
476 476 opts = pycompat.byteskwargs(opts)
477 477 peer = hg.peer(ui, opts, path)
478 478 try:
479 479 caps = peer.capabilities()
480 480 ui.writenoi18n(b'Main capabilities:\n')
481 481 for c in sorted(caps):
482 482 ui.write(b' %s\n' % c)
483 483 b2caps = bundle2.bundle2caps(peer)
484 484 if b2caps:
485 485 ui.writenoi18n(b'Bundle2 capabilities:\n')
486 486 for key, values in sorted(pycompat.iteritems(b2caps)):
487 487 ui.write(b' %s\n' % key)
488 488 for v in values:
489 489 ui.write(b' %s\n' % v)
490 490 finally:
491 491 peer.close()
492 492
493 493
494 494 @command(
495 495 b'debugchangedfiles',
496 496 [
497 497 (
498 498 b'',
499 499 b'compute',
500 500 False,
501 501 b"compute information instead of reading it from storage",
502 502 ),
503 503 ],
504 504 b'REV',
505 505 )
506 506 def debugchangedfiles(ui, repo, rev, **opts):
507 507 """list the stored files changes for a revision"""
508 508 ctx = logcmdutil.revsingle(repo, rev, None)
509 509 files = None
510 510
511 511 if opts['compute']:
512 512 files = metadata.compute_all_files_changes(ctx)
513 513 else:
514 514 sd = repo.changelog.sidedata(ctx.rev())
515 515 files_block = sd.get(sidedata.SD_FILES)
516 516 if files_block is not None:
517 517 files = metadata.decode_files_sidedata(sd)
518 518 if files is not None:
519 519 for f in sorted(files.touched):
520 520 if f in files.added:
521 521 action = b"added"
522 522 elif f in files.removed:
523 523 action = b"removed"
524 524 elif f in files.merged:
525 525 action = b"merged"
526 526 elif f in files.salvaged:
527 527 action = b"salvaged"
528 528 else:
529 529 action = b"touched"
530 530
531 531 copy_parent = b""
532 532 copy_source = b""
533 533 if f in files.copied_from_p1:
534 534 copy_parent = b"p1"
535 535 copy_source = files.copied_from_p1[f]
536 536 elif f in files.copied_from_p2:
537 537 copy_parent = b"p2"
538 538 copy_source = files.copied_from_p2[f]
539 539
540 540 data = (action, copy_parent, f, copy_source)
541 541 template = b"%-8s %2s: %s, %s;\n"
542 542 ui.write(template % data)
543 543
544 544
545 545 @command(b'debugcheckstate', [], b'')
546 546 def debugcheckstate(ui, repo):
547 547 """validate the correctness of the current dirstate"""
548 548 parent1, parent2 = repo.dirstate.parents()
549 549 m1 = repo[parent1].manifest()
550 550 m2 = repo[parent2].manifest()
551 551 errors = 0
552 552 for err in repo.dirstate.verify(m1, m2):
553 553 ui.warn(err[0] % err[1:])
554 554 errors += 1
555 555 if errors:
556 556 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
557 557 raise error.Abort(errstr)
558 558
559 559
560 560 @command(
561 561 b'debugcolor',
562 562 [(b'', b'style', None, _(b'show all configured styles'))],
563 563 b'hg debugcolor',
564 564 )
565 565 def debugcolor(ui, repo, **opts):
566 566 """show available color, effects or style"""
567 567 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
568 568 if opts.get('style'):
569 569 return _debugdisplaystyle(ui)
570 570 else:
571 571 return _debugdisplaycolor(ui)
572 572
573 573
574 574 def _debugdisplaycolor(ui):
575 575 ui = ui.copy()
576 576 ui._styles.clear()
577 577 for effect in color._activeeffects(ui).keys():
578 578 ui._styles[effect] = effect
579 579 if ui._terminfoparams:
580 580 for k, v in ui.configitems(b'color'):
581 581 if k.startswith(b'color.'):
582 582 ui._styles[k] = k[6:]
583 583 elif k.startswith(b'terminfo.'):
584 584 ui._styles[k] = k[9:]
585 585 ui.write(_(b'available colors:\n'))
586 586 # sort label with a '_' after the other to group '_background' entry.
587 587 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
588 588 for colorname, label in items:
589 589 ui.write(b'%s\n' % colorname, label=label)
590 590
591 591
592 592 def _debugdisplaystyle(ui):
593 593 ui.write(_(b'available style:\n'))
594 594 if not ui._styles:
595 595 return
596 596 width = max(len(s) for s in ui._styles)
597 597 for label, effects in sorted(ui._styles.items()):
598 598 ui.write(b'%s' % label, label=label)
599 599 if effects:
600 600 # 50
601 601 ui.write(b': ')
602 602 ui.write(b' ' * (max(0, width - len(label))))
603 603 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
604 604 ui.write(b'\n')
605 605
606 606
607 607 @command(b'debugcreatestreamclonebundle', [], b'FILE')
608 608 def debugcreatestreamclonebundle(ui, repo, fname):
609 609 """create a stream clone bundle file
610 610
611 611 Stream bundles are special bundles that are essentially archives of
612 612 revlog files. They are commonly used for cloning very quickly.
613 613 """
614 614 # TODO we may want to turn this into an abort when this functionality
615 615 # is moved into `hg bundle`.
616 616 if phases.hassecret(repo):
617 617 ui.warn(
618 618 _(
619 619 b'(warning: stream clone bundle will contain secret '
620 620 b'revisions)\n'
621 621 )
622 622 )
623 623
624 624 requirements, gen = streamclone.generatebundlev1(repo)
625 625 changegroup.writechunks(ui, gen, fname)
626 626
627 627 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
628 628
629 629
630 630 @command(
631 631 b'debugdag',
632 632 [
633 633 (b't', b'tags', None, _(b'use tags as labels')),
634 634 (b'b', b'branches', None, _(b'annotate with branch names')),
635 635 (b'', b'dots', None, _(b'use dots for runs')),
636 636 (b's', b'spaces', None, _(b'separate elements by spaces')),
637 637 ],
638 638 _(b'[OPTION]... [FILE [REV]...]'),
639 639 optionalrepo=True,
640 640 )
641 641 def debugdag(ui, repo, file_=None, *revs, **opts):
642 642 """format the changelog or an index DAG as a concise textual description
643 643
644 644 If you pass a revlog index, the revlog's DAG is emitted. If you list
645 645 revision numbers, they get labeled in the output as rN.
646 646
647 647 Otherwise, the changelog DAG of the current repo is emitted.
648 648 """
649 649 spaces = opts.get('spaces')
650 650 dots = opts.get('dots')
651 651 if file_:
652 652 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
653 653 revs = {int(r) for r in revs}
654 654
655 655 def events():
656 656 for r in rlog:
657 657 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
658 658 if r in revs:
659 659 yield b'l', (r, b"r%i" % r)
660 660
661 661 elif repo:
662 662 cl = repo.changelog
663 663 tags = opts.get('tags')
664 664 branches = opts.get('branches')
665 665 if tags:
666 666 labels = {}
667 667 for l, n in repo.tags().items():
668 668 labels.setdefault(cl.rev(n), []).append(l)
669 669
670 670 def events():
671 671 b = b"default"
672 672 for r in cl:
673 673 if branches:
674 674 newb = cl.read(cl.node(r))[5][b'branch']
675 675 if newb != b:
676 676 yield b'a', newb
677 677 b = newb
678 678 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
679 679 if tags:
680 680 ls = labels.get(r)
681 681 if ls:
682 682 for l in ls:
683 683 yield b'l', (r, l)
684 684
685 685 else:
686 686 raise error.Abort(_(b'need repo for changelog dag'))
687 687
688 688 for line in dagparser.dagtextlines(
689 689 events(),
690 690 addspaces=spaces,
691 691 wraplabels=True,
692 692 wrapannotations=True,
693 693 wrapnonlinear=dots,
694 694 usedots=dots,
695 695 maxlinewidth=70,
696 696 ):
697 697 ui.write(line)
698 698 ui.write(b"\n")
699 699
700 700
701 701 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
702 702 def debugdata(ui, repo, file_, rev=None, **opts):
703 703 """dump the contents of a data file revision"""
704 704 opts = pycompat.byteskwargs(opts)
705 705 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
706 706 if rev is not None:
707 707 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
708 708 file_, rev = None, file_
709 709 elif rev is None:
710 710 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
711 711 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
712 712 try:
713 713 ui.write(r.rawdata(r.lookup(rev)))
714 714 except KeyError:
715 715 raise error.Abort(_(b'invalid revision identifier %s') % rev)
716 716
717 717
718 718 @command(
719 719 b'debugdate',
720 720 [(b'e', b'extended', None, _(b'try extended date formats'))],
721 721 _(b'[-e] DATE [RANGE]'),
722 722 norepo=True,
723 723 optionalrepo=True,
724 724 )
725 725 def debugdate(ui, date, range=None, **opts):
726 726 """parse and display a date"""
727 727 if opts["extended"]:
728 728 d = dateutil.parsedate(date, dateutil.extendeddateformats)
729 729 else:
730 730 d = dateutil.parsedate(date)
731 731 ui.writenoi18n(b"internal: %d %d\n" % d)
732 732 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
733 733 if range:
734 734 m = dateutil.matchdate(range)
735 735 ui.writenoi18n(b"match: %s\n" % m(d[0]))
736 736
737 737
738 738 @command(
739 739 b'debugdeltachain',
740 740 cmdutil.debugrevlogopts + cmdutil.formatteropts,
741 741 _(b'-c|-m|FILE'),
742 742 optionalrepo=True,
743 743 )
744 744 def debugdeltachain(ui, repo, file_=None, **opts):
745 745 """dump information about delta chains in a revlog
746 746
747 747 Output can be templatized. Available template keywords are:
748 748
749 749 :``rev``: revision number
750 750 :``chainid``: delta chain identifier (numbered by unique base)
751 751 :``chainlen``: delta chain length to this revision
752 752 :``prevrev``: previous revision in delta chain
753 753 :``deltatype``: role of delta / how it was computed
754 754 :``compsize``: compressed size of revision
755 755 :``uncompsize``: uncompressed size of revision
756 756 :``chainsize``: total size of compressed revisions in chain
757 757 :``chainratio``: total chain size divided by uncompressed revision size
758 758 (new delta chains typically start at ratio 2.00)
759 759 :``lindist``: linear distance from base revision in delta chain to end
760 760 of this revision
761 761 :``extradist``: total size of revisions not part of this delta chain from
762 762 base of delta chain to end of this revision; a measurement
763 763 of how much extra data we need to read/seek across to read
764 764 the delta chain for this revision
765 765 :``extraratio``: extradist divided by chainsize; another representation of
766 766 how much unrelated data is needed to load this delta chain
767 767
768 768 If the repository is configured to use the sparse read, additional keywords
769 769 are available:
770 770
771 771 :``readsize``: total size of data read from the disk for a revision
772 772 (sum of the sizes of all the blocks)
773 773 :``largestblock``: size of the largest block of data read from the disk
774 774 :``readdensity``: density of useful bytes in the data read from the disk
775 775 :``srchunks``: in how many data hunks the whole revision would be read
776 776
777 777 The sparse read can be enabled with experimental.sparse-read = True
778 778 """
779 779 opts = pycompat.byteskwargs(opts)
780 780 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
781 781 index = r.index
782 782 start = r.start
783 783 length = r.length
784 784 generaldelta = r._generaldelta
785 785 withsparseread = getattr(r, '_withsparseread', False)
786 786
787 787 def revinfo(rev):
788 788 e = index[rev]
789 789 compsize = e[1]
790 790 uncompsize = e[2]
791 791 chainsize = 0
792 792
793 793 if generaldelta:
794 794 if e[3] == e[5]:
795 795 deltatype = b'p1'
796 796 elif e[3] == e[6]:
797 797 deltatype = b'p2'
798 798 elif e[3] == rev - 1:
799 799 deltatype = b'prev'
800 800 elif e[3] == rev:
801 801 deltatype = b'base'
802 802 else:
803 803 deltatype = b'other'
804 804 else:
805 805 if e[3] == rev:
806 806 deltatype = b'base'
807 807 else:
808 808 deltatype = b'prev'
809 809
810 810 chain = r._deltachain(rev)[0]
811 811 for iterrev in chain:
812 812 e = index[iterrev]
813 813 chainsize += e[1]
814 814
815 815 return compsize, uncompsize, deltatype, chain, chainsize
816 816
817 817 fm = ui.formatter(b'debugdeltachain', opts)
818 818
819 819 fm.plain(
820 820 b' rev chain# chainlen prev delta '
821 821 b'size rawsize chainsize ratio lindist extradist '
822 822 b'extraratio'
823 823 )
824 824 if withsparseread:
825 825 fm.plain(b' readsize largestblk rddensity srchunks')
826 826 fm.plain(b'\n')
827 827
828 828 chainbases = {}
829 829 for rev in r:
830 830 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
831 831 chainbase = chain[0]
832 832 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
833 833 basestart = start(chainbase)
834 834 revstart = start(rev)
835 835 lineardist = revstart + comp - basestart
836 836 extradist = lineardist - chainsize
837 837 try:
838 838 prevrev = chain[-2]
839 839 except IndexError:
840 840 prevrev = -1
841 841
842 842 if uncomp != 0:
843 843 chainratio = float(chainsize) / float(uncomp)
844 844 else:
845 845 chainratio = chainsize
846 846
847 847 if chainsize != 0:
848 848 extraratio = float(extradist) / float(chainsize)
849 849 else:
850 850 extraratio = extradist
851 851
852 852 fm.startitem()
853 853 fm.write(
854 854 b'rev chainid chainlen prevrev deltatype compsize '
855 855 b'uncompsize chainsize chainratio lindist extradist '
856 856 b'extraratio',
857 857 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
858 858 rev,
859 859 chainid,
860 860 len(chain),
861 861 prevrev,
862 862 deltatype,
863 863 comp,
864 864 uncomp,
865 865 chainsize,
866 866 chainratio,
867 867 lineardist,
868 868 extradist,
869 869 extraratio,
870 870 rev=rev,
871 871 chainid=chainid,
872 872 chainlen=len(chain),
873 873 prevrev=prevrev,
874 874 deltatype=deltatype,
875 875 compsize=comp,
876 876 uncompsize=uncomp,
877 877 chainsize=chainsize,
878 878 chainratio=chainratio,
879 879 lindist=lineardist,
880 880 extradist=extradist,
881 881 extraratio=extraratio,
882 882 )
883 883 if withsparseread:
884 884 readsize = 0
885 885 largestblock = 0
886 886 srchunks = 0
887 887
888 888 for revschunk in deltautil.slicechunk(r, chain):
889 889 srchunks += 1
890 890 blkend = start(revschunk[-1]) + length(revschunk[-1])
891 891 blksize = blkend - start(revschunk[0])
892 892
893 893 readsize += blksize
894 894 if largestblock < blksize:
895 895 largestblock = blksize
896 896
897 897 if readsize:
898 898 readdensity = float(chainsize) / float(readsize)
899 899 else:
900 900 readdensity = 1
901 901
902 902 fm.write(
903 903 b'readsize largestblock readdensity srchunks',
904 904 b' %10d %10d %9.5f %8d',
905 905 readsize,
906 906 largestblock,
907 907 readdensity,
908 908 srchunks,
909 909 readsize=readsize,
910 910 largestblock=largestblock,
911 911 readdensity=readdensity,
912 912 srchunks=srchunks,
913 913 )
914 914
915 915 fm.plain(b'\n')
916 916
917 917 fm.end()
918 918
919 919
920 920 @command(
921 921 b'debugdirstate|debugstate',
922 922 [
923 923 (
924 924 b'',
925 925 b'nodates',
926 926 None,
927 927 _(b'do not display the saved mtime (DEPRECATED)'),
928 928 ),
929 929 (b'', b'dates', True, _(b'display the saved mtime')),
930 930 (b'', b'datesort', None, _(b'sort by saved mtime')),
931 931 (
932 932 b'',
933 933 b'all',
934 934 False,
935 935 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
936 936 ),
937 937 ],
938 938 _(b'[OPTION]...'),
939 939 )
940 940 def debugstate(ui, repo, **opts):
941 941 """show the contents of the current dirstate"""
942 942
943 943 nodates = not opts['dates']
944 944 if opts.get('nodates') is not None:
945 945 nodates = True
946 946 datesort = opts.get('datesort')
947 947
948 948 if datesort:
949 949
950 950 def keyfunc(entry):
951 951 filename, _state, _mode, _size, mtime = entry
952 952 return (mtime, filename)
953 953
954 954 else:
955 955 keyfunc = None # sort by filename
956 956 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
957 957 entries.sort(key=keyfunc)
958 958 for entry in entries:
959 959 filename, state, mode, size, mtime = entry
960 960 if mtime == -1:
961 961 timestr = b'unset '
962 962 elif nodates:
963 963 timestr = b'set '
964 964 else:
965 965 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
966 966 timestr = encoding.strtolocal(timestr)
967 967 if mode & 0o20000:
968 968 mode = b'lnk'
969 969 else:
970 970 mode = b'%3o' % (mode & 0o777 & ~util.umask)
971 971 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
972 972 for f in repo.dirstate.copies():
973 973 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
974 974
975 975
976 976 @command(
977 977 b'debugdirstateignorepatternshash',
978 978 [],
979 979 _(b''),
980 980 )
981 981 def debugdirstateignorepatternshash(ui, repo, **opts):
982 982 """show the hash of ignore patterns stored in dirstate if v2,
983 983 or nothing for dirstate-v2
984 984 """
985 985 if repo.dirstate._use_dirstate_v2:
986 986 docket = repo.dirstate._map.docket
987 987 hash_len = 20 # 160 bits for SHA-1
988 988 hash_bytes = docket.tree_metadata[-hash_len:]
989 989 ui.write(binascii.hexlify(hash_bytes) + b'\n')
990 990
991 991
992 992 @command(
993 993 b'debugdiscovery',
994 994 [
995 995 (b'', b'old', None, _(b'use old-style discovery')),
996 996 (
997 997 b'',
998 998 b'nonheads',
999 999 None,
1000 1000 _(b'use old-style discovery with non-heads included'),
1001 1001 ),
1002 1002 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1003 1003 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1004 1004 (
1005 1005 b'',
1006 1006 b'local-as-revs',
1007 1007 b"",
1008 1008 b'treat local has having these revisions only',
1009 1009 ),
1010 1010 (
1011 1011 b'',
1012 1012 b'remote-as-revs',
1013 1013 b"",
1014 1014 b'use local as remote, with only these these revisions',
1015 1015 ),
1016 1016 ]
1017 1017 + cmdutil.remoteopts
1018 1018 + cmdutil.formatteropts,
1019 1019 _(b'[--rev REV] [OTHER]'),
1020 1020 )
1021 1021 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1022 1022 """runs the changeset discovery protocol in isolation
1023 1023
1024 1024 The local peer can be "replaced" by a subset of the local repository by
1025 1025 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1026 1026 be "replaced" by a subset of the local repository using the
1027 1027 `--local-as-revs` flag. This is useful to efficiently debug pathological
1028 1028 discovery situation.
1029 1029
1030 1030 The following developer oriented config are relevant for people playing with this command:
1031 1031
1032 1032 * devel.discovery.exchange-heads=True
1033 1033
1034 1034 If False, the discovery will not start with
1035 1035 remote head fetching and local head querying.
1036 1036
1037 1037 * devel.discovery.grow-sample=True
1038 1038
1039 1039 If False, the sample size used in set discovery will not be increased
1040 1040 through the process
1041 1041
1042 1042 * devel.discovery.grow-sample.dynamic=True
1043 1043
1044 1044 When discovery.grow-sample.dynamic is True, the default, the sample size is
1045 1045 adapted to the shape of the undecided set (it is set to the max of:
1046 1046 <target-size>, len(roots(undecided)), len(heads(undecided)
1047 1047
1048 1048 * devel.discovery.grow-sample.rate=1.05
1049 1049
1050 1050 the rate at which the sample grow
1051 1051
1052 1052 * devel.discovery.randomize=True
1053 1053
1054 1054 If andom sampling during discovery are deterministic. It is meant for
1055 1055 integration tests.
1056 1056
1057 1057 * devel.discovery.sample-size=200
1058 1058
1059 1059 Control the initial size of the discovery sample
1060 1060
1061 1061 * devel.discovery.sample-size.initial=100
1062 1062
1063 1063 Control the initial size of the discovery for initial change
1064 1064 """
1065 1065 opts = pycompat.byteskwargs(opts)
1066 1066 unfi = repo.unfiltered()
1067 1067
1068 1068 # setup potential extra filtering
1069 1069 local_revs = opts[b"local_as_revs"]
1070 1070 remote_revs = opts[b"remote_as_revs"]
1071 1071
1072 1072 # make sure tests are repeatable
1073 1073 random.seed(int(opts[b'seed']))
1074 1074
1075 1075 if not remote_revs:
1076 1076
1077 1077 remoteurl, branches = urlutil.get_unique_pull_path(
1078 1078 b'debugdiscovery', repo, ui, remoteurl
1079 1079 )
1080 1080 remote = hg.peer(repo, opts, remoteurl)
1081 1081 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1082 1082 else:
1083 1083 branches = (None, [])
1084 1084 remote_filtered_revs = logcmdutil.revrange(
1085 1085 unfi, [b"not (::(%s))" % remote_revs]
1086 1086 )
1087 1087 remote_filtered_revs = frozenset(remote_filtered_revs)
1088 1088
1089 1089 def remote_func(x):
1090 1090 return remote_filtered_revs
1091 1091
1092 1092 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1093 1093
1094 1094 remote = repo.peer()
1095 1095 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1096 1096
1097 1097 if local_revs:
1098 1098 local_filtered_revs = logcmdutil.revrange(
1099 1099 unfi, [b"not (::(%s))" % local_revs]
1100 1100 )
1101 1101 local_filtered_revs = frozenset(local_filtered_revs)
1102 1102
1103 1103 def local_func(x):
1104 1104 return local_filtered_revs
1105 1105
1106 1106 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1107 1107 repo = repo.filtered(b'debug-discovery-local-filter')
1108 1108
1109 1109 data = {}
1110 1110 if opts.get(b'old'):
1111 1111
1112 1112 def doit(pushedrevs, remoteheads, remote=remote):
1113 1113 if not util.safehasattr(remote, b'branches'):
1114 1114 # enable in-client legacy support
1115 1115 remote = localrepo.locallegacypeer(remote.local())
1116 1116 common, _in, hds = treediscovery.findcommonincoming(
1117 1117 repo, remote, force=True, audit=data
1118 1118 )
1119 1119 common = set(common)
1120 1120 if not opts.get(b'nonheads'):
1121 1121 ui.writenoi18n(
1122 1122 b"unpruned common: %s\n"
1123 1123 % b" ".join(sorted(short(n) for n in common))
1124 1124 )
1125 1125
1126 1126 clnode = repo.changelog.node
1127 1127 common = repo.revs(b'heads(::%ln)', common)
1128 1128 common = {clnode(r) for r in common}
1129 1129 return common, hds
1130 1130
1131 1131 else:
1132 1132
1133 1133 def doit(pushedrevs, remoteheads, remote=remote):
1134 1134 nodes = None
1135 1135 if pushedrevs:
1136 1136 revs = logcmdutil.revrange(repo, pushedrevs)
1137 1137 nodes = [repo[r].node() for r in revs]
1138 1138 common, any, hds = setdiscovery.findcommonheads(
1139 1139 ui, repo, remote, ancestorsof=nodes, audit=data
1140 1140 )
1141 1141 return common, hds
1142 1142
1143 1143 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1144 1144 localrevs = opts[b'rev']
1145 1145
1146 1146 fm = ui.formatter(b'debugdiscovery', opts)
1147 1147 if fm.strict_format:
1148 1148
1149 1149 @contextlib.contextmanager
1150 1150 def may_capture_output():
1151 1151 ui.pushbuffer()
1152 1152 yield
1153 1153 data[b'output'] = ui.popbuffer()
1154 1154
1155 1155 else:
1156 1156 may_capture_output = util.nullcontextmanager
1157 1157 with may_capture_output():
1158 1158 with util.timedcm('debug-discovery') as t:
1159 1159 common, hds = doit(localrevs, remoterevs)
1160 1160
1161 1161 # compute all statistics
1162 1162 heads_common = set(common)
1163 1163 heads_remote = set(hds)
1164 1164 heads_local = set(repo.heads())
1165 1165 # note: they cannot be a local or remote head that is in common and not
1166 1166 # itself a head of common.
1167 1167 heads_common_local = heads_common & heads_local
1168 1168 heads_common_remote = heads_common & heads_remote
1169 1169 heads_common_both = heads_common & heads_remote & heads_local
1170 1170
1171 1171 all = repo.revs(b'all()')
1172 1172 common = repo.revs(b'::%ln', common)
1173 1173 roots_common = repo.revs(b'roots(::%ld)', common)
1174 1174 missing = repo.revs(b'not ::%ld', common)
1175 1175 heads_missing = repo.revs(b'heads(%ld)', missing)
1176 1176 roots_missing = repo.revs(b'roots(%ld)', missing)
1177 1177 assert len(common) + len(missing) == len(all)
1178 1178
1179 1179 initial_undecided = repo.revs(
1180 1180 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1181 1181 )
1182 1182 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1183 1183 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1184 1184 common_initial_undecided = initial_undecided & common
1185 1185 missing_initial_undecided = initial_undecided & missing
1186 1186
1187 1187 data[b'elapsed'] = t.elapsed
1188 1188 data[b'nb-common-heads'] = len(heads_common)
1189 1189 data[b'nb-common-heads-local'] = len(heads_common_local)
1190 1190 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1191 1191 data[b'nb-common-heads-both'] = len(heads_common_both)
1192 1192 data[b'nb-common-roots'] = len(roots_common)
1193 1193 data[b'nb-head-local'] = len(heads_local)
1194 1194 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1195 1195 data[b'nb-head-remote'] = len(heads_remote)
1196 1196 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1197 1197 heads_common_remote
1198 1198 )
1199 1199 data[b'nb-revs'] = len(all)
1200 1200 data[b'nb-revs-common'] = len(common)
1201 1201 data[b'nb-revs-missing'] = len(missing)
1202 1202 data[b'nb-missing-heads'] = len(heads_missing)
1203 1203 data[b'nb-missing-roots'] = len(roots_missing)
1204 1204 data[b'nb-ini_und'] = len(initial_undecided)
1205 1205 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1206 1206 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1207 1207 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1208 1208 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1209 1209
1210 1210 fm.startitem()
1211 1211 fm.data(**pycompat.strkwargs(data))
1212 1212 # display discovery summary
1213 1213 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1214 1214 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1215 1215 fm.plain(b"heads summary:\n")
1216 1216 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1217 1217 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1218 1218 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1219 1219 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1220 1220 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1221 1221 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1222 1222 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1223 1223 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1224 1224 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1225 1225 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1226 1226 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1227 1227 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1228 1228 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1229 1229 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1230 1230 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1231 1231 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1232 1232 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1233 1233 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1234 1234 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1235 1235 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1236 1236 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1237 1237 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1238 1238
1239 1239 if ui.verbose:
1240 1240 fm.plain(
1241 1241 b"common heads: %s\n"
1242 1242 % b" ".join(sorted(short(n) for n in heads_common))
1243 1243 )
1244 1244 fm.end()
1245 1245
1246 1246
1247 1247 _chunksize = 4 << 10
1248 1248
1249 1249
1250 1250 @command(
1251 1251 b'debugdownload',
1252 1252 [
1253 1253 (b'o', b'output', b'', _(b'path')),
1254 1254 ],
1255 1255 optionalrepo=True,
1256 1256 )
1257 1257 def debugdownload(ui, repo, url, output=None, **opts):
1258 1258 """download a resource using Mercurial logic and config"""
1259 1259 fh = urlmod.open(ui, url, output)
1260 1260
1261 1261 dest = ui
1262 1262 if output:
1263 1263 dest = open(output, b"wb", _chunksize)
1264 1264 try:
1265 1265 data = fh.read(_chunksize)
1266 1266 while data:
1267 1267 dest.write(data)
1268 1268 data = fh.read(_chunksize)
1269 1269 finally:
1270 1270 if output:
1271 1271 dest.close()
1272 1272
1273 1273
1274 1274 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1275 1275 def debugextensions(ui, repo, **opts):
1276 1276 '''show information about active extensions'''
1277 1277 opts = pycompat.byteskwargs(opts)
1278 1278 exts = extensions.extensions(ui)
1279 1279 hgver = util.version()
1280 1280 fm = ui.formatter(b'debugextensions', opts)
1281 1281 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1282 1282 isinternal = extensions.ismoduleinternal(extmod)
1283 1283 extsource = None
1284 1284
1285 1285 if util.safehasattr(extmod, '__file__'):
1286 1286 extsource = pycompat.fsencode(extmod.__file__)
1287 1287 elif getattr(sys, 'oxidized', False):
1288 1288 extsource = pycompat.sysexecutable
1289 1289 if isinternal:
1290 1290 exttestedwith = [] # never expose magic string to users
1291 1291 else:
1292 1292 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1293 1293 extbuglink = getattr(extmod, 'buglink', None)
1294 1294
1295 1295 fm.startitem()
1296 1296
1297 1297 if ui.quiet or ui.verbose:
1298 1298 fm.write(b'name', b'%s\n', extname)
1299 1299 else:
1300 1300 fm.write(b'name', b'%s', extname)
1301 1301 if isinternal or hgver in exttestedwith:
1302 1302 fm.plain(b'\n')
1303 1303 elif not exttestedwith:
1304 1304 fm.plain(_(b' (untested!)\n'))
1305 1305 else:
1306 1306 lasttestedversion = exttestedwith[-1]
1307 1307 fm.plain(b' (%s!)\n' % lasttestedversion)
1308 1308
1309 1309 fm.condwrite(
1310 1310 ui.verbose and extsource,
1311 1311 b'source',
1312 1312 _(b' location: %s\n'),
1313 1313 extsource or b"",
1314 1314 )
1315 1315
1316 1316 if ui.verbose:
1317 1317 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1318 1318 fm.data(bundled=isinternal)
1319 1319
1320 1320 fm.condwrite(
1321 1321 ui.verbose and exttestedwith,
1322 1322 b'testedwith',
1323 1323 _(b' tested with: %s\n'),
1324 1324 fm.formatlist(exttestedwith, name=b'ver'),
1325 1325 )
1326 1326
1327 1327 fm.condwrite(
1328 1328 ui.verbose and extbuglink,
1329 1329 b'buglink',
1330 1330 _(b' bug reporting: %s\n'),
1331 1331 extbuglink or b"",
1332 1332 )
1333 1333
1334 1334 fm.end()
1335 1335
1336 1336
1337 1337 @command(
1338 1338 b'debugfileset',
1339 1339 [
1340 1340 (
1341 1341 b'r',
1342 1342 b'rev',
1343 1343 b'',
1344 1344 _(b'apply the filespec on this revision'),
1345 1345 _(b'REV'),
1346 1346 ),
1347 1347 (
1348 1348 b'',
1349 1349 b'all-files',
1350 1350 False,
1351 1351 _(b'test files from all revisions and working directory'),
1352 1352 ),
1353 1353 (
1354 1354 b's',
1355 1355 b'show-matcher',
1356 1356 None,
1357 1357 _(b'print internal representation of matcher'),
1358 1358 ),
1359 1359 (
1360 1360 b'p',
1361 1361 b'show-stage',
1362 1362 [],
1363 1363 _(b'print parsed tree at the given stage'),
1364 1364 _(b'NAME'),
1365 1365 ),
1366 1366 ],
1367 1367 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1368 1368 )
1369 1369 def debugfileset(ui, repo, expr, **opts):
1370 1370 '''parse and apply a fileset specification'''
1371 1371 from . import fileset
1372 1372
1373 1373 fileset.symbols # force import of fileset so we have predicates to optimize
1374 1374 opts = pycompat.byteskwargs(opts)
1375 1375 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1376 1376
1377 1377 stages = [
1378 1378 (b'parsed', pycompat.identity),
1379 1379 (b'analyzed', filesetlang.analyze),
1380 1380 (b'optimized', filesetlang.optimize),
1381 1381 ]
1382 1382 stagenames = {n for n, f in stages}
1383 1383
1384 1384 showalways = set()
1385 1385 if ui.verbose and not opts[b'show_stage']:
1386 1386 # show parsed tree by --verbose (deprecated)
1387 1387 showalways.add(b'parsed')
1388 1388 if opts[b'show_stage'] == [b'all']:
1389 1389 showalways.update(stagenames)
1390 1390 else:
1391 1391 for n in opts[b'show_stage']:
1392 1392 if n not in stagenames:
1393 1393 raise error.Abort(_(b'invalid stage name: %s') % n)
1394 1394 showalways.update(opts[b'show_stage'])
1395 1395
1396 1396 tree = filesetlang.parse(expr)
1397 1397 for n, f in stages:
1398 1398 tree = f(tree)
1399 1399 if n in showalways:
1400 1400 if opts[b'show_stage'] or n != b'parsed':
1401 1401 ui.write(b"* %s:\n" % n)
1402 1402 ui.write(filesetlang.prettyformat(tree), b"\n")
1403 1403
1404 1404 files = set()
1405 1405 if opts[b'all_files']:
1406 1406 for r in repo:
1407 1407 c = repo[r]
1408 1408 files.update(c.files())
1409 1409 files.update(c.substate)
1410 1410 if opts[b'all_files'] or ctx.rev() is None:
1411 1411 wctx = repo[None]
1412 1412 files.update(
1413 1413 repo.dirstate.walk(
1414 1414 scmutil.matchall(repo),
1415 1415 subrepos=list(wctx.substate),
1416 1416 unknown=True,
1417 1417 ignored=True,
1418 1418 )
1419 1419 )
1420 1420 files.update(wctx.substate)
1421 1421 else:
1422 1422 files.update(ctx.files())
1423 1423 files.update(ctx.substate)
1424 1424
1425 1425 m = ctx.matchfileset(repo.getcwd(), expr)
1426 1426 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1427 1427 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1428 1428 for f in sorted(files):
1429 1429 if not m(f):
1430 1430 continue
1431 1431 ui.write(b"%s\n" % f)
1432 1432
1433 1433
1434 1434 @command(
1435 1435 b"debug-repair-issue6528",
1436 1436 [
1437 1437 (
1438 1438 b'',
1439 1439 b'to-report',
1440 1440 b'',
1441 1441 _(b'build a report of affected revisions to this file'),
1442 1442 _(b'FILE'),
1443 1443 ),
1444 1444 (
1445 1445 b'',
1446 1446 b'from-report',
1447 1447 b'',
1448 1448 _(b'repair revisions listed in this report file'),
1449 1449 _(b'FILE'),
1450 1450 ),
1451 1451 (
1452 1452 b'',
1453 1453 b'paranoid',
1454 1454 False,
1455 1455 _(b'check that both detection methods do the same thing'),
1456 1456 ),
1457 1457 ]
1458 1458 + cmdutil.dryrunopts,
1459 1459 )
1460 1460 def debug_repair_issue6528(ui, repo, **opts):
1461 1461 """find affected revisions and repair them. See issue6528 for more details.
1462 1462
1463 1463 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1464 1464 computation of affected revisions for a given repository across clones.
1465 1465 The report format is line-based (with empty lines ignored):
1466 1466
1467 1467 ```
1468 1468 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1469 1469 ```
1470 1470
1471 1471 There can be multiple broken revisions per filelog, they are separated by
1472 1472 a comma with no spaces. The only space is between the revision(s) and the
1473 1473 filename.
1474 1474
1475 1475 Note that this does *not* mean that this repairs future affected revisions,
1476 1476 that needs a separate fix at the exchange level that was introduced in
1477 1477 Mercurial 5.9.1.
1478 1478
1479 1479 There is a `--paranoid` flag to test that the fast implementation is correct
1480 1480 by checking it against the slow implementation. Since this matter is quite
1481 1481 urgent and testing every edge-case is probably quite costly, we use this
1482 1482 method to test on large repositories as a fuzzing method of sorts.
1483 1483 """
1484 1484 cmdutil.check_incompatible_arguments(
1485 1485 opts, 'to_report', ['from_report', 'dry_run']
1486 1486 )
1487 1487 dry_run = opts.get('dry_run')
1488 1488 to_report = opts.get('to_report')
1489 1489 from_report = opts.get('from_report')
1490 1490 paranoid = opts.get('paranoid')
1491 1491 # TODO maybe add filelog pattern and revision pattern parameters to help
1492 1492 # narrow down the search for users that know what they're looking for?
1493 1493
1494 1494 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1495 1495 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1496 1496 raise error.Abort(_(msg))
1497 1497
1498 1498 rewrite.repair_issue6528(
1499 1499 ui,
1500 1500 repo,
1501 1501 dry_run=dry_run,
1502 1502 to_report=to_report,
1503 1503 from_report=from_report,
1504 1504 paranoid=paranoid,
1505 1505 )
1506 1506
1507 1507
1508 1508 @command(b'debugformat', [] + cmdutil.formatteropts)
1509 1509 def debugformat(ui, repo, **opts):
1510 1510 """display format information about the current repository
1511 1511
1512 1512 Use --verbose to get extra information about current config value and
1513 1513 Mercurial default."""
1514 1514 opts = pycompat.byteskwargs(opts)
1515 1515 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1516 1516 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1517 1517
1518 1518 def makeformatname(name):
1519 1519 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1520 1520
1521 1521 fm = ui.formatter(b'debugformat', opts)
1522 1522 if fm.isplain():
1523 1523
1524 1524 def formatvalue(value):
1525 1525 if util.safehasattr(value, b'startswith'):
1526 1526 return value
1527 1527 if value:
1528 1528 return b'yes'
1529 1529 else:
1530 1530 return b'no'
1531 1531
1532 1532 else:
1533 1533 formatvalue = pycompat.identity
1534 1534
1535 1535 fm.plain(b'format-variant')
1536 1536 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1537 1537 fm.plain(b' repo')
1538 1538 if ui.verbose:
1539 1539 fm.plain(b' config default')
1540 1540 fm.plain(b'\n')
1541 1541 for fv in upgrade.allformatvariant:
1542 1542 fm.startitem()
1543 1543 repovalue = fv.fromrepo(repo)
1544 1544 configvalue = fv.fromconfig(repo)
1545 1545
1546 1546 if repovalue != configvalue:
1547 1547 namelabel = b'formatvariant.name.mismatchconfig'
1548 1548 repolabel = b'formatvariant.repo.mismatchconfig'
1549 1549 elif repovalue != fv.default:
1550 1550 namelabel = b'formatvariant.name.mismatchdefault'
1551 1551 repolabel = b'formatvariant.repo.mismatchdefault'
1552 1552 else:
1553 1553 namelabel = b'formatvariant.name.uptodate'
1554 1554 repolabel = b'formatvariant.repo.uptodate'
1555 1555
1556 1556 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1557 1557 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1558 1558 if fv.default != configvalue:
1559 1559 configlabel = b'formatvariant.config.special'
1560 1560 else:
1561 1561 configlabel = b'formatvariant.config.default'
1562 1562 fm.condwrite(
1563 1563 ui.verbose,
1564 1564 b'config',
1565 1565 b' %6s',
1566 1566 formatvalue(configvalue),
1567 1567 label=configlabel,
1568 1568 )
1569 1569 fm.condwrite(
1570 1570 ui.verbose,
1571 1571 b'default',
1572 1572 b' %7s',
1573 1573 formatvalue(fv.default),
1574 1574 label=b'formatvariant.default',
1575 1575 )
1576 1576 fm.plain(b'\n')
1577 1577 fm.end()
1578 1578
1579 1579
1580 1580 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1581 1581 def debugfsinfo(ui, path=b"."):
1582 1582 """show information detected about current filesystem"""
1583 1583 ui.writenoi18n(b'path: %s\n' % path)
1584 1584 ui.writenoi18n(
1585 1585 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1586 1586 )
1587 1587 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1588 1588 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1589 1589 ui.writenoi18n(
1590 1590 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1591 1591 )
1592 1592 ui.writenoi18n(
1593 1593 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1594 1594 )
1595 1595 casesensitive = b'(unknown)'
1596 1596 try:
1597 1597 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1598 1598 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1599 1599 except OSError:
1600 1600 pass
1601 1601 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1602 1602
1603 1603
1604 1604 @command(
1605 1605 b'debuggetbundle',
1606 1606 [
1607 1607 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1608 1608 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1609 1609 (
1610 1610 b't',
1611 1611 b'type',
1612 1612 b'bzip2',
1613 1613 _(b'bundle compression type to use'),
1614 1614 _(b'TYPE'),
1615 1615 ),
1616 1616 ],
1617 1617 _(b'REPO FILE [-H|-C ID]...'),
1618 1618 norepo=True,
1619 1619 )
1620 1620 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1621 1621 """retrieves a bundle from a repo
1622 1622
1623 1623 Every ID must be a full-length hex node id string. Saves the bundle to the
1624 1624 given file.
1625 1625 """
1626 1626 opts = pycompat.byteskwargs(opts)
1627 1627 repo = hg.peer(ui, opts, repopath)
1628 1628 if not repo.capable(b'getbundle'):
1629 1629 raise error.Abort(b"getbundle() not supported by target repository")
1630 1630 args = {}
1631 1631 if common:
1632 1632 args['common'] = [bin(s) for s in common]
1633 1633 if head:
1634 1634 args['heads'] = [bin(s) for s in head]
1635 1635 # TODO: get desired bundlecaps from command line.
1636 1636 args['bundlecaps'] = None
1637 1637 bundle = repo.getbundle(b'debug', **args)
1638 1638
1639 1639 bundletype = opts.get(b'type', b'bzip2').lower()
1640 1640 btypes = {
1641 1641 b'none': b'HG10UN',
1642 1642 b'bzip2': b'HG10BZ',
1643 1643 b'gzip': b'HG10GZ',
1644 1644 b'bundle2': b'HG20',
1645 1645 }
1646 1646 bundletype = btypes.get(bundletype)
1647 1647 if bundletype not in bundle2.bundletypes:
1648 1648 raise error.Abort(_(b'unknown bundle type specified with --type'))
1649 1649 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1650 1650
1651 1651
1652 1652 @command(b'debugignore', [], b'[FILE]')
1653 1653 def debugignore(ui, repo, *files, **opts):
1654 1654 """display the combined ignore pattern and information about ignored files
1655 1655
1656 1656 With no argument display the combined ignore pattern.
1657 1657
1658 1658 Given space separated file names, shows if the given file is ignored and
1659 1659 if so, show the ignore rule (file and line number) that matched it.
1660 1660 """
1661 1661 ignore = repo.dirstate._ignore
1662 1662 if not files:
1663 1663 # Show all the patterns
1664 1664 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1665 1665 else:
1666 1666 m = scmutil.match(repo[None], pats=files)
1667 1667 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1668 1668 for f in m.files():
1669 1669 nf = util.normpath(f)
1670 1670 ignored = None
1671 1671 ignoredata = None
1672 1672 if nf != b'.':
1673 1673 if ignore(nf):
1674 1674 ignored = nf
1675 1675 ignoredata = repo.dirstate._ignorefileandline(nf)
1676 1676 else:
1677 1677 for p in pathutil.finddirs(nf):
1678 1678 if ignore(p):
1679 1679 ignored = p
1680 1680 ignoredata = repo.dirstate._ignorefileandline(p)
1681 1681 break
1682 1682 if ignored:
1683 1683 if ignored == nf:
1684 1684 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1685 1685 else:
1686 1686 ui.write(
1687 1687 _(
1688 1688 b"%s is ignored because of "
1689 1689 b"containing directory %s\n"
1690 1690 )
1691 1691 % (uipathfn(f), ignored)
1692 1692 )
1693 1693 ignorefile, lineno, line = ignoredata
1694 1694 ui.write(
1695 1695 _(b"(ignore rule in %s, line %d: '%s')\n")
1696 1696 % (ignorefile, lineno, line)
1697 1697 )
1698 1698 else:
1699 1699 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1700 1700
1701 1701
1702 1702 @command(
1703 1703 b'debugindex',
1704 1704 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1705 1705 _(b'-c|-m|FILE'),
1706 1706 )
1707 1707 def debugindex(ui, repo, file_=None, **opts):
1708 1708 """dump index data for a storage primitive"""
1709 1709 opts = pycompat.byteskwargs(opts)
1710 1710 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1711 1711
1712 1712 if ui.debugflag:
1713 1713 shortfn = hex
1714 1714 else:
1715 1715 shortfn = short
1716 1716
1717 1717 idlen = 12
1718 1718 for i in store:
1719 1719 idlen = len(shortfn(store.node(i)))
1720 1720 break
1721 1721
1722 1722 fm = ui.formatter(b'debugindex', opts)
1723 1723 fm.plain(
1724 1724 b' rev linkrev %s %s p2\n'
1725 1725 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1726 1726 )
1727 1727
1728 1728 for rev in store:
1729 1729 node = store.node(rev)
1730 1730 parents = store.parents(node)
1731 1731
1732 1732 fm.startitem()
1733 1733 fm.write(b'rev', b'%6d ', rev)
1734 1734 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1735 1735 fm.write(b'node', b'%s ', shortfn(node))
1736 1736 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1737 1737 fm.write(b'p2', b'%s', shortfn(parents[1]))
1738 1738 fm.plain(b'\n')
1739 1739
1740 1740 fm.end()
1741 1741
1742 1742
1743 1743 @command(
1744 1744 b'debugindexdot',
1745 1745 cmdutil.debugrevlogopts,
1746 1746 _(b'-c|-m|FILE'),
1747 1747 optionalrepo=True,
1748 1748 )
1749 1749 def debugindexdot(ui, repo, file_=None, **opts):
1750 1750 """dump an index DAG as a graphviz dot file"""
1751 1751 opts = pycompat.byteskwargs(opts)
1752 1752 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1753 1753 ui.writenoi18n(b"digraph G {\n")
1754 1754 for i in r:
1755 1755 node = r.node(i)
1756 1756 pp = r.parents(node)
1757 1757 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1758 1758 if pp[1] != repo.nullid:
1759 1759 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1760 1760 ui.write(b"}\n")
1761 1761
1762 1762
1763 1763 @command(b'debugindexstats', [])
1764 1764 def debugindexstats(ui, repo):
1765 1765 """show stats related to the changelog index"""
1766 1766 repo.changelog.shortest(repo.nullid, 1)
1767 1767 index = repo.changelog.index
1768 1768 if not util.safehasattr(index, b'stats'):
1769 1769 raise error.Abort(_(b'debugindexstats only works with native code'))
1770 1770 for k, v in sorted(index.stats().items()):
1771 1771 ui.write(b'%s: %d\n' % (k, v))
1772 1772
1773 1773
1774 1774 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1775 1775 def debuginstall(ui, **opts):
1776 1776 """test Mercurial installation
1777 1777
1778 1778 Returns 0 on success.
1779 1779 """
1780 1780 opts = pycompat.byteskwargs(opts)
1781 1781
1782 1782 problems = 0
1783 1783
1784 1784 fm = ui.formatter(b'debuginstall', opts)
1785 1785 fm.startitem()
1786 1786
1787 1787 # encoding might be unknown or wrong. don't translate these messages.
1788 1788 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1789 1789 err = None
1790 1790 try:
1791 1791 codecs.lookup(pycompat.sysstr(encoding.encoding))
1792 1792 except LookupError as inst:
1793 1793 err = stringutil.forcebytestr(inst)
1794 1794 problems += 1
1795 1795 fm.condwrite(
1796 1796 err,
1797 1797 b'encodingerror',
1798 1798 b" %s\n (check that your locale is properly set)\n",
1799 1799 err,
1800 1800 )
1801 1801
1802 1802 # Python
1803 1803 pythonlib = None
1804 1804 if util.safehasattr(os, '__file__'):
1805 1805 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1806 1806 elif getattr(sys, 'oxidized', False):
1807 1807 pythonlib = pycompat.sysexecutable
1808 1808
1809 1809 fm.write(
1810 1810 b'pythonexe',
1811 1811 _(b"checking Python executable (%s)\n"),
1812 1812 pycompat.sysexecutable or _(b"unknown"),
1813 1813 )
1814 1814 fm.write(
1815 1815 b'pythonimplementation',
1816 1816 _(b"checking Python implementation (%s)\n"),
1817 1817 pycompat.sysbytes(platform.python_implementation()),
1818 1818 )
1819 1819 fm.write(
1820 1820 b'pythonver',
1821 1821 _(b"checking Python version (%s)\n"),
1822 1822 (b"%d.%d.%d" % sys.version_info[:3]),
1823 1823 )
1824 1824 fm.write(
1825 1825 b'pythonlib',
1826 1826 _(b"checking Python lib (%s)...\n"),
1827 1827 pythonlib or _(b"unknown"),
1828 1828 )
1829 1829
1830 1830 try:
1831 1831 from . import rustext # pytype: disable=import-error
1832 1832
1833 1833 rustext.__doc__ # trigger lazy import
1834 1834 except ImportError:
1835 1835 rustext = None
1836 1836
1837 1837 security = set(sslutil.supportedprotocols)
1838 1838 if sslutil.hassni:
1839 1839 security.add(b'sni')
1840 1840
1841 1841 fm.write(
1842 1842 b'pythonsecurity',
1843 1843 _(b"checking Python security support (%s)\n"),
1844 1844 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1845 1845 )
1846 1846
1847 1847 # These are warnings, not errors. So don't increment problem count. This
1848 1848 # may change in the future.
1849 1849 if b'tls1.2' not in security:
1850 1850 fm.plain(
1851 1851 _(
1852 1852 b' TLS 1.2 not supported by Python install; '
1853 1853 b'network connections lack modern security\n'
1854 1854 )
1855 1855 )
1856 1856 if b'sni' not in security:
1857 1857 fm.plain(
1858 1858 _(
1859 1859 b' SNI not supported by Python install; may have '
1860 1860 b'connectivity issues with some servers\n'
1861 1861 )
1862 1862 )
1863 1863
1864 1864 fm.plain(
1865 1865 _(
1866 1866 b"checking Rust extensions (%s)\n"
1867 1867 % (b'missing' if rustext is None else b'installed')
1868 1868 ),
1869 1869 )
1870 1870
1871 1871 # TODO print CA cert info
1872 1872
1873 1873 # hg version
1874 1874 hgver = util.version()
1875 1875 fm.write(
1876 1876 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1877 1877 )
1878 1878 fm.write(
1879 1879 b'hgverextra',
1880 1880 _(b"checking Mercurial custom build (%s)\n"),
1881 1881 b'+'.join(hgver.split(b'+')[1:]),
1882 1882 )
1883 1883
1884 1884 # compiled modules
1885 1885 hgmodules = None
1886 1886 if util.safehasattr(sys.modules[__name__], '__file__'):
1887 1887 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1888 1888 elif getattr(sys, 'oxidized', False):
1889 1889 hgmodules = pycompat.sysexecutable
1890 1890
1891 1891 fm.write(
1892 1892 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1893 1893 )
1894 1894 fm.write(
1895 1895 b'hgmodules',
1896 1896 _(b"checking installed modules (%s)...\n"),
1897 1897 hgmodules or _(b"unknown"),
1898 1898 )
1899 1899
1900 1900 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1901 1901 rustext = rustandc # for now, that's the only case
1902 1902 cext = policy.policy in (b'c', b'allow') or rustandc
1903 1903 nopure = cext or rustext
1904 1904 if nopure:
1905 1905 err = None
1906 1906 try:
1907 1907 if cext:
1908 1908 from .cext import ( # pytype: disable=import-error
1909 1909 base85,
1910 1910 bdiff,
1911 1911 mpatch,
1912 1912 osutil,
1913 1913 )
1914 1914
1915 1915 # quiet pyflakes
1916 1916 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1917 1917 if rustext:
1918 1918 from .rustext import ( # pytype: disable=import-error
1919 1919 ancestor,
1920 1920 dirstate,
1921 1921 )
1922 1922
1923 1923 dir(ancestor), dir(dirstate) # quiet pyflakes
1924 1924 except Exception as inst:
1925 1925 err = stringutil.forcebytestr(inst)
1926 1926 problems += 1
1927 1927 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1928 1928
1929 1929 compengines = util.compengines._engines.values()
1930 1930 fm.write(
1931 1931 b'compengines',
1932 1932 _(b'checking registered compression engines (%s)\n'),
1933 1933 fm.formatlist(
1934 1934 sorted(e.name() for e in compengines),
1935 1935 name=b'compengine',
1936 1936 fmt=b'%s',
1937 1937 sep=b', ',
1938 1938 ),
1939 1939 )
1940 1940 fm.write(
1941 1941 b'compenginesavail',
1942 1942 _(b'checking available compression engines (%s)\n'),
1943 1943 fm.formatlist(
1944 1944 sorted(e.name() for e in compengines if e.available()),
1945 1945 name=b'compengine',
1946 1946 fmt=b'%s',
1947 1947 sep=b', ',
1948 1948 ),
1949 1949 )
1950 1950 wirecompengines = compression.compengines.supportedwireengines(
1951 1951 compression.SERVERROLE
1952 1952 )
1953 1953 fm.write(
1954 1954 b'compenginesserver',
1955 1955 _(
1956 1956 b'checking available compression engines '
1957 1957 b'for wire protocol (%s)\n'
1958 1958 ),
1959 1959 fm.formatlist(
1960 1960 [e.name() for e in wirecompengines if e.wireprotosupport()],
1961 1961 name=b'compengine',
1962 1962 fmt=b'%s',
1963 1963 sep=b', ',
1964 1964 ),
1965 1965 )
1966 1966 re2 = b'missing'
1967 1967 if util._re2:
1968 1968 re2 = b'available'
1969 1969 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1970 1970 fm.data(re2=bool(util._re2))
1971 1971
1972 1972 # templates
1973 1973 p = templater.templatedir()
1974 1974 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1975 1975 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1976 1976 if p:
1977 1977 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1978 1978 if m:
1979 1979 # template found, check if it is working
1980 1980 err = None
1981 1981 try:
1982 1982 templater.templater.frommapfile(m)
1983 1983 except Exception as inst:
1984 1984 err = stringutil.forcebytestr(inst)
1985 1985 p = None
1986 1986 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1987 1987 else:
1988 1988 p = None
1989 1989 fm.condwrite(
1990 1990 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1991 1991 )
1992 1992 fm.condwrite(
1993 1993 not m,
1994 1994 b'defaulttemplatenotfound',
1995 1995 _(b" template '%s' not found\n"),
1996 1996 b"default",
1997 1997 )
1998 1998 if not p:
1999 1999 problems += 1
2000 2000 fm.condwrite(
2001 2001 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2002 2002 )
2003 2003
2004 2004 # editor
2005 2005 editor = ui.geteditor()
2006 2006 editor = util.expandpath(editor)
2007 2007 editorbin = procutil.shellsplit(editor)[0]
2008 2008 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2009 2009 cmdpath = procutil.findexe(editorbin)
2010 2010 fm.condwrite(
2011 2011 not cmdpath and editor == b'vi',
2012 2012 b'vinotfound',
2013 2013 _(
2014 2014 b" No commit editor set and can't find %s in PATH\n"
2015 2015 b" (specify a commit editor in your configuration"
2016 2016 b" file)\n"
2017 2017 ),
2018 2018 not cmdpath and editor == b'vi' and editorbin,
2019 2019 )
2020 2020 fm.condwrite(
2021 2021 not cmdpath and editor != b'vi',
2022 2022 b'editornotfound',
2023 2023 _(
2024 2024 b" Can't find editor '%s' in PATH\n"
2025 2025 b" (specify a commit editor in your configuration"
2026 2026 b" file)\n"
2027 2027 ),
2028 2028 not cmdpath and editorbin,
2029 2029 )
2030 2030 if not cmdpath and editor != b'vi':
2031 2031 problems += 1
2032 2032
2033 2033 # check username
2034 2034 username = None
2035 2035 err = None
2036 2036 try:
2037 2037 username = ui.username()
2038 2038 except error.Abort as e:
2039 2039 err = e.message
2040 2040 problems += 1
2041 2041
2042 2042 fm.condwrite(
2043 2043 username, b'username', _(b"checking username (%s)\n"), username
2044 2044 )
2045 2045 fm.condwrite(
2046 2046 err,
2047 2047 b'usernameerror',
2048 2048 _(
2049 2049 b"checking username...\n %s\n"
2050 2050 b" (specify a username in your configuration file)\n"
2051 2051 ),
2052 2052 err,
2053 2053 )
2054 2054
2055 2055 for name, mod in extensions.extensions():
2056 2056 handler = getattr(mod, 'debuginstall', None)
2057 2057 if handler is not None:
2058 2058 problems += handler(ui, fm)
2059 2059
2060 2060 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2061 2061 if not problems:
2062 2062 fm.data(problems=problems)
2063 2063 fm.condwrite(
2064 2064 problems,
2065 2065 b'problems',
2066 2066 _(b"%d problems detected, please check your install!\n"),
2067 2067 problems,
2068 2068 )
2069 2069 fm.end()
2070 2070
2071 2071 return problems
2072 2072
2073 2073
2074 2074 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2075 2075 def debugknown(ui, repopath, *ids, **opts):
2076 2076 """test whether node ids are known to a repo
2077 2077
2078 2078 Every ID must be a full-length hex node id string. Returns a list of 0s
2079 2079 and 1s indicating unknown/known.
2080 2080 """
2081 2081 opts = pycompat.byteskwargs(opts)
2082 2082 repo = hg.peer(ui, opts, repopath)
2083 2083 if not repo.capable(b'known'):
2084 2084 raise error.Abort(b"known() not supported by target repository")
2085 2085 flags = repo.known([bin(s) for s in ids])
2086 2086 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2087 2087
2088 2088
2089 2089 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2090 2090 def debuglabelcomplete(ui, repo, *args):
2091 2091 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2092 2092 debugnamecomplete(ui, repo, *args)
2093 2093
2094 2094
2095 2095 @command(
2096 2096 b'debuglocks',
2097 2097 [
2098 2098 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2099 2099 (
2100 2100 b'W',
2101 2101 b'force-free-wlock',
2102 2102 None,
2103 2103 _(b'free the working state lock (DANGEROUS)'),
2104 2104 ),
2105 2105 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2106 2106 (
2107 2107 b'S',
2108 2108 b'set-wlock',
2109 2109 None,
2110 2110 _(b'set the working state lock until stopped'),
2111 2111 ),
2112 2112 ],
2113 2113 _(b'[OPTION]...'),
2114 2114 )
2115 2115 def debuglocks(ui, repo, **opts):
2116 2116 """show or modify state of locks
2117 2117
2118 2118 By default, this command will show which locks are held. This
2119 2119 includes the user and process holding the lock, the amount of time
2120 2120 the lock has been held, and the machine name where the process is
2121 2121 running if it's not local.
2122 2122
2123 2123 Locks protect the integrity of Mercurial's data, so should be
2124 2124 treated with care. System crashes or other interruptions may cause
2125 2125 locks to not be properly released, though Mercurial will usually
2126 2126 detect and remove such stale locks automatically.
2127 2127
2128 2128 However, detecting stale locks may not always be possible (for
2129 2129 instance, on a shared filesystem). Removing locks may also be
2130 2130 blocked by filesystem permissions.
2131 2131
2132 2132 Setting a lock will prevent other commands from changing the data.
2133 2133 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2134 2134 The set locks are removed when the command exits.
2135 2135
2136 2136 Returns 0 if no locks are held.
2137 2137
2138 2138 """
2139 2139
2140 2140 if opts.get('force_free_lock'):
2141 2141 repo.svfs.unlink(b'lock')
2142 2142 if opts.get('force_free_wlock'):
2143 2143 repo.vfs.unlink(b'wlock')
2144 2144 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2145 2145 return 0
2146 2146
2147 2147 locks = []
2148 2148 try:
2149 2149 if opts.get('set_wlock'):
2150 2150 try:
2151 2151 locks.append(repo.wlock(False))
2152 2152 except error.LockHeld:
2153 2153 raise error.Abort(_(b'wlock is already held'))
2154 2154 if opts.get('set_lock'):
2155 2155 try:
2156 2156 locks.append(repo.lock(False))
2157 2157 except error.LockHeld:
2158 2158 raise error.Abort(_(b'lock is already held'))
2159 2159 if len(locks):
2160 2160 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2161 2161 return 0
2162 2162 finally:
2163 2163 release(*locks)
2164 2164
2165 2165 now = time.time()
2166 2166 held = 0
2167 2167
2168 2168 def report(vfs, name, method):
2169 2169 # this causes stale locks to get reaped for more accurate reporting
2170 2170 try:
2171 2171 l = method(False)
2172 2172 except error.LockHeld:
2173 2173 l = None
2174 2174
2175 2175 if l:
2176 2176 l.release()
2177 2177 else:
2178 2178 try:
2179 2179 st = vfs.lstat(name)
2180 2180 age = now - st[stat.ST_MTIME]
2181 2181 user = util.username(st.st_uid)
2182 2182 locker = vfs.readlock(name)
2183 2183 if b":" in locker:
2184 2184 host, pid = locker.split(b':')
2185 2185 if host == socket.gethostname():
2186 2186 locker = b'user %s, process %s' % (user or b'None', pid)
2187 2187 else:
2188 2188 locker = b'user %s, process %s, host %s' % (
2189 2189 user or b'None',
2190 2190 pid,
2191 2191 host,
2192 2192 )
2193 2193 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2194 2194 return 1
2195 2195 except OSError as e:
2196 2196 if e.errno != errno.ENOENT:
2197 2197 raise
2198 2198
2199 2199 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2200 2200 return 0
2201 2201
2202 2202 held += report(repo.svfs, b"lock", repo.lock)
2203 2203 held += report(repo.vfs, b"wlock", repo.wlock)
2204 2204
2205 2205 return held
2206 2206
2207 2207
2208 2208 @command(
2209 2209 b'debugmanifestfulltextcache',
2210 2210 [
2211 2211 (b'', b'clear', False, _(b'clear the cache')),
2212 2212 (
2213 2213 b'a',
2214 2214 b'add',
2215 2215 [],
2216 2216 _(b'add the given manifest nodes to the cache'),
2217 2217 _(b'NODE'),
2218 2218 ),
2219 2219 ],
2220 2220 b'',
2221 2221 )
2222 2222 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2223 2223 """show, clear or amend the contents of the manifest fulltext cache"""
2224 2224
2225 2225 def getcache():
2226 2226 r = repo.manifestlog.getstorage(b'')
2227 2227 try:
2228 2228 return r._fulltextcache
2229 2229 except AttributeError:
2230 2230 msg = _(
2231 2231 b"Current revlog implementation doesn't appear to have a "
2232 2232 b"manifest fulltext cache\n"
2233 2233 )
2234 2234 raise error.Abort(msg)
2235 2235
2236 2236 if opts.get('clear'):
2237 2237 with repo.wlock():
2238 2238 cache = getcache()
2239 2239 cache.clear(clear_persisted_data=True)
2240 2240 return
2241 2241
2242 2242 if add:
2243 2243 with repo.wlock():
2244 2244 m = repo.manifestlog
2245 2245 store = m.getstorage(b'')
2246 2246 for n in add:
2247 2247 try:
2248 2248 manifest = m[store.lookup(n)]
2249 2249 except error.LookupError as e:
2250 2250 raise error.Abort(
2251 2251 bytes(e), hint=b"Check your manifest node id"
2252 2252 )
2253 2253 manifest.read() # stores revisision in cache too
2254 2254 return
2255 2255
2256 2256 cache = getcache()
2257 2257 if not len(cache):
2258 2258 ui.write(_(b'cache empty\n'))
2259 2259 else:
2260 2260 ui.write(
2261 2261 _(
2262 2262 b'cache contains %d manifest entries, in order of most to '
2263 2263 b'least recent:\n'
2264 2264 )
2265 2265 % (len(cache),)
2266 2266 )
2267 2267 totalsize = 0
2268 2268 for nodeid in cache:
2269 2269 # Use cache.get to not update the LRU order
2270 2270 data = cache.peek(nodeid)
2271 2271 size = len(data)
2272 2272 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2273 2273 ui.write(
2274 2274 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2275 2275 )
2276 2276 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2277 2277 ui.write(
2278 2278 _(b'total cache data size %s, on-disk %s\n')
2279 2279 % (util.bytecount(totalsize), util.bytecount(ondisk))
2280 2280 )
2281 2281
2282 2282
2283 2283 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2284 2284 def debugmergestate(ui, repo, *args, **opts):
2285 2285 """print merge state
2286 2286
2287 2287 Use --verbose to print out information about whether v1 or v2 merge state
2288 2288 was chosen."""
2289 2289
2290 2290 if ui.verbose:
2291 2291 ms = mergestatemod.mergestate(repo)
2292 2292
2293 2293 # sort so that reasonable information is on top
2294 2294 v1records = ms._readrecordsv1()
2295 2295 v2records = ms._readrecordsv2()
2296 2296
2297 2297 if not v1records and not v2records:
2298 2298 pass
2299 2299 elif not v2records:
2300 2300 ui.writenoi18n(b'no version 2 merge state\n')
2301 2301 elif ms._v1v2match(v1records, v2records):
2302 2302 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2303 2303 else:
2304 2304 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2305 2305
2306 2306 opts = pycompat.byteskwargs(opts)
2307 2307 if not opts[b'template']:
2308 2308 opts[b'template'] = (
2309 2309 b'{if(commits, "", "no merge state found\n")}'
2310 2310 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2311 2311 b'{files % "file: {path} (state \\"{state}\\")\n'
2312 2312 b'{if(local_path, "'
2313 2313 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2314 2314 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2315 2315 b' other path: {other_path} (node {other_node})\n'
2316 2316 b'")}'
2317 2317 b'{if(rename_side, "'
2318 2318 b' rename side: {rename_side}\n'
2319 2319 b' renamed path: {renamed_path}\n'
2320 2320 b'")}'
2321 2321 b'{extras % " extra: {key} = {value}\n"}'
2322 2322 b'"}'
2323 2323 b'{extras % "extra: {file} ({key} = {value})\n"}'
2324 2324 )
2325 2325
2326 2326 ms = mergestatemod.mergestate.read(repo)
2327 2327
2328 2328 fm = ui.formatter(b'debugmergestate', opts)
2329 2329 fm.startitem()
2330 2330
2331 2331 fm_commits = fm.nested(b'commits')
2332 2332 if ms.active():
2333 2333 for name, node, label_index in (
2334 2334 (b'local', ms.local, 0),
2335 2335 (b'other', ms.other, 1),
2336 2336 ):
2337 2337 fm_commits.startitem()
2338 2338 fm_commits.data(name=name)
2339 2339 fm_commits.data(node=hex(node))
2340 2340 if ms._labels and len(ms._labels) > label_index:
2341 2341 fm_commits.data(label=ms._labels[label_index])
2342 2342 fm_commits.end()
2343 2343
2344 2344 fm_files = fm.nested(b'files')
2345 2345 if ms.active():
2346 2346 for f in ms:
2347 2347 fm_files.startitem()
2348 2348 fm_files.data(path=f)
2349 2349 state = ms._state[f]
2350 2350 fm_files.data(state=state[0])
2351 2351 if state[0] in (
2352 2352 mergestatemod.MERGE_RECORD_UNRESOLVED,
2353 2353 mergestatemod.MERGE_RECORD_RESOLVED,
2354 2354 ):
2355 2355 fm_files.data(local_key=state[1])
2356 2356 fm_files.data(local_path=state[2])
2357 2357 fm_files.data(ancestor_path=state[3])
2358 2358 fm_files.data(ancestor_node=state[4])
2359 2359 fm_files.data(other_path=state[5])
2360 2360 fm_files.data(other_node=state[6])
2361 2361 fm_files.data(local_flags=state[7])
2362 2362 elif state[0] in (
2363 2363 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2364 2364 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2365 2365 ):
2366 2366 fm_files.data(renamed_path=state[1])
2367 2367 fm_files.data(rename_side=state[2])
2368 2368 fm_extras = fm_files.nested(b'extras')
2369 2369 for k, v in sorted(ms.extras(f).items()):
2370 2370 fm_extras.startitem()
2371 2371 fm_extras.data(key=k)
2372 2372 fm_extras.data(value=v)
2373 2373 fm_extras.end()
2374 2374
2375 2375 fm_files.end()
2376 2376
2377 2377 fm_extras = fm.nested(b'extras')
2378 2378 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2379 2379 if f in ms:
2380 2380 # If file is in mergestate, we have already processed it's extras
2381 2381 continue
2382 2382 for k, v in pycompat.iteritems(d):
2383 2383 fm_extras.startitem()
2384 2384 fm_extras.data(file=f)
2385 2385 fm_extras.data(key=k)
2386 2386 fm_extras.data(value=v)
2387 2387 fm_extras.end()
2388 2388
2389 2389 fm.end()
2390 2390
2391 2391
2392 2392 @command(b'debugnamecomplete', [], _(b'NAME...'))
2393 2393 def debugnamecomplete(ui, repo, *args):
2394 2394 '''complete "names" - tags, open branch names, bookmark names'''
2395 2395
2396 2396 names = set()
2397 2397 # since we previously only listed open branches, we will handle that
2398 2398 # specially (after this for loop)
2399 2399 for name, ns in pycompat.iteritems(repo.names):
2400 2400 if name != b'branches':
2401 2401 names.update(ns.listnames(repo))
2402 2402 names.update(
2403 2403 tag
2404 2404 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2405 2405 if not closed
2406 2406 )
2407 2407 completions = set()
2408 2408 if not args:
2409 2409 args = [b'']
2410 2410 for a in args:
2411 2411 completions.update(n for n in names if n.startswith(a))
2412 2412 ui.write(b'\n'.join(sorted(completions)))
2413 2413 ui.write(b'\n')
2414 2414
2415 2415
2416 2416 @command(
2417 2417 b'debugnodemap',
2418 2418 [
2419 2419 (
2420 2420 b'',
2421 2421 b'dump-new',
2422 2422 False,
2423 2423 _(b'write a (new) persistent binary nodemap on stdout'),
2424 2424 ),
2425 2425 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2426 2426 (
2427 2427 b'',
2428 2428 b'check',
2429 2429 False,
2430 2430 _(b'check that the data on disk data are correct.'),
2431 2431 ),
2432 2432 (
2433 2433 b'',
2434 2434 b'metadata',
2435 2435 False,
2436 2436 _(b'display the on disk meta data for the nodemap'),
2437 2437 ),
2438 2438 ],
2439 2439 )
2440 2440 def debugnodemap(ui, repo, **opts):
2441 2441 """write and inspect on disk nodemap"""
2442 2442 if opts['dump_new']:
2443 2443 unfi = repo.unfiltered()
2444 2444 cl = unfi.changelog
2445 2445 if util.safehasattr(cl.index, "nodemap_data_all"):
2446 2446 data = cl.index.nodemap_data_all()
2447 2447 else:
2448 2448 data = nodemap.persistent_data(cl.index)
2449 2449 ui.write(data)
2450 2450 elif opts['dump_disk']:
2451 2451 unfi = repo.unfiltered()
2452 2452 cl = unfi.changelog
2453 2453 nm_data = nodemap.persisted_data(cl)
2454 2454 if nm_data is not None:
2455 2455 docket, data = nm_data
2456 2456 ui.write(data[:])
2457 2457 elif opts['check']:
2458 2458 unfi = repo.unfiltered()
2459 2459 cl = unfi.changelog
2460 2460 nm_data = nodemap.persisted_data(cl)
2461 2461 if nm_data is not None:
2462 2462 docket, data = nm_data
2463 2463 return nodemap.check_data(ui, cl.index, data)
2464 2464 elif opts['metadata']:
2465 2465 unfi = repo.unfiltered()
2466 2466 cl = unfi.changelog
2467 2467 nm_data = nodemap.persisted_data(cl)
2468 2468 if nm_data is not None:
2469 2469 docket, data = nm_data
2470 2470 ui.write((b"uid: %s\n") % docket.uid)
2471 2471 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2472 2472 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2473 2473 ui.write((b"data-length: %d\n") % docket.data_length)
2474 2474 ui.write((b"data-unused: %d\n") % docket.data_unused)
2475 2475 unused_perc = docket.data_unused * 100.0 / docket.data_length
2476 2476 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2477 2477
2478 2478
2479 2479 @command(
2480 2480 b'debugobsolete',
2481 2481 [
2482 2482 (b'', b'flags', 0, _(b'markers flag')),
2483 2483 (
2484 2484 b'',
2485 2485 b'record-parents',
2486 2486 False,
2487 2487 _(b'record parent information for the precursor'),
2488 2488 ),
2489 2489 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2490 2490 (
2491 2491 b'',
2492 2492 b'exclusive',
2493 2493 False,
2494 2494 _(b'restrict display to markers only relevant to REV'),
2495 2495 ),
2496 2496 (b'', b'index', False, _(b'display index of the marker')),
2497 2497 (b'', b'delete', [], _(b'delete markers specified by indices')),
2498 2498 ]
2499 2499 + cmdutil.commitopts2
2500 2500 + cmdutil.formatteropts,
2501 2501 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2502 2502 )
2503 2503 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2504 2504 """create arbitrary obsolete marker
2505 2505
2506 2506 With no arguments, displays the list of obsolescence markers."""
2507 2507
2508 2508 opts = pycompat.byteskwargs(opts)
2509 2509
2510 2510 def parsenodeid(s):
2511 2511 try:
2512 2512 # We do not use revsingle/revrange functions here to accept
2513 2513 # arbitrary node identifiers, possibly not present in the
2514 2514 # local repository.
2515 2515 n = bin(s)
2516 2516 if len(n) != repo.nodeconstants.nodelen:
2517 2517 raise TypeError()
2518 2518 return n
2519 2519 except TypeError:
2520 2520 raise error.InputError(
2521 2521 b'changeset references must be full hexadecimal '
2522 2522 b'node identifiers'
2523 2523 )
2524 2524
2525 2525 if opts.get(b'delete'):
2526 2526 indices = []
2527 2527 for v in opts.get(b'delete'):
2528 2528 try:
2529 2529 indices.append(int(v))
2530 2530 except ValueError:
2531 2531 raise error.InputError(
2532 2532 _(b'invalid index value: %r') % v,
2533 2533 hint=_(b'use integers for indices'),
2534 2534 )
2535 2535
2536 2536 if repo.currenttransaction():
2537 2537 raise error.Abort(
2538 2538 _(b'cannot delete obsmarkers in the middle of transaction.')
2539 2539 )
2540 2540
2541 2541 with repo.lock():
2542 2542 n = repair.deleteobsmarkers(repo.obsstore, indices)
2543 2543 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2544 2544
2545 2545 return
2546 2546
2547 2547 if precursor is not None:
2548 2548 if opts[b'rev']:
2549 2549 raise error.InputError(
2550 2550 b'cannot select revision when creating marker'
2551 2551 )
2552 2552 metadata = {}
2553 2553 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2554 2554 succs = tuple(parsenodeid(succ) for succ in successors)
2555 2555 l = repo.lock()
2556 2556 try:
2557 2557 tr = repo.transaction(b'debugobsolete')
2558 2558 try:
2559 2559 date = opts.get(b'date')
2560 2560 if date:
2561 2561 date = dateutil.parsedate(date)
2562 2562 else:
2563 2563 date = None
2564 2564 prec = parsenodeid(precursor)
2565 2565 parents = None
2566 2566 if opts[b'record_parents']:
2567 2567 if prec not in repo.unfiltered():
2568 2568 raise error.Abort(
2569 2569 b'cannot used --record-parents on '
2570 2570 b'unknown changesets'
2571 2571 )
2572 2572 parents = repo.unfiltered()[prec].parents()
2573 2573 parents = tuple(p.node() for p in parents)
2574 2574 repo.obsstore.create(
2575 2575 tr,
2576 2576 prec,
2577 2577 succs,
2578 2578 opts[b'flags'],
2579 2579 parents=parents,
2580 2580 date=date,
2581 2581 metadata=metadata,
2582 2582 ui=ui,
2583 2583 )
2584 2584 tr.close()
2585 2585 except ValueError as exc:
2586 2586 raise error.Abort(
2587 2587 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2588 2588 )
2589 2589 finally:
2590 2590 tr.release()
2591 2591 finally:
2592 2592 l.release()
2593 2593 else:
2594 2594 if opts[b'rev']:
2595 2595 revs = logcmdutil.revrange(repo, opts[b'rev'])
2596 2596 nodes = [repo[r].node() for r in revs]
2597 2597 markers = list(
2598 2598 obsutil.getmarkers(
2599 2599 repo, nodes=nodes, exclusive=opts[b'exclusive']
2600 2600 )
2601 2601 )
2602 2602 markers.sort(key=lambda x: x._data)
2603 2603 else:
2604 2604 markers = obsutil.getmarkers(repo)
2605 2605
2606 2606 markerstoiter = markers
2607 2607 isrelevant = lambda m: True
2608 2608 if opts.get(b'rev') and opts.get(b'index'):
2609 2609 markerstoiter = obsutil.getmarkers(repo)
2610 2610 markerset = set(markers)
2611 2611 isrelevant = lambda m: m in markerset
2612 2612
2613 2613 fm = ui.formatter(b'debugobsolete', opts)
2614 2614 for i, m in enumerate(markerstoiter):
2615 2615 if not isrelevant(m):
2616 2616 # marker can be irrelevant when we're iterating over a set
2617 2617 # of markers (markerstoiter) which is bigger than the set
2618 2618 # of markers we want to display (markers)
2619 2619 # this can happen if both --index and --rev options are
2620 2620 # provided and thus we need to iterate over all of the markers
2621 2621 # to get the correct indices, but only display the ones that
2622 2622 # are relevant to --rev value
2623 2623 continue
2624 2624 fm.startitem()
2625 2625 ind = i if opts.get(b'index') else None
2626 2626 cmdutil.showmarker(fm, m, index=ind)
2627 2627 fm.end()
2628 2628
2629 2629
2630 2630 @command(
2631 2631 b'debugp1copies',
2632 2632 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2633 2633 _(b'[-r REV]'),
2634 2634 )
2635 2635 def debugp1copies(ui, repo, **opts):
2636 2636 """dump copy information compared to p1"""
2637 2637
2638 2638 opts = pycompat.byteskwargs(opts)
2639 2639 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2640 2640 for dst, src in ctx.p1copies().items():
2641 2641 ui.write(b'%s -> %s\n' % (src, dst))
2642 2642
2643 2643
2644 2644 @command(
2645 2645 b'debugp2copies',
2646 2646 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2647 2647 _(b'[-r REV]'),
2648 2648 )
2649 2649 def debugp1copies(ui, repo, **opts):
2650 2650 """dump copy information compared to p2"""
2651 2651
2652 2652 opts = pycompat.byteskwargs(opts)
2653 2653 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2654 2654 for dst, src in ctx.p2copies().items():
2655 2655 ui.write(b'%s -> %s\n' % (src, dst))
2656 2656
2657 2657
2658 2658 @command(
2659 2659 b'debugpathcomplete',
2660 2660 [
2661 2661 (b'f', b'full', None, _(b'complete an entire path')),
2662 2662 (b'n', b'normal', None, _(b'show only normal files')),
2663 2663 (b'a', b'added', None, _(b'show only added files')),
2664 2664 (b'r', b'removed', None, _(b'show only removed files')),
2665 2665 ],
2666 2666 _(b'FILESPEC...'),
2667 2667 )
2668 2668 def debugpathcomplete(ui, repo, *specs, **opts):
2669 2669 """complete part or all of a tracked path
2670 2670
2671 2671 This command supports shells that offer path name completion. It
2672 2672 currently completes only files already known to the dirstate.
2673 2673
2674 2674 Completion extends only to the next path segment unless
2675 2675 --full is specified, in which case entire paths are used."""
2676 2676
2677 2677 def complete(path, acceptable):
2678 2678 dirstate = repo.dirstate
2679 2679 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2680 2680 rootdir = repo.root + pycompat.ossep
2681 2681 if spec != repo.root and not spec.startswith(rootdir):
2682 2682 return [], []
2683 2683 if os.path.isdir(spec):
2684 2684 spec += b'/'
2685 2685 spec = spec[len(rootdir) :]
2686 2686 fixpaths = pycompat.ossep != b'/'
2687 2687 if fixpaths:
2688 2688 spec = spec.replace(pycompat.ossep, b'/')
2689 2689 speclen = len(spec)
2690 2690 fullpaths = opts['full']
2691 2691 files, dirs = set(), set()
2692 2692 adddir, addfile = dirs.add, files.add
2693 2693 for f, st in pycompat.iteritems(dirstate):
2694 2694 if f.startswith(spec) and st.state in acceptable:
2695 2695 if fixpaths:
2696 2696 f = f.replace(b'/', pycompat.ossep)
2697 2697 if fullpaths:
2698 2698 addfile(f)
2699 2699 continue
2700 2700 s = f.find(pycompat.ossep, speclen)
2701 2701 if s >= 0:
2702 2702 adddir(f[:s])
2703 2703 else:
2704 2704 addfile(f)
2705 2705 return files, dirs
2706 2706
2707 2707 acceptable = b''
2708 2708 if opts['normal']:
2709 2709 acceptable += b'nm'
2710 2710 if opts['added']:
2711 2711 acceptable += b'a'
2712 2712 if opts['removed']:
2713 2713 acceptable += b'r'
2714 2714 cwd = repo.getcwd()
2715 2715 if not specs:
2716 2716 specs = [b'.']
2717 2717
2718 2718 files, dirs = set(), set()
2719 2719 for spec in specs:
2720 2720 f, d = complete(spec, acceptable or b'nmar')
2721 2721 files.update(f)
2722 2722 dirs.update(d)
2723 2723 files.update(dirs)
2724 2724 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2725 2725 ui.write(b'\n')
2726 2726
2727 2727
2728 2728 @command(
2729 2729 b'debugpathcopies',
2730 2730 cmdutil.walkopts,
2731 2731 b'hg debugpathcopies REV1 REV2 [FILE]',
2732 2732 inferrepo=True,
2733 2733 )
2734 2734 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2735 2735 """show copies between two revisions"""
2736 2736 ctx1 = scmutil.revsingle(repo, rev1)
2737 2737 ctx2 = scmutil.revsingle(repo, rev2)
2738 2738 m = scmutil.match(ctx1, pats, opts)
2739 2739 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2740 2740 ui.write(b'%s -> %s\n' % (src, dst))
2741 2741
2742 2742
2743 2743 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2744 2744 def debugpeer(ui, path):
2745 2745 """establish a connection to a peer repository"""
2746 2746 # Always enable peer request logging. Requires --debug to display
2747 2747 # though.
2748 2748 overrides = {
2749 2749 (b'devel', b'debug.peer-request'): True,
2750 2750 }
2751 2751
2752 2752 with ui.configoverride(overrides):
2753 2753 peer = hg.peer(ui, {}, path)
2754 2754
2755 2755 try:
2756 2756 local = peer.local() is not None
2757 2757 canpush = peer.canpush()
2758 2758
2759 2759 ui.write(_(b'url: %s\n') % peer.url())
2760 2760 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2761 2761 ui.write(
2762 2762 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2763 2763 )
2764 2764 finally:
2765 2765 peer.close()
2766 2766
2767 2767
2768 2768 @command(
2769 2769 b'debugpickmergetool',
2770 2770 [
2771 2771 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2772 2772 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2773 2773 ]
2774 2774 + cmdutil.walkopts
2775 2775 + cmdutil.mergetoolopts,
2776 2776 _(b'[PATTERN]...'),
2777 2777 inferrepo=True,
2778 2778 )
2779 2779 def debugpickmergetool(ui, repo, *pats, **opts):
2780 2780 """examine which merge tool is chosen for specified file
2781 2781
2782 2782 As described in :hg:`help merge-tools`, Mercurial examines
2783 2783 configurations below in this order to decide which merge tool is
2784 2784 chosen for specified file.
2785 2785
2786 2786 1. ``--tool`` option
2787 2787 2. ``HGMERGE`` environment variable
2788 2788 3. configurations in ``merge-patterns`` section
2789 2789 4. configuration of ``ui.merge``
2790 2790 5. configurations in ``merge-tools`` section
2791 2791 6. ``hgmerge`` tool (for historical reason only)
2792 2792 7. default tool for fallback (``:merge`` or ``:prompt``)
2793 2793
2794 2794 This command writes out examination result in the style below::
2795 2795
2796 2796 FILE = MERGETOOL
2797 2797
2798 2798 By default, all files known in the first parent context of the
2799 2799 working directory are examined. Use file patterns and/or -I/-X
2800 2800 options to limit target files. -r/--rev is also useful to examine
2801 2801 files in another context without actual updating to it.
2802 2802
2803 2803 With --debug, this command shows warning messages while matching
2804 2804 against ``merge-patterns`` and so on, too. It is recommended to
2805 2805 use this option with explicit file patterns and/or -I/-X options,
2806 2806 because this option increases amount of output per file according
2807 2807 to configurations in hgrc.
2808 2808
2809 2809 With -v/--verbose, this command shows configurations below at
2810 2810 first (only if specified).
2811 2811
2812 2812 - ``--tool`` option
2813 2813 - ``HGMERGE`` environment variable
2814 2814 - configuration of ``ui.merge``
2815 2815
2816 2816 If merge tool is chosen before matching against
2817 2817 ``merge-patterns``, this command can't show any helpful
2818 2818 information, even with --debug. In such case, information above is
2819 2819 useful to know why a merge tool is chosen.
2820 2820 """
2821 2821 opts = pycompat.byteskwargs(opts)
2822 2822 overrides = {}
2823 2823 if opts[b'tool']:
2824 2824 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2825 2825 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2826 2826
2827 2827 with ui.configoverride(overrides, b'debugmergepatterns'):
2828 2828 hgmerge = encoding.environ.get(b"HGMERGE")
2829 2829 if hgmerge is not None:
2830 2830 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2831 2831 uimerge = ui.config(b"ui", b"merge")
2832 2832 if uimerge:
2833 2833 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2834 2834
2835 2835 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2836 2836 m = scmutil.match(ctx, pats, opts)
2837 2837 changedelete = opts[b'changedelete']
2838 2838 for path in ctx.walk(m):
2839 2839 fctx = ctx[path]
2840 2840 with ui.silent(
2841 2841 error=True
2842 2842 ) if not ui.debugflag else util.nullcontextmanager():
2843 2843 tool, toolpath = filemerge._picktool(
2844 2844 repo,
2845 2845 ui,
2846 2846 path,
2847 2847 fctx.isbinary(),
2848 2848 b'l' in fctx.flags(),
2849 2849 changedelete,
2850 2850 )
2851 2851 ui.write(b'%s = %s\n' % (path, tool))
2852 2852
2853 2853
2854 2854 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2855 2855 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2856 2856 """access the pushkey key/value protocol
2857 2857
2858 2858 With two args, list the keys in the given namespace.
2859 2859
2860 2860 With five args, set a key to new if it currently is set to old.
2861 2861 Reports success or failure.
2862 2862 """
2863 2863
2864 2864 target = hg.peer(ui, {}, repopath)
2865 2865 try:
2866 2866 if keyinfo:
2867 2867 key, old, new = keyinfo
2868 2868 with target.commandexecutor() as e:
2869 2869 r = e.callcommand(
2870 2870 b'pushkey',
2871 2871 {
2872 2872 b'namespace': namespace,
2873 2873 b'key': key,
2874 2874 b'old': old,
2875 2875 b'new': new,
2876 2876 },
2877 2877 ).result()
2878 2878
2879 2879 ui.status(pycompat.bytestr(r) + b'\n')
2880 2880 return not r
2881 2881 else:
2882 2882 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2883 2883 ui.write(
2884 2884 b"%s\t%s\n"
2885 2885 % (stringutil.escapestr(k), stringutil.escapestr(v))
2886 2886 )
2887 2887 finally:
2888 2888 target.close()
2889 2889
2890 2890
2891 2891 @command(b'debugpvec', [], _(b'A B'))
2892 2892 def debugpvec(ui, repo, a, b=None):
2893 2893 ca = scmutil.revsingle(repo, a)
2894 2894 cb = scmutil.revsingle(repo, b)
2895 2895 pa = pvec.ctxpvec(ca)
2896 2896 pb = pvec.ctxpvec(cb)
2897 2897 if pa == pb:
2898 2898 rel = b"="
2899 2899 elif pa > pb:
2900 2900 rel = b">"
2901 2901 elif pa < pb:
2902 2902 rel = b"<"
2903 2903 elif pa | pb:
2904 2904 rel = b"|"
2905 2905 ui.write(_(b"a: %s\n") % pa)
2906 2906 ui.write(_(b"b: %s\n") % pb)
2907 2907 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2908 2908 ui.write(
2909 2909 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2910 2910 % (
2911 2911 abs(pa._depth - pb._depth),
2912 2912 pvec._hamming(pa._vec, pb._vec),
2913 2913 pa.distance(pb),
2914 2914 rel,
2915 2915 )
2916 2916 )
2917 2917
2918 2918
2919 2919 @command(
2920 2920 b'debugrebuilddirstate|debugrebuildstate',
2921 2921 [
2922 2922 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2923 2923 (
2924 2924 b'',
2925 2925 b'minimal',
2926 2926 None,
2927 2927 _(
2928 2928 b'only rebuild files that are inconsistent with '
2929 2929 b'the working copy parent'
2930 2930 ),
2931 2931 ),
2932 2932 ],
2933 2933 _(b'[-r REV]'),
2934 2934 )
2935 2935 def debugrebuilddirstate(ui, repo, rev, **opts):
2936 2936 """rebuild the dirstate as it would look like for the given revision
2937 2937
2938 2938 If no revision is specified the first current parent will be used.
2939 2939
2940 2940 The dirstate will be set to the files of the given revision.
2941 2941 The actual working directory content or existing dirstate
2942 2942 information such as adds or removes is not considered.
2943 2943
2944 2944 ``minimal`` will only rebuild the dirstate status for files that claim to be
2945 2945 tracked but are not in the parent manifest, or that exist in the parent
2946 2946 manifest but are not in the dirstate. It will not change adds, removes, or
2947 2947 modified files that are in the working copy parent.
2948 2948
2949 2949 One use of this command is to make the next :hg:`status` invocation
2950 2950 check the actual file content.
2951 2951 """
2952 2952 ctx = scmutil.revsingle(repo, rev)
2953 2953 with repo.wlock():
2954 2954 dirstate = repo.dirstate
2955 2955 changedfiles = None
2956 2956 # See command doc for what minimal does.
2957 2957 if opts.get('minimal'):
2958 2958 manifestfiles = set(ctx.manifest().keys())
2959 2959 dirstatefiles = set(dirstate)
2960 2960 manifestonly = manifestfiles - dirstatefiles
2961 2961 dsonly = dirstatefiles - manifestfiles
2962 2962 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
2963 2963 changedfiles = manifestonly | dsnotadded
2964 2964
2965 2965 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2966 2966
2967 2967
2968 2968 @command(
2969 2969 b'debugrebuildfncache',
2970 2970 [
2971 2971 (
2972 2972 b'',
2973 2973 b'only-data',
2974 2974 False,
2975 2975 _(b'only look for wrong .d files (much faster)'),
2976 2976 )
2977 2977 ],
2978 2978 b'',
2979 2979 )
2980 2980 def debugrebuildfncache(ui, repo, **opts):
2981 2981 """rebuild the fncache file"""
2982 2982 opts = pycompat.byteskwargs(opts)
2983 2983 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
2984 2984
2985 2985
2986 2986 @command(
2987 2987 b'debugrename',
2988 2988 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2989 2989 _(b'[-r REV] [FILE]...'),
2990 2990 )
2991 2991 def debugrename(ui, repo, *pats, **opts):
2992 2992 """dump rename information"""
2993 2993
2994 2994 opts = pycompat.byteskwargs(opts)
2995 2995 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2996 2996 m = scmutil.match(ctx, pats, opts)
2997 2997 for abs in ctx.walk(m):
2998 2998 fctx = ctx[abs]
2999 2999 o = fctx.filelog().renamed(fctx.filenode())
3000 3000 rel = repo.pathto(abs)
3001 3001 if o:
3002 3002 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3003 3003 else:
3004 3004 ui.write(_(b"%s not renamed\n") % rel)
3005 3005
3006 3006
3007 3007 @command(b'debugrequires|debugrequirements', [], b'')
3008 3008 def debugrequirements(ui, repo):
3009 3009 """print the current repo requirements"""
3010 3010 for r in sorted(repo.requirements):
3011 3011 ui.write(b"%s\n" % r)
3012 3012
3013 3013
3014 3014 @command(
3015 3015 b'debugrevlog',
3016 3016 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3017 3017 _(b'-c|-m|FILE'),
3018 3018 optionalrepo=True,
3019 3019 )
3020 3020 def debugrevlog(ui, repo, file_=None, **opts):
3021 3021 """show data and statistics about a revlog"""
3022 3022 opts = pycompat.byteskwargs(opts)
3023 3023 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3024 3024
3025 3025 if opts.get(b"dump"):
3026 3026 numrevs = len(r)
3027 3027 ui.write(
3028 3028 (
3029 3029 b"# rev p1rev p2rev start end deltastart base p1 p2"
3030 3030 b" rawsize totalsize compression heads chainlen\n"
3031 3031 )
3032 3032 )
3033 3033 ts = 0
3034 3034 heads = set()
3035 3035
3036 3036 for rev in pycompat.xrange(numrevs):
3037 3037 dbase = r.deltaparent(rev)
3038 3038 if dbase == -1:
3039 3039 dbase = rev
3040 3040 cbase = r.chainbase(rev)
3041 3041 clen = r.chainlen(rev)
3042 3042 p1, p2 = r.parentrevs(rev)
3043 3043 rs = r.rawsize(rev)
3044 3044 ts = ts + rs
3045 3045 heads -= set(r.parentrevs(rev))
3046 3046 heads.add(rev)
3047 3047 try:
3048 3048 compression = ts / r.end(rev)
3049 3049 except ZeroDivisionError:
3050 3050 compression = 0
3051 3051 ui.write(
3052 3052 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3053 3053 b"%11d %5d %8d\n"
3054 3054 % (
3055 3055 rev,
3056 3056 p1,
3057 3057 p2,
3058 3058 r.start(rev),
3059 3059 r.end(rev),
3060 3060 r.start(dbase),
3061 3061 r.start(cbase),
3062 3062 r.start(p1),
3063 3063 r.start(p2),
3064 3064 rs,
3065 3065 ts,
3066 3066 compression,
3067 3067 len(heads),
3068 3068 clen,
3069 3069 )
3070 3070 )
3071 3071 return 0
3072 3072
3073 3073 format = r._format_version
3074 3074 v = r._format_flags
3075 3075 flags = []
3076 3076 gdelta = False
3077 3077 if v & revlog.FLAG_INLINE_DATA:
3078 3078 flags.append(b'inline')
3079 3079 if v & revlog.FLAG_GENERALDELTA:
3080 3080 gdelta = True
3081 3081 flags.append(b'generaldelta')
3082 3082 if not flags:
3083 3083 flags = [b'(none)']
3084 3084
3085 3085 ### tracks merge vs single parent
3086 3086 nummerges = 0
3087 3087
3088 3088 ### tracks ways the "delta" are build
3089 3089 # nodelta
3090 3090 numempty = 0
3091 3091 numemptytext = 0
3092 3092 numemptydelta = 0
3093 3093 # full file content
3094 3094 numfull = 0
3095 3095 # intermediate snapshot against a prior snapshot
3096 3096 numsemi = 0
3097 3097 # snapshot count per depth
3098 3098 numsnapdepth = collections.defaultdict(lambda: 0)
3099 3099 # delta against previous revision
3100 3100 numprev = 0
3101 3101 # delta against first or second parent (not prev)
3102 3102 nump1 = 0
3103 3103 nump2 = 0
3104 3104 # delta against neither prev nor parents
3105 3105 numother = 0
3106 3106 # delta against prev that are also first or second parent
3107 3107 # (details of `numprev`)
3108 3108 nump1prev = 0
3109 3109 nump2prev = 0
3110 3110
3111 3111 # data about delta chain of each revs
3112 3112 chainlengths = []
3113 3113 chainbases = []
3114 3114 chainspans = []
3115 3115
3116 3116 # data about each revision
3117 3117 datasize = [None, 0, 0]
3118 3118 fullsize = [None, 0, 0]
3119 3119 semisize = [None, 0, 0]
3120 3120 # snapshot count per depth
3121 3121 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3122 3122 deltasize = [None, 0, 0]
3123 3123 chunktypecounts = {}
3124 3124 chunktypesizes = {}
3125 3125
3126 3126 def addsize(size, l):
3127 3127 if l[0] is None or size < l[0]:
3128 3128 l[0] = size
3129 3129 if size > l[1]:
3130 3130 l[1] = size
3131 3131 l[2] += size
3132 3132
3133 3133 numrevs = len(r)
3134 3134 for rev in pycompat.xrange(numrevs):
3135 3135 p1, p2 = r.parentrevs(rev)
3136 3136 delta = r.deltaparent(rev)
3137 3137 if format > 0:
3138 3138 addsize(r.rawsize(rev), datasize)
3139 3139 if p2 != nullrev:
3140 3140 nummerges += 1
3141 3141 size = r.length(rev)
3142 3142 if delta == nullrev:
3143 3143 chainlengths.append(0)
3144 3144 chainbases.append(r.start(rev))
3145 3145 chainspans.append(size)
3146 3146 if size == 0:
3147 3147 numempty += 1
3148 3148 numemptytext += 1
3149 3149 else:
3150 3150 numfull += 1
3151 3151 numsnapdepth[0] += 1
3152 3152 addsize(size, fullsize)
3153 3153 addsize(size, snapsizedepth[0])
3154 3154 else:
3155 3155 chainlengths.append(chainlengths[delta] + 1)
3156 3156 baseaddr = chainbases[delta]
3157 3157 revaddr = r.start(rev)
3158 3158 chainbases.append(baseaddr)
3159 3159 chainspans.append((revaddr - baseaddr) + size)
3160 3160 if size == 0:
3161 3161 numempty += 1
3162 3162 numemptydelta += 1
3163 3163 elif r.issnapshot(rev):
3164 3164 addsize(size, semisize)
3165 3165 numsemi += 1
3166 3166 depth = r.snapshotdepth(rev)
3167 3167 numsnapdepth[depth] += 1
3168 3168 addsize(size, snapsizedepth[depth])
3169 3169 else:
3170 3170 addsize(size, deltasize)
3171 3171 if delta == rev - 1:
3172 3172 numprev += 1
3173 3173 if delta == p1:
3174 3174 nump1prev += 1
3175 3175 elif delta == p2:
3176 3176 nump2prev += 1
3177 3177 elif delta == p1:
3178 3178 nump1 += 1
3179 3179 elif delta == p2:
3180 3180 nump2 += 1
3181 3181 elif delta != nullrev:
3182 3182 numother += 1
3183 3183
3184 3184 # Obtain data on the raw chunks in the revlog.
3185 3185 if util.safehasattr(r, b'_getsegmentforrevs'):
3186 3186 segment = r._getsegmentforrevs(rev, rev)[1]
3187 3187 else:
3188 3188 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3189 3189 if segment:
3190 3190 chunktype = bytes(segment[0:1])
3191 3191 else:
3192 3192 chunktype = b'empty'
3193 3193
3194 3194 if chunktype not in chunktypecounts:
3195 3195 chunktypecounts[chunktype] = 0
3196 3196 chunktypesizes[chunktype] = 0
3197 3197
3198 3198 chunktypecounts[chunktype] += 1
3199 3199 chunktypesizes[chunktype] += size
3200 3200
3201 3201 # Adjust size min value for empty cases
3202 3202 for size in (datasize, fullsize, semisize, deltasize):
3203 3203 if size[0] is None:
3204 3204 size[0] = 0
3205 3205
3206 3206 numdeltas = numrevs - numfull - numempty - numsemi
3207 3207 numoprev = numprev - nump1prev - nump2prev
3208 3208 totalrawsize = datasize[2]
3209 3209 datasize[2] /= numrevs
3210 3210 fulltotal = fullsize[2]
3211 3211 if numfull == 0:
3212 3212 fullsize[2] = 0
3213 3213 else:
3214 3214 fullsize[2] /= numfull
3215 3215 semitotal = semisize[2]
3216 3216 snaptotal = {}
3217 3217 if numsemi > 0:
3218 3218 semisize[2] /= numsemi
3219 3219 for depth in snapsizedepth:
3220 3220 snaptotal[depth] = snapsizedepth[depth][2]
3221 3221 snapsizedepth[depth][2] /= numsnapdepth[depth]
3222 3222
3223 3223 deltatotal = deltasize[2]
3224 3224 if numdeltas > 0:
3225 3225 deltasize[2] /= numdeltas
3226 3226 totalsize = fulltotal + semitotal + deltatotal
3227 3227 avgchainlen = sum(chainlengths) / numrevs
3228 3228 maxchainlen = max(chainlengths)
3229 3229 maxchainspan = max(chainspans)
3230 3230 compratio = 1
3231 3231 if totalsize:
3232 3232 compratio = totalrawsize / totalsize
3233 3233
3234 3234 basedfmtstr = b'%%%dd\n'
3235 3235 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3236 3236
3237 3237 def dfmtstr(max):
3238 3238 return basedfmtstr % len(str(max))
3239 3239
3240 3240 def pcfmtstr(max, padding=0):
3241 3241 return basepcfmtstr % (len(str(max)), b' ' * padding)
3242 3242
3243 3243 def pcfmt(value, total):
3244 3244 if total:
3245 3245 return (value, 100 * float(value) / total)
3246 3246 else:
3247 3247 return value, 100.0
3248 3248
3249 3249 ui.writenoi18n(b'format : %d\n' % format)
3250 3250 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3251 3251
3252 3252 ui.write(b'\n')
3253 3253 fmt = pcfmtstr(totalsize)
3254 3254 fmt2 = dfmtstr(totalsize)
3255 3255 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3256 3256 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3257 3257 ui.writenoi18n(
3258 3258 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3259 3259 )
3260 3260 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3261 3261 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3262 3262 ui.writenoi18n(
3263 3263 b' text : '
3264 3264 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3265 3265 )
3266 3266 ui.writenoi18n(
3267 3267 b' delta : '
3268 3268 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3269 3269 )
3270 3270 ui.writenoi18n(
3271 3271 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3272 3272 )
3273 3273 for depth in sorted(numsnapdepth):
3274 3274 ui.write(
3275 3275 (b' lvl-%-3d : ' % depth)
3276 3276 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3277 3277 )
3278 3278 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3279 3279 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3280 3280 ui.writenoi18n(
3281 3281 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3282 3282 )
3283 3283 for depth in sorted(numsnapdepth):
3284 3284 ui.write(
3285 3285 (b' lvl-%-3d : ' % depth)
3286 3286 + fmt % pcfmt(snaptotal[depth], totalsize)
3287 3287 )
3288 3288 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3289 3289
3290 3290 def fmtchunktype(chunktype):
3291 3291 if chunktype == b'empty':
3292 3292 return b' %s : ' % chunktype
3293 3293 elif chunktype in pycompat.bytestr(string.ascii_letters):
3294 3294 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3295 3295 else:
3296 3296 return b' 0x%s : ' % hex(chunktype)
3297 3297
3298 3298 ui.write(b'\n')
3299 3299 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3300 3300 for chunktype in sorted(chunktypecounts):
3301 3301 ui.write(fmtchunktype(chunktype))
3302 3302 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3303 3303 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3304 3304 for chunktype in sorted(chunktypecounts):
3305 3305 ui.write(fmtchunktype(chunktype))
3306 3306 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3307 3307
3308 3308 ui.write(b'\n')
3309 3309 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3310 3310 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3311 3311 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3312 3312 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3313 3313 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3314 3314
3315 3315 if format > 0:
3316 3316 ui.write(b'\n')
3317 3317 ui.writenoi18n(
3318 3318 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3319 3319 % tuple(datasize)
3320 3320 )
3321 3321 ui.writenoi18n(
3322 3322 b'full revision size (min/max/avg) : %d / %d / %d\n'
3323 3323 % tuple(fullsize)
3324 3324 )
3325 3325 ui.writenoi18n(
3326 3326 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3327 3327 % tuple(semisize)
3328 3328 )
3329 3329 for depth in sorted(snapsizedepth):
3330 3330 if depth == 0:
3331 3331 continue
3332 3332 ui.writenoi18n(
3333 3333 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3334 3334 % ((depth,) + tuple(snapsizedepth[depth]))
3335 3335 )
3336 3336 ui.writenoi18n(
3337 3337 b'delta size (min/max/avg) : %d / %d / %d\n'
3338 3338 % tuple(deltasize)
3339 3339 )
3340 3340
3341 3341 if numdeltas > 0:
3342 3342 ui.write(b'\n')
3343 3343 fmt = pcfmtstr(numdeltas)
3344 3344 fmt2 = pcfmtstr(numdeltas, 4)
3345 3345 ui.writenoi18n(
3346 3346 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3347 3347 )
3348 3348 if numprev > 0:
3349 3349 ui.writenoi18n(
3350 3350 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3351 3351 )
3352 3352 ui.writenoi18n(
3353 3353 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3354 3354 )
3355 3355 ui.writenoi18n(
3356 3356 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3357 3357 )
3358 3358 if gdelta:
3359 3359 ui.writenoi18n(
3360 3360 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3361 3361 )
3362 3362 ui.writenoi18n(
3363 3363 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3364 3364 )
3365 3365 ui.writenoi18n(
3366 3366 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3367 3367 )
3368 3368
3369 3369
3370 3370 @command(
3371 3371 b'debugrevlogindex',
3372 3372 cmdutil.debugrevlogopts
3373 3373 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3374 3374 _(b'[-f FORMAT] -c|-m|FILE'),
3375 3375 optionalrepo=True,
3376 3376 )
3377 3377 def debugrevlogindex(ui, repo, file_=None, **opts):
3378 3378 """dump the contents of a revlog index"""
3379 3379 opts = pycompat.byteskwargs(opts)
3380 3380 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3381 3381 format = opts.get(b'format', 0)
3382 3382 if format not in (0, 1):
3383 3383 raise error.Abort(_(b"unknown format %d") % format)
3384 3384
3385 3385 if ui.debugflag:
3386 3386 shortfn = hex
3387 3387 else:
3388 3388 shortfn = short
3389 3389
3390 3390 # There might not be anything in r, so have a sane default
3391 3391 idlen = 12
3392 3392 for i in r:
3393 3393 idlen = len(shortfn(r.node(i)))
3394 3394 break
3395 3395
3396 3396 if format == 0:
3397 3397 if ui.verbose:
3398 3398 ui.writenoi18n(
3399 3399 b" rev offset length linkrev %s %s p2\n"
3400 3400 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3401 3401 )
3402 3402 else:
3403 3403 ui.writenoi18n(
3404 3404 b" rev linkrev %s %s p2\n"
3405 3405 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3406 3406 )
3407 3407 elif format == 1:
3408 3408 if ui.verbose:
3409 3409 ui.writenoi18n(
3410 3410 (
3411 3411 b" rev flag offset length size link p1"
3412 3412 b" p2 %s\n"
3413 3413 )
3414 3414 % b"nodeid".rjust(idlen)
3415 3415 )
3416 3416 else:
3417 3417 ui.writenoi18n(
3418 3418 b" rev flag size link p1 p2 %s\n"
3419 3419 % b"nodeid".rjust(idlen)
3420 3420 )
3421 3421
3422 3422 for i in r:
3423 3423 node = r.node(i)
3424 3424 if format == 0:
3425 3425 try:
3426 3426 pp = r.parents(node)
3427 3427 except Exception:
3428 3428 pp = [repo.nullid, repo.nullid]
3429 3429 if ui.verbose:
3430 3430 ui.write(
3431 3431 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3432 3432 % (
3433 3433 i,
3434 3434 r.start(i),
3435 3435 r.length(i),
3436 3436 r.linkrev(i),
3437 3437 shortfn(node),
3438 3438 shortfn(pp[0]),
3439 3439 shortfn(pp[1]),
3440 3440 )
3441 3441 )
3442 3442 else:
3443 3443 ui.write(
3444 3444 b"% 6d % 7d %s %s %s\n"
3445 3445 % (
3446 3446 i,
3447 3447 r.linkrev(i),
3448 3448 shortfn(node),
3449 3449 shortfn(pp[0]),
3450 3450 shortfn(pp[1]),
3451 3451 )
3452 3452 )
3453 3453 elif format == 1:
3454 3454 pr = r.parentrevs(i)
3455 3455 if ui.verbose:
3456 3456 ui.write(
3457 3457 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3458 3458 % (
3459 3459 i,
3460 3460 r.flags(i),
3461 3461 r.start(i),
3462 3462 r.length(i),
3463 3463 r.rawsize(i),
3464 3464 r.linkrev(i),
3465 3465 pr[0],
3466 3466 pr[1],
3467 3467 shortfn(node),
3468 3468 )
3469 3469 )
3470 3470 else:
3471 3471 ui.write(
3472 3472 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3473 3473 % (
3474 3474 i,
3475 3475 r.flags(i),
3476 3476 r.rawsize(i),
3477 3477 r.linkrev(i),
3478 3478 pr[0],
3479 3479 pr[1],
3480 3480 shortfn(node),
3481 3481 )
3482 3482 )
3483 3483
3484 3484
3485 3485 @command(
3486 3486 b'debugrevspec',
3487 3487 [
3488 3488 (
3489 3489 b'',
3490 3490 b'optimize',
3491 3491 None,
3492 3492 _(b'print parsed tree after optimizing (DEPRECATED)'),
3493 3493 ),
3494 3494 (
3495 3495 b'',
3496 3496 b'show-revs',
3497 3497 True,
3498 3498 _(b'print list of result revisions (default)'),
3499 3499 ),
3500 3500 (
3501 3501 b's',
3502 3502 b'show-set',
3503 3503 None,
3504 3504 _(b'print internal representation of result set'),
3505 3505 ),
3506 3506 (
3507 3507 b'p',
3508 3508 b'show-stage',
3509 3509 [],
3510 3510 _(b'print parsed tree at the given stage'),
3511 3511 _(b'NAME'),
3512 3512 ),
3513 3513 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3514 3514 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3515 3515 ],
3516 3516 b'REVSPEC',
3517 3517 )
3518 3518 def debugrevspec(ui, repo, expr, **opts):
3519 3519 """parse and apply a revision specification
3520 3520
3521 3521 Use -p/--show-stage option to print the parsed tree at the given stages.
3522 3522 Use -p all to print tree at every stage.
3523 3523
3524 3524 Use --no-show-revs option with -s or -p to print only the set
3525 3525 representation or the parsed tree respectively.
3526 3526
3527 3527 Use --verify-optimized to compare the optimized result with the unoptimized
3528 3528 one. Returns 1 if the optimized result differs.
3529 3529 """
3530 3530 opts = pycompat.byteskwargs(opts)
3531 3531 aliases = ui.configitems(b'revsetalias')
3532 3532 stages = [
3533 3533 (b'parsed', lambda tree: tree),
3534 3534 (
3535 3535 b'expanded',
3536 3536 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3537 3537 ),
3538 3538 (b'concatenated', revsetlang.foldconcat),
3539 3539 (b'analyzed', revsetlang.analyze),
3540 3540 (b'optimized', revsetlang.optimize),
3541 3541 ]
3542 3542 if opts[b'no_optimized']:
3543 3543 stages = stages[:-1]
3544 3544 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3545 3545 raise error.Abort(
3546 3546 _(b'cannot use --verify-optimized with --no-optimized')
3547 3547 )
3548 3548 stagenames = {n for n, f in stages}
3549 3549
3550 3550 showalways = set()
3551 3551 showchanged = set()
3552 3552 if ui.verbose and not opts[b'show_stage']:
3553 3553 # show parsed tree by --verbose (deprecated)
3554 3554 showalways.add(b'parsed')
3555 3555 showchanged.update([b'expanded', b'concatenated'])
3556 3556 if opts[b'optimize']:
3557 3557 showalways.add(b'optimized')
3558 3558 if opts[b'show_stage'] and opts[b'optimize']:
3559 3559 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3560 3560 if opts[b'show_stage'] == [b'all']:
3561 3561 showalways.update(stagenames)
3562 3562 else:
3563 3563 for n in opts[b'show_stage']:
3564 3564 if n not in stagenames:
3565 3565 raise error.Abort(_(b'invalid stage name: %s') % n)
3566 3566 showalways.update(opts[b'show_stage'])
3567 3567
3568 3568 treebystage = {}
3569 3569 printedtree = None
3570 3570 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3571 3571 for n, f in stages:
3572 3572 treebystage[n] = tree = f(tree)
3573 3573 if n in showalways or (n in showchanged and tree != printedtree):
3574 3574 if opts[b'show_stage'] or n != b'parsed':
3575 3575 ui.write(b"* %s:\n" % n)
3576 3576 ui.write(revsetlang.prettyformat(tree), b"\n")
3577 3577 printedtree = tree
3578 3578
3579 3579 if opts[b'verify_optimized']:
3580 3580 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3581 3581 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3582 3582 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3583 3583 ui.writenoi18n(
3584 3584 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3585 3585 )
3586 3586 ui.writenoi18n(
3587 3587 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3588 3588 )
3589 3589 arevs = list(arevs)
3590 3590 brevs = list(brevs)
3591 3591 if arevs == brevs:
3592 3592 return 0
3593 3593 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3594 3594 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3595 3595 sm = difflib.SequenceMatcher(None, arevs, brevs)
3596 3596 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3597 3597 if tag in ('delete', 'replace'):
3598 3598 for c in arevs[alo:ahi]:
3599 3599 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3600 3600 if tag in ('insert', 'replace'):
3601 3601 for c in brevs[blo:bhi]:
3602 3602 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3603 3603 if tag == 'equal':
3604 3604 for c in arevs[alo:ahi]:
3605 3605 ui.write(b' %d\n' % c)
3606 3606 return 1
3607 3607
3608 3608 func = revset.makematcher(tree)
3609 3609 revs = func(repo)
3610 3610 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3611 3611 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3612 3612 if not opts[b'show_revs']:
3613 3613 return
3614 3614 for c in revs:
3615 3615 ui.write(b"%d\n" % c)
3616 3616
3617 3617
3618 3618 @command(
3619 3619 b'debugserve',
3620 3620 [
3621 3621 (
3622 3622 b'',
3623 3623 b'sshstdio',
3624 3624 False,
3625 3625 _(b'run an SSH server bound to process handles'),
3626 3626 ),
3627 3627 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3628 3628 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3629 3629 ],
3630 3630 b'',
3631 3631 )
3632 3632 def debugserve(ui, repo, **opts):
3633 3633 """run a server with advanced settings
3634 3634
3635 3635 This command is similar to :hg:`serve`. It exists partially as a
3636 3636 workaround to the fact that ``hg serve --stdio`` must have specific
3637 3637 arguments for security reasons.
3638 3638 """
3639 3639 opts = pycompat.byteskwargs(opts)
3640 3640
3641 3641 if not opts[b'sshstdio']:
3642 3642 raise error.Abort(_(b'only --sshstdio is currently supported'))
3643 3643
3644 3644 logfh = None
3645 3645
3646 3646 if opts[b'logiofd'] and opts[b'logiofile']:
3647 3647 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3648 3648
3649 3649 if opts[b'logiofd']:
3650 3650 # Ideally we would be line buffered. But line buffering in binary
3651 3651 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3652 3652 # buffering could have performance impacts. But since this isn't
3653 3653 # performance critical code, it should be fine.
3654 3654 try:
3655 3655 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3656 3656 except OSError as e:
3657 3657 if e.errno != errno.ESPIPE:
3658 3658 raise
3659 3659 # can't seek a pipe, so `ab` mode fails on py3
3660 3660 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3661 3661 elif opts[b'logiofile']:
3662 3662 logfh = open(opts[b'logiofile'], b'ab', 0)
3663 3663
3664 3664 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3665 3665 s.serve_forever()
3666 3666
3667 3667
3668 3668 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3669 3669 def debugsetparents(ui, repo, rev1, rev2=None):
3670 3670 """manually set the parents of the current working directory (DANGEROUS)
3671 3671
3672 3672 This command is not what you are looking for and should not be used. Using
3673 3673 this command will most certainly results in slight corruption of the file
3674 3674 level histories withing your repository. DO NOT USE THIS COMMAND.
3675 3675
3676 3676 The command update the p1 and p2 field in the dirstate, and not touching
3677 3677 anything else. This useful for writing repository conversion tools, but
3678 3678 should be used with extreme care. For example, neither the working
3679 3679 directory nor the dirstate is updated, so file status may be incorrect
3680 3680 after running this command. Only used if you are one of the few people that
3681 3681 deeply unstand both conversion tools and file level histories. If you are
3682 3682 reading this help, you are not one of this people (most of them sailed west
3683 3683 from Mithlond anyway.
3684 3684
3685 3685 So one last time DO NOT USE THIS COMMAND.
3686 3686
3687 3687 Returns 0 on success.
3688 3688 """
3689 3689
3690 3690 node1 = scmutil.revsingle(repo, rev1).node()
3691 3691 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3692 3692
3693 3693 with repo.wlock():
3694 3694 repo.setparents(node1, node2)
3695 3695
3696 3696
3697 3697 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3698 3698 def debugsidedata(ui, repo, file_, rev=None, **opts):
3699 3699 """dump the side data for a cl/manifest/file revision
3700 3700
3701 3701 Use --verbose to dump the sidedata content."""
3702 3702 opts = pycompat.byteskwargs(opts)
3703 3703 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3704 3704 if rev is not None:
3705 3705 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3706 3706 file_, rev = None, file_
3707 3707 elif rev is None:
3708 3708 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3709 3709 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3710 3710 r = getattr(r, '_revlog', r)
3711 3711 try:
3712 3712 sidedata = r.sidedata(r.lookup(rev))
3713 3713 except KeyError:
3714 3714 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3715 3715 if sidedata:
3716 3716 sidedata = list(sidedata.items())
3717 3717 sidedata.sort()
3718 3718 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3719 3719 for key, value in sidedata:
3720 3720 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3721 3721 if ui.verbose:
3722 3722 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3723 3723
3724 3724
3725 3725 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3726 3726 def debugssl(ui, repo, source=None, **opts):
3727 3727 """test a secure connection to a server
3728 3728
3729 3729 This builds the certificate chain for the server on Windows, installing the
3730 3730 missing intermediates and trusted root via Windows Update if necessary. It
3731 3731 does nothing on other platforms.
3732 3732
3733 3733 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3734 3734 that server is used. See :hg:`help urls` for more information.
3735 3735
3736 3736 If the update succeeds, retry the original operation. Otherwise, the cause
3737 3737 of the SSL error is likely another issue.
3738 3738 """
3739 3739 if not pycompat.iswindows:
3740 3740 raise error.Abort(
3741 3741 _(b'certificate chain building is only possible on Windows')
3742 3742 )
3743 3743
3744 3744 if not source:
3745 3745 if not repo:
3746 3746 raise error.Abort(
3747 3747 _(
3748 3748 b"there is no Mercurial repository here, and no "
3749 3749 b"server specified"
3750 3750 )
3751 3751 )
3752 3752 source = b"default"
3753 3753
3754 3754 source, branches = urlutil.get_unique_pull_path(
3755 3755 b'debugssl', repo, ui, source
3756 3756 )
3757 3757 url = urlutil.url(source)
3758 3758
3759 3759 defaultport = {b'https': 443, b'ssh': 22}
3760 3760 if url.scheme in defaultport:
3761 3761 try:
3762 3762 addr = (url.host, int(url.port or defaultport[url.scheme]))
3763 3763 except ValueError:
3764 3764 raise error.Abort(_(b"malformed port number in URL"))
3765 3765 else:
3766 3766 raise error.Abort(_(b"only https and ssh connections are supported"))
3767 3767
3768 3768 from . import win32
3769 3769
3770 3770 s = ssl.wrap_socket(
3771 3771 socket.socket(),
3772 3772 ssl_version=ssl.PROTOCOL_TLS,
3773 3773 cert_reqs=ssl.CERT_NONE,
3774 3774 ca_certs=None,
3775 3775 )
3776 3776
3777 3777 try:
3778 3778 s.connect(addr)
3779 3779 cert = s.getpeercert(True)
3780 3780
3781 3781 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3782 3782
3783 3783 complete = win32.checkcertificatechain(cert, build=False)
3784 3784
3785 3785 if not complete:
3786 3786 ui.status(_(b'certificate chain is incomplete, updating... '))
3787 3787
3788 3788 if not win32.checkcertificatechain(cert):
3789 3789 ui.status(_(b'failed.\n'))
3790 3790 else:
3791 3791 ui.status(_(b'done.\n'))
3792 3792 else:
3793 3793 ui.status(_(b'full certificate chain is available\n'))
3794 3794 finally:
3795 3795 s.close()
3796 3796
3797 3797
3798 3798 @command(
3799 3799 b"debugbackupbundle",
3800 3800 [
3801 3801 (
3802 3802 b"",
3803 3803 b"recover",
3804 3804 b"",
3805 3805 b"brings the specified changeset back into the repository",
3806 3806 )
3807 3807 ]
3808 3808 + cmdutil.logopts,
3809 3809 _(b"hg debugbackupbundle [--recover HASH]"),
3810 3810 )
3811 3811 def debugbackupbundle(ui, repo, *pats, **opts):
3812 3812 """lists the changesets available in backup bundles
3813 3813
3814 3814 Without any arguments, this command prints a list of the changesets in each
3815 3815 backup bundle.
3816 3816
3817 3817 --recover takes a changeset hash and unbundles the first bundle that
3818 3818 contains that hash, which puts that changeset back in your repository.
3819 3819
3820 3820 --verbose will print the entire commit message and the bundle path for that
3821 3821 backup.
3822 3822 """
3823 3823 backups = list(
3824 3824 filter(
3825 3825 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3826 3826 )
3827 3827 )
3828 3828 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3829 3829
3830 3830 opts = pycompat.byteskwargs(opts)
3831 3831 opts[b"bundle"] = b""
3832 3832 opts[b"force"] = None
3833 3833 limit = logcmdutil.getlimit(opts)
3834 3834
3835 3835 def display(other, chlist, displayer):
3836 3836 if opts.get(b"newest_first"):
3837 3837 chlist.reverse()
3838 3838 count = 0
3839 3839 for n in chlist:
3840 3840 if limit is not None and count >= limit:
3841 3841 break
3842 3842 parents = [
3843 3843 True for p in other.changelog.parents(n) if p != repo.nullid
3844 3844 ]
3845 3845 if opts.get(b"no_merges") and len(parents) == 2:
3846 3846 continue
3847 3847 count += 1
3848 3848 displayer.show(other[n])
3849 3849
3850 3850 recovernode = opts.get(b"recover")
3851 3851 if recovernode:
3852 3852 if scmutil.isrevsymbol(repo, recovernode):
3853 3853 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3854 3854 return
3855 3855 elif backups:
3856 3856 msg = _(
3857 3857 b"Recover changesets using: hg debugbackupbundle --recover "
3858 3858 b"<changeset hash>\n\nAvailable backup changesets:"
3859 3859 )
3860 3860 ui.status(msg, label=b"status.removed")
3861 3861 else:
3862 3862 ui.status(_(b"no backup changesets found\n"))
3863 3863 return
3864 3864
3865 3865 for backup in backups:
3866 3866 # Much of this is copied from the hg incoming logic
3867 3867 source = os.path.relpath(backup, encoding.getcwd())
3868 3868 source, branches = urlutil.get_unique_pull_path(
3869 3869 b'debugbackupbundle',
3870 3870 repo,
3871 3871 ui,
3872 3872 source,
3873 3873 default_branches=opts.get(b'branch'),
3874 3874 )
3875 3875 try:
3876 3876 other = hg.peer(repo, opts, source)
3877 3877 except error.LookupError as ex:
3878 3878 msg = _(b"\nwarning: unable to open bundle %s") % source
3879 3879 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3880 3880 ui.warn(msg, hint=hint)
3881 3881 continue
3882 3882 revs, checkout = hg.addbranchrevs(
3883 3883 repo, other, branches, opts.get(b"rev")
3884 3884 )
3885 3885
3886 3886 if revs:
3887 3887 revs = [other.lookup(rev) for rev in revs]
3888 3888
3889 3889 with ui.silent():
3890 3890 try:
3891 3891 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3892 3892 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3893 3893 )
3894 3894 except error.LookupError:
3895 3895 continue
3896 3896
3897 3897 try:
3898 3898 if not chlist:
3899 3899 continue
3900 3900 if recovernode:
3901 3901 with repo.lock(), repo.transaction(b"unbundle") as tr:
3902 3902 if scmutil.isrevsymbol(other, recovernode):
3903 3903 ui.status(_(b"Unbundling %s\n") % (recovernode))
3904 3904 f = hg.openpath(ui, source)
3905 3905 gen = exchange.readbundle(ui, f, source)
3906 3906 if isinstance(gen, bundle2.unbundle20):
3907 3907 bundle2.applybundle(
3908 3908 repo,
3909 3909 gen,
3910 3910 tr,
3911 3911 source=b"unbundle",
3912 3912 url=b"bundle:" + source,
3913 3913 )
3914 3914 else:
3915 3915 gen.apply(repo, b"unbundle", b"bundle:" + source)
3916 3916 break
3917 3917 else:
3918 3918 backupdate = encoding.strtolocal(
3919 3919 time.strftime(
3920 3920 "%a %H:%M, %Y-%m-%d",
3921 3921 time.localtime(os.path.getmtime(source)),
3922 3922 )
3923 3923 )
3924 3924 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3925 3925 if ui.verbose:
3926 3926 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3927 3927 else:
3928 3928 opts[
3929 3929 b"template"
3930 3930 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3931 3931 displayer = logcmdutil.changesetdisplayer(
3932 3932 ui, other, opts, False
3933 3933 )
3934 3934 display(other, chlist, displayer)
3935 3935 displayer.close()
3936 3936 finally:
3937 3937 cleanupfn()
3938 3938
3939 3939
3940 3940 @command(
3941 3941 b'debugsub',
3942 3942 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3943 3943 _(b'[-r REV] [REV]'),
3944 3944 )
3945 3945 def debugsub(ui, repo, rev=None):
3946 3946 ctx = scmutil.revsingle(repo, rev, None)
3947 3947 for k, v in sorted(ctx.substate.items()):
3948 3948 ui.writenoi18n(b'path %s\n' % k)
3949 3949 ui.writenoi18n(b' source %s\n' % v[0])
3950 3950 ui.writenoi18n(b' revision %s\n' % v[1])
3951 3951
3952 3952
3953 3953 @command(b'debugshell', optionalrepo=True)
3954 3954 def debugshell(ui, repo):
3955 3955 """run an interactive Python interpreter
3956 3956
3957 3957 The local namespace is provided with a reference to the ui and
3958 3958 the repo instance (if available).
3959 3959 """
3960 3960 import code
3961 3961
3962 3962 imported_objects = {
3963 3963 'ui': ui,
3964 3964 'repo': repo,
3965 3965 }
3966 3966
3967 3967 code.interact(local=imported_objects)
3968 3968
3969 3969
3970 3970 @command(
3971 3971 b'debugsuccessorssets',
3972 3972 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3973 3973 _(b'[REV]'),
3974 3974 )
3975 3975 def debugsuccessorssets(ui, repo, *revs, **opts):
3976 3976 """show set of successors for revision
3977 3977
3978 3978 A successors set of changeset A is a consistent group of revisions that
3979 3979 succeed A. It contains non-obsolete changesets only unless closests
3980 3980 successors set is set.
3981 3981
3982 3982 In most cases a changeset A has a single successors set containing a single
3983 3983 successor (changeset A replaced by A').
3984 3984
3985 3985 A changeset that is made obsolete with no successors are called "pruned".
3986 3986 Such changesets have no successors sets at all.
3987 3987
3988 3988 A changeset that has been "split" will have a successors set containing
3989 3989 more than one successor.
3990 3990
3991 3991 A changeset that has been rewritten in multiple different ways is called
3992 3992 "divergent". Such changesets have multiple successor sets (each of which
3993 3993 may also be split, i.e. have multiple successors).
3994 3994
3995 3995 Results are displayed as follows::
3996 3996
3997 3997 <rev1>
3998 3998 <successors-1A>
3999 3999 <rev2>
4000 4000 <successors-2A>
4001 4001 <successors-2B1> <successors-2B2> <successors-2B3>
4002 4002
4003 4003 Here rev2 has two possible (i.e. divergent) successors sets. The first
4004 4004 holds one element, whereas the second holds three (i.e. the changeset has
4005 4005 been split).
4006 4006 """
4007 4007 # passed to successorssets caching computation from one call to another
4008 4008 cache = {}
4009 4009 ctx2str = bytes
4010 4010 node2str = short
4011 4011 for rev in logcmdutil.revrange(repo, revs):
4012 4012 ctx = repo[rev]
4013 4013 ui.write(b'%s\n' % ctx2str(ctx))
4014 4014 for succsset in obsutil.successorssets(
4015 4015 repo, ctx.node(), closest=opts['closest'], cache=cache
4016 4016 ):
4017 4017 if succsset:
4018 4018 ui.write(b' ')
4019 4019 ui.write(node2str(succsset[0]))
4020 4020 for node in succsset[1:]:
4021 4021 ui.write(b' ')
4022 4022 ui.write(node2str(node))
4023 4023 ui.write(b'\n')
4024 4024
4025 4025
4026 4026 @command(b'debugtagscache', [])
4027 4027 def debugtagscache(ui, repo):
4028 4028 """display the contents of .hg/cache/hgtagsfnodes1"""
4029 4029 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4030 4030 flog = repo.file(b'.hgtags')
4031 4031 for r in repo:
4032 4032 node = repo[r].node()
4033 4033 tagsnode = cache.getfnode(node, computemissing=False)
4034 4034 if tagsnode:
4035 4035 tagsnodedisplay = hex(tagsnode)
4036 4036 if not flog.hasnode(tagsnode):
4037 4037 tagsnodedisplay += b' (unknown node)'
4038 4038 elif tagsnode is None:
4039 4039 tagsnodedisplay = b'missing'
4040 4040 else:
4041 4041 tagsnodedisplay = b'invalid'
4042 4042
4043 4043 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4044 4044
4045 4045
4046 4046 @command(
4047 4047 b'debugtemplate',
4048 4048 [
4049 4049 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4050 4050 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4051 4051 ],
4052 4052 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4053 4053 optionalrepo=True,
4054 4054 )
4055 4055 def debugtemplate(ui, repo, tmpl, **opts):
4056 4056 """parse and apply a template
4057 4057
4058 4058 If -r/--rev is given, the template is processed as a log template and
4059 4059 applied to the given changesets. Otherwise, it is processed as a generic
4060 4060 template.
4061 4061
4062 4062 Use --verbose to print the parsed tree.
4063 4063 """
4064 4064 revs = None
4065 4065 if opts['rev']:
4066 4066 if repo is None:
4067 4067 raise error.RepoError(
4068 4068 _(b'there is no Mercurial repository here (.hg not found)')
4069 4069 )
4070 4070 revs = logcmdutil.revrange(repo, opts['rev'])
4071 4071
4072 4072 props = {}
4073 4073 for d in opts['define']:
4074 4074 try:
4075 4075 k, v = (e.strip() for e in d.split(b'=', 1))
4076 4076 if not k or k == b'ui':
4077 4077 raise ValueError
4078 4078 props[k] = v
4079 4079 except ValueError:
4080 4080 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4081 4081
4082 4082 if ui.verbose:
4083 4083 aliases = ui.configitems(b'templatealias')
4084 4084 tree = templater.parse(tmpl)
4085 4085 ui.note(templater.prettyformat(tree), b'\n')
4086 4086 newtree = templater.expandaliases(tree, aliases)
4087 4087 if newtree != tree:
4088 4088 ui.notenoi18n(
4089 4089 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4090 4090 )
4091 4091
4092 4092 if revs is None:
4093 4093 tres = formatter.templateresources(ui, repo)
4094 4094 t = formatter.maketemplater(ui, tmpl, resources=tres)
4095 4095 if ui.verbose:
4096 4096 kwds, funcs = t.symbolsuseddefault()
4097 4097 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4098 4098 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4099 4099 ui.write(t.renderdefault(props))
4100 4100 else:
4101 4101 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4102 4102 if ui.verbose:
4103 4103 kwds, funcs = displayer.t.symbolsuseddefault()
4104 4104 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4105 4105 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4106 4106 for r in revs:
4107 4107 displayer.show(repo[r], **pycompat.strkwargs(props))
4108 4108 displayer.close()
4109 4109
4110 4110
4111 4111 @command(
4112 4112 b'debuguigetpass',
4113 4113 [
4114 4114 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4115 4115 ],
4116 4116 _(b'[-p TEXT]'),
4117 4117 norepo=True,
4118 4118 )
4119 4119 def debuguigetpass(ui, prompt=b''):
4120 4120 """show prompt to type password"""
4121 4121 r = ui.getpass(prompt)
4122 4122 if r is None:
4123 4123 r = b"<default response>"
4124 4124 ui.writenoi18n(b'response: %s\n' % r)
4125 4125
4126 4126
4127 4127 @command(
4128 4128 b'debuguiprompt',
4129 4129 [
4130 4130 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4131 4131 ],
4132 4132 _(b'[-p TEXT]'),
4133 4133 norepo=True,
4134 4134 )
4135 4135 def debuguiprompt(ui, prompt=b''):
4136 4136 """show plain prompt"""
4137 4137 r = ui.prompt(prompt)
4138 4138 ui.writenoi18n(b'response: %s\n' % r)
4139 4139
4140 4140
4141 4141 @command(b'debugupdatecaches', [])
4142 4142 def debugupdatecaches(ui, repo, *pats, **opts):
4143 4143 """warm all known caches in the repository"""
4144 4144 with repo.wlock(), repo.lock():
4145 4145 repo.updatecaches(caches=repository.CACHES_ALL)
4146 4146
4147 4147
4148 4148 @command(
4149 4149 b'debugupgraderepo',
4150 4150 [
4151 4151 (
4152 4152 b'o',
4153 4153 b'optimize',
4154 4154 [],
4155 4155 _(b'extra optimization to perform'),
4156 4156 _(b'NAME'),
4157 4157 ),
4158 4158 (b'', b'run', False, _(b'performs an upgrade')),
4159 4159 (b'', b'backup', True, _(b'keep the old repository content around')),
4160 4160 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4161 4161 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4162 4162 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4163 4163 ],
4164 4164 )
4165 4165 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4166 4166 """upgrade a repository to use different features
4167 4167
4168 4168 If no arguments are specified, the repository is evaluated for upgrade
4169 4169 and a list of problems and potential optimizations is printed.
4170 4170
4171 4171 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4172 4172 can be influenced via additional arguments. More details will be provided
4173 4173 by the command output when run without ``--run``.
4174 4174
4175 4175 During the upgrade, the repository will be locked and no writes will be
4176 4176 allowed.
4177 4177
4178 4178 At the end of the upgrade, the repository may not be readable while new
4179 4179 repository data is swapped in. This window will be as long as it takes to
4180 4180 rename some directories inside the ``.hg`` directory. On most machines, this
4181 4181 should complete almost instantaneously and the chances of a consumer being
4182 4182 unable to access the repository should be low.
4183 4183
4184 4184 By default, all revlogs will be upgraded. You can restrict this using flags
4185 4185 such as `--manifest`:
4186 4186
4187 4187 * `--manifest`: only optimize the manifest
4188 4188 * `--no-manifest`: optimize all revlog but the manifest
4189 4189 * `--changelog`: optimize the changelog only
4190 4190 * `--no-changelog --no-manifest`: optimize filelogs only
4191 4191 * `--filelogs`: optimize the filelogs only
4192 4192 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4193 4193 """
4194 4194 return upgrade.upgraderepo(
4195 4195 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4196 4196 )
4197 4197
4198 4198
4199 4199 @command(
4200 4200 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4201 4201 )
4202 4202 def debugwalk(ui, repo, *pats, **opts):
4203 4203 """show how files match on given patterns"""
4204 4204 opts = pycompat.byteskwargs(opts)
4205 4205 m = scmutil.match(repo[None], pats, opts)
4206 4206 if ui.verbose:
4207 4207 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4208 4208 items = list(repo[None].walk(m))
4209 4209 if not items:
4210 4210 return
4211 4211 f = lambda fn: fn
4212 4212 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4213 4213 f = lambda fn: util.normpath(fn)
4214 4214 fmt = b'f %%-%ds %%-%ds %%s' % (
4215 4215 max([len(abs) for abs in items]),
4216 4216 max([len(repo.pathto(abs)) for abs in items]),
4217 4217 )
4218 4218 for abs in items:
4219 4219 line = fmt % (
4220 4220 abs,
4221 4221 f(repo.pathto(abs)),
4222 4222 m.exact(abs) and b'exact' or b'',
4223 4223 )
4224 4224 ui.write(b"%s\n" % line.rstrip())
4225 4225
4226 4226
4227 4227 @command(b'debugwhyunstable', [], _(b'REV'))
4228 4228 def debugwhyunstable(ui, repo, rev):
4229 4229 """explain instabilities of a changeset"""
4230 4230 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4231 4231 dnodes = b''
4232 4232 if entry.get(b'divergentnodes'):
4233 4233 dnodes = (
4234 4234 b' '.join(
4235 4235 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4236 4236 for ctx in entry[b'divergentnodes']
4237 4237 )
4238 4238 + b' '
4239 4239 )
4240 4240 ui.write(
4241 4241 b'%s: %s%s %s\n'
4242 4242 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4243 4243 )
4244 4244
4245 4245
4246 4246 @command(
4247 4247 b'debugwireargs',
4248 4248 [
4249 4249 (b'', b'three', b'', b'three'),
4250 4250 (b'', b'four', b'', b'four'),
4251 4251 (b'', b'five', b'', b'five'),
4252 4252 ]
4253 4253 + cmdutil.remoteopts,
4254 4254 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4255 4255 norepo=True,
4256 4256 )
4257 4257 def debugwireargs(ui, repopath, *vals, **opts):
4258 4258 opts = pycompat.byteskwargs(opts)
4259 4259 repo = hg.peer(ui, opts, repopath)
4260 4260 try:
4261 4261 for opt in cmdutil.remoteopts:
4262 4262 del opts[opt[1]]
4263 4263 args = {}
4264 4264 for k, v in pycompat.iteritems(opts):
4265 4265 if v:
4266 4266 args[k] = v
4267 4267 args = pycompat.strkwargs(args)
4268 4268 # run twice to check that we don't mess up the stream for the next command
4269 4269 res1 = repo.debugwireargs(*vals, **args)
4270 4270 res2 = repo.debugwireargs(*vals, **args)
4271 4271 ui.write(b"%s\n" % res1)
4272 4272 if res1 != res2:
4273 4273 ui.warn(b"%s\n" % res2)
4274 4274 finally:
4275 4275 repo.close()
4276 4276
4277 4277
4278 4278 def _parsewirelangblocks(fh):
4279 4279 activeaction = None
4280 4280 blocklines = []
4281 4281 lastindent = 0
4282 4282
4283 4283 for line in fh:
4284 4284 line = line.rstrip()
4285 4285 if not line:
4286 4286 continue
4287 4287
4288 4288 if line.startswith(b'#'):
4289 4289 continue
4290 4290
4291 4291 if not line.startswith(b' '):
4292 4292 # New block. Flush previous one.
4293 4293 if activeaction:
4294 4294 yield activeaction, blocklines
4295 4295
4296 4296 activeaction = line
4297 4297 blocklines = []
4298 4298 lastindent = 0
4299 4299 continue
4300 4300
4301 4301 # Else we start with an indent.
4302 4302
4303 4303 if not activeaction:
4304 4304 raise error.Abort(_(b'indented line outside of block'))
4305 4305
4306 4306 indent = len(line) - len(line.lstrip())
4307 4307
4308 4308 # If this line is indented more than the last line, concatenate it.
4309 4309 if indent > lastindent and blocklines:
4310 4310 blocklines[-1] += line.lstrip()
4311 4311 else:
4312 4312 blocklines.append(line)
4313 4313 lastindent = indent
4314 4314
4315 4315 # Flush last block.
4316 4316 if activeaction:
4317 4317 yield activeaction, blocklines
4318 4318
4319 4319
4320 4320 @command(
4321 4321 b'debugwireproto',
4322 4322 [
4323 4323 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4324 4324 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4325 4325 (
4326 4326 b'',
4327 4327 b'noreadstderr',
4328 4328 False,
4329 4329 _(b'do not read from stderr of the remote'),
4330 4330 ),
4331 4331 (
4332 4332 b'',
4333 4333 b'nologhandshake',
4334 4334 False,
4335 4335 _(b'do not log I/O related to the peer handshake'),
4336 4336 ),
4337 4337 ]
4338 4338 + cmdutil.remoteopts,
4339 4339 _(b'[PATH]'),
4340 4340 optionalrepo=True,
4341 4341 )
4342 4342 def debugwireproto(ui, repo, path=None, **opts):
4343 4343 """send wire protocol commands to a server
4344 4344
4345 4345 This command can be used to issue wire protocol commands to remote
4346 4346 peers and to debug the raw data being exchanged.
4347 4347
4348 4348 ``--localssh`` will start an SSH server against the current repository
4349 4349 and connect to that. By default, the connection will perform a handshake
4350 4350 and establish an appropriate peer instance.
4351 4351
4352 4352 ``--peer`` can be used to bypass the handshake protocol and construct a
4353 4353 peer instance using the specified class type. Valid values are ``raw``,
4354 4354 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4355 4355 don't support higher-level command actions.
4356 4356
4357 4357 ``--noreadstderr`` can be used to disable automatic reading from stderr
4358 4358 of the peer (for SSH connections only). Disabling automatic reading of
4359 4359 stderr is useful for making output more deterministic.
4360 4360
4361 4361 Commands are issued via a mini language which is specified via stdin.
4362 4362 The language consists of individual actions to perform. An action is
4363 4363 defined by a block. A block is defined as a line with no leading
4364 4364 space followed by 0 or more lines with leading space. Blocks are
4365 4365 effectively a high-level command with additional metadata.
4366 4366
4367 4367 Lines beginning with ``#`` are ignored.
4368 4368
4369 4369 The following sections denote available actions.
4370 4370
4371 4371 raw
4372 4372 ---
4373 4373
4374 4374 Send raw data to the server.
4375 4375
4376 4376 The block payload contains the raw data to send as one atomic send
4377 4377 operation. The data may not actually be delivered in a single system
4378 4378 call: it depends on the abilities of the transport being used.
4379 4379
4380 4380 Each line in the block is de-indented and concatenated. Then, that
4381 4381 value is evaluated as a Python b'' literal. This allows the use of
4382 4382 backslash escaping, etc.
4383 4383
4384 4384 raw+
4385 4385 ----
4386 4386
4387 4387 Behaves like ``raw`` except flushes output afterwards.
4388 4388
4389 4389 command <X>
4390 4390 -----------
4391 4391
4392 4392 Send a request to run a named command, whose name follows the ``command``
4393 4393 string.
4394 4394
4395 4395 Arguments to the command are defined as lines in this block. The format of
4396 4396 each line is ``<key> <value>``. e.g.::
4397 4397
4398 4398 command listkeys
4399 4399 namespace bookmarks
4400 4400
4401 4401 If the value begins with ``eval:``, it will be interpreted as a Python
4402 4402 literal expression. Otherwise values are interpreted as Python b'' literals.
4403 4403 This allows sending complex types and encoding special byte sequences via
4404 4404 backslash escaping.
4405 4405
4406 4406 The following arguments have special meaning:
4407 4407
4408 4408 ``PUSHFILE``
4409 4409 When defined, the *push* mechanism of the peer will be used instead
4410 4410 of the static request-response mechanism and the content of the
4411 4411 file specified in the value of this argument will be sent as the
4412 4412 command payload.
4413 4413
4414 4414 This can be used to submit a local bundle file to the remote.
4415 4415
4416 4416 batchbegin
4417 4417 ----------
4418 4418
4419 4419 Instruct the peer to begin a batched send.
4420 4420
4421 4421 All ``command`` blocks are queued for execution until the next
4422 4422 ``batchsubmit`` block.
4423 4423
4424 4424 batchsubmit
4425 4425 -----------
4426 4426
4427 4427 Submit previously queued ``command`` blocks as a batch request.
4428 4428
4429 4429 This action MUST be paired with a ``batchbegin`` action.
4430 4430
4431 4431 httprequest <method> <path>
4432 4432 ---------------------------
4433 4433
4434 4434 (HTTP peer only)
4435 4435
4436 4436 Send an HTTP request to the peer.
4437 4437
4438 4438 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4439 4439
4440 4440 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4441 4441 headers to add to the request. e.g. ``Accept: foo``.
4442 4442
4443 4443 The following arguments are special:
4444 4444
4445 4445 ``BODYFILE``
4446 4446 The content of the file defined as the value to this argument will be
4447 4447 transferred verbatim as the HTTP request body.
4448 4448
4449 4449 ``frame <type> <flags> <payload>``
4450 4450 Send a unified protocol frame as part of the request body.
4451 4451
4452 4452 All frames will be collected and sent as the body to the HTTP
4453 4453 request.
4454 4454
4455 4455 close
4456 4456 -----
4457 4457
4458 4458 Close the connection to the server.
4459 4459
4460 4460 flush
4461 4461 -----
4462 4462
4463 4463 Flush data written to the server.
4464 4464
4465 4465 readavailable
4466 4466 -------------
4467 4467
4468 4468 Close the write end of the connection and read all available data from
4469 4469 the server.
4470 4470
4471 4471 If the connection to the server encompasses multiple pipes, we poll both
4472 4472 pipes and read available data.
4473 4473
4474 4474 readline
4475 4475 --------
4476 4476
4477 4477 Read a line of output from the server. If there are multiple output
4478 4478 pipes, reads only the main pipe.
4479 4479
4480 4480 ereadline
4481 4481 ---------
4482 4482
4483 4483 Like ``readline``, but read from the stderr pipe, if available.
4484 4484
4485 4485 read <X>
4486 4486 --------
4487 4487
4488 4488 ``read()`` N bytes from the server's main output pipe.
4489 4489
4490 4490 eread <X>
4491 4491 ---------
4492 4492
4493 4493 ``read()`` N bytes from the server's stderr pipe, if available.
4494 4494
4495 4495 Specifying Unified Frame-Based Protocol Frames
4496 4496 ----------------------------------------------
4497 4497
4498 4498 It is possible to emit a *Unified Frame-Based Protocol* by using special
4499 4499 syntax.
4500 4500
4501 4501 A frame is composed as a type, flags, and payload. These can be parsed
4502 4502 from a string of the form:
4503 4503
4504 4504 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4505 4505
4506 4506 ``request-id`` and ``stream-id`` are integers defining the request and
4507 4507 stream identifiers.
4508 4508
4509 4509 ``type`` can be an integer value for the frame type or the string name
4510 4510 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4511 4511 ``command-name``.
4512 4512
4513 4513 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4514 4514 components. Each component (and there can be just one) can be an integer
4515 4515 or a flag name for stream flags or frame flags, respectively. Values are
4516 4516 resolved to integers and then bitwise OR'd together.
4517 4517
4518 4518 ``payload`` represents the raw frame payload. If it begins with
4519 4519 ``cbor:``, the following string is evaluated as Python code and the
4520 4520 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4521 4521 as a Python byte string literal.
4522 4522 """
4523 4523 opts = pycompat.byteskwargs(opts)
4524 4524
4525 4525 if opts[b'localssh'] and not repo:
4526 4526 raise error.Abort(_(b'--localssh requires a repository'))
4527 4527
4528 4528 if opts[b'peer'] and opts[b'peer'] not in (
4529 4529 b'raw',
4530 4530 b'ssh1',
4531 4531 ):
4532 4532 raise error.Abort(
4533 4533 _(b'invalid value for --peer'),
4534 4534 hint=_(b'valid values are "raw" and "ssh1"'),
4535 4535 )
4536 4536
4537 4537 if path and opts[b'localssh']:
4538 4538 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4539 4539
4540 4540 if ui.interactive():
4541 4541 ui.write(_(b'(waiting for commands on stdin)\n'))
4542 4542
4543 4543 blocks = list(_parsewirelangblocks(ui.fin))
4544 4544
4545 4545 proc = None
4546 4546 stdin = None
4547 4547 stdout = None
4548 4548 stderr = None
4549 4549 opener = None
4550 4550
4551 4551 if opts[b'localssh']:
4552 4552 # We start the SSH server in its own process so there is process
4553 4553 # separation. This prevents a whole class of potential bugs around
4554 4554 # shared state from interfering with server operation.
4555 4555 args = procutil.hgcmd() + [
4556 4556 b'-R',
4557 4557 repo.root,
4558 4558 b'debugserve',
4559 4559 b'--sshstdio',
4560 4560 ]
4561 4561 proc = subprocess.Popen(
4562 4562 pycompat.rapply(procutil.tonativestr, args),
4563 4563 stdin=subprocess.PIPE,
4564 4564 stdout=subprocess.PIPE,
4565 4565 stderr=subprocess.PIPE,
4566 4566 bufsize=0,
4567 4567 )
4568 4568
4569 4569 stdin = proc.stdin
4570 4570 stdout = proc.stdout
4571 4571 stderr = proc.stderr
4572 4572
4573 4573 # We turn the pipes into observers so we can log I/O.
4574 4574 if ui.verbose or opts[b'peer'] == b'raw':
4575 4575 stdin = util.makeloggingfileobject(
4576 4576 ui, proc.stdin, b'i', logdata=True
4577 4577 )
4578 4578 stdout = util.makeloggingfileobject(
4579 4579 ui, proc.stdout, b'o', logdata=True
4580 4580 )
4581 4581 stderr = util.makeloggingfileobject(
4582 4582 ui, proc.stderr, b'e', logdata=True
4583 4583 )
4584 4584
4585 4585 # --localssh also implies the peer connection settings.
4586 4586
4587 4587 url = b'ssh://localserver'
4588 4588 autoreadstderr = not opts[b'noreadstderr']
4589 4589
4590 4590 if opts[b'peer'] == b'ssh1':
4591 4591 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4592 4592 peer = sshpeer.sshv1peer(
4593 4593 ui,
4594 4594 url,
4595 4595 proc,
4596 4596 stdin,
4597 4597 stdout,
4598 4598 stderr,
4599 4599 None,
4600 4600 autoreadstderr=autoreadstderr,
4601 4601 )
4602 4602 elif opts[b'peer'] == b'raw':
4603 4603 ui.write(_(b'using raw connection to peer\n'))
4604 4604 peer = None
4605 4605 else:
4606 4606 ui.write(_(b'creating ssh peer from handshake results\n'))
4607 4607 peer = sshpeer.makepeer(
4608 4608 ui,
4609 4609 url,
4610 4610 proc,
4611 4611 stdin,
4612 4612 stdout,
4613 4613 stderr,
4614 4614 autoreadstderr=autoreadstderr,
4615 4615 )
4616 4616
4617 4617 elif path:
4618 4618 # We bypass hg.peer() so we can proxy the sockets.
4619 4619 # TODO consider not doing this because we skip
4620 4620 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4621 4621 u = urlutil.url(path)
4622 4622 if u.scheme != b'http':
4623 4623 raise error.Abort(_(b'only http:// paths are currently supported'))
4624 4624
4625 4625 url, authinfo = u.authinfo()
4626 4626 openerargs = {
4627 4627 'useragent': b'Mercurial debugwireproto',
4628 4628 }
4629 4629
4630 4630 # Turn pipes/sockets into observers so we can log I/O.
4631 4631 if ui.verbose:
4632 4632 openerargs.update(
4633 4633 {
4634 4634 'loggingfh': ui,
4635 4635 'loggingname': b's',
4636 4636 'loggingopts': {
4637 4637 'logdata': True,
4638 4638 'logdataapis': False,
4639 4639 },
4640 4640 }
4641 4641 )
4642 4642
4643 4643 if ui.debugflag:
4644 4644 openerargs['loggingopts']['logdataapis'] = True
4645 4645
4646 4646 # Don't send default headers when in raw mode. This allows us to
4647 4647 # bypass most of the behavior of our URL handling code so we can
4648 4648 # have near complete control over what's sent on the wire.
4649 4649 if opts[b'peer'] == b'raw':
4650 4650 openerargs['sendaccept'] = False
4651 4651
4652 4652 opener = urlmod.opener(ui, authinfo, **openerargs)
4653 4653
4654 4654 if opts[b'peer'] == b'raw':
4655 4655 ui.write(_(b'using raw connection to peer\n'))
4656 4656 peer = None
4657 4657 elif opts[b'peer']:
4658 4658 raise error.Abort(
4659 4659 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4660 4660 )
4661 4661 else:
4662 4662 peer = httppeer.makepeer(ui, path, opener=opener)
4663 4663
4664 4664 # We /could/ populate stdin/stdout with sock.makefile()...
4665 4665 else:
4666 4666 raise error.Abort(_(b'unsupported connection configuration'))
4667 4667
4668 4668 batchedcommands = None
4669 4669
4670 4670 # Now perform actions based on the parsed wire language instructions.
4671 4671 for action, lines in blocks:
4672 4672 if action in (b'raw', b'raw+'):
4673 4673 if not stdin:
4674 4674 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4675 4675
4676 4676 # Concatenate the data together.
4677 4677 data = b''.join(l.lstrip() for l in lines)
4678 4678 data = stringutil.unescapestr(data)
4679 4679 stdin.write(data)
4680 4680
4681 4681 if action == b'raw+':
4682 4682 stdin.flush()
4683 4683 elif action == b'flush':
4684 4684 if not stdin:
4685 4685 raise error.Abort(_(b'cannot call flush on this peer'))
4686 4686 stdin.flush()
4687 4687 elif action.startswith(b'command'):
4688 4688 if not peer:
4689 4689 raise error.Abort(
4690 4690 _(
4691 4691 b'cannot send commands unless peer instance '
4692 4692 b'is available'
4693 4693 )
4694 4694 )
4695 4695
4696 4696 command = action.split(b' ', 1)[1]
4697 4697
4698 4698 args = {}
4699 4699 for line in lines:
4700 4700 # We need to allow empty values.
4701 4701 fields = line.lstrip().split(b' ', 1)
4702 4702 if len(fields) == 1:
4703 4703 key = fields[0]
4704 4704 value = b''
4705 4705 else:
4706 4706 key, value = fields
4707 4707
4708 4708 if value.startswith(b'eval:'):
4709 4709 value = stringutil.evalpythonliteral(value[5:])
4710 4710 else:
4711 4711 value = stringutil.unescapestr(value)
4712 4712
4713 4713 args[key] = value
4714 4714
4715 4715 if batchedcommands is not None:
4716 4716 batchedcommands.append((command, args))
4717 4717 continue
4718 4718
4719 4719 ui.status(_(b'sending %s command\n') % command)
4720 4720
4721 4721 if b'PUSHFILE' in args:
4722 4722 with open(args[b'PUSHFILE'], 'rb') as fh:
4723 4723 del args[b'PUSHFILE']
4724 4724 res, output = peer._callpush(
4725 4725 command, fh, **pycompat.strkwargs(args)
4726 4726 )
4727 4727 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4728 4728 ui.status(
4729 4729 _(b'remote output: %s\n') % stringutil.escapestr(output)
4730 4730 )
4731 4731 else:
4732 4732 with peer.commandexecutor() as e:
4733 4733 res = e.callcommand(command, args).result()
4734 4734
4735 4735 ui.status(
4736 4736 _(b'response: %s\n')
4737 4737 % stringutil.pprint(res, bprefix=True, indent=2)
4738 4738 )
4739 4739
4740 4740 elif action == b'batchbegin':
4741 4741 if batchedcommands is not None:
4742 4742 raise error.Abort(_(b'nested batchbegin not allowed'))
4743 4743
4744 4744 batchedcommands = []
4745 4745 elif action == b'batchsubmit':
4746 4746 # There is a batching API we could go through. But it would be
4747 4747 # difficult to normalize requests into function calls. It is easier
4748 4748 # to bypass this layer and normalize to commands + args.
4749 4749 ui.status(
4750 4750 _(b'sending batch with %d sub-commands\n')
4751 4751 % len(batchedcommands)
4752 4752 )
4753 4753 assert peer is not None
4754 4754 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4755 4755 ui.status(
4756 4756 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4757 4757 )
4758 4758
4759 4759 batchedcommands = None
4760 4760
4761 4761 elif action.startswith(b'httprequest '):
4762 4762 if not opener:
4763 4763 raise error.Abort(
4764 4764 _(b'cannot use httprequest without an HTTP peer')
4765 4765 )
4766 4766
4767 4767 request = action.split(b' ', 2)
4768 4768 if len(request) != 3:
4769 4769 raise error.Abort(
4770 4770 _(
4771 4771 b'invalid httprequest: expected format is '
4772 4772 b'"httprequest <method> <path>'
4773 4773 )
4774 4774 )
4775 4775
4776 4776 method, httppath = request[1:]
4777 4777 headers = {}
4778 4778 body = None
4779 4779 frames = []
4780 4780 for line in lines:
4781 4781 line = line.lstrip()
4782 4782 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4783 4783 if m:
4784 4784 # Headers need to use native strings.
4785 4785 key = pycompat.strurl(m.group(1))
4786 4786 value = pycompat.strurl(m.group(2))
4787 4787 headers[key] = value
4788 4788 continue
4789 4789
4790 4790 if line.startswith(b'BODYFILE '):
4791 4791 with open(line.split(b' ', 1), b'rb') as fh:
4792 4792 body = fh.read()
4793 4793 elif line.startswith(b'frame '):
4794 4794 frame = wireprotoframing.makeframefromhumanstring(
4795 4795 line[len(b'frame ') :]
4796 4796 )
4797 4797
4798 4798 frames.append(frame)
4799 4799 else:
4800 4800 raise error.Abort(
4801 4801 _(b'unknown argument to httprequest: %s') % line
4802 4802 )
4803 4803
4804 4804 url = path + httppath
4805 4805
4806 4806 if frames:
4807 4807 body = b''.join(bytes(f) for f in frames)
4808 4808
4809 4809 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4810 4810
4811 4811 # urllib.Request insists on using has_data() as a proxy for
4812 4812 # determining the request method. Override that to use our
4813 4813 # explicitly requested method.
4814 4814 req.get_method = lambda: pycompat.sysstr(method)
4815 4815
4816 4816 try:
4817 4817 res = opener.open(req)
4818 4818 body = res.read()
4819 4819 except util.urlerr.urlerror as e:
4820 4820 # read() method must be called, but only exists in Python 2
4821 4821 getattr(e, 'read', lambda: None)()
4822 4822 continue
4823 4823
4824 4824 ct = res.headers.get('Content-Type')
4825 4825 if ct == 'application/mercurial-cbor':
4826 4826 ui.write(
4827 4827 _(b'cbor> %s\n')
4828 4828 % stringutil.pprint(
4829 4829 cborutil.decodeall(body), bprefix=True, indent=2
4830 4830 )
4831 4831 )
4832 4832
4833 4833 elif action == b'close':
4834 4834 assert peer is not None
4835 4835 peer.close()
4836 4836 elif action == b'readavailable':
4837 4837 if not stdout or not stderr:
4838 4838 raise error.Abort(
4839 4839 _(b'readavailable not available on this peer')
4840 4840 )
4841 4841
4842 4842 stdin.close()
4843 4843 stdout.read()
4844 4844 stderr.read()
4845 4845
4846 4846 elif action == b'readline':
4847 4847 if not stdout:
4848 4848 raise error.Abort(_(b'readline not available on this peer'))
4849 4849 stdout.readline()
4850 4850 elif action == b'ereadline':
4851 4851 if not stderr:
4852 4852 raise error.Abort(_(b'ereadline not available on this peer'))
4853 4853 stderr.readline()
4854 4854 elif action.startswith(b'read '):
4855 4855 count = int(action.split(b' ', 1)[1])
4856 4856 if not stdout:
4857 4857 raise error.Abort(_(b'read not available on this peer'))
4858 4858 stdout.read(count)
4859 4859 elif action.startswith(b'eread '):
4860 4860 count = int(action.split(b' ', 1)[1])
4861 4861 if not stderr:
4862 4862 raise error.Abort(_(b'eread not available on this peer'))
4863 4863 stderr.read(count)
4864 4864 else:
4865 4865 raise error.Abort(_(b'unknown action: %s') % action)
4866 4866
4867 4867 if batchedcommands is not None:
4868 4868 raise error.Abort(_(b'unclosed "batchbegin" request'))
4869 4869
4870 4870 if peer:
4871 4871 peer.close()
4872 4872
4873 4873 if proc:
4874 4874 proc.kill()
@@ -1,524 +1,525 b''
1 1 # Copyright (C) 2004, 2005 Canonical Ltd
2 2 #
3 3 # This program is free software; you can redistribute it and/or modify
4 4 # it under the terms of the GNU General Public License as published by
5 5 # the Free Software Foundation; either version 2 of the License, or
6 6 # (at your option) any later version.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU General Public License
14 14 # along with this program; if not, see <http://www.gnu.org/licenses/>.
15 15
16 16 # mbp: "you know that thing where cvs gives you conflict markers?"
17 17 # s: "i hate that."
18 18
19 19 from __future__ import absolute_import
20 20
21 21 from .i18n import _
22 22 from . import (
23 23 error,
24 24 mdiff,
25 25 pycompat,
26 26 )
27 27 from .utils import stringutil
28 28
29 29
30 30 class CantReprocessAndShowBase(Exception):
31 31 pass
32 32
33 33
34 34 def intersect(ra, rb):
35 35 """Given two ranges return the range where they intersect or None.
36 36
37 37 >>> intersect((0, 10), (0, 6))
38 38 (0, 6)
39 39 >>> intersect((0, 10), (5, 15))
40 40 (5, 10)
41 41 >>> intersect((0, 10), (10, 15))
42 42 >>> intersect((0, 9), (10, 15))
43 43 >>> intersect((0, 9), (7, 15))
44 44 (7, 9)
45 45 """
46 46 assert ra[0] <= ra[1]
47 47 assert rb[0] <= rb[1]
48 48
49 49 sa = max(ra[0], rb[0])
50 50 sb = min(ra[1], rb[1])
51 51 if sa < sb:
52 52 return sa, sb
53 53 else:
54 54 return None
55 55
56 56
57 57 def compare_range(a, astart, aend, b, bstart, bend):
58 58 """Compare a[astart:aend] == b[bstart:bend], without slicing."""
59 59 if (aend - astart) != (bend - bstart):
60 60 return False
61 61 for ia, ib in zip(
62 62 pycompat.xrange(astart, aend), pycompat.xrange(bstart, bend)
63 63 ):
64 64 if a[ia] != b[ib]:
65 65 return False
66 66 else:
67 67 return True
68 68
69 69
70 70 class Merge3Text(object):
71 71 """3-way merge of texts.
72 72
73 73 Given strings BASE, OTHER, THIS, tries to produce a combined text
74 74 incorporating the changes from both BASE->OTHER and BASE->THIS."""
75 75
76 76 def __init__(self, basetext, atext, btext, base=None, a=None, b=None):
77 77 self.basetext = basetext
78 78 self.atext = atext
79 79 self.btext = btext
80 80 if base is None:
81 81 base = mdiff.splitnewlines(basetext)
82 82 if a is None:
83 83 a = mdiff.splitnewlines(atext)
84 84 if b is None:
85 85 b = mdiff.splitnewlines(btext)
86 86 self.base = base
87 87 self.a = a
88 88 self.b = b
89 89
90 def merge_lines(
91 self,
92 name_a=None,
93 name_b=None,
94 name_base=None,
95 start_marker=b'<<<<<<<',
96 mid_marker=b'=======',
97 end_marker=b'>>>>>>>',
98 base_marker=None,
99 minimize=False,
100 ):
101 """Return merge in cvs-like form."""
102 conflicts = False
103 newline = b'\n'
104 if len(self.a) > 0:
105 if self.a[0].endswith(b'\r\n'):
106 newline = b'\r\n'
107 elif self.a[0].endswith(b'\r'):
108 newline = b'\r'
109 if name_a and start_marker:
110 start_marker = start_marker + b' ' + name_a
111 if name_b and end_marker:
112 end_marker = end_marker + b' ' + name_b
113 if name_base and base_marker:
114 base_marker = base_marker + b' ' + name_base
115 merge_groups = self.merge_groups()
116 if minimize:
117 merge_groups = self.minimize(merge_groups)
118 lines = []
119 for what, group_lines in merge_groups:
120 if what == b'conflict':
121 base_lines, a_lines, b_lines = group_lines
122 conflicts = True
123 if start_marker is not None:
124 lines.append(start_marker + newline)
125 lines.extend(a_lines)
126 if base_marker is not None:
127 lines.append(base_marker + newline)
128 lines.extend(base_lines)
129 if mid_marker is not None:
130 lines.append(mid_marker + newline)
131 lines.extend(b_lines)
132 if end_marker is not None:
133 lines.append(end_marker + newline)
134 else:
135 lines.extend(group_lines)
136 return lines, conflicts
137
138 90 def merge_groups(self):
139 91 """Yield sequence of line groups. Each one is a tuple:
140 92
141 93 'unchanged', lines
142 94 Lines unchanged from base
143 95
144 96 'a', lines
145 97 Lines taken from a
146 98
147 99 'same', lines
148 100 Lines taken from a (and equal to b)
149 101
150 102 'b', lines
151 103 Lines taken from b
152 104
153 105 'conflict', (base_lines, a_lines, b_lines)
154 106 Lines from base were changed to either a or b and conflict.
155 107 """
156 108 for t in self.merge_regions():
157 109 what = t[0]
158 110 if what == b'unchanged':
159 111 yield what, self.base[t[1] : t[2]]
160 112 elif what == b'a' or what == b'same':
161 113 yield what, self.a[t[1] : t[2]]
162 114 elif what == b'b':
163 115 yield what, self.b[t[1] : t[2]]
164 116 elif what == b'conflict':
165 117 yield (
166 118 what,
167 119 (
168 120 self.base[t[1] : t[2]],
169 121 self.a[t[3] : t[4]],
170 122 self.b[t[5] : t[6]],
171 123 ),
172 124 )
173 125 else:
174 126 raise ValueError(what)
175 127
176 128 def merge_regions(self):
177 129 """Return sequences of matching and conflicting regions.
178 130
179 131 This returns tuples, where the first value says what kind we
180 132 have:
181 133
182 134 'unchanged', start, end
183 135 Take a region of base[start:end]
184 136
185 137 'same', astart, aend
186 138 b and a are different from base but give the same result
187 139
188 140 'a', start, end
189 141 Non-clashing insertion from a[start:end]
190 142
191 143 'conflict', zstart, zend, astart, aend, bstart, bend
192 144 Conflict between a and b, with z as common ancestor
193 145
194 146 Method is as follows:
195 147
196 148 The two sequences align only on regions which match the base
197 149 and both descendants. These are found by doing a two-way diff
198 150 of each one against the base, and then finding the
199 151 intersections between those regions. These "sync regions"
200 152 are by definition unchanged in both and easily dealt with.
201 153
202 154 The regions in between can be in any of three cases:
203 155 conflicted, or changed on only one side.
204 156 """
205 157
206 158 # section a[0:ia] has been disposed of, etc
207 159 iz = ia = ib = 0
208 160
209 161 for region in self.find_sync_regions():
210 162 zmatch, zend, amatch, aend, bmatch, bend = region
211 163 # print 'match base [%d:%d]' % (zmatch, zend)
212 164
213 165 matchlen = zend - zmatch
214 166 assert matchlen >= 0
215 167 assert matchlen == (aend - amatch)
216 168 assert matchlen == (bend - bmatch)
217 169
218 170 len_a = amatch - ia
219 171 len_b = bmatch - ib
220 172 len_base = zmatch - iz
221 173 assert len_a >= 0
222 174 assert len_b >= 0
223 175 assert len_base >= 0
224 176
225 177 # print 'unmatched a=%d, b=%d' % (len_a, len_b)
226 178
227 179 if len_a or len_b:
228 180 # try to avoid actually slicing the lists
229 181 equal_a = compare_range(
230 182 self.a, ia, amatch, self.base, iz, zmatch
231 183 )
232 184 equal_b = compare_range(
233 185 self.b, ib, bmatch, self.base, iz, zmatch
234 186 )
235 187 same = compare_range(self.a, ia, amatch, self.b, ib, bmatch)
236 188
237 189 if same:
238 190 yield b'same', ia, amatch
239 191 elif equal_a and not equal_b:
240 192 yield b'b', ib, bmatch
241 193 elif equal_b and not equal_a:
242 194 yield b'a', ia, amatch
243 195 elif not equal_a and not equal_b:
244 196 yield b'conflict', iz, zmatch, ia, amatch, ib, bmatch
245 197 else:
246 198 raise AssertionError(b"can't handle a=b=base but unmatched")
247 199
248 200 ia = amatch
249 201 ib = bmatch
250 202 iz = zmatch
251 203
252 204 # if the same part of the base was deleted on both sides
253 205 # that's OK, we can just skip it.
254 206
255 207 if matchlen > 0:
256 208 assert ia == amatch
257 209 assert ib == bmatch
258 210 assert iz == zmatch
259 211
260 212 yield b'unchanged', zmatch, zend
261 213 iz = zend
262 214 ia = aend
263 215 ib = bend
264 216
265 217 def minimize(self, merge_groups):
266 218 """Trim conflict regions of lines where A and B sides match.
267 219
268 220 Lines where both A and B have made the same changes at the beginning
269 221 or the end of each merge region are eliminated from the conflict
270 222 region and are instead considered the same.
271 223 """
272 224 for what, lines in merge_groups:
273 225 if what != b"conflict":
274 226 yield what, lines
275 227 continue
276 228 base_lines, a_lines, b_lines = lines
277 229 alen = len(a_lines)
278 230 blen = len(b_lines)
279 231
280 232 # find matches at the front
281 233 ii = 0
282 234 while ii < alen and ii < blen and a_lines[ii] == b_lines[ii]:
283 235 ii += 1
284 236 startmatches = ii
285 237
286 238 # find matches at the end
287 239 ii = 0
288 240 while (
289 241 ii < alen and ii < blen and a_lines[-ii - 1] == b_lines[-ii - 1]
290 242 ):
291 243 ii += 1
292 244 endmatches = ii
293 245
294 246 if startmatches > 0:
295 247 yield b'same', a_lines[:startmatches]
296 248
297 249 yield (
298 250 b'conflict',
299 251 (
300 252 base_lines,
301 253 a_lines[startmatches : alen - endmatches],
302 254 b_lines[startmatches : blen - endmatches],
303 255 ),
304 256 )
305 257
306 258 if endmatches > 0:
307 259 yield b'same', a_lines[alen - endmatches :]
308 260
309 261 def find_sync_regions(self):
310 262 """Return a list of sync regions, where both descendants match the base.
311 263
312 264 Generates a list of (base1, base2, a1, a2, b1, b2). There is
313 265 always a zero-length sync region at the end of all the files.
314 266 """
315 267
316 268 ia = ib = 0
317 269 amatches = mdiff.get_matching_blocks(self.basetext, self.atext)
318 270 bmatches = mdiff.get_matching_blocks(self.basetext, self.btext)
319 271 len_a = len(amatches)
320 272 len_b = len(bmatches)
321 273
322 274 sl = []
323 275
324 276 while ia < len_a and ib < len_b:
325 277 abase, amatch, alen = amatches[ia]
326 278 bbase, bmatch, blen = bmatches[ib]
327 279
328 280 # there is an unconflicted block at i; how long does it
329 281 # extend? until whichever one ends earlier.
330 282 i = intersect((abase, abase + alen), (bbase, bbase + blen))
331 283 if i:
332 284 intbase = i[0]
333 285 intend = i[1]
334 286 intlen = intend - intbase
335 287
336 288 # found a match of base[i[0], i[1]]; this may be less than
337 289 # the region that matches in either one
338 290 assert intlen <= alen
339 291 assert intlen <= blen
340 292 assert abase <= intbase
341 293 assert bbase <= intbase
342 294
343 295 asub = amatch + (intbase - abase)
344 296 bsub = bmatch + (intbase - bbase)
345 297 aend = asub + intlen
346 298 bend = bsub + intlen
347 299
348 300 assert self.base[intbase:intend] == self.a[asub:aend], (
349 301 self.base[intbase:intend],
350 302 self.a[asub:aend],
351 303 )
352 304
353 305 assert self.base[intbase:intend] == self.b[bsub:bend]
354 306
355 307 sl.append((intbase, intend, asub, aend, bsub, bend))
356 308
357 309 # advance whichever one ends first in the base text
358 310 if (abase + alen) < (bbase + blen):
359 311 ia += 1
360 312 else:
361 313 ib += 1
362 314
363 315 intbase = len(self.base)
364 316 abase = len(self.a)
365 317 bbase = len(self.b)
366 318 sl.append((intbase, intbase, abase, abase, bbase, bbase))
367 319
368 320 return sl
369 321
370 322
371 323 def _verifytext(text, path, ui, opts):
372 324 """verifies that text is non-binary (unless opts[text] is passed,
373 325 then we just warn)"""
374 326 if stringutil.binary(text):
375 327 msg = _(b"%s looks like a binary file.") % path
376 328 if not opts.get('quiet'):
377 329 ui.warn(_(b'warning: %s\n') % msg)
378 330 if not opts.get('text'):
379 331 raise error.Abort(msg)
380 332 return text
381 333
382 334
383 335 def _picklabels(defaults, overrides):
384 336 if len(overrides) > 3:
385 337 raise error.Abort(_(b"can only specify three labels."))
386 338 result = defaults[:]
387 339 for i, override in enumerate(overrides):
388 340 result[i] = override
389 341 return result
390 342
391 343
344 def merge_lines(
345 m3,
346 name_a=None,
347 name_b=None,
348 name_base=None,
349 start_marker=b'<<<<<<<',
350 mid_marker=b'=======',
351 end_marker=b'>>>>>>>',
352 base_marker=None,
353 minimize=False,
354 ):
355 """Return merge in cvs-like form."""
356 conflicts = False
357 newline = b'\n'
358 if len(m3.a) > 0:
359 if m3.a[0].endswith(b'\r\n'):
360 newline = b'\r\n'
361 elif m3.a[0].endswith(b'\r'):
362 newline = b'\r'
363 if name_a and start_marker:
364 start_marker = start_marker + b' ' + name_a
365 if name_b and end_marker:
366 end_marker = end_marker + b' ' + name_b
367 if name_base and base_marker:
368 base_marker = base_marker + b' ' + name_base
369 merge_groups = m3.merge_groups()
370 if minimize:
371 merge_groups = m3.minimize(merge_groups)
372 lines = []
373 for what, group_lines in merge_groups:
374 if what == b'conflict':
375 base_lines, a_lines, b_lines = group_lines
376 conflicts = True
377 if start_marker is not None:
378 lines.append(start_marker + newline)
379 lines.extend(a_lines)
380 if base_marker is not None:
381 lines.append(base_marker + newline)
382 lines.extend(base_lines)
383 if mid_marker is not None:
384 lines.append(mid_marker + newline)
385 lines.extend(b_lines)
386 if end_marker is not None:
387 lines.append(end_marker + newline)
388 else:
389 lines.extend(group_lines)
390 return lines, conflicts
391
392
392 393 def _mergediff(m3, name_a, name_b, name_base):
393 394 lines = []
394 395 conflicts = False
395 396 for what, group_lines in m3.merge_groups():
396 397 if what == b'conflict':
397 398 base_lines, a_lines, b_lines = group_lines
398 399 base_text = b''.join(base_lines)
399 400 b_blocks = list(
400 401 mdiff.allblocks(
401 402 base_text,
402 403 b''.join(b_lines),
403 404 lines1=base_lines,
404 405 lines2=b_lines,
405 406 )
406 407 )
407 408 a_blocks = list(
408 409 mdiff.allblocks(
409 410 base_text,
410 411 b''.join(a_lines),
411 412 lines1=base_lines,
412 413 lines2=b_lines,
413 414 )
414 415 )
415 416
416 417 def matching_lines(blocks):
417 418 return sum(
418 419 block[1] - block[0]
419 420 for block, kind in blocks
420 421 if kind == b'='
421 422 )
422 423
423 424 def diff_lines(blocks, lines1, lines2):
424 425 for block, kind in blocks:
425 426 if kind == b'=':
426 427 for line in lines1[block[0] : block[1]]:
427 428 yield b' ' + line
428 429 else:
429 430 for line in lines1[block[0] : block[1]]:
430 431 yield b'-' + line
431 432 for line in lines2[block[2] : block[3]]:
432 433 yield b'+' + line
433 434
434 435 lines.append(b"<<<<<<<\n")
435 436 if matching_lines(a_blocks) < matching_lines(b_blocks):
436 437 lines.append(b"======= %s\n" % name_a)
437 438 lines.extend(a_lines)
438 439 lines.append(b"------- %s\n" % name_base)
439 440 lines.append(b"+++++++ %s\n" % name_b)
440 441 lines.extend(diff_lines(b_blocks, base_lines, b_lines))
441 442 else:
442 443 lines.append(b"------- %s\n" % name_base)
443 444 lines.append(b"+++++++ %s\n" % name_a)
444 445 lines.extend(diff_lines(a_blocks, base_lines, a_lines))
445 446 lines.append(b"======= %s\n" % name_b)
446 447 lines.extend(b_lines)
447 448 lines.append(b">>>>>>>\n")
448 449 conflicts = True
449 450 else:
450 451 lines.extend(group_lines)
451 452 return lines, conflicts
452 453
453 454
454 455 def _resolve(m3, sides):
455 456 lines = []
456 457 for what, group_lines in m3.merge_groups():
457 458 if what == b'conflict':
458 459 for side in sides:
459 460 lines.extend(group_lines[side])
460 461 else:
461 462 lines.extend(group_lines)
462 463 return lines
463 464
464 465
465 466 def simplemerge(ui, localctx, basectx, otherctx, **opts):
466 467 """Performs the simplemerge algorithm.
467 468
468 469 The merged result is written into `localctx`.
469 470 """
470 471
471 472 def readctx(ctx):
472 473 # Merges were always run in the working copy before, which means
473 474 # they used decoded data, if the user defined any repository
474 475 # filters.
475 476 #
476 477 # Maintain that behavior today for BC, though perhaps in the future
477 478 # it'd be worth considering whether merging encoded data (what the
478 479 # repository usually sees) might be more useful.
479 480 return _verifytext(ctx.decodeddata(), ctx.path(), ui, opts)
480 481
481 482 try:
482 483 localtext = readctx(localctx)
483 484 basetext = readctx(basectx)
484 485 othertext = readctx(otherctx)
485 486 except error.Abort:
486 487 return 1
487 488
488 489 m3 = Merge3Text(basetext, localtext, othertext)
489 490 conflicts = False
490 491 mode = opts.get('mode', b'merge')
491 492 if mode == b'union':
492 493 lines = _resolve(m3, (1, 2))
493 494 elif mode == b'local':
494 495 lines = _resolve(m3, (1,))
495 496 elif mode == b'other':
496 497 lines = _resolve(m3, (2,))
497 498 else:
498 499 name_a, name_b, name_base = _picklabels(
499 500 [localctx.path(), otherctx.path(), None], opts.get('label', [])
500 501 )
501 502 if mode == b'mergediff':
502 503 lines, conflicts = _mergediff(m3, name_a, name_b, name_base)
503 504 else:
504 505 extrakwargs = {
505 506 'minimize': True,
506 507 }
507 508 if mode == b'merge3':
508 509 extrakwargs['base_marker'] = b'|||||||'
509 510 extrakwargs['name_base'] = name_base
510 511 extrakwargs['minimize'] = False
511 lines, conflicts = m3.merge_lines(
512 name_a=name_a, name_b=name_b, **extrakwargs
512 lines, conflicts = merge_lines(
513 m3, name_a=name_a, name_b=name_b, **extrakwargs
513 514 )
514 515
515 516 mergedtext = b''.join(lines)
516 517 if opts.get('print'):
517 518 ui.fout.write(mergedtext)
518 519 else:
519 520 # localctx.flags() already has the merged flags (done in
520 521 # mergestate.resolve())
521 522 localctx.write(mergedtext, localctx.flags())
522 523
523 524 if conflicts:
524 525 return 1
@@ -1,386 +1,395 b''
1 1 # Copyright (C) 2004, 2005 Canonical Ltd
2 2 #
3 3 # This program is free software; you can redistribute it and/or modify
4 4 # it under the terms of the GNU General Public License as published by
5 5 # the Free Software Foundation; either version 2 of the License, or
6 6 # (at your option) any later version.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU General Public License
14 14 # along with this program; if not, see <http://www.gnu.org/licenses/>.
15 15
16 16 from __future__ import absolute_import
17 17
18 18 import unittest
19 19 from mercurial import (
20 20 error,
21 21 simplemerge,
22 22 util,
23 23 )
24 24
25 25 from mercurial.utils import stringutil
26 26
27 27 TestCase = unittest.TestCase
28 28 # bzr compatible interface, for the tests
29 29 class Merge3(simplemerge.Merge3Text):
30 30 """3-way merge of texts.
31 31
32 32 Given BASE, OTHER, THIS, tries to produce a combined text
33 33 incorporating the changes from both BASE->OTHER and BASE->THIS.
34 34 All three will typically be sequences of lines."""
35 35
36 36 def __init__(self, base, a, b):
37 37 basetext = b'\n'.join([i.strip(b'\n') for i in base] + [b''])
38 38 atext = b'\n'.join([i.strip(b'\n') for i in a] + [b''])
39 39 btext = b'\n'.join([i.strip(b'\n') for i in b] + [b''])
40 40 if (
41 41 stringutil.binary(basetext)
42 42 or stringutil.binary(atext)
43 43 or stringutil.binary(btext)
44 44 ):
45 45 raise error.Abort(b"don't know how to merge binary files")
46 46 simplemerge.Merge3Text.__init__(
47 47 self, basetext, atext, btext, base, a, b
48 48 )
49 49
50 50
51 51 CantReprocessAndShowBase = simplemerge.CantReprocessAndShowBase
52 52
53 53
54 54 def split_lines(t):
55 55 return util.stringio(t).readlines()
56 56
57 57
58 58 ############################################################
59 59 # test case data from the gnu diffutils manual
60 60 # common base
61 61 TZU = split_lines(
62 62 b""" The Nameless is the origin of Heaven and Earth;
63 63 The named is the mother of all things.
64 64
65 65 Therefore let there always be non-being,
66 66 so we may see their subtlety,
67 67 And let there always be being,
68 68 so we may see their outcome.
69 69 The two are the same,
70 70 But after they are produced,
71 71 they have different names.
72 72 They both may be called deep and profound.
73 73 Deeper and more profound,
74 74 The door of all subtleties!
75 75 """
76 76 )
77 77
78 78 LAO = split_lines(
79 79 b""" The Way that can be told of is not the eternal Way;
80 80 The name that can be named is not the eternal name.
81 81 The Nameless is the origin of Heaven and Earth;
82 82 The Named is the mother of all things.
83 83 Therefore let there always be non-being,
84 84 so we may see their subtlety,
85 85 And let there always be being,
86 86 so we may see their outcome.
87 87 The two are the same,
88 88 But after they are produced,
89 89 they have different names.
90 90 """
91 91 )
92 92
93 93
94 94 TAO = split_lines(
95 95 b""" The Way that can be told of is not the eternal Way;
96 96 The name that can be named is not the eternal name.
97 97 The Nameless is the origin of Heaven and Earth;
98 98 The named is the mother of all things.
99 99
100 100 Therefore let there always be non-being,
101 101 so we may see their subtlety,
102 102 And let there always be being,
103 103 so we may see their result.
104 104 The two are the same,
105 105 But after they are produced,
106 106 they have different names.
107 107
108 108 -- The Way of Lao-Tzu, tr. Wing-tsit Chan
109 109
110 110 """
111 111 )
112 112
113 113 MERGED_RESULT = split_lines(
114 114 b"""\
115 115 The Way that can be told of is not the eternal Way;
116 116 The name that can be named is not the eternal name.
117 117 The Nameless is the origin of Heaven and Earth;
118 118 The Named is the mother of all things.
119 119 Therefore let there always be non-being,
120 120 so we may see their subtlety,
121 121 And let there always be being,
122 122 so we may see their result.
123 123 The two are the same,
124 124 But after they are produced,
125 125 they have different names.\
126 126 \n<<<<<<< LAO\
127 127 \n=======
128 128
129 129 -- The Way of Lao-Tzu, tr. Wing-tsit Chan
130 130 \
131 131 \n>>>>>>> TAO
132 132 """
133 133 )
134 134
135 135
136 136 class TestMerge3(TestCase):
137 137 def log(self, msg):
138 138 pass
139 139
140 140 def test_no_changes(self):
141 141 """No conflicts because nothing changed"""
142 142 m3 = Merge3([b'aaa', b'bbb'], [b'aaa', b'bbb'], [b'aaa', b'bbb'])
143 143
144 144 self.assertEqual(
145 145 list(m3.find_sync_regions()),
146 146 [(0, 2, 0, 2, 0, 2), (2, 2, 2, 2, 2, 2)],
147 147 )
148 148
149 149 self.assertEqual(list(m3.merge_regions()), [(b'unchanged', 0, 2)])
150 150
151 151 self.assertEqual(
152 152 list(m3.merge_groups()), [(b'unchanged', [b'aaa', b'bbb'])]
153 153 )
154 154
155 155 def test_front_insert(self):
156 156 m3 = Merge3([b'zz'], [b'aaa', b'bbb', b'zz'], [b'zz'])
157 157
158 158 # todo: should use a sentinel at end as from get_matching_blocks
159 159 # to match without zz
160 160 self.assertEqual(
161 161 list(m3.find_sync_regions()),
162 162 [(0, 1, 2, 3, 0, 1), (1, 1, 3, 3, 1, 1)],
163 163 )
164 164
165 165 self.assertEqual(
166 166 list(m3.merge_regions()), [(b'a', 0, 2), (b'unchanged', 0, 1)]
167 167 )
168 168
169 169 self.assertEqual(
170 170 list(m3.merge_groups()),
171 171 [(b'a', [b'aaa', b'bbb']), (b'unchanged', [b'zz'])],
172 172 )
173 173
174 174 def test_null_insert(self):
175 175 m3 = Merge3([], [b'aaa', b'bbb'], [])
176 176 # todo: should use a sentinel at end as from get_matching_blocks
177 177 # to match without zz
178 178 self.assertEqual(list(m3.find_sync_regions()), [(0, 0, 2, 2, 0, 0)])
179 179
180 180 self.assertEqual(list(m3.merge_regions()), [(b'a', 0, 2)])
181 181
182 self.assertEqual(m3.merge_lines(), ([b'aaa', b'bbb'], False))
182 self.assertEqual(simplemerge.merge_lines(m3), ([b'aaa', b'bbb'], False))
183 183
184 184 def test_no_conflicts(self):
185 185 """No conflicts because only one side changed"""
186 186 m3 = Merge3(
187 187 [b'aaa', b'bbb'], [b'aaa', b'111', b'bbb'], [b'aaa', b'bbb']
188 188 )
189 189
190 190 self.assertEqual(
191 191 list(m3.find_sync_regions()),
192 192 [(0, 1, 0, 1, 0, 1), (1, 2, 2, 3, 1, 2), (2, 2, 3, 3, 2, 2)],
193 193 )
194 194
195 195 self.assertEqual(
196 196 list(m3.merge_regions()),
197 197 [(b'unchanged', 0, 1), (b'a', 1, 2), (b'unchanged', 1, 2)],
198 198 )
199 199
200 200 def test_append_a(self):
201 201 m3 = Merge3(
202 202 [b'aaa\n', b'bbb\n'],
203 203 [b'aaa\n', b'bbb\n', b'222\n'],
204 204 [b'aaa\n', b'bbb\n'],
205 205 )
206 206
207 self.assertEqual(b''.join(m3.merge_lines()[0]), b'aaa\nbbb\n222\n')
207 self.assertEqual(
208 b''.join(simplemerge.merge_lines(m3)[0]), b'aaa\nbbb\n222\n'
209 )
208 210
209 211 def test_append_b(self):
210 212 m3 = Merge3(
211 213 [b'aaa\n', b'bbb\n'],
212 214 [b'aaa\n', b'bbb\n'],
213 215 [b'aaa\n', b'bbb\n', b'222\n'],
214 216 )
215 217
216 self.assertEqual(b''.join(m3.merge_lines()[0]), b'aaa\nbbb\n222\n')
218 self.assertEqual(
219 b''.join(simplemerge.merge_lines(m3)[0]), b'aaa\nbbb\n222\n'
220 )
217 221
218 222 def test_append_agreement(self):
219 223 m3 = Merge3(
220 224 [b'aaa\n', b'bbb\n'],
221 225 [b'aaa\n', b'bbb\n', b'222\n'],
222 226 [b'aaa\n', b'bbb\n', b'222\n'],
223 227 )
224 228
225 self.assertEqual(b''.join(m3.merge_lines()[0]), b'aaa\nbbb\n222\n')
229 self.assertEqual(
230 b''.join(simplemerge.merge_lines(m3)[0]), b'aaa\nbbb\n222\n'
231 )
226 232
227 233 def test_append_clash(self):
228 234 m3 = Merge3(
229 235 [b'aaa\n', b'bbb\n'],
230 236 [b'aaa\n', b'bbb\n', b'222\n'],
231 237 [b'aaa\n', b'bbb\n', b'333\n'],
232 238 )
233 239
234 ml, conflicts = m3.merge_lines(
240 ml, conflicts = simplemerge.merge_lines(
241 m3,
235 242 name_a=b'a',
236 243 name_b=b'b',
237 244 start_marker=b'<<',
238 245 mid_marker=b'--',
239 246 end_marker=b'>>',
240 247 )
241 248 self.assertEqual(
242 249 b''.join(ml),
243 250 b'aaa\n' b'bbb\n' b'<< a\n' b'222\n' b'--\n' b'333\n' b'>> b\n',
244 251 )
245 252
246 253 def test_insert_agreement(self):
247 254 m3 = Merge3(
248 255 [b'aaa\n', b'bbb\n'],
249 256 [b'aaa\n', b'222\n', b'bbb\n'],
250 257 [b'aaa\n', b'222\n', b'bbb\n'],
251 258 )
252 259
253 ml, conflicts = m3.merge_lines(
260 ml, conflicts = simplemerge.merge_lines(
261 m3,
254 262 name_a=b'a',
255 263 name_b=b'b',
256 264 start_marker=b'<<',
257 265 mid_marker=b'--',
258 266 end_marker=b'>>',
259 267 )
260 268 self.assertEqual(b''.join(ml), b'aaa\n222\nbbb\n')
261 269
262 270 def test_insert_clash(self):
263 271 """Both try to insert lines in the same place."""
264 272 m3 = Merge3(
265 273 [b'aaa\n', b'bbb\n'],
266 274 [b'aaa\n', b'111\n', b'bbb\n'],
267 275 [b'aaa\n', b'222\n', b'bbb\n'],
268 276 )
269 277
270 278 self.assertEqual(
271 279 list(m3.find_sync_regions()),
272 280 [(0, 1, 0, 1, 0, 1), (1, 2, 2, 3, 2, 3), (2, 2, 3, 3, 3, 3)],
273 281 )
274 282
275 283 self.assertEqual(
276 284 list(m3.merge_regions()),
277 285 [
278 286 (b'unchanged', 0, 1),
279 287 (b'conflict', 1, 1, 1, 2, 1, 2),
280 288 (b'unchanged', 1, 2),
281 289 ],
282 290 )
283 291
284 292 self.assertEqual(
285 293 list(m3.merge_groups()),
286 294 [
287 295 (b'unchanged', [b'aaa\n']),
288 296 (b'conflict', ([], [b'111\n'], [b'222\n'])),
289 297 (b'unchanged', [b'bbb\n']),
290 298 ],
291 299 )
292 300
293 ml, conflicts = m3.merge_lines(
301 ml, conflicts = simplemerge.merge_lines(
302 m3,
294 303 name_a=b'a',
295 304 name_b=b'b',
296 305 start_marker=b'<<',
297 306 mid_marker=b'--',
298 307 end_marker=b'>>',
299 308 )
300 309 self.assertEqual(
301 310 b''.join(ml),
302 311 b'''aaa
303 312 << a
304 313 111
305 314 --
306 315 222
307 316 >> b
308 317 bbb
309 318 ''',
310 319 )
311 320
312 321 def test_replace_clash(self):
313 322 """Both try to insert lines in the same place."""
314 323 m3 = Merge3(
315 324 [b'aaa', b'000', b'bbb'],
316 325 [b'aaa', b'111', b'bbb'],
317 326 [b'aaa', b'222', b'bbb'],
318 327 )
319 328
320 329 self.assertEqual(
321 330 list(m3.find_sync_regions()),
322 331 [(0, 1, 0, 1, 0, 1), (2, 3, 2, 3, 2, 3), (3, 3, 3, 3, 3, 3)],
323 332 )
324 333
325 334 def test_replace_multi(self):
326 335 """Replacement with regions of different size."""
327 336 m3 = Merge3(
328 337 [b'aaa', b'000', b'000', b'bbb'],
329 338 [b'aaa', b'111', b'111', b'111', b'bbb'],
330 339 [b'aaa', b'222', b'222', b'222', b'222', b'bbb'],
331 340 )
332 341
333 342 self.assertEqual(
334 343 list(m3.find_sync_regions()),
335 344 [(0, 1, 0, 1, 0, 1), (3, 4, 4, 5, 5, 6), (4, 4, 5, 5, 6, 6)],
336 345 )
337 346
338 347 def test_merge_poem(self):
339 348 """Test case from diff3 manual"""
340 349 m3 = Merge3(TZU, LAO, TAO)
341 ml, conflicts = m3.merge_lines(b'LAO', b'TAO')
350 ml, conflicts = simplemerge.merge_lines(m3, b'LAO', b'TAO')
342 351 self.log(b'merge result:')
343 352 self.log(b''.join(ml))
344 353 self.assertEqual(ml, MERGED_RESULT)
345 354
346 355 def test_binary(self):
347 356 with self.assertRaises(error.Abort):
348 357 Merge3([b'\x00'], [b'a'], [b'b'])
349 358
350 359 def test_dos_text(self):
351 360 base_text = b'a\r\n'
352 361 this_text = b'b\r\n'
353 362 other_text = b'c\r\n'
354 363 m3 = Merge3(
355 364 base_text.splitlines(True),
356 365 other_text.splitlines(True),
357 366 this_text.splitlines(True),
358 367 )
359 m_lines, conflicts = m3.merge_lines(b'OTHER', b'THIS')
368 m_lines, conflicts = simplemerge.merge_lines(m3, b'OTHER', b'THIS')
360 369 self.assertEqual(
361 370 b'<<<<<<< OTHER\r\nc\r\n=======\r\nb\r\n'
362 371 b'>>>>>>> THIS\r\n'.splitlines(True),
363 372 m_lines,
364 373 )
365 374
366 375 def test_mac_text(self):
367 376 base_text = b'a\r'
368 377 this_text = b'b\r'
369 378 other_text = b'c\r'
370 379 m3 = Merge3(
371 380 base_text.splitlines(True),
372 381 other_text.splitlines(True),
373 382 this_text.splitlines(True),
374 383 )
375 m_lines, conflicts = m3.merge_lines(b'OTHER', b'THIS')
384 m_lines, conflicts = simplemerge.merge_lines(m3, b'OTHER', b'THIS')
376 385 self.assertEqual(
377 386 b'<<<<<<< OTHER\rc\r=======\rb\r'
378 387 b'>>>>>>> THIS\r'.splitlines(True),
379 388 m_lines,
380 389 )
381 390
382 391
383 392 if __name__ == '__main__':
384 393 import silenttestrunner
385 394
386 395 silenttestrunner.main(__name__)
General Comments 0
You need to be logged in to leave comments. Login now