##// END OF EJS Templates
debugmergestate: show extras for files which are not in mergestate...
Pulkit Goyal -
r46016:766797f2 default
parent child Browse files
Show More
@@ -1,4529 +1,4542
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import glob
15 15 import operator
16 16 import os
17 17 import platform
18 18 import random
19 19 import re
20 20 import socket
21 21 import ssl
22 22 import stat
23 23 import string
24 24 import subprocess
25 25 import sys
26 26 import time
27 27
28 28 from .i18n import _
29 29 from .node import (
30 30 bin,
31 31 hex,
32 32 nullid,
33 33 nullrev,
34 34 short,
35 35 )
36 36 from .pycompat import (
37 37 getattr,
38 38 open,
39 39 )
40 40 from . import (
41 41 bundle2,
42 42 bundlerepo,
43 43 changegroup,
44 44 cmdutil,
45 45 color,
46 46 context,
47 47 copies,
48 48 dagparser,
49 49 encoding,
50 50 error,
51 51 exchange,
52 52 extensions,
53 53 filemerge,
54 54 filesetlang,
55 55 formatter,
56 56 hg,
57 57 httppeer,
58 58 localrepo,
59 59 lock as lockmod,
60 60 logcmdutil,
61 61 mergestate as mergestatemod,
62 62 obsolete,
63 63 obsutil,
64 64 pathutil,
65 65 phases,
66 66 policy,
67 67 pvec,
68 68 pycompat,
69 69 registrar,
70 70 repair,
71 71 revlog,
72 72 revset,
73 73 revsetlang,
74 74 scmutil,
75 75 setdiscovery,
76 76 simplemerge,
77 77 sshpeer,
78 78 sslutil,
79 79 streamclone,
80 80 tags as tagsmod,
81 81 templater,
82 82 treediscovery,
83 83 upgrade,
84 84 url as urlmod,
85 85 util,
86 86 vfs as vfsmod,
87 87 wireprotoframing,
88 88 wireprotoserver,
89 89 wireprotov2peer,
90 90 )
91 91 from .utils import (
92 92 cborutil,
93 93 compression,
94 94 dateutil,
95 95 procutil,
96 96 stringutil,
97 97 )
98 98
99 99 from .revlogutils import (
100 100 deltas as deltautil,
101 101 nodemap,
102 102 )
103 103
104 104 release = lockmod.release
105 105
106 106 command = registrar.command()
107 107
108 108
109 109 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
110 110 def debugancestor(ui, repo, *args):
111 111 """find the ancestor revision of two revisions in a given index"""
112 112 if len(args) == 3:
113 113 index, rev1, rev2 = args
114 114 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
115 115 lookup = r.lookup
116 116 elif len(args) == 2:
117 117 if not repo:
118 118 raise error.Abort(
119 119 _(b'there is no Mercurial repository here (.hg not found)')
120 120 )
121 121 rev1, rev2 = args
122 122 r = repo.changelog
123 123 lookup = repo.lookup
124 124 else:
125 125 raise error.Abort(_(b'either two or three arguments required'))
126 126 a = r.ancestor(lookup(rev1), lookup(rev2))
127 127 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
128 128
129 129
130 130 @command(b'debugantivirusrunning', [])
131 131 def debugantivirusrunning(ui, repo):
132 132 """attempt to trigger an antivirus scanner to see if one is active"""
133 133 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
134 134 f.write(
135 135 util.b85decode(
136 136 # This is a base85-armored version of the EICAR test file. See
137 137 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
138 138 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
139 139 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
140 140 )
141 141 )
142 142 # Give an AV engine time to scan the file.
143 143 time.sleep(2)
144 144 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
145 145
146 146
147 147 @command(b'debugapplystreamclonebundle', [], b'FILE')
148 148 def debugapplystreamclonebundle(ui, repo, fname):
149 149 """apply a stream clone bundle file"""
150 150 f = hg.openpath(ui, fname)
151 151 gen = exchange.readbundle(ui, f, fname)
152 152 gen.apply(repo)
153 153
154 154
155 155 @command(
156 156 b'debugbuilddag',
157 157 [
158 158 (
159 159 b'm',
160 160 b'mergeable-file',
161 161 None,
162 162 _(b'add single file mergeable changes'),
163 163 ),
164 164 (
165 165 b'o',
166 166 b'overwritten-file',
167 167 None,
168 168 _(b'add single file all revs overwrite'),
169 169 ),
170 170 (b'n', b'new-file', None, _(b'add new file at each rev')),
171 171 ],
172 172 _(b'[OPTION]... [TEXT]'),
173 173 )
174 174 def debugbuilddag(
175 175 ui,
176 176 repo,
177 177 text=None,
178 178 mergeable_file=False,
179 179 overwritten_file=False,
180 180 new_file=False,
181 181 ):
182 182 """builds a repo with a given DAG from scratch in the current empty repo
183 183
184 184 The description of the DAG is read from stdin if not given on the
185 185 command line.
186 186
187 187 Elements:
188 188
189 189 - "+n" is a linear run of n nodes based on the current default parent
190 190 - "." is a single node based on the current default parent
191 191 - "$" resets the default parent to null (implied at the start);
192 192 otherwise the default parent is always the last node created
193 193 - "<p" sets the default parent to the backref p
194 194 - "*p" is a fork at parent p, which is a backref
195 195 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
196 196 - "/p2" is a merge of the preceding node and p2
197 197 - ":tag" defines a local tag for the preceding node
198 198 - "@branch" sets the named branch for subsequent nodes
199 199 - "#...\\n" is a comment up to the end of the line
200 200
201 201 Whitespace between the above elements is ignored.
202 202
203 203 A backref is either
204 204
205 205 - a number n, which references the node curr-n, where curr is the current
206 206 node, or
207 207 - the name of a local tag you placed earlier using ":tag", or
208 208 - empty to denote the default parent.
209 209
210 210 All string valued-elements are either strictly alphanumeric, or must
211 211 be enclosed in double quotes ("..."), with "\\" as escape character.
212 212 """
213 213
214 214 if text is None:
215 215 ui.status(_(b"reading DAG from stdin\n"))
216 216 text = ui.fin.read()
217 217
218 218 cl = repo.changelog
219 219 if len(cl) > 0:
220 220 raise error.Abort(_(b'repository is not empty'))
221 221
222 222 # determine number of revs in DAG
223 223 total = 0
224 224 for type, data in dagparser.parsedag(text):
225 225 if type == b'n':
226 226 total += 1
227 227
228 228 if mergeable_file:
229 229 linesperrev = 2
230 230 # make a file with k lines per rev
231 231 initialmergedlines = [
232 232 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
233 233 ]
234 234 initialmergedlines.append(b"")
235 235
236 236 tags = []
237 237 progress = ui.makeprogress(
238 238 _(b'building'), unit=_(b'revisions'), total=total
239 239 )
240 240 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
241 241 at = -1
242 242 atbranch = b'default'
243 243 nodeids = []
244 244 id = 0
245 245 progress.update(id)
246 246 for type, data in dagparser.parsedag(text):
247 247 if type == b'n':
248 248 ui.note((b'node %s\n' % pycompat.bytestr(data)))
249 249 id, ps = data
250 250
251 251 files = []
252 252 filecontent = {}
253 253
254 254 p2 = None
255 255 if mergeable_file:
256 256 fn = b"mf"
257 257 p1 = repo[ps[0]]
258 258 if len(ps) > 1:
259 259 p2 = repo[ps[1]]
260 260 pa = p1.ancestor(p2)
261 261 base, local, other = [
262 262 x[fn].data() for x in (pa, p1, p2)
263 263 ]
264 264 m3 = simplemerge.Merge3Text(base, local, other)
265 265 ml = [l.strip() for l in m3.merge_lines()]
266 266 ml.append(b"")
267 267 elif at > 0:
268 268 ml = p1[fn].data().split(b"\n")
269 269 else:
270 270 ml = initialmergedlines
271 271 ml[id * linesperrev] += b" r%i" % id
272 272 mergedtext = b"\n".join(ml)
273 273 files.append(fn)
274 274 filecontent[fn] = mergedtext
275 275
276 276 if overwritten_file:
277 277 fn = b"of"
278 278 files.append(fn)
279 279 filecontent[fn] = b"r%i\n" % id
280 280
281 281 if new_file:
282 282 fn = b"nf%i" % id
283 283 files.append(fn)
284 284 filecontent[fn] = b"r%i\n" % id
285 285 if len(ps) > 1:
286 286 if not p2:
287 287 p2 = repo[ps[1]]
288 288 for fn in p2:
289 289 if fn.startswith(b"nf"):
290 290 files.append(fn)
291 291 filecontent[fn] = p2[fn].data()
292 292
293 293 def fctxfn(repo, cx, path):
294 294 if path in filecontent:
295 295 return context.memfilectx(
296 296 repo, cx, path, filecontent[path]
297 297 )
298 298 return None
299 299
300 300 if len(ps) == 0 or ps[0] < 0:
301 301 pars = [None, None]
302 302 elif len(ps) == 1:
303 303 pars = [nodeids[ps[0]], None]
304 304 else:
305 305 pars = [nodeids[p] for p in ps]
306 306 cx = context.memctx(
307 307 repo,
308 308 pars,
309 309 b"r%i" % id,
310 310 files,
311 311 fctxfn,
312 312 date=(id, 0),
313 313 user=b"debugbuilddag",
314 314 extra={b'branch': atbranch},
315 315 )
316 316 nodeid = repo.commitctx(cx)
317 317 nodeids.append(nodeid)
318 318 at = id
319 319 elif type == b'l':
320 320 id, name = data
321 321 ui.note((b'tag %s\n' % name))
322 322 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
323 323 elif type == b'a':
324 324 ui.note((b'branch %s\n' % data))
325 325 atbranch = data
326 326 progress.update(id)
327 327
328 328 if tags:
329 329 repo.vfs.write(b"localtags", b"".join(tags))
330 330
331 331
332 332 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
333 333 indent_string = b' ' * indent
334 334 if all:
335 335 ui.writenoi18n(
336 336 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
337 337 % indent_string
338 338 )
339 339
340 340 def showchunks(named):
341 341 ui.write(b"\n%s%s\n" % (indent_string, named))
342 342 for deltadata in gen.deltaiter():
343 343 node, p1, p2, cs, deltabase, delta, flags = deltadata
344 344 ui.write(
345 345 b"%s%s %s %s %s %s %d\n"
346 346 % (
347 347 indent_string,
348 348 hex(node),
349 349 hex(p1),
350 350 hex(p2),
351 351 hex(cs),
352 352 hex(deltabase),
353 353 len(delta),
354 354 )
355 355 )
356 356
357 357 gen.changelogheader()
358 358 showchunks(b"changelog")
359 359 gen.manifestheader()
360 360 showchunks(b"manifest")
361 361 for chunkdata in iter(gen.filelogheader, {}):
362 362 fname = chunkdata[b'filename']
363 363 showchunks(fname)
364 364 else:
365 365 if isinstance(gen, bundle2.unbundle20):
366 366 raise error.Abort(_(b'use debugbundle2 for this file'))
367 367 gen.changelogheader()
368 368 for deltadata in gen.deltaiter():
369 369 node, p1, p2, cs, deltabase, delta, flags = deltadata
370 370 ui.write(b"%s%s\n" % (indent_string, hex(node)))
371 371
372 372
373 373 def _debugobsmarkers(ui, part, indent=0, **opts):
374 374 """display version and markers contained in 'data'"""
375 375 opts = pycompat.byteskwargs(opts)
376 376 data = part.read()
377 377 indent_string = b' ' * indent
378 378 try:
379 379 version, markers = obsolete._readmarkers(data)
380 380 except error.UnknownVersion as exc:
381 381 msg = b"%sunsupported version: %s (%d bytes)\n"
382 382 msg %= indent_string, exc.version, len(data)
383 383 ui.write(msg)
384 384 else:
385 385 msg = b"%sversion: %d (%d bytes)\n"
386 386 msg %= indent_string, version, len(data)
387 387 ui.write(msg)
388 388 fm = ui.formatter(b'debugobsolete', opts)
389 389 for rawmarker in sorted(markers):
390 390 m = obsutil.marker(None, rawmarker)
391 391 fm.startitem()
392 392 fm.plain(indent_string)
393 393 cmdutil.showmarker(fm, m)
394 394 fm.end()
395 395
396 396
397 397 def _debugphaseheads(ui, data, indent=0):
398 398 """display version and markers contained in 'data'"""
399 399 indent_string = b' ' * indent
400 400 headsbyphase = phases.binarydecode(data)
401 401 for phase in phases.allphases:
402 402 for head in headsbyphase[phase]:
403 403 ui.write(indent_string)
404 404 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
405 405
406 406
407 407 def _quasirepr(thing):
408 408 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
409 409 return b'{%s}' % (
410 410 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
411 411 )
412 412 return pycompat.bytestr(repr(thing))
413 413
414 414
415 415 def _debugbundle2(ui, gen, all=None, **opts):
416 416 """lists the contents of a bundle2"""
417 417 if not isinstance(gen, bundle2.unbundle20):
418 418 raise error.Abort(_(b'not a bundle2 file'))
419 419 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
420 420 parttypes = opts.get('part_type', [])
421 421 for part in gen.iterparts():
422 422 if parttypes and part.type not in parttypes:
423 423 continue
424 424 msg = b'%s -- %s (mandatory: %r)\n'
425 425 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
426 426 if part.type == b'changegroup':
427 427 version = part.params.get(b'version', b'01')
428 428 cg = changegroup.getunbundler(version, part, b'UN')
429 429 if not ui.quiet:
430 430 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
431 431 if part.type == b'obsmarkers':
432 432 if not ui.quiet:
433 433 _debugobsmarkers(ui, part, indent=4, **opts)
434 434 if part.type == b'phase-heads':
435 435 if not ui.quiet:
436 436 _debugphaseheads(ui, part, indent=4)
437 437
438 438
439 439 @command(
440 440 b'debugbundle',
441 441 [
442 442 (b'a', b'all', None, _(b'show all details')),
443 443 (b'', b'part-type', [], _(b'show only the named part type')),
444 444 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
445 445 ],
446 446 _(b'FILE'),
447 447 norepo=True,
448 448 )
449 449 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
450 450 """lists the contents of a bundle"""
451 451 with hg.openpath(ui, bundlepath) as f:
452 452 if spec:
453 453 spec = exchange.getbundlespec(ui, f)
454 454 ui.write(b'%s\n' % spec)
455 455 return
456 456
457 457 gen = exchange.readbundle(ui, f, bundlepath)
458 458 if isinstance(gen, bundle2.unbundle20):
459 459 return _debugbundle2(ui, gen, all=all, **opts)
460 460 _debugchangegroup(ui, gen, all=all, **opts)
461 461
462 462
463 463 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
464 464 def debugcapabilities(ui, path, **opts):
465 465 """lists the capabilities of a remote peer"""
466 466 opts = pycompat.byteskwargs(opts)
467 467 peer = hg.peer(ui, opts, path)
468 468 caps = peer.capabilities()
469 469 ui.writenoi18n(b'Main capabilities:\n')
470 470 for c in sorted(caps):
471 471 ui.write(b' %s\n' % c)
472 472 b2caps = bundle2.bundle2caps(peer)
473 473 if b2caps:
474 474 ui.writenoi18n(b'Bundle2 capabilities:\n')
475 475 for key, values in sorted(pycompat.iteritems(b2caps)):
476 476 ui.write(b' %s\n' % key)
477 477 for v in values:
478 478 ui.write(b' %s\n' % v)
479 479
480 480
481 481 @command(b'debugcheckstate', [], b'')
482 482 def debugcheckstate(ui, repo):
483 483 """validate the correctness of the current dirstate"""
484 484 parent1, parent2 = repo.dirstate.parents()
485 485 m1 = repo[parent1].manifest()
486 486 m2 = repo[parent2].manifest()
487 487 errors = 0
488 488 for f in repo.dirstate:
489 489 state = repo.dirstate[f]
490 490 if state in b"nr" and f not in m1:
491 491 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
492 492 errors += 1
493 493 if state in b"a" and f in m1:
494 494 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
495 495 errors += 1
496 496 if state in b"m" and f not in m1 and f not in m2:
497 497 ui.warn(
498 498 _(b"%s in state %s, but not in either manifest\n") % (f, state)
499 499 )
500 500 errors += 1
501 501 for f in m1:
502 502 state = repo.dirstate[f]
503 503 if state not in b"nrm":
504 504 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
505 505 errors += 1
506 506 if errors:
507 507 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
508 508 raise error.Abort(errstr)
509 509
510 510
511 511 @command(
512 512 b'debugcolor',
513 513 [(b'', b'style', None, _(b'show all configured styles'))],
514 514 b'hg debugcolor',
515 515 )
516 516 def debugcolor(ui, repo, **opts):
517 517 """show available color, effects or style"""
518 518 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
519 519 if opts.get('style'):
520 520 return _debugdisplaystyle(ui)
521 521 else:
522 522 return _debugdisplaycolor(ui)
523 523
524 524
525 525 def _debugdisplaycolor(ui):
526 526 ui = ui.copy()
527 527 ui._styles.clear()
528 528 for effect in color._activeeffects(ui).keys():
529 529 ui._styles[effect] = effect
530 530 if ui._terminfoparams:
531 531 for k, v in ui.configitems(b'color'):
532 532 if k.startswith(b'color.'):
533 533 ui._styles[k] = k[6:]
534 534 elif k.startswith(b'terminfo.'):
535 535 ui._styles[k] = k[9:]
536 536 ui.write(_(b'available colors:\n'))
537 537 # sort label with a '_' after the other to group '_background' entry.
538 538 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
539 539 for colorname, label in items:
540 540 ui.write(b'%s\n' % colorname, label=label)
541 541
542 542
543 543 def _debugdisplaystyle(ui):
544 544 ui.write(_(b'available style:\n'))
545 545 if not ui._styles:
546 546 return
547 547 width = max(len(s) for s in ui._styles)
548 548 for label, effects in sorted(ui._styles.items()):
549 549 ui.write(b'%s' % label, label=label)
550 550 if effects:
551 551 # 50
552 552 ui.write(b': ')
553 553 ui.write(b' ' * (max(0, width - len(label))))
554 554 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
555 555 ui.write(b'\n')
556 556
557 557
558 558 @command(b'debugcreatestreamclonebundle', [], b'FILE')
559 559 def debugcreatestreamclonebundle(ui, repo, fname):
560 560 """create a stream clone bundle file
561 561
562 562 Stream bundles are special bundles that are essentially archives of
563 563 revlog files. They are commonly used for cloning very quickly.
564 564 """
565 565 # TODO we may want to turn this into an abort when this functionality
566 566 # is moved into `hg bundle`.
567 567 if phases.hassecret(repo):
568 568 ui.warn(
569 569 _(
570 570 b'(warning: stream clone bundle will contain secret '
571 571 b'revisions)\n'
572 572 )
573 573 )
574 574
575 575 requirements, gen = streamclone.generatebundlev1(repo)
576 576 changegroup.writechunks(ui, gen, fname)
577 577
578 578 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
579 579
580 580
581 581 @command(
582 582 b'debugdag',
583 583 [
584 584 (b't', b'tags', None, _(b'use tags as labels')),
585 585 (b'b', b'branches', None, _(b'annotate with branch names')),
586 586 (b'', b'dots', None, _(b'use dots for runs')),
587 587 (b's', b'spaces', None, _(b'separate elements by spaces')),
588 588 ],
589 589 _(b'[OPTION]... [FILE [REV]...]'),
590 590 optionalrepo=True,
591 591 )
592 592 def debugdag(ui, repo, file_=None, *revs, **opts):
593 593 """format the changelog or an index DAG as a concise textual description
594 594
595 595 If you pass a revlog index, the revlog's DAG is emitted. If you list
596 596 revision numbers, they get labeled in the output as rN.
597 597
598 598 Otherwise, the changelog DAG of the current repo is emitted.
599 599 """
600 600 spaces = opts.get('spaces')
601 601 dots = opts.get('dots')
602 602 if file_:
603 603 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
604 604 revs = {int(r) for r in revs}
605 605
606 606 def events():
607 607 for r in rlog:
608 608 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
609 609 if r in revs:
610 610 yield b'l', (r, b"r%i" % r)
611 611
612 612 elif repo:
613 613 cl = repo.changelog
614 614 tags = opts.get('tags')
615 615 branches = opts.get('branches')
616 616 if tags:
617 617 labels = {}
618 618 for l, n in repo.tags().items():
619 619 labels.setdefault(cl.rev(n), []).append(l)
620 620
621 621 def events():
622 622 b = b"default"
623 623 for r in cl:
624 624 if branches:
625 625 newb = cl.read(cl.node(r))[5][b'branch']
626 626 if newb != b:
627 627 yield b'a', newb
628 628 b = newb
629 629 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
630 630 if tags:
631 631 ls = labels.get(r)
632 632 if ls:
633 633 for l in ls:
634 634 yield b'l', (r, l)
635 635
636 636 else:
637 637 raise error.Abort(_(b'need repo for changelog dag'))
638 638
639 639 for line in dagparser.dagtextlines(
640 640 events(),
641 641 addspaces=spaces,
642 642 wraplabels=True,
643 643 wrapannotations=True,
644 644 wrapnonlinear=dots,
645 645 usedots=dots,
646 646 maxlinewidth=70,
647 647 ):
648 648 ui.write(line)
649 649 ui.write(b"\n")
650 650
651 651
652 652 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
653 653 def debugdata(ui, repo, file_, rev=None, **opts):
654 654 """dump the contents of a data file revision"""
655 655 opts = pycompat.byteskwargs(opts)
656 656 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
657 657 if rev is not None:
658 658 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
659 659 file_, rev = None, file_
660 660 elif rev is None:
661 661 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
662 662 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
663 663 try:
664 664 ui.write(r.rawdata(r.lookup(rev)))
665 665 except KeyError:
666 666 raise error.Abort(_(b'invalid revision identifier %s') % rev)
667 667
668 668
669 669 @command(
670 670 b'debugdate',
671 671 [(b'e', b'extended', None, _(b'try extended date formats'))],
672 672 _(b'[-e] DATE [RANGE]'),
673 673 norepo=True,
674 674 optionalrepo=True,
675 675 )
676 676 def debugdate(ui, date, range=None, **opts):
677 677 """parse and display a date"""
678 678 if opts["extended"]:
679 679 d = dateutil.parsedate(date, dateutil.extendeddateformats)
680 680 else:
681 681 d = dateutil.parsedate(date)
682 682 ui.writenoi18n(b"internal: %d %d\n" % d)
683 683 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
684 684 if range:
685 685 m = dateutil.matchdate(range)
686 686 ui.writenoi18n(b"match: %s\n" % m(d[0]))
687 687
688 688
689 689 @command(
690 690 b'debugdeltachain',
691 691 cmdutil.debugrevlogopts + cmdutil.formatteropts,
692 692 _(b'-c|-m|FILE'),
693 693 optionalrepo=True,
694 694 )
695 695 def debugdeltachain(ui, repo, file_=None, **opts):
696 696 """dump information about delta chains in a revlog
697 697
698 698 Output can be templatized. Available template keywords are:
699 699
700 700 :``rev``: revision number
701 701 :``chainid``: delta chain identifier (numbered by unique base)
702 702 :``chainlen``: delta chain length to this revision
703 703 :``prevrev``: previous revision in delta chain
704 704 :``deltatype``: role of delta / how it was computed
705 705 :``compsize``: compressed size of revision
706 706 :``uncompsize``: uncompressed size of revision
707 707 :``chainsize``: total size of compressed revisions in chain
708 708 :``chainratio``: total chain size divided by uncompressed revision size
709 709 (new delta chains typically start at ratio 2.00)
710 710 :``lindist``: linear distance from base revision in delta chain to end
711 711 of this revision
712 712 :``extradist``: total size of revisions not part of this delta chain from
713 713 base of delta chain to end of this revision; a measurement
714 714 of how much extra data we need to read/seek across to read
715 715 the delta chain for this revision
716 716 :``extraratio``: extradist divided by chainsize; another representation of
717 717 how much unrelated data is needed to load this delta chain
718 718
719 719 If the repository is configured to use the sparse read, additional keywords
720 720 are available:
721 721
722 722 :``readsize``: total size of data read from the disk for a revision
723 723 (sum of the sizes of all the blocks)
724 724 :``largestblock``: size of the largest block of data read from the disk
725 725 :``readdensity``: density of useful bytes in the data read from the disk
726 726 :``srchunks``: in how many data hunks the whole revision would be read
727 727
728 728 The sparse read can be enabled with experimental.sparse-read = True
729 729 """
730 730 opts = pycompat.byteskwargs(opts)
731 731 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
732 732 index = r.index
733 733 start = r.start
734 734 length = r.length
735 735 generaldelta = r.version & revlog.FLAG_GENERALDELTA
736 736 withsparseread = getattr(r, '_withsparseread', False)
737 737
738 738 def revinfo(rev):
739 739 e = index[rev]
740 740 compsize = e[1]
741 741 uncompsize = e[2]
742 742 chainsize = 0
743 743
744 744 if generaldelta:
745 745 if e[3] == e[5]:
746 746 deltatype = b'p1'
747 747 elif e[3] == e[6]:
748 748 deltatype = b'p2'
749 749 elif e[3] == rev - 1:
750 750 deltatype = b'prev'
751 751 elif e[3] == rev:
752 752 deltatype = b'base'
753 753 else:
754 754 deltatype = b'other'
755 755 else:
756 756 if e[3] == rev:
757 757 deltatype = b'base'
758 758 else:
759 759 deltatype = b'prev'
760 760
761 761 chain = r._deltachain(rev)[0]
762 762 for iterrev in chain:
763 763 e = index[iterrev]
764 764 chainsize += e[1]
765 765
766 766 return compsize, uncompsize, deltatype, chain, chainsize
767 767
768 768 fm = ui.formatter(b'debugdeltachain', opts)
769 769
770 770 fm.plain(
771 771 b' rev chain# chainlen prev delta '
772 772 b'size rawsize chainsize ratio lindist extradist '
773 773 b'extraratio'
774 774 )
775 775 if withsparseread:
776 776 fm.plain(b' readsize largestblk rddensity srchunks')
777 777 fm.plain(b'\n')
778 778
779 779 chainbases = {}
780 780 for rev in r:
781 781 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
782 782 chainbase = chain[0]
783 783 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
784 784 basestart = start(chainbase)
785 785 revstart = start(rev)
786 786 lineardist = revstart + comp - basestart
787 787 extradist = lineardist - chainsize
788 788 try:
789 789 prevrev = chain[-2]
790 790 except IndexError:
791 791 prevrev = -1
792 792
793 793 if uncomp != 0:
794 794 chainratio = float(chainsize) / float(uncomp)
795 795 else:
796 796 chainratio = chainsize
797 797
798 798 if chainsize != 0:
799 799 extraratio = float(extradist) / float(chainsize)
800 800 else:
801 801 extraratio = extradist
802 802
803 803 fm.startitem()
804 804 fm.write(
805 805 b'rev chainid chainlen prevrev deltatype compsize '
806 806 b'uncompsize chainsize chainratio lindist extradist '
807 807 b'extraratio',
808 808 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
809 809 rev,
810 810 chainid,
811 811 len(chain),
812 812 prevrev,
813 813 deltatype,
814 814 comp,
815 815 uncomp,
816 816 chainsize,
817 817 chainratio,
818 818 lineardist,
819 819 extradist,
820 820 extraratio,
821 821 rev=rev,
822 822 chainid=chainid,
823 823 chainlen=len(chain),
824 824 prevrev=prevrev,
825 825 deltatype=deltatype,
826 826 compsize=comp,
827 827 uncompsize=uncomp,
828 828 chainsize=chainsize,
829 829 chainratio=chainratio,
830 830 lindist=lineardist,
831 831 extradist=extradist,
832 832 extraratio=extraratio,
833 833 )
834 834 if withsparseread:
835 835 readsize = 0
836 836 largestblock = 0
837 837 srchunks = 0
838 838
839 839 for revschunk in deltautil.slicechunk(r, chain):
840 840 srchunks += 1
841 841 blkend = start(revschunk[-1]) + length(revschunk[-1])
842 842 blksize = blkend - start(revschunk[0])
843 843
844 844 readsize += blksize
845 845 if largestblock < blksize:
846 846 largestblock = blksize
847 847
848 848 if readsize:
849 849 readdensity = float(chainsize) / float(readsize)
850 850 else:
851 851 readdensity = 1
852 852
853 853 fm.write(
854 854 b'readsize largestblock readdensity srchunks',
855 855 b' %10d %10d %9.5f %8d',
856 856 readsize,
857 857 largestblock,
858 858 readdensity,
859 859 srchunks,
860 860 readsize=readsize,
861 861 largestblock=largestblock,
862 862 readdensity=readdensity,
863 863 srchunks=srchunks,
864 864 )
865 865
866 866 fm.plain(b'\n')
867 867
868 868 fm.end()
869 869
870 870
871 871 @command(
872 872 b'debugdirstate|debugstate',
873 873 [
874 874 (
875 875 b'',
876 876 b'nodates',
877 877 None,
878 878 _(b'do not display the saved mtime (DEPRECATED)'),
879 879 ),
880 880 (b'', b'dates', True, _(b'display the saved mtime')),
881 881 (b'', b'datesort', None, _(b'sort by saved mtime')),
882 882 ],
883 883 _(b'[OPTION]...'),
884 884 )
885 885 def debugstate(ui, repo, **opts):
886 886 """show the contents of the current dirstate"""
887 887
888 888 nodates = not opts['dates']
889 889 if opts.get('nodates') is not None:
890 890 nodates = True
891 891 datesort = opts.get('datesort')
892 892
893 893 if datesort:
894 894 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
895 895 else:
896 896 keyfunc = None # sort by filename
897 897 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
898 898 if ent[3] == -1:
899 899 timestr = b'unset '
900 900 elif nodates:
901 901 timestr = b'set '
902 902 else:
903 903 timestr = time.strftime(
904 904 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
905 905 )
906 906 timestr = encoding.strtolocal(timestr)
907 907 if ent[1] & 0o20000:
908 908 mode = b'lnk'
909 909 else:
910 910 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
911 911 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
912 912 for f in repo.dirstate.copies():
913 913 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
914 914
915 915
916 916 @command(
917 917 b'debugdiscovery',
918 918 [
919 919 (b'', b'old', None, _(b'use old-style discovery')),
920 920 (
921 921 b'',
922 922 b'nonheads',
923 923 None,
924 924 _(b'use old-style discovery with non-heads included'),
925 925 ),
926 926 (b'', b'rev', [], b'restrict discovery to this set of revs'),
927 927 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
928 928 ]
929 929 + cmdutil.remoteopts,
930 930 _(b'[--rev REV] [OTHER]'),
931 931 )
932 932 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
933 933 """runs the changeset discovery protocol in isolation"""
934 934 opts = pycompat.byteskwargs(opts)
935 935 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
936 936 remote = hg.peer(repo, opts, remoteurl)
937 937 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
938 938
939 939 # make sure tests are repeatable
940 940 random.seed(int(opts[b'seed']))
941 941
942 942 if opts.get(b'old'):
943 943
944 944 def doit(pushedrevs, remoteheads, remote=remote):
945 945 if not util.safehasattr(remote, b'branches'):
946 946 # enable in-client legacy support
947 947 remote = localrepo.locallegacypeer(remote.local())
948 948 common, _in, hds = treediscovery.findcommonincoming(
949 949 repo, remote, force=True
950 950 )
951 951 common = set(common)
952 952 if not opts.get(b'nonheads'):
953 953 ui.writenoi18n(
954 954 b"unpruned common: %s\n"
955 955 % b" ".join(sorted(short(n) for n in common))
956 956 )
957 957
958 958 clnode = repo.changelog.node
959 959 common = repo.revs(b'heads(::%ln)', common)
960 960 common = {clnode(r) for r in common}
961 961 return common, hds
962 962
963 963 else:
964 964
965 965 def doit(pushedrevs, remoteheads, remote=remote):
966 966 nodes = None
967 967 if pushedrevs:
968 968 revs = scmutil.revrange(repo, pushedrevs)
969 969 nodes = [repo[r].node() for r in revs]
970 970 common, any, hds = setdiscovery.findcommonheads(
971 971 ui, repo, remote, ancestorsof=nodes
972 972 )
973 973 return common, hds
974 974
975 975 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
976 976 localrevs = opts[b'rev']
977 977 with util.timedcm('debug-discovery') as t:
978 978 common, hds = doit(localrevs, remoterevs)
979 979
980 980 # compute all statistics
981 981 common = set(common)
982 982 rheads = set(hds)
983 983 lheads = set(repo.heads())
984 984
985 985 data = {}
986 986 data[b'elapsed'] = t.elapsed
987 987 data[b'nb-common'] = len(common)
988 988 data[b'nb-common-local'] = len(common & lheads)
989 989 data[b'nb-common-remote'] = len(common & rheads)
990 990 data[b'nb-common-both'] = len(common & rheads & lheads)
991 991 data[b'nb-local'] = len(lheads)
992 992 data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local']
993 993 data[b'nb-remote'] = len(rheads)
994 994 data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote']
995 995 data[b'nb-revs'] = len(repo.revs(b'all()'))
996 996 data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common))
997 997 data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common']
998 998
999 999 # display discovery summary
1000 1000 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
1001 1001 ui.writenoi18n(b"heads summary:\n")
1002 1002 ui.writenoi18n(b" total common heads: %(nb-common)9d\n" % data)
1003 1003 ui.writenoi18n(b" also local heads: %(nb-common-local)9d\n" % data)
1004 1004 ui.writenoi18n(b" also remote heads: %(nb-common-remote)9d\n" % data)
1005 1005 ui.writenoi18n(b" both: %(nb-common-both)9d\n" % data)
1006 1006 ui.writenoi18n(b" local heads: %(nb-local)9d\n" % data)
1007 1007 ui.writenoi18n(b" common: %(nb-common-local)9d\n" % data)
1008 1008 ui.writenoi18n(b" missing: %(nb-local-missing)9d\n" % data)
1009 1009 ui.writenoi18n(b" remote heads: %(nb-remote)9d\n" % data)
1010 1010 ui.writenoi18n(b" common: %(nb-common-remote)9d\n" % data)
1011 1011 ui.writenoi18n(b" unknown: %(nb-remote-unknown)9d\n" % data)
1012 1012 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
1013 1013 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
1014 1014 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
1015 1015
1016 1016 if ui.verbose:
1017 1017 ui.writenoi18n(
1018 1018 b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common))
1019 1019 )
1020 1020
1021 1021
1022 1022 _chunksize = 4 << 10
1023 1023
1024 1024
1025 1025 @command(
1026 1026 b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
1027 1027 )
1028 1028 def debugdownload(ui, repo, url, output=None, **opts):
1029 1029 """download a resource using Mercurial logic and config
1030 1030 """
1031 1031 fh = urlmod.open(ui, url, output)
1032 1032
1033 1033 dest = ui
1034 1034 if output:
1035 1035 dest = open(output, b"wb", _chunksize)
1036 1036 try:
1037 1037 data = fh.read(_chunksize)
1038 1038 while data:
1039 1039 dest.write(data)
1040 1040 data = fh.read(_chunksize)
1041 1041 finally:
1042 1042 if output:
1043 1043 dest.close()
1044 1044
1045 1045
1046 1046 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1047 1047 def debugextensions(ui, repo, **opts):
1048 1048 '''show information about active extensions'''
1049 1049 opts = pycompat.byteskwargs(opts)
1050 1050 exts = extensions.extensions(ui)
1051 1051 hgver = util.version()
1052 1052 fm = ui.formatter(b'debugextensions', opts)
1053 1053 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1054 1054 isinternal = extensions.ismoduleinternal(extmod)
1055 1055 extsource = None
1056 1056
1057 1057 if util.safehasattr(extmod, '__file__'):
1058 1058 extsource = pycompat.fsencode(extmod.__file__)
1059 1059 elif getattr(sys, 'oxidized', False):
1060 1060 extsource = pycompat.sysexecutable
1061 1061 if isinternal:
1062 1062 exttestedwith = [] # never expose magic string to users
1063 1063 else:
1064 1064 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1065 1065 extbuglink = getattr(extmod, 'buglink', None)
1066 1066
1067 1067 fm.startitem()
1068 1068
1069 1069 if ui.quiet or ui.verbose:
1070 1070 fm.write(b'name', b'%s\n', extname)
1071 1071 else:
1072 1072 fm.write(b'name', b'%s', extname)
1073 1073 if isinternal or hgver in exttestedwith:
1074 1074 fm.plain(b'\n')
1075 1075 elif not exttestedwith:
1076 1076 fm.plain(_(b' (untested!)\n'))
1077 1077 else:
1078 1078 lasttestedversion = exttestedwith[-1]
1079 1079 fm.plain(b' (%s!)\n' % lasttestedversion)
1080 1080
1081 1081 fm.condwrite(
1082 1082 ui.verbose and extsource,
1083 1083 b'source',
1084 1084 _(b' location: %s\n'),
1085 1085 extsource or b"",
1086 1086 )
1087 1087
1088 1088 if ui.verbose:
1089 1089 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1090 1090 fm.data(bundled=isinternal)
1091 1091
1092 1092 fm.condwrite(
1093 1093 ui.verbose and exttestedwith,
1094 1094 b'testedwith',
1095 1095 _(b' tested with: %s\n'),
1096 1096 fm.formatlist(exttestedwith, name=b'ver'),
1097 1097 )
1098 1098
1099 1099 fm.condwrite(
1100 1100 ui.verbose and extbuglink,
1101 1101 b'buglink',
1102 1102 _(b' bug reporting: %s\n'),
1103 1103 extbuglink or b"",
1104 1104 )
1105 1105
1106 1106 fm.end()
1107 1107
1108 1108
1109 1109 @command(
1110 1110 b'debugfileset',
1111 1111 [
1112 1112 (
1113 1113 b'r',
1114 1114 b'rev',
1115 1115 b'',
1116 1116 _(b'apply the filespec on this revision'),
1117 1117 _(b'REV'),
1118 1118 ),
1119 1119 (
1120 1120 b'',
1121 1121 b'all-files',
1122 1122 False,
1123 1123 _(b'test files from all revisions and working directory'),
1124 1124 ),
1125 1125 (
1126 1126 b's',
1127 1127 b'show-matcher',
1128 1128 None,
1129 1129 _(b'print internal representation of matcher'),
1130 1130 ),
1131 1131 (
1132 1132 b'p',
1133 1133 b'show-stage',
1134 1134 [],
1135 1135 _(b'print parsed tree at the given stage'),
1136 1136 _(b'NAME'),
1137 1137 ),
1138 1138 ],
1139 1139 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1140 1140 )
1141 1141 def debugfileset(ui, repo, expr, **opts):
1142 1142 '''parse and apply a fileset specification'''
1143 1143 from . import fileset
1144 1144
1145 1145 fileset.symbols # force import of fileset so we have predicates to optimize
1146 1146 opts = pycompat.byteskwargs(opts)
1147 1147 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1148 1148
1149 1149 stages = [
1150 1150 (b'parsed', pycompat.identity),
1151 1151 (b'analyzed', filesetlang.analyze),
1152 1152 (b'optimized', filesetlang.optimize),
1153 1153 ]
1154 1154 stagenames = {n for n, f in stages}
1155 1155
1156 1156 showalways = set()
1157 1157 if ui.verbose and not opts[b'show_stage']:
1158 1158 # show parsed tree by --verbose (deprecated)
1159 1159 showalways.add(b'parsed')
1160 1160 if opts[b'show_stage'] == [b'all']:
1161 1161 showalways.update(stagenames)
1162 1162 else:
1163 1163 for n in opts[b'show_stage']:
1164 1164 if n not in stagenames:
1165 1165 raise error.Abort(_(b'invalid stage name: %s') % n)
1166 1166 showalways.update(opts[b'show_stage'])
1167 1167
1168 1168 tree = filesetlang.parse(expr)
1169 1169 for n, f in stages:
1170 1170 tree = f(tree)
1171 1171 if n in showalways:
1172 1172 if opts[b'show_stage'] or n != b'parsed':
1173 1173 ui.write(b"* %s:\n" % n)
1174 1174 ui.write(filesetlang.prettyformat(tree), b"\n")
1175 1175
1176 1176 files = set()
1177 1177 if opts[b'all_files']:
1178 1178 for r in repo:
1179 1179 c = repo[r]
1180 1180 files.update(c.files())
1181 1181 files.update(c.substate)
1182 1182 if opts[b'all_files'] or ctx.rev() is None:
1183 1183 wctx = repo[None]
1184 1184 files.update(
1185 1185 repo.dirstate.walk(
1186 1186 scmutil.matchall(repo),
1187 1187 subrepos=list(wctx.substate),
1188 1188 unknown=True,
1189 1189 ignored=True,
1190 1190 )
1191 1191 )
1192 1192 files.update(wctx.substate)
1193 1193 else:
1194 1194 files.update(ctx.files())
1195 1195 files.update(ctx.substate)
1196 1196
1197 1197 m = ctx.matchfileset(repo.getcwd(), expr)
1198 1198 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1199 1199 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1200 1200 for f in sorted(files):
1201 1201 if not m(f):
1202 1202 continue
1203 1203 ui.write(b"%s\n" % f)
1204 1204
1205 1205
1206 1206 @command(b'debugformat', [] + cmdutil.formatteropts)
1207 1207 def debugformat(ui, repo, **opts):
1208 1208 """display format information about the current repository
1209 1209
1210 1210 Use --verbose to get extra information about current config value and
1211 1211 Mercurial default."""
1212 1212 opts = pycompat.byteskwargs(opts)
1213 1213 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1214 1214 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1215 1215
1216 1216 def makeformatname(name):
1217 1217 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1218 1218
1219 1219 fm = ui.formatter(b'debugformat', opts)
1220 1220 if fm.isplain():
1221 1221
1222 1222 def formatvalue(value):
1223 1223 if util.safehasattr(value, b'startswith'):
1224 1224 return value
1225 1225 if value:
1226 1226 return b'yes'
1227 1227 else:
1228 1228 return b'no'
1229 1229
1230 1230 else:
1231 1231 formatvalue = pycompat.identity
1232 1232
1233 1233 fm.plain(b'format-variant')
1234 1234 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1235 1235 fm.plain(b' repo')
1236 1236 if ui.verbose:
1237 1237 fm.plain(b' config default')
1238 1238 fm.plain(b'\n')
1239 1239 for fv in upgrade.allformatvariant:
1240 1240 fm.startitem()
1241 1241 repovalue = fv.fromrepo(repo)
1242 1242 configvalue = fv.fromconfig(repo)
1243 1243
1244 1244 if repovalue != configvalue:
1245 1245 namelabel = b'formatvariant.name.mismatchconfig'
1246 1246 repolabel = b'formatvariant.repo.mismatchconfig'
1247 1247 elif repovalue != fv.default:
1248 1248 namelabel = b'formatvariant.name.mismatchdefault'
1249 1249 repolabel = b'formatvariant.repo.mismatchdefault'
1250 1250 else:
1251 1251 namelabel = b'formatvariant.name.uptodate'
1252 1252 repolabel = b'formatvariant.repo.uptodate'
1253 1253
1254 1254 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1255 1255 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1256 1256 if fv.default != configvalue:
1257 1257 configlabel = b'formatvariant.config.special'
1258 1258 else:
1259 1259 configlabel = b'formatvariant.config.default'
1260 1260 fm.condwrite(
1261 1261 ui.verbose,
1262 1262 b'config',
1263 1263 b' %6s',
1264 1264 formatvalue(configvalue),
1265 1265 label=configlabel,
1266 1266 )
1267 1267 fm.condwrite(
1268 1268 ui.verbose,
1269 1269 b'default',
1270 1270 b' %7s',
1271 1271 formatvalue(fv.default),
1272 1272 label=b'formatvariant.default',
1273 1273 )
1274 1274 fm.plain(b'\n')
1275 1275 fm.end()
1276 1276
1277 1277
1278 1278 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1279 1279 def debugfsinfo(ui, path=b"."):
1280 1280 """show information detected about current filesystem"""
1281 1281 ui.writenoi18n(b'path: %s\n' % path)
1282 1282 ui.writenoi18n(
1283 1283 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1284 1284 )
1285 1285 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1286 1286 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1287 1287 ui.writenoi18n(
1288 1288 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1289 1289 )
1290 1290 ui.writenoi18n(
1291 1291 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1292 1292 )
1293 1293 casesensitive = b'(unknown)'
1294 1294 try:
1295 1295 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1296 1296 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1297 1297 except OSError:
1298 1298 pass
1299 1299 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1300 1300
1301 1301
1302 1302 @command(
1303 1303 b'debuggetbundle',
1304 1304 [
1305 1305 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1306 1306 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1307 1307 (
1308 1308 b't',
1309 1309 b'type',
1310 1310 b'bzip2',
1311 1311 _(b'bundle compression type to use'),
1312 1312 _(b'TYPE'),
1313 1313 ),
1314 1314 ],
1315 1315 _(b'REPO FILE [-H|-C ID]...'),
1316 1316 norepo=True,
1317 1317 )
1318 1318 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1319 1319 """retrieves a bundle from a repo
1320 1320
1321 1321 Every ID must be a full-length hex node id string. Saves the bundle to the
1322 1322 given file.
1323 1323 """
1324 1324 opts = pycompat.byteskwargs(opts)
1325 1325 repo = hg.peer(ui, opts, repopath)
1326 1326 if not repo.capable(b'getbundle'):
1327 1327 raise error.Abort(b"getbundle() not supported by target repository")
1328 1328 args = {}
1329 1329 if common:
1330 1330 args['common'] = [bin(s) for s in common]
1331 1331 if head:
1332 1332 args['heads'] = [bin(s) for s in head]
1333 1333 # TODO: get desired bundlecaps from command line.
1334 1334 args['bundlecaps'] = None
1335 1335 bundle = repo.getbundle(b'debug', **args)
1336 1336
1337 1337 bundletype = opts.get(b'type', b'bzip2').lower()
1338 1338 btypes = {
1339 1339 b'none': b'HG10UN',
1340 1340 b'bzip2': b'HG10BZ',
1341 1341 b'gzip': b'HG10GZ',
1342 1342 b'bundle2': b'HG20',
1343 1343 }
1344 1344 bundletype = btypes.get(bundletype)
1345 1345 if bundletype not in bundle2.bundletypes:
1346 1346 raise error.Abort(_(b'unknown bundle type specified with --type'))
1347 1347 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1348 1348
1349 1349
1350 1350 @command(b'debugignore', [], b'[FILE]')
1351 1351 def debugignore(ui, repo, *files, **opts):
1352 1352 """display the combined ignore pattern and information about ignored files
1353 1353
1354 1354 With no argument display the combined ignore pattern.
1355 1355
1356 1356 Given space separated file names, shows if the given file is ignored and
1357 1357 if so, show the ignore rule (file and line number) that matched it.
1358 1358 """
1359 1359 ignore = repo.dirstate._ignore
1360 1360 if not files:
1361 1361 # Show all the patterns
1362 1362 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1363 1363 else:
1364 1364 m = scmutil.match(repo[None], pats=files)
1365 1365 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1366 1366 for f in m.files():
1367 1367 nf = util.normpath(f)
1368 1368 ignored = None
1369 1369 ignoredata = None
1370 1370 if nf != b'.':
1371 1371 if ignore(nf):
1372 1372 ignored = nf
1373 1373 ignoredata = repo.dirstate._ignorefileandline(nf)
1374 1374 else:
1375 1375 for p in pathutil.finddirs(nf):
1376 1376 if ignore(p):
1377 1377 ignored = p
1378 1378 ignoredata = repo.dirstate._ignorefileandline(p)
1379 1379 break
1380 1380 if ignored:
1381 1381 if ignored == nf:
1382 1382 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1383 1383 else:
1384 1384 ui.write(
1385 1385 _(
1386 1386 b"%s is ignored because of "
1387 1387 b"containing directory %s\n"
1388 1388 )
1389 1389 % (uipathfn(f), ignored)
1390 1390 )
1391 1391 ignorefile, lineno, line = ignoredata
1392 1392 ui.write(
1393 1393 _(b"(ignore rule in %s, line %d: '%s')\n")
1394 1394 % (ignorefile, lineno, line)
1395 1395 )
1396 1396 else:
1397 1397 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1398 1398
1399 1399
1400 1400 @command(
1401 1401 b'debugindex',
1402 1402 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1403 1403 _(b'-c|-m|FILE'),
1404 1404 )
1405 1405 def debugindex(ui, repo, file_=None, **opts):
1406 1406 """dump index data for a storage primitive"""
1407 1407 opts = pycompat.byteskwargs(opts)
1408 1408 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1409 1409
1410 1410 if ui.debugflag:
1411 1411 shortfn = hex
1412 1412 else:
1413 1413 shortfn = short
1414 1414
1415 1415 idlen = 12
1416 1416 for i in store:
1417 1417 idlen = len(shortfn(store.node(i)))
1418 1418 break
1419 1419
1420 1420 fm = ui.formatter(b'debugindex', opts)
1421 1421 fm.plain(
1422 1422 b' rev linkrev %s %s p2\n'
1423 1423 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1424 1424 )
1425 1425
1426 1426 for rev in store:
1427 1427 node = store.node(rev)
1428 1428 parents = store.parents(node)
1429 1429
1430 1430 fm.startitem()
1431 1431 fm.write(b'rev', b'%6d ', rev)
1432 1432 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1433 1433 fm.write(b'node', b'%s ', shortfn(node))
1434 1434 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1435 1435 fm.write(b'p2', b'%s', shortfn(parents[1]))
1436 1436 fm.plain(b'\n')
1437 1437
1438 1438 fm.end()
1439 1439
1440 1440
1441 1441 @command(
1442 1442 b'debugindexdot',
1443 1443 cmdutil.debugrevlogopts,
1444 1444 _(b'-c|-m|FILE'),
1445 1445 optionalrepo=True,
1446 1446 )
1447 1447 def debugindexdot(ui, repo, file_=None, **opts):
1448 1448 """dump an index DAG as a graphviz dot file"""
1449 1449 opts = pycompat.byteskwargs(opts)
1450 1450 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1451 1451 ui.writenoi18n(b"digraph G {\n")
1452 1452 for i in r:
1453 1453 node = r.node(i)
1454 1454 pp = r.parents(node)
1455 1455 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1456 1456 if pp[1] != nullid:
1457 1457 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1458 1458 ui.write(b"}\n")
1459 1459
1460 1460
1461 1461 @command(b'debugindexstats', [])
1462 1462 def debugindexstats(ui, repo):
1463 1463 """show stats related to the changelog index"""
1464 1464 repo.changelog.shortest(nullid, 1)
1465 1465 index = repo.changelog.index
1466 1466 if not util.safehasattr(index, b'stats'):
1467 1467 raise error.Abort(_(b'debugindexstats only works with native code'))
1468 1468 for k, v in sorted(index.stats().items()):
1469 1469 ui.write(b'%s: %d\n' % (k, v))
1470 1470
1471 1471
1472 1472 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1473 1473 def debuginstall(ui, **opts):
1474 1474 '''test Mercurial installation
1475 1475
1476 1476 Returns 0 on success.
1477 1477 '''
1478 1478 opts = pycompat.byteskwargs(opts)
1479 1479
1480 1480 problems = 0
1481 1481
1482 1482 fm = ui.formatter(b'debuginstall', opts)
1483 1483 fm.startitem()
1484 1484
1485 1485 # encoding might be unknown or wrong. don't translate these messages.
1486 1486 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1487 1487 err = None
1488 1488 try:
1489 1489 codecs.lookup(pycompat.sysstr(encoding.encoding))
1490 1490 except LookupError as inst:
1491 1491 err = stringutil.forcebytestr(inst)
1492 1492 problems += 1
1493 1493 fm.condwrite(
1494 1494 err,
1495 1495 b'encodingerror',
1496 1496 b" %s\n (check that your locale is properly set)\n",
1497 1497 err,
1498 1498 )
1499 1499
1500 1500 # Python
1501 1501 pythonlib = None
1502 1502 if util.safehasattr(os, '__file__'):
1503 1503 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1504 1504 elif getattr(sys, 'oxidized', False):
1505 1505 pythonlib = pycompat.sysexecutable
1506 1506
1507 1507 fm.write(
1508 1508 b'pythonexe',
1509 1509 _(b"checking Python executable (%s)\n"),
1510 1510 pycompat.sysexecutable or _(b"unknown"),
1511 1511 )
1512 1512 fm.write(
1513 1513 b'pythonimplementation',
1514 1514 _(b"checking Python implementation (%s)\n"),
1515 1515 pycompat.sysbytes(platform.python_implementation()),
1516 1516 )
1517 1517 fm.write(
1518 1518 b'pythonver',
1519 1519 _(b"checking Python version (%s)\n"),
1520 1520 (b"%d.%d.%d" % sys.version_info[:3]),
1521 1521 )
1522 1522 fm.write(
1523 1523 b'pythonlib',
1524 1524 _(b"checking Python lib (%s)...\n"),
1525 1525 pythonlib or _(b"unknown"),
1526 1526 )
1527 1527
1528 1528 try:
1529 1529 from . import rustext
1530 1530
1531 1531 rustext.__doc__ # trigger lazy import
1532 1532 except ImportError:
1533 1533 rustext = None
1534 1534
1535 1535 security = set(sslutil.supportedprotocols)
1536 1536 if sslutil.hassni:
1537 1537 security.add(b'sni')
1538 1538
1539 1539 fm.write(
1540 1540 b'pythonsecurity',
1541 1541 _(b"checking Python security support (%s)\n"),
1542 1542 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1543 1543 )
1544 1544
1545 1545 # These are warnings, not errors. So don't increment problem count. This
1546 1546 # may change in the future.
1547 1547 if b'tls1.2' not in security:
1548 1548 fm.plain(
1549 1549 _(
1550 1550 b' TLS 1.2 not supported by Python install; '
1551 1551 b'network connections lack modern security\n'
1552 1552 )
1553 1553 )
1554 1554 if b'sni' not in security:
1555 1555 fm.plain(
1556 1556 _(
1557 1557 b' SNI not supported by Python install; may have '
1558 1558 b'connectivity issues with some servers\n'
1559 1559 )
1560 1560 )
1561 1561
1562 1562 fm.plain(
1563 1563 _(
1564 1564 b"checking Rust extensions (%s)\n"
1565 1565 % (b'missing' if rustext is None else b'installed')
1566 1566 ),
1567 1567 )
1568 1568
1569 1569 # TODO print CA cert info
1570 1570
1571 1571 # hg version
1572 1572 hgver = util.version()
1573 1573 fm.write(
1574 1574 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1575 1575 )
1576 1576 fm.write(
1577 1577 b'hgverextra',
1578 1578 _(b"checking Mercurial custom build (%s)\n"),
1579 1579 b'+'.join(hgver.split(b'+')[1:]),
1580 1580 )
1581 1581
1582 1582 # compiled modules
1583 1583 hgmodules = None
1584 1584 if util.safehasattr(sys.modules[__name__], '__file__'):
1585 1585 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1586 1586 elif getattr(sys, 'oxidized', False):
1587 1587 hgmodules = pycompat.sysexecutable
1588 1588
1589 1589 fm.write(
1590 1590 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1591 1591 )
1592 1592 fm.write(
1593 1593 b'hgmodules',
1594 1594 _(b"checking installed modules (%s)...\n"),
1595 1595 hgmodules or _(b"unknown"),
1596 1596 )
1597 1597
1598 1598 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1599 1599 rustext = rustandc # for now, that's the only case
1600 1600 cext = policy.policy in (b'c', b'allow') or rustandc
1601 1601 nopure = cext or rustext
1602 1602 if nopure:
1603 1603 err = None
1604 1604 try:
1605 1605 if cext:
1606 1606 from .cext import ( # pytype: disable=import-error
1607 1607 base85,
1608 1608 bdiff,
1609 1609 mpatch,
1610 1610 osutil,
1611 1611 )
1612 1612
1613 1613 # quiet pyflakes
1614 1614 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1615 1615 if rustext:
1616 1616 from .rustext import ( # pytype: disable=import-error
1617 1617 ancestor,
1618 1618 dirstate,
1619 1619 )
1620 1620
1621 1621 dir(ancestor), dir(dirstate) # quiet pyflakes
1622 1622 except Exception as inst:
1623 1623 err = stringutil.forcebytestr(inst)
1624 1624 problems += 1
1625 1625 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1626 1626
1627 1627 compengines = util.compengines._engines.values()
1628 1628 fm.write(
1629 1629 b'compengines',
1630 1630 _(b'checking registered compression engines (%s)\n'),
1631 1631 fm.formatlist(
1632 1632 sorted(e.name() for e in compengines),
1633 1633 name=b'compengine',
1634 1634 fmt=b'%s',
1635 1635 sep=b', ',
1636 1636 ),
1637 1637 )
1638 1638 fm.write(
1639 1639 b'compenginesavail',
1640 1640 _(b'checking available compression engines (%s)\n'),
1641 1641 fm.formatlist(
1642 1642 sorted(e.name() for e in compengines if e.available()),
1643 1643 name=b'compengine',
1644 1644 fmt=b'%s',
1645 1645 sep=b', ',
1646 1646 ),
1647 1647 )
1648 1648 wirecompengines = compression.compengines.supportedwireengines(
1649 1649 compression.SERVERROLE
1650 1650 )
1651 1651 fm.write(
1652 1652 b'compenginesserver',
1653 1653 _(
1654 1654 b'checking available compression engines '
1655 1655 b'for wire protocol (%s)\n'
1656 1656 ),
1657 1657 fm.formatlist(
1658 1658 [e.name() for e in wirecompengines if e.wireprotosupport()],
1659 1659 name=b'compengine',
1660 1660 fmt=b'%s',
1661 1661 sep=b', ',
1662 1662 ),
1663 1663 )
1664 1664 re2 = b'missing'
1665 1665 if util._re2:
1666 1666 re2 = b'available'
1667 1667 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1668 1668 fm.data(re2=bool(util._re2))
1669 1669
1670 1670 # templates
1671 1671 p = templater.templatedir()
1672 1672 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1673 1673 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1674 1674 if p:
1675 1675 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1676 1676 if m:
1677 1677 # template found, check if it is working
1678 1678 err = None
1679 1679 try:
1680 1680 templater.templater.frommapfile(m)
1681 1681 except Exception as inst:
1682 1682 err = stringutil.forcebytestr(inst)
1683 1683 p = None
1684 1684 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1685 1685 else:
1686 1686 p = None
1687 1687 fm.condwrite(
1688 1688 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1689 1689 )
1690 1690 fm.condwrite(
1691 1691 not m,
1692 1692 b'defaulttemplatenotfound',
1693 1693 _(b" template '%s' not found\n"),
1694 1694 b"default",
1695 1695 )
1696 1696 if not p:
1697 1697 problems += 1
1698 1698 fm.condwrite(
1699 1699 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1700 1700 )
1701 1701
1702 1702 # editor
1703 1703 editor = ui.geteditor()
1704 1704 editor = util.expandpath(editor)
1705 1705 editorbin = procutil.shellsplit(editor)[0]
1706 1706 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1707 1707 cmdpath = procutil.findexe(editorbin)
1708 1708 fm.condwrite(
1709 1709 not cmdpath and editor == b'vi',
1710 1710 b'vinotfound',
1711 1711 _(
1712 1712 b" No commit editor set and can't find %s in PATH\n"
1713 1713 b" (specify a commit editor in your configuration"
1714 1714 b" file)\n"
1715 1715 ),
1716 1716 not cmdpath and editor == b'vi' and editorbin,
1717 1717 )
1718 1718 fm.condwrite(
1719 1719 not cmdpath and editor != b'vi',
1720 1720 b'editornotfound',
1721 1721 _(
1722 1722 b" Can't find editor '%s' in PATH\n"
1723 1723 b" (specify a commit editor in your configuration"
1724 1724 b" file)\n"
1725 1725 ),
1726 1726 not cmdpath and editorbin,
1727 1727 )
1728 1728 if not cmdpath and editor != b'vi':
1729 1729 problems += 1
1730 1730
1731 1731 # check username
1732 1732 username = None
1733 1733 err = None
1734 1734 try:
1735 1735 username = ui.username()
1736 1736 except error.Abort as e:
1737 1737 err = stringutil.forcebytestr(e)
1738 1738 problems += 1
1739 1739
1740 1740 fm.condwrite(
1741 1741 username, b'username', _(b"checking username (%s)\n"), username
1742 1742 )
1743 1743 fm.condwrite(
1744 1744 err,
1745 1745 b'usernameerror',
1746 1746 _(
1747 1747 b"checking username...\n %s\n"
1748 1748 b" (specify a username in your configuration file)\n"
1749 1749 ),
1750 1750 err,
1751 1751 )
1752 1752
1753 1753 for name, mod in extensions.extensions():
1754 1754 handler = getattr(mod, 'debuginstall', None)
1755 1755 if handler is not None:
1756 1756 problems += handler(ui, fm)
1757 1757
1758 1758 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1759 1759 if not problems:
1760 1760 fm.data(problems=problems)
1761 1761 fm.condwrite(
1762 1762 problems,
1763 1763 b'problems',
1764 1764 _(b"%d problems detected, please check your install!\n"),
1765 1765 problems,
1766 1766 )
1767 1767 fm.end()
1768 1768
1769 1769 return problems
1770 1770
1771 1771
1772 1772 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1773 1773 def debugknown(ui, repopath, *ids, **opts):
1774 1774 """test whether node ids are known to a repo
1775 1775
1776 1776 Every ID must be a full-length hex node id string. Returns a list of 0s
1777 1777 and 1s indicating unknown/known.
1778 1778 """
1779 1779 opts = pycompat.byteskwargs(opts)
1780 1780 repo = hg.peer(ui, opts, repopath)
1781 1781 if not repo.capable(b'known'):
1782 1782 raise error.Abort(b"known() not supported by target repository")
1783 1783 flags = repo.known([bin(s) for s in ids])
1784 1784 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1785 1785
1786 1786
1787 1787 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1788 1788 def debuglabelcomplete(ui, repo, *args):
1789 1789 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1790 1790 debugnamecomplete(ui, repo, *args)
1791 1791
1792 1792
1793 1793 @command(
1794 1794 b'debuglocks',
1795 1795 [
1796 1796 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1797 1797 (
1798 1798 b'W',
1799 1799 b'force-wlock',
1800 1800 None,
1801 1801 _(b'free the working state lock (DANGEROUS)'),
1802 1802 ),
1803 1803 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1804 1804 (
1805 1805 b'S',
1806 1806 b'set-wlock',
1807 1807 None,
1808 1808 _(b'set the working state lock until stopped'),
1809 1809 ),
1810 1810 ],
1811 1811 _(b'[OPTION]...'),
1812 1812 )
1813 1813 def debuglocks(ui, repo, **opts):
1814 1814 """show or modify state of locks
1815 1815
1816 1816 By default, this command will show which locks are held. This
1817 1817 includes the user and process holding the lock, the amount of time
1818 1818 the lock has been held, and the machine name where the process is
1819 1819 running if it's not local.
1820 1820
1821 1821 Locks protect the integrity of Mercurial's data, so should be
1822 1822 treated with care. System crashes or other interruptions may cause
1823 1823 locks to not be properly released, though Mercurial will usually
1824 1824 detect and remove such stale locks automatically.
1825 1825
1826 1826 However, detecting stale locks may not always be possible (for
1827 1827 instance, on a shared filesystem). Removing locks may also be
1828 1828 blocked by filesystem permissions.
1829 1829
1830 1830 Setting a lock will prevent other commands from changing the data.
1831 1831 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1832 1832 The set locks are removed when the command exits.
1833 1833
1834 1834 Returns 0 if no locks are held.
1835 1835
1836 1836 """
1837 1837
1838 1838 if opts.get('force_lock'):
1839 1839 repo.svfs.unlink(b'lock')
1840 1840 if opts.get('force_wlock'):
1841 1841 repo.vfs.unlink(b'wlock')
1842 1842 if opts.get('force_lock') or opts.get('force_wlock'):
1843 1843 return 0
1844 1844
1845 1845 locks = []
1846 1846 try:
1847 1847 if opts.get('set_wlock'):
1848 1848 try:
1849 1849 locks.append(repo.wlock(False))
1850 1850 except error.LockHeld:
1851 1851 raise error.Abort(_(b'wlock is already held'))
1852 1852 if opts.get('set_lock'):
1853 1853 try:
1854 1854 locks.append(repo.lock(False))
1855 1855 except error.LockHeld:
1856 1856 raise error.Abort(_(b'lock is already held'))
1857 1857 if len(locks):
1858 1858 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1859 1859 return 0
1860 1860 finally:
1861 1861 release(*locks)
1862 1862
1863 1863 now = time.time()
1864 1864 held = 0
1865 1865
1866 1866 def report(vfs, name, method):
1867 1867 # this causes stale locks to get reaped for more accurate reporting
1868 1868 try:
1869 1869 l = method(False)
1870 1870 except error.LockHeld:
1871 1871 l = None
1872 1872
1873 1873 if l:
1874 1874 l.release()
1875 1875 else:
1876 1876 try:
1877 1877 st = vfs.lstat(name)
1878 1878 age = now - st[stat.ST_MTIME]
1879 1879 user = util.username(st.st_uid)
1880 1880 locker = vfs.readlock(name)
1881 1881 if b":" in locker:
1882 1882 host, pid = locker.split(b':')
1883 1883 if host == socket.gethostname():
1884 1884 locker = b'user %s, process %s' % (user or b'None', pid)
1885 1885 else:
1886 1886 locker = b'user %s, process %s, host %s' % (
1887 1887 user or b'None',
1888 1888 pid,
1889 1889 host,
1890 1890 )
1891 1891 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1892 1892 return 1
1893 1893 except OSError as e:
1894 1894 if e.errno != errno.ENOENT:
1895 1895 raise
1896 1896
1897 1897 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1898 1898 return 0
1899 1899
1900 1900 held += report(repo.svfs, b"lock", repo.lock)
1901 1901 held += report(repo.vfs, b"wlock", repo.wlock)
1902 1902
1903 1903 return held
1904 1904
1905 1905
1906 1906 @command(
1907 1907 b'debugmanifestfulltextcache',
1908 1908 [
1909 1909 (b'', b'clear', False, _(b'clear the cache')),
1910 1910 (
1911 1911 b'a',
1912 1912 b'add',
1913 1913 [],
1914 1914 _(b'add the given manifest nodes to the cache'),
1915 1915 _(b'NODE'),
1916 1916 ),
1917 1917 ],
1918 1918 b'',
1919 1919 )
1920 1920 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1921 1921 """show, clear or amend the contents of the manifest fulltext cache"""
1922 1922
1923 1923 def getcache():
1924 1924 r = repo.manifestlog.getstorage(b'')
1925 1925 try:
1926 1926 return r._fulltextcache
1927 1927 except AttributeError:
1928 1928 msg = _(
1929 1929 b"Current revlog implementation doesn't appear to have a "
1930 1930 b"manifest fulltext cache\n"
1931 1931 )
1932 1932 raise error.Abort(msg)
1933 1933
1934 1934 if opts.get('clear'):
1935 1935 with repo.wlock():
1936 1936 cache = getcache()
1937 1937 cache.clear(clear_persisted_data=True)
1938 1938 return
1939 1939
1940 1940 if add:
1941 1941 with repo.wlock():
1942 1942 m = repo.manifestlog
1943 1943 store = m.getstorage(b'')
1944 1944 for n in add:
1945 1945 try:
1946 1946 manifest = m[store.lookup(n)]
1947 1947 except error.LookupError as e:
1948 1948 raise error.Abort(e, hint=b"Check your manifest node id")
1949 1949 manifest.read() # stores revisision in cache too
1950 1950 return
1951 1951
1952 1952 cache = getcache()
1953 1953 if not len(cache):
1954 1954 ui.write(_(b'cache empty\n'))
1955 1955 else:
1956 1956 ui.write(
1957 1957 _(
1958 1958 b'cache contains %d manifest entries, in order of most to '
1959 1959 b'least recent:\n'
1960 1960 )
1961 1961 % (len(cache),)
1962 1962 )
1963 1963 totalsize = 0
1964 1964 for nodeid in cache:
1965 1965 # Use cache.get to not update the LRU order
1966 1966 data = cache.peek(nodeid)
1967 1967 size = len(data)
1968 1968 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1969 1969 ui.write(
1970 1970 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
1971 1971 )
1972 1972 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
1973 1973 ui.write(
1974 1974 _(b'total cache data size %s, on-disk %s\n')
1975 1975 % (util.bytecount(totalsize), util.bytecount(ondisk))
1976 1976 )
1977 1977
1978 1978
1979 1979 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
1980 1980 def debugmergestate(ui, repo, *args, **opts):
1981 1981 """print merge state
1982 1982
1983 1983 Use --verbose to print out information about whether v1 or v2 merge state
1984 1984 was chosen."""
1985 1985
1986 1986 if ui.verbose:
1987 1987 ms = mergestatemod.mergestate(repo)
1988 1988
1989 1989 # sort so that reasonable information is on top
1990 1990 v1records = ms._readrecordsv1()
1991 1991 v2records = ms._readrecordsv2()
1992 1992
1993 1993 if not v1records and not v2records:
1994 1994 pass
1995 1995 elif not v2records:
1996 1996 ui.writenoi18n(b'no version 2 merge state\n')
1997 1997 elif ms._v1v2match(v1records, v2records):
1998 1998 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
1999 1999 else:
2000 2000 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2001 2001
2002 2002 opts = pycompat.byteskwargs(opts)
2003 2003 if not opts[b'template']:
2004 2004 opts[b'template'] = (
2005 2005 b'{if(commits, "", "no merge state found\n")}'
2006 2006 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2007 2007 b'{files % "file: {path} (state \\"{state}\\")\n'
2008 2008 b'{if(local_path, "'
2009 2009 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2010 2010 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2011 2011 b' other path: {other_path} (node {other_node})\n'
2012 2012 b'")}'
2013 2013 b'{if(rename_side, "'
2014 2014 b' rename side: {rename_side}\n'
2015 2015 b' renamed path: {renamed_path}\n'
2016 2016 b'")}'
2017 2017 b'{extras % " extra: {key} = {value}\n"}'
2018 2018 b'"}'
2019 b'{extras % "extra: {file} ({key} = {value})\n"}'
2019 2020 )
2020 2021
2021 2022 ms = mergestatemod.mergestate.read(repo)
2022 2023
2023 2024 fm = ui.formatter(b'debugmergestate', opts)
2024 2025 fm.startitem()
2025 2026
2026 2027 fm_commits = fm.nested(b'commits')
2027 2028 if ms.active():
2028 2029 for name, node, label_index in (
2029 2030 (b'local', ms.local, 0),
2030 2031 (b'other', ms.other, 1),
2031 2032 ):
2032 2033 fm_commits.startitem()
2033 2034 fm_commits.data(name=name)
2034 2035 fm_commits.data(node=hex(node))
2035 2036 if ms._labels and len(ms._labels) > label_index:
2036 2037 fm_commits.data(label=ms._labels[label_index])
2037 2038 fm_commits.end()
2038 2039
2039 2040 fm_files = fm.nested(b'files')
2040 2041 if ms.active():
2041 2042 for f in ms:
2042 2043 fm_files.startitem()
2043 2044 fm_files.data(path=f)
2044 2045 state = ms._state[f]
2045 2046 fm_files.data(state=state[0])
2046 2047 if state[0] in (
2047 2048 mergestatemod.MERGE_RECORD_UNRESOLVED,
2048 2049 mergestatemod.MERGE_RECORD_RESOLVED,
2049 2050 ):
2050 2051 fm_files.data(local_key=state[1])
2051 2052 fm_files.data(local_path=state[2])
2052 2053 fm_files.data(ancestor_path=state[3])
2053 2054 fm_files.data(ancestor_node=state[4])
2054 2055 fm_files.data(other_path=state[5])
2055 2056 fm_files.data(other_node=state[6])
2056 2057 fm_files.data(local_flags=state[7])
2057 2058 elif state[0] in (
2058 2059 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2059 2060 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2060 2061 ):
2061 2062 fm_files.data(renamed_path=state[1])
2062 2063 fm_files.data(rename_side=state[2])
2063 2064 fm_extras = fm_files.nested(b'extras')
2064 2065 for k, v in ms.extras(f).items():
2065 2066 fm_extras.startitem()
2066 2067 fm_extras.data(key=k)
2067 2068 fm_extras.data(value=v)
2068 2069 fm_extras.end()
2069 2070
2070 2071 fm_files.end()
2071 2072
2073 fm_extras = fm.nested(b'extras')
2074 for f, d in sorted(pycompat.iteritems(ms._stateextras)):
2075 if f in ms:
2076 # If file is in mergestate, we have already processed it's extras
2077 continue
2078 for k, v in pycompat.iteritems(d):
2079 fm_extras.startitem()
2080 fm_extras.data(file=f)
2081 fm_extras.data(key=k)
2082 fm_extras.data(value=v)
2083 fm_extras.end()
2084
2072 2085 fm.end()
2073 2086
2074 2087
2075 2088 @command(b'debugnamecomplete', [], _(b'NAME...'))
2076 2089 def debugnamecomplete(ui, repo, *args):
2077 2090 '''complete "names" - tags, open branch names, bookmark names'''
2078 2091
2079 2092 names = set()
2080 2093 # since we previously only listed open branches, we will handle that
2081 2094 # specially (after this for loop)
2082 2095 for name, ns in pycompat.iteritems(repo.names):
2083 2096 if name != b'branches':
2084 2097 names.update(ns.listnames(repo))
2085 2098 names.update(
2086 2099 tag
2087 2100 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2088 2101 if not closed
2089 2102 )
2090 2103 completions = set()
2091 2104 if not args:
2092 2105 args = [b'']
2093 2106 for a in args:
2094 2107 completions.update(n for n in names if n.startswith(a))
2095 2108 ui.write(b'\n'.join(sorted(completions)))
2096 2109 ui.write(b'\n')
2097 2110
2098 2111
2099 2112 @command(
2100 2113 b'debugnodemap',
2101 2114 [
2102 2115 (
2103 2116 b'',
2104 2117 b'dump-new',
2105 2118 False,
2106 2119 _(b'write a (new) persistent binary nodemap on stdin'),
2107 2120 ),
2108 2121 (b'', b'dump-disk', False, _(b'dump on-disk data on stdin')),
2109 2122 (
2110 2123 b'',
2111 2124 b'check',
2112 2125 False,
2113 2126 _(b'check that the data on disk data are correct.'),
2114 2127 ),
2115 2128 (
2116 2129 b'',
2117 2130 b'metadata',
2118 2131 False,
2119 2132 _(b'display the on disk meta data for the nodemap'),
2120 2133 ),
2121 2134 ],
2122 2135 )
2123 2136 def debugnodemap(ui, repo, **opts):
2124 2137 """write and inspect on disk nodemap
2125 2138 """
2126 2139 if opts['dump_new']:
2127 2140 unfi = repo.unfiltered()
2128 2141 cl = unfi.changelog
2129 2142 if util.safehasattr(cl.index, "nodemap_data_all"):
2130 2143 data = cl.index.nodemap_data_all()
2131 2144 else:
2132 2145 data = nodemap.persistent_data(cl.index)
2133 2146 ui.write(data)
2134 2147 elif opts['dump_disk']:
2135 2148 unfi = repo.unfiltered()
2136 2149 cl = unfi.changelog
2137 2150 nm_data = nodemap.persisted_data(cl)
2138 2151 if nm_data is not None:
2139 2152 docket, data = nm_data
2140 2153 ui.write(data[:])
2141 2154 elif opts['check']:
2142 2155 unfi = repo.unfiltered()
2143 2156 cl = unfi.changelog
2144 2157 nm_data = nodemap.persisted_data(cl)
2145 2158 if nm_data is not None:
2146 2159 docket, data = nm_data
2147 2160 return nodemap.check_data(ui, cl.index, data)
2148 2161 elif opts['metadata']:
2149 2162 unfi = repo.unfiltered()
2150 2163 cl = unfi.changelog
2151 2164 nm_data = nodemap.persisted_data(cl)
2152 2165 if nm_data is not None:
2153 2166 docket, data = nm_data
2154 2167 ui.write((b"uid: %s\n") % docket.uid)
2155 2168 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2156 2169 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2157 2170 ui.write((b"data-length: %d\n") % docket.data_length)
2158 2171 ui.write((b"data-unused: %d\n") % docket.data_unused)
2159 2172 unused_perc = docket.data_unused * 100.0 / docket.data_length
2160 2173 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2161 2174
2162 2175
2163 2176 @command(
2164 2177 b'debugobsolete',
2165 2178 [
2166 2179 (b'', b'flags', 0, _(b'markers flag')),
2167 2180 (
2168 2181 b'',
2169 2182 b'record-parents',
2170 2183 False,
2171 2184 _(b'record parent information for the precursor'),
2172 2185 ),
2173 2186 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2174 2187 (
2175 2188 b'',
2176 2189 b'exclusive',
2177 2190 False,
2178 2191 _(b'restrict display to markers only relevant to REV'),
2179 2192 ),
2180 2193 (b'', b'index', False, _(b'display index of the marker')),
2181 2194 (b'', b'delete', [], _(b'delete markers specified by indices')),
2182 2195 ]
2183 2196 + cmdutil.commitopts2
2184 2197 + cmdutil.formatteropts,
2185 2198 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2186 2199 )
2187 2200 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2188 2201 """create arbitrary obsolete marker
2189 2202
2190 2203 With no arguments, displays the list of obsolescence markers."""
2191 2204
2192 2205 opts = pycompat.byteskwargs(opts)
2193 2206
2194 2207 def parsenodeid(s):
2195 2208 try:
2196 2209 # We do not use revsingle/revrange functions here to accept
2197 2210 # arbitrary node identifiers, possibly not present in the
2198 2211 # local repository.
2199 2212 n = bin(s)
2200 2213 if len(n) != len(nullid):
2201 2214 raise TypeError()
2202 2215 return n
2203 2216 except TypeError:
2204 2217 raise error.Abort(
2205 2218 b'changeset references must be full hexadecimal '
2206 2219 b'node identifiers'
2207 2220 )
2208 2221
2209 2222 if opts.get(b'delete'):
2210 2223 indices = []
2211 2224 for v in opts.get(b'delete'):
2212 2225 try:
2213 2226 indices.append(int(v))
2214 2227 except ValueError:
2215 2228 raise error.Abort(
2216 2229 _(b'invalid index value: %r') % v,
2217 2230 hint=_(b'use integers for indices'),
2218 2231 )
2219 2232
2220 2233 if repo.currenttransaction():
2221 2234 raise error.Abort(
2222 2235 _(b'cannot delete obsmarkers in the middle of transaction.')
2223 2236 )
2224 2237
2225 2238 with repo.lock():
2226 2239 n = repair.deleteobsmarkers(repo.obsstore, indices)
2227 2240 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2228 2241
2229 2242 return
2230 2243
2231 2244 if precursor is not None:
2232 2245 if opts[b'rev']:
2233 2246 raise error.Abort(b'cannot select revision when creating marker')
2234 2247 metadata = {}
2235 2248 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2236 2249 succs = tuple(parsenodeid(succ) for succ in successors)
2237 2250 l = repo.lock()
2238 2251 try:
2239 2252 tr = repo.transaction(b'debugobsolete')
2240 2253 try:
2241 2254 date = opts.get(b'date')
2242 2255 if date:
2243 2256 date = dateutil.parsedate(date)
2244 2257 else:
2245 2258 date = None
2246 2259 prec = parsenodeid(precursor)
2247 2260 parents = None
2248 2261 if opts[b'record_parents']:
2249 2262 if prec not in repo.unfiltered():
2250 2263 raise error.Abort(
2251 2264 b'cannot used --record-parents on '
2252 2265 b'unknown changesets'
2253 2266 )
2254 2267 parents = repo.unfiltered()[prec].parents()
2255 2268 parents = tuple(p.node() for p in parents)
2256 2269 repo.obsstore.create(
2257 2270 tr,
2258 2271 prec,
2259 2272 succs,
2260 2273 opts[b'flags'],
2261 2274 parents=parents,
2262 2275 date=date,
2263 2276 metadata=metadata,
2264 2277 ui=ui,
2265 2278 )
2266 2279 tr.close()
2267 2280 except ValueError as exc:
2268 2281 raise error.Abort(
2269 2282 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2270 2283 )
2271 2284 finally:
2272 2285 tr.release()
2273 2286 finally:
2274 2287 l.release()
2275 2288 else:
2276 2289 if opts[b'rev']:
2277 2290 revs = scmutil.revrange(repo, opts[b'rev'])
2278 2291 nodes = [repo[r].node() for r in revs]
2279 2292 markers = list(
2280 2293 obsutil.getmarkers(
2281 2294 repo, nodes=nodes, exclusive=opts[b'exclusive']
2282 2295 )
2283 2296 )
2284 2297 markers.sort(key=lambda x: x._data)
2285 2298 else:
2286 2299 markers = obsutil.getmarkers(repo)
2287 2300
2288 2301 markerstoiter = markers
2289 2302 isrelevant = lambda m: True
2290 2303 if opts.get(b'rev') and opts.get(b'index'):
2291 2304 markerstoiter = obsutil.getmarkers(repo)
2292 2305 markerset = set(markers)
2293 2306 isrelevant = lambda m: m in markerset
2294 2307
2295 2308 fm = ui.formatter(b'debugobsolete', opts)
2296 2309 for i, m in enumerate(markerstoiter):
2297 2310 if not isrelevant(m):
2298 2311 # marker can be irrelevant when we're iterating over a set
2299 2312 # of markers (markerstoiter) which is bigger than the set
2300 2313 # of markers we want to display (markers)
2301 2314 # this can happen if both --index and --rev options are
2302 2315 # provided and thus we need to iterate over all of the markers
2303 2316 # to get the correct indices, but only display the ones that
2304 2317 # are relevant to --rev value
2305 2318 continue
2306 2319 fm.startitem()
2307 2320 ind = i if opts.get(b'index') else None
2308 2321 cmdutil.showmarker(fm, m, index=ind)
2309 2322 fm.end()
2310 2323
2311 2324
2312 2325 @command(
2313 2326 b'debugp1copies',
2314 2327 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2315 2328 _(b'[-r REV]'),
2316 2329 )
2317 2330 def debugp1copies(ui, repo, **opts):
2318 2331 """dump copy information compared to p1"""
2319 2332
2320 2333 opts = pycompat.byteskwargs(opts)
2321 2334 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2322 2335 for dst, src in ctx.p1copies().items():
2323 2336 ui.write(b'%s -> %s\n' % (src, dst))
2324 2337
2325 2338
2326 2339 @command(
2327 2340 b'debugp2copies',
2328 2341 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2329 2342 _(b'[-r REV]'),
2330 2343 )
2331 2344 def debugp1copies(ui, repo, **opts):
2332 2345 """dump copy information compared to p2"""
2333 2346
2334 2347 opts = pycompat.byteskwargs(opts)
2335 2348 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2336 2349 for dst, src in ctx.p2copies().items():
2337 2350 ui.write(b'%s -> %s\n' % (src, dst))
2338 2351
2339 2352
2340 2353 @command(
2341 2354 b'debugpathcomplete',
2342 2355 [
2343 2356 (b'f', b'full', None, _(b'complete an entire path')),
2344 2357 (b'n', b'normal', None, _(b'show only normal files')),
2345 2358 (b'a', b'added', None, _(b'show only added files')),
2346 2359 (b'r', b'removed', None, _(b'show only removed files')),
2347 2360 ],
2348 2361 _(b'FILESPEC...'),
2349 2362 )
2350 2363 def debugpathcomplete(ui, repo, *specs, **opts):
2351 2364 '''complete part or all of a tracked path
2352 2365
2353 2366 This command supports shells that offer path name completion. It
2354 2367 currently completes only files already known to the dirstate.
2355 2368
2356 2369 Completion extends only to the next path segment unless
2357 2370 --full is specified, in which case entire paths are used.'''
2358 2371
2359 2372 def complete(path, acceptable):
2360 2373 dirstate = repo.dirstate
2361 2374 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2362 2375 rootdir = repo.root + pycompat.ossep
2363 2376 if spec != repo.root and not spec.startswith(rootdir):
2364 2377 return [], []
2365 2378 if os.path.isdir(spec):
2366 2379 spec += b'/'
2367 2380 spec = spec[len(rootdir) :]
2368 2381 fixpaths = pycompat.ossep != b'/'
2369 2382 if fixpaths:
2370 2383 spec = spec.replace(pycompat.ossep, b'/')
2371 2384 speclen = len(spec)
2372 2385 fullpaths = opts['full']
2373 2386 files, dirs = set(), set()
2374 2387 adddir, addfile = dirs.add, files.add
2375 2388 for f, st in pycompat.iteritems(dirstate):
2376 2389 if f.startswith(spec) and st[0] in acceptable:
2377 2390 if fixpaths:
2378 2391 f = f.replace(b'/', pycompat.ossep)
2379 2392 if fullpaths:
2380 2393 addfile(f)
2381 2394 continue
2382 2395 s = f.find(pycompat.ossep, speclen)
2383 2396 if s >= 0:
2384 2397 adddir(f[:s])
2385 2398 else:
2386 2399 addfile(f)
2387 2400 return files, dirs
2388 2401
2389 2402 acceptable = b''
2390 2403 if opts['normal']:
2391 2404 acceptable += b'nm'
2392 2405 if opts['added']:
2393 2406 acceptable += b'a'
2394 2407 if opts['removed']:
2395 2408 acceptable += b'r'
2396 2409 cwd = repo.getcwd()
2397 2410 if not specs:
2398 2411 specs = [b'.']
2399 2412
2400 2413 files, dirs = set(), set()
2401 2414 for spec in specs:
2402 2415 f, d = complete(spec, acceptable or b'nmar')
2403 2416 files.update(f)
2404 2417 dirs.update(d)
2405 2418 files.update(dirs)
2406 2419 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2407 2420 ui.write(b'\n')
2408 2421
2409 2422
2410 2423 @command(
2411 2424 b'debugpathcopies',
2412 2425 cmdutil.walkopts,
2413 2426 b'hg debugpathcopies REV1 REV2 [FILE]',
2414 2427 inferrepo=True,
2415 2428 )
2416 2429 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2417 2430 """show copies between two revisions"""
2418 2431 ctx1 = scmutil.revsingle(repo, rev1)
2419 2432 ctx2 = scmutil.revsingle(repo, rev2)
2420 2433 m = scmutil.match(ctx1, pats, opts)
2421 2434 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2422 2435 ui.write(b'%s -> %s\n' % (src, dst))
2423 2436
2424 2437
2425 2438 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2426 2439 def debugpeer(ui, path):
2427 2440 """establish a connection to a peer repository"""
2428 2441 # Always enable peer request logging. Requires --debug to display
2429 2442 # though.
2430 2443 overrides = {
2431 2444 (b'devel', b'debug.peer-request'): True,
2432 2445 }
2433 2446
2434 2447 with ui.configoverride(overrides):
2435 2448 peer = hg.peer(ui, {}, path)
2436 2449
2437 2450 local = peer.local() is not None
2438 2451 canpush = peer.canpush()
2439 2452
2440 2453 ui.write(_(b'url: %s\n') % peer.url())
2441 2454 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2442 2455 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2443 2456
2444 2457
2445 2458 @command(
2446 2459 b'debugpickmergetool',
2447 2460 [
2448 2461 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2449 2462 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2450 2463 ]
2451 2464 + cmdutil.walkopts
2452 2465 + cmdutil.mergetoolopts,
2453 2466 _(b'[PATTERN]...'),
2454 2467 inferrepo=True,
2455 2468 )
2456 2469 def debugpickmergetool(ui, repo, *pats, **opts):
2457 2470 """examine which merge tool is chosen for specified file
2458 2471
2459 2472 As described in :hg:`help merge-tools`, Mercurial examines
2460 2473 configurations below in this order to decide which merge tool is
2461 2474 chosen for specified file.
2462 2475
2463 2476 1. ``--tool`` option
2464 2477 2. ``HGMERGE`` environment variable
2465 2478 3. configurations in ``merge-patterns`` section
2466 2479 4. configuration of ``ui.merge``
2467 2480 5. configurations in ``merge-tools`` section
2468 2481 6. ``hgmerge`` tool (for historical reason only)
2469 2482 7. default tool for fallback (``:merge`` or ``:prompt``)
2470 2483
2471 2484 This command writes out examination result in the style below::
2472 2485
2473 2486 FILE = MERGETOOL
2474 2487
2475 2488 By default, all files known in the first parent context of the
2476 2489 working directory are examined. Use file patterns and/or -I/-X
2477 2490 options to limit target files. -r/--rev is also useful to examine
2478 2491 files in another context without actual updating to it.
2479 2492
2480 2493 With --debug, this command shows warning messages while matching
2481 2494 against ``merge-patterns`` and so on, too. It is recommended to
2482 2495 use this option with explicit file patterns and/or -I/-X options,
2483 2496 because this option increases amount of output per file according
2484 2497 to configurations in hgrc.
2485 2498
2486 2499 With -v/--verbose, this command shows configurations below at
2487 2500 first (only if specified).
2488 2501
2489 2502 - ``--tool`` option
2490 2503 - ``HGMERGE`` environment variable
2491 2504 - configuration of ``ui.merge``
2492 2505
2493 2506 If merge tool is chosen before matching against
2494 2507 ``merge-patterns``, this command can't show any helpful
2495 2508 information, even with --debug. In such case, information above is
2496 2509 useful to know why a merge tool is chosen.
2497 2510 """
2498 2511 opts = pycompat.byteskwargs(opts)
2499 2512 overrides = {}
2500 2513 if opts[b'tool']:
2501 2514 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2502 2515 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2503 2516
2504 2517 with ui.configoverride(overrides, b'debugmergepatterns'):
2505 2518 hgmerge = encoding.environ.get(b"HGMERGE")
2506 2519 if hgmerge is not None:
2507 2520 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2508 2521 uimerge = ui.config(b"ui", b"merge")
2509 2522 if uimerge:
2510 2523 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2511 2524
2512 2525 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2513 2526 m = scmutil.match(ctx, pats, opts)
2514 2527 changedelete = opts[b'changedelete']
2515 2528 for path in ctx.walk(m):
2516 2529 fctx = ctx[path]
2517 2530 try:
2518 2531 if not ui.debugflag:
2519 2532 ui.pushbuffer(error=True)
2520 2533 tool, toolpath = filemerge._picktool(
2521 2534 repo,
2522 2535 ui,
2523 2536 path,
2524 2537 fctx.isbinary(),
2525 2538 b'l' in fctx.flags(),
2526 2539 changedelete,
2527 2540 )
2528 2541 finally:
2529 2542 if not ui.debugflag:
2530 2543 ui.popbuffer()
2531 2544 ui.write(b'%s = %s\n' % (path, tool))
2532 2545
2533 2546
2534 2547 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2535 2548 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2536 2549 '''access the pushkey key/value protocol
2537 2550
2538 2551 With two args, list the keys in the given namespace.
2539 2552
2540 2553 With five args, set a key to new if it currently is set to old.
2541 2554 Reports success or failure.
2542 2555 '''
2543 2556
2544 2557 target = hg.peer(ui, {}, repopath)
2545 2558 if keyinfo:
2546 2559 key, old, new = keyinfo
2547 2560 with target.commandexecutor() as e:
2548 2561 r = e.callcommand(
2549 2562 b'pushkey',
2550 2563 {
2551 2564 b'namespace': namespace,
2552 2565 b'key': key,
2553 2566 b'old': old,
2554 2567 b'new': new,
2555 2568 },
2556 2569 ).result()
2557 2570
2558 2571 ui.status(pycompat.bytestr(r) + b'\n')
2559 2572 return not r
2560 2573 else:
2561 2574 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2562 2575 ui.write(
2563 2576 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2564 2577 )
2565 2578
2566 2579
2567 2580 @command(b'debugpvec', [], _(b'A B'))
2568 2581 def debugpvec(ui, repo, a, b=None):
2569 2582 ca = scmutil.revsingle(repo, a)
2570 2583 cb = scmutil.revsingle(repo, b)
2571 2584 pa = pvec.ctxpvec(ca)
2572 2585 pb = pvec.ctxpvec(cb)
2573 2586 if pa == pb:
2574 2587 rel = b"="
2575 2588 elif pa > pb:
2576 2589 rel = b">"
2577 2590 elif pa < pb:
2578 2591 rel = b"<"
2579 2592 elif pa | pb:
2580 2593 rel = b"|"
2581 2594 ui.write(_(b"a: %s\n") % pa)
2582 2595 ui.write(_(b"b: %s\n") % pb)
2583 2596 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2584 2597 ui.write(
2585 2598 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2586 2599 % (
2587 2600 abs(pa._depth - pb._depth),
2588 2601 pvec._hamming(pa._vec, pb._vec),
2589 2602 pa.distance(pb),
2590 2603 rel,
2591 2604 )
2592 2605 )
2593 2606
2594 2607
2595 2608 @command(
2596 2609 b'debugrebuilddirstate|debugrebuildstate',
2597 2610 [
2598 2611 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2599 2612 (
2600 2613 b'',
2601 2614 b'minimal',
2602 2615 None,
2603 2616 _(
2604 2617 b'only rebuild files that are inconsistent with '
2605 2618 b'the working copy parent'
2606 2619 ),
2607 2620 ),
2608 2621 ],
2609 2622 _(b'[-r REV]'),
2610 2623 )
2611 2624 def debugrebuilddirstate(ui, repo, rev, **opts):
2612 2625 """rebuild the dirstate as it would look like for the given revision
2613 2626
2614 2627 If no revision is specified the first current parent will be used.
2615 2628
2616 2629 The dirstate will be set to the files of the given revision.
2617 2630 The actual working directory content or existing dirstate
2618 2631 information such as adds or removes is not considered.
2619 2632
2620 2633 ``minimal`` will only rebuild the dirstate status for files that claim to be
2621 2634 tracked but are not in the parent manifest, or that exist in the parent
2622 2635 manifest but are not in the dirstate. It will not change adds, removes, or
2623 2636 modified files that are in the working copy parent.
2624 2637
2625 2638 One use of this command is to make the next :hg:`status` invocation
2626 2639 check the actual file content.
2627 2640 """
2628 2641 ctx = scmutil.revsingle(repo, rev)
2629 2642 with repo.wlock():
2630 2643 dirstate = repo.dirstate
2631 2644 changedfiles = None
2632 2645 # See command doc for what minimal does.
2633 2646 if opts.get('minimal'):
2634 2647 manifestfiles = set(ctx.manifest().keys())
2635 2648 dirstatefiles = set(dirstate)
2636 2649 manifestonly = manifestfiles - dirstatefiles
2637 2650 dsonly = dirstatefiles - manifestfiles
2638 2651 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2639 2652 changedfiles = manifestonly | dsnotadded
2640 2653
2641 2654 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2642 2655
2643 2656
2644 2657 @command(b'debugrebuildfncache', [], b'')
2645 2658 def debugrebuildfncache(ui, repo):
2646 2659 """rebuild the fncache file"""
2647 2660 repair.rebuildfncache(ui, repo)
2648 2661
2649 2662
2650 2663 @command(
2651 2664 b'debugrename',
2652 2665 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2653 2666 _(b'[-r REV] [FILE]...'),
2654 2667 )
2655 2668 def debugrename(ui, repo, *pats, **opts):
2656 2669 """dump rename information"""
2657 2670
2658 2671 opts = pycompat.byteskwargs(opts)
2659 2672 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2660 2673 m = scmutil.match(ctx, pats, opts)
2661 2674 for abs in ctx.walk(m):
2662 2675 fctx = ctx[abs]
2663 2676 o = fctx.filelog().renamed(fctx.filenode())
2664 2677 rel = repo.pathto(abs)
2665 2678 if o:
2666 2679 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2667 2680 else:
2668 2681 ui.write(_(b"%s not renamed\n") % rel)
2669 2682
2670 2683
2671 2684 @command(b'debugrequires|debugrequirements', [], b'')
2672 2685 def debugrequirements(ui, repo):
2673 2686 """ print the current repo requirements """
2674 2687 for r in sorted(repo.requirements):
2675 2688 ui.write(b"%s\n" % r)
2676 2689
2677 2690
2678 2691 @command(
2679 2692 b'debugrevlog',
2680 2693 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2681 2694 _(b'-c|-m|FILE'),
2682 2695 optionalrepo=True,
2683 2696 )
2684 2697 def debugrevlog(ui, repo, file_=None, **opts):
2685 2698 """show data and statistics about a revlog"""
2686 2699 opts = pycompat.byteskwargs(opts)
2687 2700 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2688 2701
2689 2702 if opts.get(b"dump"):
2690 2703 numrevs = len(r)
2691 2704 ui.write(
2692 2705 (
2693 2706 b"# rev p1rev p2rev start end deltastart base p1 p2"
2694 2707 b" rawsize totalsize compression heads chainlen\n"
2695 2708 )
2696 2709 )
2697 2710 ts = 0
2698 2711 heads = set()
2699 2712
2700 2713 for rev in pycompat.xrange(numrevs):
2701 2714 dbase = r.deltaparent(rev)
2702 2715 if dbase == -1:
2703 2716 dbase = rev
2704 2717 cbase = r.chainbase(rev)
2705 2718 clen = r.chainlen(rev)
2706 2719 p1, p2 = r.parentrevs(rev)
2707 2720 rs = r.rawsize(rev)
2708 2721 ts = ts + rs
2709 2722 heads -= set(r.parentrevs(rev))
2710 2723 heads.add(rev)
2711 2724 try:
2712 2725 compression = ts / r.end(rev)
2713 2726 except ZeroDivisionError:
2714 2727 compression = 0
2715 2728 ui.write(
2716 2729 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2717 2730 b"%11d %5d %8d\n"
2718 2731 % (
2719 2732 rev,
2720 2733 p1,
2721 2734 p2,
2722 2735 r.start(rev),
2723 2736 r.end(rev),
2724 2737 r.start(dbase),
2725 2738 r.start(cbase),
2726 2739 r.start(p1),
2727 2740 r.start(p2),
2728 2741 rs,
2729 2742 ts,
2730 2743 compression,
2731 2744 len(heads),
2732 2745 clen,
2733 2746 )
2734 2747 )
2735 2748 return 0
2736 2749
2737 2750 v = r.version
2738 2751 format = v & 0xFFFF
2739 2752 flags = []
2740 2753 gdelta = False
2741 2754 if v & revlog.FLAG_INLINE_DATA:
2742 2755 flags.append(b'inline')
2743 2756 if v & revlog.FLAG_GENERALDELTA:
2744 2757 gdelta = True
2745 2758 flags.append(b'generaldelta')
2746 2759 if not flags:
2747 2760 flags = [b'(none)']
2748 2761
2749 2762 ### tracks merge vs single parent
2750 2763 nummerges = 0
2751 2764
2752 2765 ### tracks ways the "delta" are build
2753 2766 # nodelta
2754 2767 numempty = 0
2755 2768 numemptytext = 0
2756 2769 numemptydelta = 0
2757 2770 # full file content
2758 2771 numfull = 0
2759 2772 # intermediate snapshot against a prior snapshot
2760 2773 numsemi = 0
2761 2774 # snapshot count per depth
2762 2775 numsnapdepth = collections.defaultdict(lambda: 0)
2763 2776 # delta against previous revision
2764 2777 numprev = 0
2765 2778 # delta against first or second parent (not prev)
2766 2779 nump1 = 0
2767 2780 nump2 = 0
2768 2781 # delta against neither prev nor parents
2769 2782 numother = 0
2770 2783 # delta against prev that are also first or second parent
2771 2784 # (details of `numprev`)
2772 2785 nump1prev = 0
2773 2786 nump2prev = 0
2774 2787
2775 2788 # data about delta chain of each revs
2776 2789 chainlengths = []
2777 2790 chainbases = []
2778 2791 chainspans = []
2779 2792
2780 2793 # data about each revision
2781 2794 datasize = [None, 0, 0]
2782 2795 fullsize = [None, 0, 0]
2783 2796 semisize = [None, 0, 0]
2784 2797 # snapshot count per depth
2785 2798 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2786 2799 deltasize = [None, 0, 0]
2787 2800 chunktypecounts = {}
2788 2801 chunktypesizes = {}
2789 2802
2790 2803 def addsize(size, l):
2791 2804 if l[0] is None or size < l[0]:
2792 2805 l[0] = size
2793 2806 if size > l[1]:
2794 2807 l[1] = size
2795 2808 l[2] += size
2796 2809
2797 2810 numrevs = len(r)
2798 2811 for rev in pycompat.xrange(numrevs):
2799 2812 p1, p2 = r.parentrevs(rev)
2800 2813 delta = r.deltaparent(rev)
2801 2814 if format > 0:
2802 2815 addsize(r.rawsize(rev), datasize)
2803 2816 if p2 != nullrev:
2804 2817 nummerges += 1
2805 2818 size = r.length(rev)
2806 2819 if delta == nullrev:
2807 2820 chainlengths.append(0)
2808 2821 chainbases.append(r.start(rev))
2809 2822 chainspans.append(size)
2810 2823 if size == 0:
2811 2824 numempty += 1
2812 2825 numemptytext += 1
2813 2826 else:
2814 2827 numfull += 1
2815 2828 numsnapdepth[0] += 1
2816 2829 addsize(size, fullsize)
2817 2830 addsize(size, snapsizedepth[0])
2818 2831 else:
2819 2832 chainlengths.append(chainlengths[delta] + 1)
2820 2833 baseaddr = chainbases[delta]
2821 2834 revaddr = r.start(rev)
2822 2835 chainbases.append(baseaddr)
2823 2836 chainspans.append((revaddr - baseaddr) + size)
2824 2837 if size == 0:
2825 2838 numempty += 1
2826 2839 numemptydelta += 1
2827 2840 elif r.issnapshot(rev):
2828 2841 addsize(size, semisize)
2829 2842 numsemi += 1
2830 2843 depth = r.snapshotdepth(rev)
2831 2844 numsnapdepth[depth] += 1
2832 2845 addsize(size, snapsizedepth[depth])
2833 2846 else:
2834 2847 addsize(size, deltasize)
2835 2848 if delta == rev - 1:
2836 2849 numprev += 1
2837 2850 if delta == p1:
2838 2851 nump1prev += 1
2839 2852 elif delta == p2:
2840 2853 nump2prev += 1
2841 2854 elif delta == p1:
2842 2855 nump1 += 1
2843 2856 elif delta == p2:
2844 2857 nump2 += 1
2845 2858 elif delta != nullrev:
2846 2859 numother += 1
2847 2860
2848 2861 # Obtain data on the raw chunks in the revlog.
2849 2862 if util.safehasattr(r, b'_getsegmentforrevs'):
2850 2863 segment = r._getsegmentforrevs(rev, rev)[1]
2851 2864 else:
2852 2865 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2853 2866 if segment:
2854 2867 chunktype = bytes(segment[0:1])
2855 2868 else:
2856 2869 chunktype = b'empty'
2857 2870
2858 2871 if chunktype not in chunktypecounts:
2859 2872 chunktypecounts[chunktype] = 0
2860 2873 chunktypesizes[chunktype] = 0
2861 2874
2862 2875 chunktypecounts[chunktype] += 1
2863 2876 chunktypesizes[chunktype] += size
2864 2877
2865 2878 # Adjust size min value for empty cases
2866 2879 for size in (datasize, fullsize, semisize, deltasize):
2867 2880 if size[0] is None:
2868 2881 size[0] = 0
2869 2882
2870 2883 numdeltas = numrevs - numfull - numempty - numsemi
2871 2884 numoprev = numprev - nump1prev - nump2prev
2872 2885 totalrawsize = datasize[2]
2873 2886 datasize[2] /= numrevs
2874 2887 fulltotal = fullsize[2]
2875 2888 if numfull == 0:
2876 2889 fullsize[2] = 0
2877 2890 else:
2878 2891 fullsize[2] /= numfull
2879 2892 semitotal = semisize[2]
2880 2893 snaptotal = {}
2881 2894 if numsemi > 0:
2882 2895 semisize[2] /= numsemi
2883 2896 for depth in snapsizedepth:
2884 2897 snaptotal[depth] = snapsizedepth[depth][2]
2885 2898 snapsizedepth[depth][2] /= numsnapdepth[depth]
2886 2899
2887 2900 deltatotal = deltasize[2]
2888 2901 if numdeltas > 0:
2889 2902 deltasize[2] /= numdeltas
2890 2903 totalsize = fulltotal + semitotal + deltatotal
2891 2904 avgchainlen = sum(chainlengths) / numrevs
2892 2905 maxchainlen = max(chainlengths)
2893 2906 maxchainspan = max(chainspans)
2894 2907 compratio = 1
2895 2908 if totalsize:
2896 2909 compratio = totalrawsize / totalsize
2897 2910
2898 2911 basedfmtstr = b'%%%dd\n'
2899 2912 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
2900 2913
2901 2914 def dfmtstr(max):
2902 2915 return basedfmtstr % len(str(max))
2903 2916
2904 2917 def pcfmtstr(max, padding=0):
2905 2918 return basepcfmtstr % (len(str(max)), b' ' * padding)
2906 2919
2907 2920 def pcfmt(value, total):
2908 2921 if total:
2909 2922 return (value, 100 * float(value) / total)
2910 2923 else:
2911 2924 return value, 100.0
2912 2925
2913 2926 ui.writenoi18n(b'format : %d\n' % format)
2914 2927 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
2915 2928
2916 2929 ui.write(b'\n')
2917 2930 fmt = pcfmtstr(totalsize)
2918 2931 fmt2 = dfmtstr(totalsize)
2919 2932 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2920 2933 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
2921 2934 ui.writenoi18n(
2922 2935 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
2923 2936 )
2924 2937 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2925 2938 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
2926 2939 ui.writenoi18n(
2927 2940 b' text : '
2928 2941 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
2929 2942 )
2930 2943 ui.writenoi18n(
2931 2944 b' delta : '
2932 2945 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
2933 2946 )
2934 2947 ui.writenoi18n(
2935 2948 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
2936 2949 )
2937 2950 for depth in sorted(numsnapdepth):
2938 2951 ui.write(
2939 2952 (b' lvl-%-3d : ' % depth)
2940 2953 + fmt % pcfmt(numsnapdepth[depth], numrevs)
2941 2954 )
2942 2955 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2943 2956 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
2944 2957 ui.writenoi18n(
2945 2958 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
2946 2959 )
2947 2960 for depth in sorted(numsnapdepth):
2948 2961 ui.write(
2949 2962 (b' lvl-%-3d : ' % depth)
2950 2963 + fmt % pcfmt(snaptotal[depth], totalsize)
2951 2964 )
2952 2965 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
2953 2966
2954 2967 def fmtchunktype(chunktype):
2955 2968 if chunktype == b'empty':
2956 2969 return b' %s : ' % chunktype
2957 2970 elif chunktype in pycompat.bytestr(string.ascii_letters):
2958 2971 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2959 2972 else:
2960 2973 return b' 0x%s : ' % hex(chunktype)
2961 2974
2962 2975 ui.write(b'\n')
2963 2976 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
2964 2977 for chunktype in sorted(chunktypecounts):
2965 2978 ui.write(fmtchunktype(chunktype))
2966 2979 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2967 2980 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
2968 2981 for chunktype in sorted(chunktypecounts):
2969 2982 ui.write(fmtchunktype(chunktype))
2970 2983 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2971 2984
2972 2985 ui.write(b'\n')
2973 2986 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2974 2987 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
2975 2988 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
2976 2989 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
2977 2990 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
2978 2991
2979 2992 if format > 0:
2980 2993 ui.write(b'\n')
2981 2994 ui.writenoi18n(
2982 2995 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
2983 2996 % tuple(datasize)
2984 2997 )
2985 2998 ui.writenoi18n(
2986 2999 b'full revision size (min/max/avg) : %d / %d / %d\n'
2987 3000 % tuple(fullsize)
2988 3001 )
2989 3002 ui.writenoi18n(
2990 3003 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
2991 3004 % tuple(semisize)
2992 3005 )
2993 3006 for depth in sorted(snapsizedepth):
2994 3007 if depth == 0:
2995 3008 continue
2996 3009 ui.writenoi18n(
2997 3010 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
2998 3011 % ((depth,) + tuple(snapsizedepth[depth]))
2999 3012 )
3000 3013 ui.writenoi18n(
3001 3014 b'delta size (min/max/avg) : %d / %d / %d\n'
3002 3015 % tuple(deltasize)
3003 3016 )
3004 3017
3005 3018 if numdeltas > 0:
3006 3019 ui.write(b'\n')
3007 3020 fmt = pcfmtstr(numdeltas)
3008 3021 fmt2 = pcfmtstr(numdeltas, 4)
3009 3022 ui.writenoi18n(
3010 3023 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3011 3024 )
3012 3025 if numprev > 0:
3013 3026 ui.writenoi18n(
3014 3027 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3015 3028 )
3016 3029 ui.writenoi18n(
3017 3030 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3018 3031 )
3019 3032 ui.writenoi18n(
3020 3033 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3021 3034 )
3022 3035 if gdelta:
3023 3036 ui.writenoi18n(
3024 3037 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3025 3038 )
3026 3039 ui.writenoi18n(
3027 3040 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3028 3041 )
3029 3042 ui.writenoi18n(
3030 3043 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3031 3044 )
3032 3045
3033 3046
3034 3047 @command(
3035 3048 b'debugrevlogindex',
3036 3049 cmdutil.debugrevlogopts
3037 3050 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3038 3051 _(b'[-f FORMAT] -c|-m|FILE'),
3039 3052 optionalrepo=True,
3040 3053 )
3041 3054 def debugrevlogindex(ui, repo, file_=None, **opts):
3042 3055 """dump the contents of a revlog index"""
3043 3056 opts = pycompat.byteskwargs(opts)
3044 3057 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3045 3058 format = opts.get(b'format', 0)
3046 3059 if format not in (0, 1):
3047 3060 raise error.Abort(_(b"unknown format %d") % format)
3048 3061
3049 3062 if ui.debugflag:
3050 3063 shortfn = hex
3051 3064 else:
3052 3065 shortfn = short
3053 3066
3054 3067 # There might not be anything in r, so have a sane default
3055 3068 idlen = 12
3056 3069 for i in r:
3057 3070 idlen = len(shortfn(r.node(i)))
3058 3071 break
3059 3072
3060 3073 if format == 0:
3061 3074 if ui.verbose:
3062 3075 ui.writenoi18n(
3063 3076 b" rev offset length linkrev %s %s p2\n"
3064 3077 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3065 3078 )
3066 3079 else:
3067 3080 ui.writenoi18n(
3068 3081 b" rev linkrev %s %s p2\n"
3069 3082 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3070 3083 )
3071 3084 elif format == 1:
3072 3085 if ui.verbose:
3073 3086 ui.writenoi18n(
3074 3087 (
3075 3088 b" rev flag offset length size link p1"
3076 3089 b" p2 %s\n"
3077 3090 )
3078 3091 % b"nodeid".rjust(idlen)
3079 3092 )
3080 3093 else:
3081 3094 ui.writenoi18n(
3082 3095 b" rev flag size link p1 p2 %s\n"
3083 3096 % b"nodeid".rjust(idlen)
3084 3097 )
3085 3098
3086 3099 for i in r:
3087 3100 node = r.node(i)
3088 3101 if format == 0:
3089 3102 try:
3090 3103 pp = r.parents(node)
3091 3104 except Exception:
3092 3105 pp = [nullid, nullid]
3093 3106 if ui.verbose:
3094 3107 ui.write(
3095 3108 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3096 3109 % (
3097 3110 i,
3098 3111 r.start(i),
3099 3112 r.length(i),
3100 3113 r.linkrev(i),
3101 3114 shortfn(node),
3102 3115 shortfn(pp[0]),
3103 3116 shortfn(pp[1]),
3104 3117 )
3105 3118 )
3106 3119 else:
3107 3120 ui.write(
3108 3121 b"% 6d % 7d %s %s %s\n"
3109 3122 % (
3110 3123 i,
3111 3124 r.linkrev(i),
3112 3125 shortfn(node),
3113 3126 shortfn(pp[0]),
3114 3127 shortfn(pp[1]),
3115 3128 )
3116 3129 )
3117 3130 elif format == 1:
3118 3131 pr = r.parentrevs(i)
3119 3132 if ui.verbose:
3120 3133 ui.write(
3121 3134 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3122 3135 % (
3123 3136 i,
3124 3137 r.flags(i),
3125 3138 r.start(i),
3126 3139 r.length(i),
3127 3140 r.rawsize(i),
3128 3141 r.linkrev(i),
3129 3142 pr[0],
3130 3143 pr[1],
3131 3144 shortfn(node),
3132 3145 )
3133 3146 )
3134 3147 else:
3135 3148 ui.write(
3136 3149 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3137 3150 % (
3138 3151 i,
3139 3152 r.flags(i),
3140 3153 r.rawsize(i),
3141 3154 r.linkrev(i),
3142 3155 pr[0],
3143 3156 pr[1],
3144 3157 shortfn(node),
3145 3158 )
3146 3159 )
3147 3160
3148 3161
3149 3162 @command(
3150 3163 b'debugrevspec',
3151 3164 [
3152 3165 (
3153 3166 b'',
3154 3167 b'optimize',
3155 3168 None,
3156 3169 _(b'print parsed tree after optimizing (DEPRECATED)'),
3157 3170 ),
3158 3171 (
3159 3172 b'',
3160 3173 b'show-revs',
3161 3174 True,
3162 3175 _(b'print list of result revisions (default)'),
3163 3176 ),
3164 3177 (
3165 3178 b's',
3166 3179 b'show-set',
3167 3180 None,
3168 3181 _(b'print internal representation of result set'),
3169 3182 ),
3170 3183 (
3171 3184 b'p',
3172 3185 b'show-stage',
3173 3186 [],
3174 3187 _(b'print parsed tree at the given stage'),
3175 3188 _(b'NAME'),
3176 3189 ),
3177 3190 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3178 3191 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3179 3192 ],
3180 3193 b'REVSPEC',
3181 3194 )
3182 3195 def debugrevspec(ui, repo, expr, **opts):
3183 3196 """parse and apply a revision specification
3184 3197
3185 3198 Use -p/--show-stage option to print the parsed tree at the given stages.
3186 3199 Use -p all to print tree at every stage.
3187 3200
3188 3201 Use --no-show-revs option with -s or -p to print only the set
3189 3202 representation or the parsed tree respectively.
3190 3203
3191 3204 Use --verify-optimized to compare the optimized result with the unoptimized
3192 3205 one. Returns 1 if the optimized result differs.
3193 3206 """
3194 3207 opts = pycompat.byteskwargs(opts)
3195 3208 aliases = ui.configitems(b'revsetalias')
3196 3209 stages = [
3197 3210 (b'parsed', lambda tree: tree),
3198 3211 (
3199 3212 b'expanded',
3200 3213 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3201 3214 ),
3202 3215 (b'concatenated', revsetlang.foldconcat),
3203 3216 (b'analyzed', revsetlang.analyze),
3204 3217 (b'optimized', revsetlang.optimize),
3205 3218 ]
3206 3219 if opts[b'no_optimized']:
3207 3220 stages = stages[:-1]
3208 3221 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3209 3222 raise error.Abort(
3210 3223 _(b'cannot use --verify-optimized with --no-optimized')
3211 3224 )
3212 3225 stagenames = {n for n, f in stages}
3213 3226
3214 3227 showalways = set()
3215 3228 showchanged = set()
3216 3229 if ui.verbose and not opts[b'show_stage']:
3217 3230 # show parsed tree by --verbose (deprecated)
3218 3231 showalways.add(b'parsed')
3219 3232 showchanged.update([b'expanded', b'concatenated'])
3220 3233 if opts[b'optimize']:
3221 3234 showalways.add(b'optimized')
3222 3235 if opts[b'show_stage'] and opts[b'optimize']:
3223 3236 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3224 3237 if opts[b'show_stage'] == [b'all']:
3225 3238 showalways.update(stagenames)
3226 3239 else:
3227 3240 for n in opts[b'show_stage']:
3228 3241 if n not in stagenames:
3229 3242 raise error.Abort(_(b'invalid stage name: %s') % n)
3230 3243 showalways.update(opts[b'show_stage'])
3231 3244
3232 3245 treebystage = {}
3233 3246 printedtree = None
3234 3247 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3235 3248 for n, f in stages:
3236 3249 treebystage[n] = tree = f(tree)
3237 3250 if n in showalways or (n in showchanged and tree != printedtree):
3238 3251 if opts[b'show_stage'] or n != b'parsed':
3239 3252 ui.write(b"* %s:\n" % n)
3240 3253 ui.write(revsetlang.prettyformat(tree), b"\n")
3241 3254 printedtree = tree
3242 3255
3243 3256 if opts[b'verify_optimized']:
3244 3257 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3245 3258 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3246 3259 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3247 3260 ui.writenoi18n(
3248 3261 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3249 3262 )
3250 3263 ui.writenoi18n(
3251 3264 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3252 3265 )
3253 3266 arevs = list(arevs)
3254 3267 brevs = list(brevs)
3255 3268 if arevs == brevs:
3256 3269 return 0
3257 3270 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3258 3271 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3259 3272 sm = difflib.SequenceMatcher(None, arevs, brevs)
3260 3273 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3261 3274 if tag in ('delete', 'replace'):
3262 3275 for c in arevs[alo:ahi]:
3263 3276 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3264 3277 if tag in ('insert', 'replace'):
3265 3278 for c in brevs[blo:bhi]:
3266 3279 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3267 3280 if tag == 'equal':
3268 3281 for c in arevs[alo:ahi]:
3269 3282 ui.write(b' %d\n' % c)
3270 3283 return 1
3271 3284
3272 3285 func = revset.makematcher(tree)
3273 3286 revs = func(repo)
3274 3287 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3275 3288 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3276 3289 if not opts[b'show_revs']:
3277 3290 return
3278 3291 for c in revs:
3279 3292 ui.write(b"%d\n" % c)
3280 3293
3281 3294
3282 3295 @command(
3283 3296 b'debugserve',
3284 3297 [
3285 3298 (
3286 3299 b'',
3287 3300 b'sshstdio',
3288 3301 False,
3289 3302 _(b'run an SSH server bound to process handles'),
3290 3303 ),
3291 3304 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3292 3305 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3293 3306 ],
3294 3307 b'',
3295 3308 )
3296 3309 def debugserve(ui, repo, **opts):
3297 3310 """run a server with advanced settings
3298 3311
3299 3312 This command is similar to :hg:`serve`. It exists partially as a
3300 3313 workaround to the fact that ``hg serve --stdio`` must have specific
3301 3314 arguments for security reasons.
3302 3315 """
3303 3316 opts = pycompat.byteskwargs(opts)
3304 3317
3305 3318 if not opts[b'sshstdio']:
3306 3319 raise error.Abort(_(b'only --sshstdio is currently supported'))
3307 3320
3308 3321 logfh = None
3309 3322
3310 3323 if opts[b'logiofd'] and opts[b'logiofile']:
3311 3324 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3312 3325
3313 3326 if opts[b'logiofd']:
3314 3327 # Ideally we would be line buffered. But line buffering in binary
3315 3328 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3316 3329 # buffering could have performance impacts. But since this isn't
3317 3330 # performance critical code, it should be fine.
3318 3331 try:
3319 3332 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3320 3333 except OSError as e:
3321 3334 if e.errno != errno.ESPIPE:
3322 3335 raise
3323 3336 # can't seek a pipe, so `ab` mode fails on py3
3324 3337 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3325 3338 elif opts[b'logiofile']:
3326 3339 logfh = open(opts[b'logiofile'], b'ab', 0)
3327 3340
3328 3341 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3329 3342 s.serve_forever()
3330 3343
3331 3344
3332 3345 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3333 3346 def debugsetparents(ui, repo, rev1, rev2=None):
3334 3347 """manually set the parents of the current working directory
3335 3348
3336 3349 This is useful for writing repository conversion tools, but should
3337 3350 be used with care. For example, neither the working directory nor the
3338 3351 dirstate is updated, so file status may be incorrect after running this
3339 3352 command.
3340 3353
3341 3354 Returns 0 on success.
3342 3355 """
3343 3356
3344 3357 node1 = scmutil.revsingle(repo, rev1).node()
3345 3358 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3346 3359
3347 3360 with repo.wlock():
3348 3361 repo.setparents(node1, node2)
3349 3362
3350 3363
3351 3364 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3352 3365 def debugsidedata(ui, repo, file_, rev=None, **opts):
3353 3366 """dump the side data for a cl/manifest/file revision
3354 3367
3355 3368 Use --verbose to dump the sidedata content."""
3356 3369 opts = pycompat.byteskwargs(opts)
3357 3370 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3358 3371 if rev is not None:
3359 3372 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3360 3373 file_, rev = None, file_
3361 3374 elif rev is None:
3362 3375 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3363 3376 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3364 3377 r = getattr(r, '_revlog', r)
3365 3378 try:
3366 3379 sidedata = r.sidedata(r.lookup(rev))
3367 3380 except KeyError:
3368 3381 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3369 3382 if sidedata:
3370 3383 sidedata = list(sidedata.items())
3371 3384 sidedata.sort()
3372 3385 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3373 3386 for key, value in sidedata:
3374 3387 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3375 3388 if ui.verbose:
3376 3389 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3377 3390
3378 3391
3379 3392 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3380 3393 def debugssl(ui, repo, source=None, **opts):
3381 3394 '''test a secure connection to a server
3382 3395
3383 3396 This builds the certificate chain for the server on Windows, installing the
3384 3397 missing intermediates and trusted root via Windows Update if necessary. It
3385 3398 does nothing on other platforms.
3386 3399
3387 3400 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3388 3401 that server is used. See :hg:`help urls` for more information.
3389 3402
3390 3403 If the update succeeds, retry the original operation. Otherwise, the cause
3391 3404 of the SSL error is likely another issue.
3392 3405 '''
3393 3406 if not pycompat.iswindows:
3394 3407 raise error.Abort(
3395 3408 _(b'certificate chain building is only possible on Windows')
3396 3409 )
3397 3410
3398 3411 if not source:
3399 3412 if not repo:
3400 3413 raise error.Abort(
3401 3414 _(
3402 3415 b"there is no Mercurial repository here, and no "
3403 3416 b"server specified"
3404 3417 )
3405 3418 )
3406 3419 source = b"default"
3407 3420
3408 3421 source, branches = hg.parseurl(ui.expandpath(source))
3409 3422 url = util.url(source)
3410 3423
3411 3424 defaultport = {b'https': 443, b'ssh': 22}
3412 3425 if url.scheme in defaultport:
3413 3426 try:
3414 3427 addr = (url.host, int(url.port or defaultport[url.scheme]))
3415 3428 except ValueError:
3416 3429 raise error.Abort(_(b"malformed port number in URL"))
3417 3430 else:
3418 3431 raise error.Abort(_(b"only https and ssh connections are supported"))
3419 3432
3420 3433 from . import win32
3421 3434
3422 3435 s = ssl.wrap_socket(
3423 3436 socket.socket(),
3424 3437 ssl_version=ssl.PROTOCOL_TLS,
3425 3438 cert_reqs=ssl.CERT_NONE,
3426 3439 ca_certs=None,
3427 3440 )
3428 3441
3429 3442 try:
3430 3443 s.connect(addr)
3431 3444 cert = s.getpeercert(True)
3432 3445
3433 3446 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3434 3447
3435 3448 complete = win32.checkcertificatechain(cert, build=False)
3436 3449
3437 3450 if not complete:
3438 3451 ui.status(_(b'certificate chain is incomplete, updating... '))
3439 3452
3440 3453 if not win32.checkcertificatechain(cert):
3441 3454 ui.status(_(b'failed.\n'))
3442 3455 else:
3443 3456 ui.status(_(b'done.\n'))
3444 3457 else:
3445 3458 ui.status(_(b'full certificate chain is available\n'))
3446 3459 finally:
3447 3460 s.close()
3448 3461
3449 3462
3450 3463 @command(
3451 3464 b"debugbackupbundle",
3452 3465 [
3453 3466 (
3454 3467 b"",
3455 3468 b"recover",
3456 3469 b"",
3457 3470 b"brings the specified changeset back into the repository",
3458 3471 )
3459 3472 ]
3460 3473 + cmdutil.logopts,
3461 3474 _(b"hg debugbackupbundle [--recover HASH]"),
3462 3475 )
3463 3476 def debugbackupbundle(ui, repo, *pats, **opts):
3464 3477 """lists the changesets available in backup bundles
3465 3478
3466 3479 Without any arguments, this command prints a list of the changesets in each
3467 3480 backup bundle.
3468 3481
3469 3482 --recover takes a changeset hash and unbundles the first bundle that
3470 3483 contains that hash, which puts that changeset back in your repository.
3471 3484
3472 3485 --verbose will print the entire commit message and the bundle path for that
3473 3486 backup.
3474 3487 """
3475 3488 backups = list(
3476 3489 filter(
3477 3490 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3478 3491 )
3479 3492 )
3480 3493 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3481 3494
3482 3495 opts = pycompat.byteskwargs(opts)
3483 3496 opts[b"bundle"] = b""
3484 3497 opts[b"force"] = None
3485 3498 limit = logcmdutil.getlimit(opts)
3486 3499
3487 3500 def display(other, chlist, displayer):
3488 3501 if opts.get(b"newest_first"):
3489 3502 chlist.reverse()
3490 3503 count = 0
3491 3504 for n in chlist:
3492 3505 if limit is not None and count >= limit:
3493 3506 break
3494 3507 parents = [True for p in other.changelog.parents(n) if p != nullid]
3495 3508 if opts.get(b"no_merges") and len(parents) == 2:
3496 3509 continue
3497 3510 count += 1
3498 3511 displayer.show(other[n])
3499 3512
3500 3513 recovernode = opts.get(b"recover")
3501 3514 if recovernode:
3502 3515 if scmutil.isrevsymbol(repo, recovernode):
3503 3516 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3504 3517 return
3505 3518 elif backups:
3506 3519 msg = _(
3507 3520 b"Recover changesets using: hg debugbackupbundle --recover "
3508 3521 b"<changeset hash>\n\nAvailable backup changesets:"
3509 3522 )
3510 3523 ui.status(msg, label=b"status.removed")
3511 3524 else:
3512 3525 ui.status(_(b"no backup changesets found\n"))
3513 3526 return
3514 3527
3515 3528 for backup in backups:
3516 3529 # Much of this is copied from the hg incoming logic
3517 3530 source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
3518 3531 source, branches = hg.parseurl(source, opts.get(b"branch"))
3519 3532 try:
3520 3533 other = hg.peer(repo, opts, source)
3521 3534 except error.LookupError as ex:
3522 3535 msg = _(b"\nwarning: unable to open bundle %s") % source
3523 3536 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3524 3537 ui.warn(msg, hint=hint)
3525 3538 continue
3526 3539 revs, checkout = hg.addbranchrevs(
3527 3540 repo, other, branches, opts.get(b"rev")
3528 3541 )
3529 3542
3530 3543 if revs:
3531 3544 revs = [other.lookup(rev) for rev in revs]
3532 3545
3533 3546 quiet = ui.quiet
3534 3547 try:
3535 3548 ui.quiet = True
3536 3549 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3537 3550 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3538 3551 )
3539 3552 except error.LookupError:
3540 3553 continue
3541 3554 finally:
3542 3555 ui.quiet = quiet
3543 3556
3544 3557 try:
3545 3558 if not chlist:
3546 3559 continue
3547 3560 if recovernode:
3548 3561 with repo.lock(), repo.transaction(b"unbundle") as tr:
3549 3562 if scmutil.isrevsymbol(other, recovernode):
3550 3563 ui.status(_(b"Unbundling %s\n") % (recovernode))
3551 3564 f = hg.openpath(ui, source)
3552 3565 gen = exchange.readbundle(ui, f, source)
3553 3566 if isinstance(gen, bundle2.unbundle20):
3554 3567 bundle2.applybundle(
3555 3568 repo,
3556 3569 gen,
3557 3570 tr,
3558 3571 source=b"unbundle",
3559 3572 url=b"bundle:" + source,
3560 3573 )
3561 3574 else:
3562 3575 gen.apply(repo, b"unbundle", b"bundle:" + source)
3563 3576 break
3564 3577 else:
3565 3578 backupdate = encoding.strtolocal(
3566 3579 time.strftime(
3567 3580 "%a %H:%M, %Y-%m-%d",
3568 3581 time.localtime(os.path.getmtime(source)),
3569 3582 )
3570 3583 )
3571 3584 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3572 3585 if ui.verbose:
3573 3586 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3574 3587 else:
3575 3588 opts[
3576 3589 b"template"
3577 3590 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3578 3591 displayer = logcmdutil.changesetdisplayer(
3579 3592 ui, other, opts, False
3580 3593 )
3581 3594 display(other, chlist, displayer)
3582 3595 displayer.close()
3583 3596 finally:
3584 3597 cleanupfn()
3585 3598
3586 3599
3587 3600 @command(
3588 3601 b'debugsub',
3589 3602 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3590 3603 _(b'[-r REV] [REV]'),
3591 3604 )
3592 3605 def debugsub(ui, repo, rev=None):
3593 3606 ctx = scmutil.revsingle(repo, rev, None)
3594 3607 for k, v in sorted(ctx.substate.items()):
3595 3608 ui.writenoi18n(b'path %s\n' % k)
3596 3609 ui.writenoi18n(b' source %s\n' % v[0])
3597 3610 ui.writenoi18n(b' revision %s\n' % v[1])
3598 3611
3599 3612
3600 3613 @command(
3601 3614 b'debugsuccessorssets',
3602 3615 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3603 3616 _(b'[REV]'),
3604 3617 )
3605 3618 def debugsuccessorssets(ui, repo, *revs, **opts):
3606 3619 """show set of successors for revision
3607 3620
3608 3621 A successors set of changeset A is a consistent group of revisions that
3609 3622 succeed A. It contains non-obsolete changesets only unless closests
3610 3623 successors set is set.
3611 3624
3612 3625 In most cases a changeset A has a single successors set containing a single
3613 3626 successor (changeset A replaced by A').
3614 3627
3615 3628 A changeset that is made obsolete with no successors are called "pruned".
3616 3629 Such changesets have no successors sets at all.
3617 3630
3618 3631 A changeset that has been "split" will have a successors set containing
3619 3632 more than one successor.
3620 3633
3621 3634 A changeset that has been rewritten in multiple different ways is called
3622 3635 "divergent". Such changesets have multiple successor sets (each of which
3623 3636 may also be split, i.e. have multiple successors).
3624 3637
3625 3638 Results are displayed as follows::
3626 3639
3627 3640 <rev1>
3628 3641 <successors-1A>
3629 3642 <rev2>
3630 3643 <successors-2A>
3631 3644 <successors-2B1> <successors-2B2> <successors-2B3>
3632 3645
3633 3646 Here rev2 has two possible (i.e. divergent) successors sets. The first
3634 3647 holds one element, whereas the second holds three (i.e. the changeset has
3635 3648 been split).
3636 3649 """
3637 3650 # passed to successorssets caching computation from one call to another
3638 3651 cache = {}
3639 3652 ctx2str = bytes
3640 3653 node2str = short
3641 3654 for rev in scmutil.revrange(repo, revs):
3642 3655 ctx = repo[rev]
3643 3656 ui.write(b'%s\n' % ctx2str(ctx))
3644 3657 for succsset in obsutil.successorssets(
3645 3658 repo, ctx.node(), closest=opts['closest'], cache=cache
3646 3659 ):
3647 3660 if succsset:
3648 3661 ui.write(b' ')
3649 3662 ui.write(node2str(succsset[0]))
3650 3663 for node in succsset[1:]:
3651 3664 ui.write(b' ')
3652 3665 ui.write(node2str(node))
3653 3666 ui.write(b'\n')
3654 3667
3655 3668
3656 3669 @command(b'debugtagscache', [])
3657 3670 def debugtagscache(ui, repo):
3658 3671 """display the contents of .hg/cache/hgtagsfnodes1"""
3659 3672 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3660 3673 for r in repo:
3661 3674 node = repo[r].node()
3662 3675 tagsnode = cache.getfnode(node, computemissing=False)
3663 3676 tagsnodedisplay = hex(tagsnode) if tagsnode else b'missing/invalid'
3664 3677 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3665 3678
3666 3679
3667 3680 @command(
3668 3681 b'debugtemplate',
3669 3682 [
3670 3683 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3671 3684 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3672 3685 ],
3673 3686 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3674 3687 optionalrepo=True,
3675 3688 )
3676 3689 def debugtemplate(ui, repo, tmpl, **opts):
3677 3690 """parse and apply a template
3678 3691
3679 3692 If -r/--rev is given, the template is processed as a log template and
3680 3693 applied to the given changesets. Otherwise, it is processed as a generic
3681 3694 template.
3682 3695
3683 3696 Use --verbose to print the parsed tree.
3684 3697 """
3685 3698 revs = None
3686 3699 if opts['rev']:
3687 3700 if repo is None:
3688 3701 raise error.RepoError(
3689 3702 _(b'there is no Mercurial repository here (.hg not found)')
3690 3703 )
3691 3704 revs = scmutil.revrange(repo, opts['rev'])
3692 3705
3693 3706 props = {}
3694 3707 for d in opts['define']:
3695 3708 try:
3696 3709 k, v = (e.strip() for e in d.split(b'=', 1))
3697 3710 if not k or k == b'ui':
3698 3711 raise ValueError
3699 3712 props[k] = v
3700 3713 except ValueError:
3701 3714 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3702 3715
3703 3716 if ui.verbose:
3704 3717 aliases = ui.configitems(b'templatealias')
3705 3718 tree = templater.parse(tmpl)
3706 3719 ui.note(templater.prettyformat(tree), b'\n')
3707 3720 newtree = templater.expandaliases(tree, aliases)
3708 3721 if newtree != tree:
3709 3722 ui.notenoi18n(
3710 3723 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3711 3724 )
3712 3725
3713 3726 if revs is None:
3714 3727 tres = formatter.templateresources(ui, repo)
3715 3728 t = formatter.maketemplater(ui, tmpl, resources=tres)
3716 3729 if ui.verbose:
3717 3730 kwds, funcs = t.symbolsuseddefault()
3718 3731 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3719 3732 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3720 3733 ui.write(t.renderdefault(props))
3721 3734 else:
3722 3735 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3723 3736 if ui.verbose:
3724 3737 kwds, funcs = displayer.t.symbolsuseddefault()
3725 3738 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3726 3739 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3727 3740 for r in revs:
3728 3741 displayer.show(repo[r], **pycompat.strkwargs(props))
3729 3742 displayer.close()
3730 3743
3731 3744
3732 3745 @command(
3733 3746 b'debuguigetpass',
3734 3747 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3735 3748 _(b'[-p TEXT]'),
3736 3749 norepo=True,
3737 3750 )
3738 3751 def debuguigetpass(ui, prompt=b''):
3739 3752 """show prompt to type password"""
3740 3753 r = ui.getpass(prompt)
3741 3754 ui.writenoi18n(b'response: %s\n' % r)
3742 3755
3743 3756
3744 3757 @command(
3745 3758 b'debuguiprompt',
3746 3759 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3747 3760 _(b'[-p TEXT]'),
3748 3761 norepo=True,
3749 3762 )
3750 3763 def debuguiprompt(ui, prompt=b''):
3751 3764 """show plain prompt"""
3752 3765 r = ui.prompt(prompt)
3753 3766 ui.writenoi18n(b'response: %s\n' % r)
3754 3767
3755 3768
3756 3769 @command(b'debugupdatecaches', [])
3757 3770 def debugupdatecaches(ui, repo, *pats, **opts):
3758 3771 """warm all known caches in the repository"""
3759 3772 with repo.wlock(), repo.lock():
3760 3773 repo.updatecaches(full=True)
3761 3774
3762 3775
3763 3776 @command(
3764 3777 b'debugupgraderepo',
3765 3778 [
3766 3779 (
3767 3780 b'o',
3768 3781 b'optimize',
3769 3782 [],
3770 3783 _(b'extra optimization to perform'),
3771 3784 _(b'NAME'),
3772 3785 ),
3773 3786 (b'', b'run', False, _(b'performs an upgrade')),
3774 3787 (b'', b'backup', True, _(b'keep the old repository content around')),
3775 3788 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3776 3789 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3777 3790 ],
3778 3791 )
3779 3792 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3780 3793 """upgrade a repository to use different features
3781 3794
3782 3795 If no arguments are specified, the repository is evaluated for upgrade
3783 3796 and a list of problems and potential optimizations is printed.
3784 3797
3785 3798 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3786 3799 can be influenced via additional arguments. More details will be provided
3787 3800 by the command output when run without ``--run``.
3788 3801
3789 3802 During the upgrade, the repository will be locked and no writes will be
3790 3803 allowed.
3791 3804
3792 3805 At the end of the upgrade, the repository may not be readable while new
3793 3806 repository data is swapped in. This window will be as long as it takes to
3794 3807 rename some directories inside the ``.hg`` directory. On most machines, this
3795 3808 should complete almost instantaneously and the chances of a consumer being
3796 3809 unable to access the repository should be low.
3797 3810
3798 3811 By default, all revlog will be upgraded. You can restrict this using flag
3799 3812 such as `--manifest`:
3800 3813
3801 3814 * `--manifest`: only optimize the manifest
3802 3815 * `--no-manifest`: optimize all revlog but the manifest
3803 3816 * `--changelog`: optimize the changelog only
3804 3817 * `--no-changelog --no-manifest`: optimize filelogs only
3805 3818 """
3806 3819 return upgrade.upgraderepo(
3807 3820 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3808 3821 )
3809 3822
3810 3823
3811 3824 @command(
3812 3825 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3813 3826 )
3814 3827 def debugwalk(ui, repo, *pats, **opts):
3815 3828 """show how files match on given patterns"""
3816 3829 opts = pycompat.byteskwargs(opts)
3817 3830 m = scmutil.match(repo[None], pats, opts)
3818 3831 if ui.verbose:
3819 3832 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3820 3833 items = list(repo[None].walk(m))
3821 3834 if not items:
3822 3835 return
3823 3836 f = lambda fn: fn
3824 3837 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3825 3838 f = lambda fn: util.normpath(fn)
3826 3839 fmt = b'f %%-%ds %%-%ds %%s' % (
3827 3840 max([len(abs) for abs in items]),
3828 3841 max([len(repo.pathto(abs)) for abs in items]),
3829 3842 )
3830 3843 for abs in items:
3831 3844 line = fmt % (
3832 3845 abs,
3833 3846 f(repo.pathto(abs)),
3834 3847 m.exact(abs) and b'exact' or b'',
3835 3848 )
3836 3849 ui.write(b"%s\n" % line.rstrip())
3837 3850
3838 3851
3839 3852 @command(b'debugwhyunstable', [], _(b'REV'))
3840 3853 def debugwhyunstable(ui, repo, rev):
3841 3854 """explain instabilities of a changeset"""
3842 3855 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3843 3856 dnodes = b''
3844 3857 if entry.get(b'divergentnodes'):
3845 3858 dnodes = (
3846 3859 b' '.join(
3847 3860 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3848 3861 for ctx in entry[b'divergentnodes']
3849 3862 )
3850 3863 + b' '
3851 3864 )
3852 3865 ui.write(
3853 3866 b'%s: %s%s %s\n'
3854 3867 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3855 3868 )
3856 3869
3857 3870
3858 3871 @command(
3859 3872 b'debugwireargs',
3860 3873 [
3861 3874 (b'', b'three', b'', b'three'),
3862 3875 (b'', b'four', b'', b'four'),
3863 3876 (b'', b'five', b'', b'five'),
3864 3877 ]
3865 3878 + cmdutil.remoteopts,
3866 3879 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3867 3880 norepo=True,
3868 3881 )
3869 3882 def debugwireargs(ui, repopath, *vals, **opts):
3870 3883 opts = pycompat.byteskwargs(opts)
3871 3884 repo = hg.peer(ui, opts, repopath)
3872 3885 for opt in cmdutil.remoteopts:
3873 3886 del opts[opt[1]]
3874 3887 args = {}
3875 3888 for k, v in pycompat.iteritems(opts):
3876 3889 if v:
3877 3890 args[k] = v
3878 3891 args = pycompat.strkwargs(args)
3879 3892 # run twice to check that we don't mess up the stream for the next command
3880 3893 res1 = repo.debugwireargs(*vals, **args)
3881 3894 res2 = repo.debugwireargs(*vals, **args)
3882 3895 ui.write(b"%s\n" % res1)
3883 3896 if res1 != res2:
3884 3897 ui.warn(b"%s\n" % res2)
3885 3898
3886 3899
3887 3900 def _parsewirelangblocks(fh):
3888 3901 activeaction = None
3889 3902 blocklines = []
3890 3903 lastindent = 0
3891 3904
3892 3905 for line in fh:
3893 3906 line = line.rstrip()
3894 3907 if not line:
3895 3908 continue
3896 3909
3897 3910 if line.startswith(b'#'):
3898 3911 continue
3899 3912
3900 3913 if not line.startswith(b' '):
3901 3914 # New block. Flush previous one.
3902 3915 if activeaction:
3903 3916 yield activeaction, blocklines
3904 3917
3905 3918 activeaction = line
3906 3919 blocklines = []
3907 3920 lastindent = 0
3908 3921 continue
3909 3922
3910 3923 # Else we start with an indent.
3911 3924
3912 3925 if not activeaction:
3913 3926 raise error.Abort(_(b'indented line outside of block'))
3914 3927
3915 3928 indent = len(line) - len(line.lstrip())
3916 3929
3917 3930 # If this line is indented more than the last line, concatenate it.
3918 3931 if indent > lastindent and blocklines:
3919 3932 blocklines[-1] += line.lstrip()
3920 3933 else:
3921 3934 blocklines.append(line)
3922 3935 lastindent = indent
3923 3936
3924 3937 # Flush last block.
3925 3938 if activeaction:
3926 3939 yield activeaction, blocklines
3927 3940
3928 3941
3929 3942 @command(
3930 3943 b'debugwireproto',
3931 3944 [
3932 3945 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
3933 3946 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
3934 3947 (
3935 3948 b'',
3936 3949 b'noreadstderr',
3937 3950 False,
3938 3951 _(b'do not read from stderr of the remote'),
3939 3952 ),
3940 3953 (
3941 3954 b'',
3942 3955 b'nologhandshake',
3943 3956 False,
3944 3957 _(b'do not log I/O related to the peer handshake'),
3945 3958 ),
3946 3959 ]
3947 3960 + cmdutil.remoteopts,
3948 3961 _(b'[PATH]'),
3949 3962 optionalrepo=True,
3950 3963 )
3951 3964 def debugwireproto(ui, repo, path=None, **opts):
3952 3965 """send wire protocol commands to a server
3953 3966
3954 3967 This command can be used to issue wire protocol commands to remote
3955 3968 peers and to debug the raw data being exchanged.
3956 3969
3957 3970 ``--localssh`` will start an SSH server against the current repository
3958 3971 and connect to that. By default, the connection will perform a handshake
3959 3972 and establish an appropriate peer instance.
3960 3973
3961 3974 ``--peer`` can be used to bypass the handshake protocol and construct a
3962 3975 peer instance using the specified class type. Valid values are ``raw``,
3963 3976 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
3964 3977 raw data payloads and don't support higher-level command actions.
3965 3978
3966 3979 ``--noreadstderr`` can be used to disable automatic reading from stderr
3967 3980 of the peer (for SSH connections only). Disabling automatic reading of
3968 3981 stderr is useful for making output more deterministic.
3969 3982
3970 3983 Commands are issued via a mini language which is specified via stdin.
3971 3984 The language consists of individual actions to perform. An action is
3972 3985 defined by a block. A block is defined as a line with no leading
3973 3986 space followed by 0 or more lines with leading space. Blocks are
3974 3987 effectively a high-level command with additional metadata.
3975 3988
3976 3989 Lines beginning with ``#`` are ignored.
3977 3990
3978 3991 The following sections denote available actions.
3979 3992
3980 3993 raw
3981 3994 ---
3982 3995
3983 3996 Send raw data to the server.
3984 3997
3985 3998 The block payload contains the raw data to send as one atomic send
3986 3999 operation. The data may not actually be delivered in a single system
3987 4000 call: it depends on the abilities of the transport being used.
3988 4001
3989 4002 Each line in the block is de-indented and concatenated. Then, that
3990 4003 value is evaluated as a Python b'' literal. This allows the use of
3991 4004 backslash escaping, etc.
3992 4005
3993 4006 raw+
3994 4007 ----
3995 4008
3996 4009 Behaves like ``raw`` except flushes output afterwards.
3997 4010
3998 4011 command <X>
3999 4012 -----------
4000 4013
4001 4014 Send a request to run a named command, whose name follows the ``command``
4002 4015 string.
4003 4016
4004 4017 Arguments to the command are defined as lines in this block. The format of
4005 4018 each line is ``<key> <value>``. e.g.::
4006 4019
4007 4020 command listkeys
4008 4021 namespace bookmarks
4009 4022
4010 4023 If the value begins with ``eval:``, it will be interpreted as a Python
4011 4024 literal expression. Otherwise values are interpreted as Python b'' literals.
4012 4025 This allows sending complex types and encoding special byte sequences via
4013 4026 backslash escaping.
4014 4027
4015 4028 The following arguments have special meaning:
4016 4029
4017 4030 ``PUSHFILE``
4018 4031 When defined, the *push* mechanism of the peer will be used instead
4019 4032 of the static request-response mechanism and the content of the
4020 4033 file specified in the value of this argument will be sent as the
4021 4034 command payload.
4022 4035
4023 4036 This can be used to submit a local bundle file to the remote.
4024 4037
4025 4038 batchbegin
4026 4039 ----------
4027 4040
4028 4041 Instruct the peer to begin a batched send.
4029 4042
4030 4043 All ``command`` blocks are queued for execution until the next
4031 4044 ``batchsubmit`` block.
4032 4045
4033 4046 batchsubmit
4034 4047 -----------
4035 4048
4036 4049 Submit previously queued ``command`` blocks as a batch request.
4037 4050
4038 4051 This action MUST be paired with a ``batchbegin`` action.
4039 4052
4040 4053 httprequest <method> <path>
4041 4054 ---------------------------
4042 4055
4043 4056 (HTTP peer only)
4044 4057
4045 4058 Send an HTTP request to the peer.
4046 4059
4047 4060 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4048 4061
4049 4062 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4050 4063 headers to add to the request. e.g. ``Accept: foo``.
4051 4064
4052 4065 The following arguments are special:
4053 4066
4054 4067 ``BODYFILE``
4055 4068 The content of the file defined as the value to this argument will be
4056 4069 transferred verbatim as the HTTP request body.
4057 4070
4058 4071 ``frame <type> <flags> <payload>``
4059 4072 Send a unified protocol frame as part of the request body.
4060 4073
4061 4074 All frames will be collected and sent as the body to the HTTP
4062 4075 request.
4063 4076
4064 4077 close
4065 4078 -----
4066 4079
4067 4080 Close the connection to the server.
4068 4081
4069 4082 flush
4070 4083 -----
4071 4084
4072 4085 Flush data written to the server.
4073 4086
4074 4087 readavailable
4075 4088 -------------
4076 4089
4077 4090 Close the write end of the connection and read all available data from
4078 4091 the server.
4079 4092
4080 4093 If the connection to the server encompasses multiple pipes, we poll both
4081 4094 pipes and read available data.
4082 4095
4083 4096 readline
4084 4097 --------
4085 4098
4086 4099 Read a line of output from the server. If there are multiple output
4087 4100 pipes, reads only the main pipe.
4088 4101
4089 4102 ereadline
4090 4103 ---------
4091 4104
4092 4105 Like ``readline``, but read from the stderr pipe, if available.
4093 4106
4094 4107 read <X>
4095 4108 --------
4096 4109
4097 4110 ``read()`` N bytes from the server's main output pipe.
4098 4111
4099 4112 eread <X>
4100 4113 ---------
4101 4114
4102 4115 ``read()`` N bytes from the server's stderr pipe, if available.
4103 4116
4104 4117 Specifying Unified Frame-Based Protocol Frames
4105 4118 ----------------------------------------------
4106 4119
4107 4120 It is possible to emit a *Unified Frame-Based Protocol* by using special
4108 4121 syntax.
4109 4122
4110 4123 A frame is composed as a type, flags, and payload. These can be parsed
4111 4124 from a string of the form:
4112 4125
4113 4126 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4114 4127
4115 4128 ``request-id`` and ``stream-id`` are integers defining the request and
4116 4129 stream identifiers.
4117 4130
4118 4131 ``type`` can be an integer value for the frame type or the string name
4119 4132 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4120 4133 ``command-name``.
4121 4134
4122 4135 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4123 4136 components. Each component (and there can be just one) can be an integer
4124 4137 or a flag name for stream flags or frame flags, respectively. Values are
4125 4138 resolved to integers and then bitwise OR'd together.
4126 4139
4127 4140 ``payload`` represents the raw frame payload. If it begins with
4128 4141 ``cbor:``, the following string is evaluated as Python code and the
4129 4142 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4130 4143 as a Python byte string literal.
4131 4144 """
4132 4145 opts = pycompat.byteskwargs(opts)
4133 4146
4134 4147 if opts[b'localssh'] and not repo:
4135 4148 raise error.Abort(_(b'--localssh requires a repository'))
4136 4149
4137 4150 if opts[b'peer'] and opts[b'peer'] not in (
4138 4151 b'raw',
4139 4152 b'http2',
4140 4153 b'ssh1',
4141 4154 b'ssh2',
4142 4155 ):
4143 4156 raise error.Abort(
4144 4157 _(b'invalid value for --peer'),
4145 4158 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4146 4159 )
4147 4160
4148 4161 if path and opts[b'localssh']:
4149 4162 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4150 4163
4151 4164 if ui.interactive():
4152 4165 ui.write(_(b'(waiting for commands on stdin)\n'))
4153 4166
4154 4167 blocks = list(_parsewirelangblocks(ui.fin))
4155 4168
4156 4169 proc = None
4157 4170 stdin = None
4158 4171 stdout = None
4159 4172 stderr = None
4160 4173 opener = None
4161 4174
4162 4175 if opts[b'localssh']:
4163 4176 # We start the SSH server in its own process so there is process
4164 4177 # separation. This prevents a whole class of potential bugs around
4165 4178 # shared state from interfering with server operation.
4166 4179 args = procutil.hgcmd() + [
4167 4180 b'-R',
4168 4181 repo.root,
4169 4182 b'debugserve',
4170 4183 b'--sshstdio',
4171 4184 ]
4172 4185 proc = subprocess.Popen(
4173 4186 pycompat.rapply(procutil.tonativestr, args),
4174 4187 stdin=subprocess.PIPE,
4175 4188 stdout=subprocess.PIPE,
4176 4189 stderr=subprocess.PIPE,
4177 4190 bufsize=0,
4178 4191 )
4179 4192
4180 4193 stdin = proc.stdin
4181 4194 stdout = proc.stdout
4182 4195 stderr = proc.stderr
4183 4196
4184 4197 # We turn the pipes into observers so we can log I/O.
4185 4198 if ui.verbose or opts[b'peer'] == b'raw':
4186 4199 stdin = util.makeloggingfileobject(
4187 4200 ui, proc.stdin, b'i', logdata=True
4188 4201 )
4189 4202 stdout = util.makeloggingfileobject(
4190 4203 ui, proc.stdout, b'o', logdata=True
4191 4204 )
4192 4205 stderr = util.makeloggingfileobject(
4193 4206 ui, proc.stderr, b'e', logdata=True
4194 4207 )
4195 4208
4196 4209 # --localssh also implies the peer connection settings.
4197 4210
4198 4211 url = b'ssh://localserver'
4199 4212 autoreadstderr = not opts[b'noreadstderr']
4200 4213
4201 4214 if opts[b'peer'] == b'ssh1':
4202 4215 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4203 4216 peer = sshpeer.sshv1peer(
4204 4217 ui,
4205 4218 url,
4206 4219 proc,
4207 4220 stdin,
4208 4221 stdout,
4209 4222 stderr,
4210 4223 None,
4211 4224 autoreadstderr=autoreadstderr,
4212 4225 )
4213 4226 elif opts[b'peer'] == b'ssh2':
4214 4227 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4215 4228 peer = sshpeer.sshv2peer(
4216 4229 ui,
4217 4230 url,
4218 4231 proc,
4219 4232 stdin,
4220 4233 stdout,
4221 4234 stderr,
4222 4235 None,
4223 4236 autoreadstderr=autoreadstderr,
4224 4237 )
4225 4238 elif opts[b'peer'] == b'raw':
4226 4239 ui.write(_(b'using raw connection to peer\n'))
4227 4240 peer = None
4228 4241 else:
4229 4242 ui.write(_(b'creating ssh peer from handshake results\n'))
4230 4243 peer = sshpeer.makepeer(
4231 4244 ui,
4232 4245 url,
4233 4246 proc,
4234 4247 stdin,
4235 4248 stdout,
4236 4249 stderr,
4237 4250 autoreadstderr=autoreadstderr,
4238 4251 )
4239 4252
4240 4253 elif path:
4241 4254 # We bypass hg.peer() so we can proxy the sockets.
4242 4255 # TODO consider not doing this because we skip
4243 4256 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4244 4257 u = util.url(path)
4245 4258 if u.scheme != b'http':
4246 4259 raise error.Abort(_(b'only http:// paths are currently supported'))
4247 4260
4248 4261 url, authinfo = u.authinfo()
4249 4262 openerargs = {
4250 4263 'useragent': b'Mercurial debugwireproto',
4251 4264 }
4252 4265
4253 4266 # Turn pipes/sockets into observers so we can log I/O.
4254 4267 if ui.verbose:
4255 4268 openerargs.update(
4256 4269 {
4257 4270 'loggingfh': ui,
4258 4271 'loggingname': b's',
4259 4272 'loggingopts': {'logdata': True, 'logdataapis': False,},
4260 4273 }
4261 4274 )
4262 4275
4263 4276 if ui.debugflag:
4264 4277 openerargs['loggingopts']['logdataapis'] = True
4265 4278
4266 4279 # Don't send default headers when in raw mode. This allows us to
4267 4280 # bypass most of the behavior of our URL handling code so we can
4268 4281 # have near complete control over what's sent on the wire.
4269 4282 if opts[b'peer'] == b'raw':
4270 4283 openerargs['sendaccept'] = False
4271 4284
4272 4285 opener = urlmod.opener(ui, authinfo, **openerargs)
4273 4286
4274 4287 if opts[b'peer'] == b'http2':
4275 4288 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4276 4289 # We go through makepeer() because we need an API descriptor for
4277 4290 # the peer instance to be useful.
4278 4291 with ui.configoverride(
4279 4292 {(b'experimental', b'httppeer.advertise-v2'): True}
4280 4293 ):
4281 4294 if opts[b'nologhandshake']:
4282 4295 ui.pushbuffer()
4283 4296
4284 4297 peer = httppeer.makepeer(ui, path, opener=opener)
4285 4298
4286 4299 if opts[b'nologhandshake']:
4287 4300 ui.popbuffer()
4288 4301
4289 4302 if not isinstance(peer, httppeer.httpv2peer):
4290 4303 raise error.Abort(
4291 4304 _(
4292 4305 b'could not instantiate HTTP peer for '
4293 4306 b'wire protocol version 2'
4294 4307 ),
4295 4308 hint=_(
4296 4309 b'the server may not have the feature '
4297 4310 b'enabled or is not allowing this '
4298 4311 b'client version'
4299 4312 ),
4300 4313 )
4301 4314
4302 4315 elif opts[b'peer'] == b'raw':
4303 4316 ui.write(_(b'using raw connection to peer\n'))
4304 4317 peer = None
4305 4318 elif opts[b'peer']:
4306 4319 raise error.Abort(
4307 4320 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4308 4321 )
4309 4322 else:
4310 4323 peer = httppeer.makepeer(ui, path, opener=opener)
4311 4324
4312 4325 # We /could/ populate stdin/stdout with sock.makefile()...
4313 4326 else:
4314 4327 raise error.Abort(_(b'unsupported connection configuration'))
4315 4328
4316 4329 batchedcommands = None
4317 4330
4318 4331 # Now perform actions based on the parsed wire language instructions.
4319 4332 for action, lines in blocks:
4320 4333 if action in (b'raw', b'raw+'):
4321 4334 if not stdin:
4322 4335 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4323 4336
4324 4337 # Concatenate the data together.
4325 4338 data = b''.join(l.lstrip() for l in lines)
4326 4339 data = stringutil.unescapestr(data)
4327 4340 stdin.write(data)
4328 4341
4329 4342 if action == b'raw+':
4330 4343 stdin.flush()
4331 4344 elif action == b'flush':
4332 4345 if not stdin:
4333 4346 raise error.Abort(_(b'cannot call flush on this peer'))
4334 4347 stdin.flush()
4335 4348 elif action.startswith(b'command'):
4336 4349 if not peer:
4337 4350 raise error.Abort(
4338 4351 _(
4339 4352 b'cannot send commands unless peer instance '
4340 4353 b'is available'
4341 4354 )
4342 4355 )
4343 4356
4344 4357 command = action.split(b' ', 1)[1]
4345 4358
4346 4359 args = {}
4347 4360 for line in lines:
4348 4361 # We need to allow empty values.
4349 4362 fields = line.lstrip().split(b' ', 1)
4350 4363 if len(fields) == 1:
4351 4364 key = fields[0]
4352 4365 value = b''
4353 4366 else:
4354 4367 key, value = fields
4355 4368
4356 4369 if value.startswith(b'eval:'):
4357 4370 value = stringutil.evalpythonliteral(value[5:])
4358 4371 else:
4359 4372 value = stringutil.unescapestr(value)
4360 4373
4361 4374 args[key] = value
4362 4375
4363 4376 if batchedcommands is not None:
4364 4377 batchedcommands.append((command, args))
4365 4378 continue
4366 4379
4367 4380 ui.status(_(b'sending %s command\n') % command)
4368 4381
4369 4382 if b'PUSHFILE' in args:
4370 4383 with open(args[b'PUSHFILE'], 'rb') as fh:
4371 4384 del args[b'PUSHFILE']
4372 4385 res, output = peer._callpush(
4373 4386 command, fh, **pycompat.strkwargs(args)
4374 4387 )
4375 4388 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4376 4389 ui.status(
4377 4390 _(b'remote output: %s\n') % stringutil.escapestr(output)
4378 4391 )
4379 4392 else:
4380 4393 with peer.commandexecutor() as e:
4381 4394 res = e.callcommand(command, args).result()
4382 4395
4383 4396 if isinstance(res, wireprotov2peer.commandresponse):
4384 4397 val = res.objects()
4385 4398 ui.status(
4386 4399 _(b'response: %s\n')
4387 4400 % stringutil.pprint(val, bprefix=True, indent=2)
4388 4401 )
4389 4402 else:
4390 4403 ui.status(
4391 4404 _(b'response: %s\n')
4392 4405 % stringutil.pprint(res, bprefix=True, indent=2)
4393 4406 )
4394 4407
4395 4408 elif action == b'batchbegin':
4396 4409 if batchedcommands is not None:
4397 4410 raise error.Abort(_(b'nested batchbegin not allowed'))
4398 4411
4399 4412 batchedcommands = []
4400 4413 elif action == b'batchsubmit':
4401 4414 # There is a batching API we could go through. But it would be
4402 4415 # difficult to normalize requests into function calls. It is easier
4403 4416 # to bypass this layer and normalize to commands + args.
4404 4417 ui.status(
4405 4418 _(b'sending batch with %d sub-commands\n')
4406 4419 % len(batchedcommands)
4407 4420 )
4408 4421 assert peer is not None
4409 4422 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4410 4423 ui.status(
4411 4424 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4412 4425 )
4413 4426
4414 4427 batchedcommands = None
4415 4428
4416 4429 elif action.startswith(b'httprequest '):
4417 4430 if not opener:
4418 4431 raise error.Abort(
4419 4432 _(b'cannot use httprequest without an HTTP peer')
4420 4433 )
4421 4434
4422 4435 request = action.split(b' ', 2)
4423 4436 if len(request) != 3:
4424 4437 raise error.Abort(
4425 4438 _(
4426 4439 b'invalid httprequest: expected format is '
4427 4440 b'"httprequest <method> <path>'
4428 4441 )
4429 4442 )
4430 4443
4431 4444 method, httppath = request[1:]
4432 4445 headers = {}
4433 4446 body = None
4434 4447 frames = []
4435 4448 for line in lines:
4436 4449 line = line.lstrip()
4437 4450 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4438 4451 if m:
4439 4452 # Headers need to use native strings.
4440 4453 key = pycompat.strurl(m.group(1))
4441 4454 value = pycompat.strurl(m.group(2))
4442 4455 headers[key] = value
4443 4456 continue
4444 4457
4445 4458 if line.startswith(b'BODYFILE '):
4446 4459 with open(line.split(b' ', 1), b'rb') as fh:
4447 4460 body = fh.read()
4448 4461 elif line.startswith(b'frame '):
4449 4462 frame = wireprotoframing.makeframefromhumanstring(
4450 4463 line[len(b'frame ') :]
4451 4464 )
4452 4465
4453 4466 frames.append(frame)
4454 4467 else:
4455 4468 raise error.Abort(
4456 4469 _(b'unknown argument to httprequest: %s') % line
4457 4470 )
4458 4471
4459 4472 url = path + httppath
4460 4473
4461 4474 if frames:
4462 4475 body = b''.join(bytes(f) for f in frames)
4463 4476
4464 4477 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4465 4478
4466 4479 # urllib.Request insists on using has_data() as a proxy for
4467 4480 # determining the request method. Override that to use our
4468 4481 # explicitly requested method.
4469 4482 req.get_method = lambda: pycompat.sysstr(method)
4470 4483
4471 4484 try:
4472 4485 res = opener.open(req)
4473 4486 body = res.read()
4474 4487 except util.urlerr.urlerror as e:
4475 4488 # read() method must be called, but only exists in Python 2
4476 4489 getattr(e, 'read', lambda: None)()
4477 4490 continue
4478 4491
4479 4492 ct = res.headers.get('Content-Type')
4480 4493 if ct == 'application/mercurial-cbor':
4481 4494 ui.write(
4482 4495 _(b'cbor> %s\n')
4483 4496 % stringutil.pprint(
4484 4497 cborutil.decodeall(body), bprefix=True, indent=2
4485 4498 )
4486 4499 )
4487 4500
4488 4501 elif action == b'close':
4489 4502 assert peer is not None
4490 4503 peer.close()
4491 4504 elif action == b'readavailable':
4492 4505 if not stdout or not stderr:
4493 4506 raise error.Abort(
4494 4507 _(b'readavailable not available on this peer')
4495 4508 )
4496 4509
4497 4510 stdin.close()
4498 4511 stdout.read()
4499 4512 stderr.read()
4500 4513
4501 4514 elif action == b'readline':
4502 4515 if not stdout:
4503 4516 raise error.Abort(_(b'readline not available on this peer'))
4504 4517 stdout.readline()
4505 4518 elif action == b'ereadline':
4506 4519 if not stderr:
4507 4520 raise error.Abort(_(b'ereadline not available on this peer'))
4508 4521 stderr.readline()
4509 4522 elif action.startswith(b'read '):
4510 4523 count = int(action.split(b' ', 1)[1])
4511 4524 if not stdout:
4512 4525 raise error.Abort(_(b'read not available on this peer'))
4513 4526 stdout.read(count)
4514 4527 elif action.startswith(b'eread '):
4515 4528 count = int(action.split(b' ', 1)[1])
4516 4529 if not stderr:
4517 4530 raise error.Abort(_(b'eread not available on this peer'))
4518 4531 stderr.read(count)
4519 4532 else:
4520 4533 raise error.Abort(_(b'unknown action: %s') % action)
4521 4534
4522 4535 if batchedcommands is not None:
4523 4536 raise error.Abort(_(b'unclosed "batchbegin" request'))
4524 4537
4525 4538 if peer:
4526 4539 peer.close()
4527 4540
4528 4541 if proc:
4529 4542 proc.kill()
@@ -1,795 +1,796
1 1 test that a commit clears the merge state.
2 2
3 3 $ hg init repo
4 4 $ cd repo
5 5
6 6 $ echo foo > file1
7 7 $ echo foo > file2
8 8 $ hg commit -Am 'add files'
9 9 adding file1
10 10 adding file2
11 11
12 12 $ echo bar >> file1
13 13 $ echo bar >> file2
14 14 $ hg commit -Am 'append bar to files'
15 15
16 16 create a second head with conflicting edits
17 17
18 18 $ hg up -C 0
19 19 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
20 20 $ echo baz >> file1
21 21 $ echo baz >> file2
22 22 $ hg commit -Am 'append baz to files'
23 23 created new head
24 24
25 25 create a third head with no conflicting edits
26 26 $ hg up -qC 0
27 27 $ echo foo > file3
28 28 $ hg commit -Am 'add non-conflicting file'
29 29 adding file3
30 30 created new head
31 31
32 32 failing merge
33 33
34 34 $ hg up -qC 2
35 35 $ hg merge --tool=internal:fail 1
36 36 0 files updated, 0 files merged, 0 files removed, 2 files unresolved
37 37 use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
38 38 [1]
39 39
40 40 resolve -l should contain unresolved entries
41 41
42 42 $ hg resolve -l
43 43 U file1
44 44 U file2
45 45
46 46 $ hg resolve -l --no-status
47 47 file1
48 48 file2
49 49
50 50 resolving an unknown path should emit a warning, but not for -l
51 51
52 52 $ hg resolve -m does-not-exist
53 53 arguments do not match paths that need resolving
54 54 $ hg resolve -l does-not-exist
55 55
56 56 tell users how they could have used resolve
57 57
58 58 $ mkdir nested
59 59 $ cd nested
60 60 $ hg resolve -m file1
61 61 arguments do not match paths that need resolving
62 62 (try: hg resolve -m path:file1)
63 63 $ hg resolve -m file1 filez
64 64 arguments do not match paths that need resolving
65 65 (try: hg resolve -m path:file1 path:filez)
66 66 $ hg resolve -m path:file1 path:filez
67 67 $ hg resolve -l
68 68 R file1
69 69 U file2
70 70 $ hg resolve -l --config ui.relative-paths=yes
71 71 R ../file1
72 72 U ../file2
73 73 $ hg resolve --re-merge filez file2
74 74 arguments do not match paths that need resolving
75 75 (try: hg resolve --re-merge path:filez path:file2)
76 76 $ hg resolve -m filez file2
77 77 arguments do not match paths that need resolving
78 78 (try: hg resolve -m path:filez path:file2)
79 79 $ hg resolve -m path:filez path:file2
80 80 (no more unresolved files)
81 81 $ hg resolve -l
82 82 R file1
83 83 R file2
84 84
85 85 cleanup
86 86 $ hg resolve -u
87 87 $ cd ..
88 88 $ rmdir nested
89 89
90 90 don't allow marking or unmarking driver-resolved files
91 91
92 92 $ cat > $TESTTMP/markdriver.py << EOF
93 93 > '''mark and unmark files as driver-resolved'''
94 94 > from mercurial import (
95 95 > mergestate,
96 96 > pycompat,
97 97 > registrar,
98 98 > scmutil,
99 99 > )
100 100 > cmdtable = {}
101 101 > command = registrar.command(cmdtable)
102 102 > @command(b'markdriver',
103 103 > [(b'u', b'unmark', None, b'')],
104 104 > b'FILE...')
105 105 > def markdriver(ui, repo, *pats, **opts):
106 106 > wlock = repo.wlock()
107 107 > opts = pycompat.byteskwargs(opts)
108 108 > try:
109 109 > ms = mergestate.mergestate.read(repo)
110 110 > m = scmutil.match(repo[None], pats, opts)
111 111 > for f in ms:
112 112 > if not m(f):
113 113 > continue
114 114 > if not opts[b'unmark']:
115 115 > ms.mark(f, b'd')
116 116 > else:
117 117 > ms.mark(f, b'u')
118 118 > ms.commit()
119 119 > finally:
120 120 > wlock.release()
121 121 > EOF
122 122 $ hg --config extensions.markdriver=$TESTTMP/markdriver.py markdriver file1
123 123 $ hg resolve --list
124 124 D file1
125 125 U file2
126 126 $ hg resolve --mark file1
127 127 not marking file1 as it is driver-resolved
128 128 this should not print out file1
129 129 $ hg resolve --mark --all
130 130 (no more unresolved files -- run "hg resolve --all" to conclude)
131 131 $ hg resolve --mark 'glob:file*'
132 132 (no more unresolved files -- run "hg resolve --all" to conclude)
133 133 $ hg resolve --list
134 134 D file1
135 135 R file2
136 136 $ hg resolve --unmark file1
137 137 not unmarking file1 as it is driver-resolved
138 138 (no more unresolved files -- run "hg resolve --all" to conclude)
139 139 $ hg resolve --unmark --all
140 140 $ hg resolve --list
141 141 D file1
142 142 U file2
143 143 $ hg --config extensions.markdriver=$TESTTMP/markdriver.py markdriver --unmark file1
144 144 $ hg resolve --list
145 145 U file1
146 146 U file2
147 147
148 148 resolve the failure
149 149
150 150 $ echo resolved > file1
151 151 $ hg resolve -m file1
152 152
153 153 resolve -l should show resolved file as resolved
154 154
155 155 $ hg resolve -l
156 156 R file1
157 157 U file2
158 158
159 159 $ hg resolve -l -Tjson
160 160 [
161 161 {
162 162 "mergestatus": "R",
163 163 "path": "file1"
164 164 },
165 165 {
166 166 "mergestatus": "U",
167 167 "path": "file2"
168 168 }
169 169 ]
170 170
171 171 $ hg resolve -l -T '{path} {mergestatus} {status} {p1rev} {p2rev}\n'
172 172 file1 R M 2 1
173 173 file2 U M 2 1
174 174
175 175 resolve -m without paths should mark all resolved
176 176
177 177 $ hg resolve -m
178 178 (no more unresolved files)
179 179 $ hg commit -m 'resolved'
180 180
181 181 resolve -l should be empty after commit
182 182
183 183 $ hg resolve -l
184 184
185 185 $ hg resolve -l -Tjson
186 186 [
187 187 ]
188 188
189 189 resolve --all should abort when no merge in progress
190 190
191 191 $ hg resolve --all
192 192 abort: resolve command not applicable when not merging
193 193 [255]
194 194
195 195 resolve -m should abort when no merge in progress
196 196
197 197 $ hg resolve -m
198 198 abort: resolve command not applicable when not merging
199 199 [255]
200 200
201 201 can not update or merge when there are unresolved conflicts
202 202
203 203 $ hg up -qC 0
204 204 $ echo quux >> file1
205 205 $ hg up 1
206 206 merging file1
207 207 warning: conflicts while merging file1! (edit, then use 'hg resolve --mark')
208 208 1 files updated, 0 files merged, 0 files removed, 1 files unresolved
209 209 use 'hg resolve' to retry unresolved file merges
210 210 [1]
211 211 $ hg up 0
212 212 abort: outstanding merge conflicts
213 213 (use 'hg resolve' to resolve)
214 214 [255]
215 215 $ hg merge 2
216 216 abort: outstanding merge conflicts
217 217 (use 'hg resolve' to resolve)
218 218 [255]
219 219 $ hg merge --force 2
220 220 abort: outstanding merge conflicts
221 221 (use 'hg resolve' to resolve)
222 222 [255]
223 223
224 224 set up conflict-free merge
225 225
226 226 $ hg up -qC 3
227 227 $ hg merge 1
228 228 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
229 229 (branch merge, don't forget to commit)
230 230
231 231 resolve --all should do nothing in merge without conflicts
232 232 $ hg resolve --all
233 233 (no more unresolved files)
234 234
235 235 resolve -m should do nothing in merge without conflicts
236 236
237 237 $ hg resolve -m
238 238 (no more unresolved files)
239 239
240 240 get back to conflicting state
241 241
242 242 $ hg up -qC 2
243 243 $ hg merge --tool=internal:fail 1
244 244 0 files updated, 0 files merged, 0 files removed, 2 files unresolved
245 245 use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
246 246 [1]
247 247
248 248 resolve without arguments should suggest --all
249 249 $ hg resolve
250 250 abort: no files or directories specified
251 251 (use --all to re-merge all unresolved files)
252 252 [255]
253 253
254 254 resolve --all should re-merge all unresolved files
255 255 $ hg resolve --all
256 256 merging file1
257 257 merging file2
258 258 warning: conflicts while merging file1! (edit, then use 'hg resolve --mark')
259 259 warning: conflicts while merging file2! (edit, then use 'hg resolve --mark')
260 260 [1]
261 261 $ cat file1.orig
262 262 foo
263 263 baz
264 264 $ cat file2.orig
265 265 foo
266 266 baz
267 267
268 268 .orig files should exists where specified
269 269 $ hg resolve --all --verbose --config 'ui.origbackuppath=.hg/origbackups'
270 270 merging file1
271 271 creating directory: $TESTTMP/repo/.hg/origbackups
272 272 merging file2
273 273 warning: conflicts while merging file1! (edit, then use 'hg resolve --mark')
274 274 warning: conflicts while merging file2! (edit, then use 'hg resolve --mark')
275 275 [1]
276 276 $ ls .hg/origbackups
277 277 file1
278 278 file2
279 279 $ grep '<<<' file1 > /dev/null
280 280 $ grep '<<<' file2 > /dev/null
281 281
282 282 resolve <file> should re-merge file
283 283 $ echo resolved > file1
284 284 $ hg resolve -q file1
285 285 warning: conflicts while merging file1! (edit, then use 'hg resolve --mark')
286 286 [1]
287 287 $ grep '<<<' file1 > /dev/null
288 288
289 289 test .orig behavior with resolve
290 290
291 291 $ hg resolve -q file1 --tool "sh -c 'f --dump \"$TESTTMP/repo/file1.orig\"'"
292 292 $TESTTMP/repo/file1.orig:
293 293 >>>
294 294 foo
295 295 baz
296 296 <<<
297 297
298 298 resolve <file> should do nothing if 'file' was marked resolved
299 299 $ echo resolved > file1
300 300 $ hg resolve -m file1
301 301 $ hg resolve -q file1
302 302 $ cat file1
303 303 resolved
304 304
305 305 insert unsupported advisory merge record
306 306
307 307 $ hg --config extensions.fakemergerecord=$TESTDIR/fakemergerecord.py fakemergerecord -x
308 308 $ hg debugmergestate
309 309 local (working copy): 57653b9f834a4493f7240b0681efcb9ae7cab745
310 310 other (merge rev): dc77451844e37f03f5c559e3b8529b2b48d381d1
311 311 file: file1 (state "r")
312 312 local path: file1 (hash 60b27f004e454aca81b0480209cce5081ec52390, flags "")
313 313 ancestor path: file1 (node 2ed2a3912a0b24502043eae84ee4b279c18b90dd)
314 314 other path: file1 (node 6f4310b00b9a147241b071a60c28a650827fb03d)
315 315 extra: ancestorlinknode = 99726c03216e233810a2564cbc0adfe395007eac
316 316 file: file2 (state "u")
317 317 local path: file2 (hash cb99b709a1978bd205ab9dfd4c5aaa1fc91c7523, flags "")
318 318 ancestor path: file2 (node 2ed2a3912a0b24502043eae84ee4b279c18b90dd)
319 319 other path: file2 (node 6f4310b00b9a147241b071a60c28a650827fb03d)
320 320 extra: ancestorlinknode = 99726c03216e233810a2564cbc0adfe395007eac
321 321 $ hg resolve -l
322 322 R file1
323 323 U file2
324 324
325 325 test json output
326 326
327 327 $ hg debugmergestate -T json
328 328 [
329 329 {
330 330 "commits": [{"label": "working copy", "name": "local", "node": "57653b9f834a4493f7240b0681efcb9ae7cab745"}, {"label": "merge rev", "name": "other", "node": "dc77451844e37f03f5c559e3b8529b2b48d381d1"}],
331 "extras": [],
331 332 "files": [{"ancestor_node": "2ed2a3912a0b24502043eae84ee4b279c18b90dd", "ancestor_path": "file1", "extras": [{"key": "ancestorlinknode", "value": "99726c03216e233810a2564cbc0adfe395007eac"}], "local_flags": "", "local_key": "60b27f004e454aca81b0480209cce5081ec52390", "local_path": "file1", "other_node": "6f4310b00b9a147241b071a60c28a650827fb03d", "other_path": "file1", "path": "file1", "state": "r"}, {"ancestor_node": "2ed2a3912a0b24502043eae84ee4b279c18b90dd", "ancestor_path": "file2", "extras": [{"key": "ancestorlinknode", "value": "99726c03216e233810a2564cbc0adfe395007eac"}], "local_flags": "", "local_key": "cb99b709a1978bd205ab9dfd4c5aaa1fc91c7523", "local_path": "file2", "other_node": "6f4310b00b9a147241b071a60c28a650827fb03d", "other_path": "file2", "path": "file2", "state": "u"}]
332 333 }
333 334 ]
334 335
335 336
336 337 insert unsupported mandatory merge record
337 338
338 339 $ hg --config extensions.fakemergerecord=$TESTDIR/fakemergerecord.py fakemergerecord -X
339 340 $ hg debugmergestate
340 341 abort: unsupported merge state records: X
341 342 (see https://mercurial-scm.org/wiki/MergeStateRecords for more information)
342 343 [255]
343 344 $ hg resolve -l
344 345 abort: unsupported merge state records: X
345 346 (see https://mercurial-scm.org/wiki/MergeStateRecords for more information)
346 347 [255]
347 348 $ hg resolve -ma
348 349 abort: unsupported merge state records: X
349 350 (see https://mercurial-scm.org/wiki/MergeStateRecords for more information)
350 351 [255]
351 352 $ hg summary
352 353 warning: merge state has unsupported record types: X
353 354 parent: 2:57653b9f834a
354 355 append baz to files
355 356 parent: 1:dc77451844e3
356 357 append bar to files
357 358 branch: default
358 359 commit: 2 modified, 2 unknown (merge)
359 360 update: 2 new changesets (update)
360 361 phases: 5 draft
361 362
362 363 update --clean shouldn't abort on unsupported records
363 364
364 365 $ hg up -qC 1
365 366 $ hg debugmergestate
366 367 no merge state found
367 368
368 369 test crashed merge with empty mergestate
369 370
370 371 $ mkdir .hg/merge
371 372 $ touch .hg/merge/state
372 373
373 374 resolve -l should be empty
374 375
375 376 $ hg resolve -l
376 377
377 378 resolve -m can be configured to look for remaining conflict markers
378 379 $ hg up -qC 2
379 380 $ hg merge -q --tool=internal:merge 1
380 381 warning: conflicts while merging file1! (edit, then use 'hg resolve --mark')
381 382 warning: conflicts while merging file2! (edit, then use 'hg resolve --mark')
382 383 [1]
383 384 $ hg resolve -l
384 385 U file1
385 386 U file2
386 387 $ echo 'remove markers' > file1
387 388 $ hg --config commands.resolve.mark-check=abort resolve -m
388 389 warning: the following files still have conflict markers:
389 390 file2
390 391 abort: conflict markers detected
391 392 (use --all to mark anyway)
392 393 [255]
393 394 $ hg resolve -l
394 395 U file1
395 396 U file2
396 397 Try with --all from the hint
397 398 $ hg --config commands.resolve.mark-check=abort resolve -m --all
398 399 warning: the following files still have conflict markers:
399 400 file2
400 401 (no more unresolved files)
401 402 $ hg resolve -l
402 403 R file1
403 404 R file2
404 405 Test option value 'warn'
405 406 $ hg resolve --unmark
406 407 $ hg resolve -l
407 408 U file1
408 409 U file2
409 410 $ hg --config commands.resolve.mark-check=warn resolve -m
410 411 warning: the following files still have conflict markers:
411 412 file2
412 413 (no more unresolved files)
413 414 $ hg resolve -l
414 415 R file1
415 416 R file2
416 417 If the file is already marked as resolved, we don't warn about it
417 418 $ hg resolve --unmark file1
418 419 $ hg resolve -l
419 420 U file1
420 421 R file2
421 422 $ hg --config commands.resolve.mark-check=warn resolve -m
422 423 (no more unresolved files)
423 424 $ hg resolve -l
424 425 R file1
425 426 R file2
426 427 If the user passes an invalid value, we treat it as 'none'.
427 428 $ hg resolve --unmark
428 429 $ hg resolve -l
429 430 U file1
430 431 U file2
431 432 $ hg --config commands.resolve.mark-check=nope resolve -m
432 433 (no more unresolved files)
433 434 $ hg resolve -l
434 435 R file1
435 436 R file2
436 437 Test explicitly setting the option to 'none'
437 438 $ hg resolve --unmark
438 439 $ hg resolve -l
439 440 U file1
440 441 U file2
441 442 $ hg --config commands.resolve.mark-check=none resolve -m
442 443 (no more unresolved files)
443 444 $ hg resolve -l
444 445 R file1
445 446 R file2
446 447 Test with marking an explicit file as resolved, this should not abort (since
447 448 there's no --force flag, we have no way of combining --all with a filename)
448 449 $ hg resolve --unmark
449 450 $ hg resolve -l
450 451 U file1
451 452 U file2
452 453 (This downgrades to a warning since an explicit file was specified).
453 454 $ hg --config commands.resolve.mark-check=abort resolve -m file2
454 455 warning: the following files still have conflict markers:
455 456 file2
456 457 $ hg resolve -l
457 458 U file1
458 459 R file2
459 460 Testing the --re-merge flag
460 461 $ hg resolve --unmark file1
461 462 $ hg resolve -l
462 463 U file1
463 464 R file2
464 465 $ hg resolve --mark --re-merge
465 466 abort: too many actions specified
466 467 [255]
467 468 $ hg resolve --re-merge --all
468 469 merging file1
469 470 warning: conflicts while merging file1! (edit, then use 'hg resolve --mark')
470 471 [1]
471 472 Explicit re-merge
472 473 $ hg resolve --unmark file1
473 474 $ hg resolve --config commands.resolve.explicit-re-merge=1 --all
474 475 abort: no action specified
475 476 (use --mark, --unmark, --list or --re-merge)
476 477 [255]
477 478 $ hg resolve --config commands.resolve.explicit-re-merge=1 --re-merge --all
478 479 merging file1
479 480 warning: conflicts while merging file1! (edit, then use 'hg resolve --mark')
480 481 [1]
481 482
482 483 $ cd ..
483 484
484 485 ======================================================
485 486 Test 'hg resolve' confirm config option functionality |
486 487 ======================================================
487 488 $ cat >> $HGRCPATH << EOF
488 489 > [extensions]
489 490 > rebase=
490 491 > EOF
491 492
492 493 $ hg init repo2
493 494 $ cd repo2
494 495
495 496 $ echo boss > boss
496 497 $ hg ci -Am "add boss"
497 498 adding boss
498 499
499 500 $ for emp in emp1 emp2 emp3; do echo work > $emp; done;
500 501 $ hg ci -Aqm "added emp1 emp2 emp3"
501 502
502 503 $ hg up 0
503 504 0 files updated, 0 files merged, 3 files removed, 0 files unresolved
504 505
505 506 $ for emp in emp1 emp2 emp3; do echo nowork > $emp; done;
506 507 $ hg ci -Aqm "added lazy emp1 emp2 emp3"
507 508
508 509 $ hg log -GT "{rev} {node|short} {firstline(desc)}\n"
509 510 @ 2 0acfd4a49af0 added lazy emp1 emp2 emp3
510 511 |
511 512 | o 1 f30f98a8181f added emp1 emp2 emp3
512 513 |/
513 514 o 0 88660038d466 add boss
514 515
515 516 $ hg rebase -s 1 -d 2
516 517 rebasing 1:f30f98a8181f "added emp1 emp2 emp3"
517 518 merging emp1
518 519 merging emp2
519 520 merging emp3
520 521 warning: conflicts while merging emp1! (edit, then use 'hg resolve --mark')
521 522 warning: conflicts while merging emp2! (edit, then use 'hg resolve --mark')
522 523 warning: conflicts while merging emp3! (edit, then use 'hg resolve --mark')
523 524 unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
524 525 [1]
525 526
526 527 Test when commands.resolve.confirm config option is not set:
527 528 ===========================================================
528 529 $ hg resolve --all
529 530 merging emp1
530 531 merging emp2
531 532 merging emp3
532 533 warning: conflicts while merging emp1! (edit, then use 'hg resolve --mark')
533 534 warning: conflicts while merging emp2! (edit, then use 'hg resolve --mark')
534 535 warning: conflicts while merging emp3! (edit, then use 'hg resolve --mark')
535 536 [1]
536 537
537 538 Test when config option is set:
538 539 ==============================
539 540 $ cat >> .hg/hgrc << EOF
540 541 > [ui]
541 542 > interactive = True
542 543 > [commands]
543 544 > resolve.confirm = True
544 545 > EOF
545 546
546 547 $ hg resolve
547 548 abort: no files or directories specified
548 549 (use --all to re-merge all unresolved files)
549 550 [255]
550 551 $ hg resolve --all << EOF
551 552 > n
552 553 > EOF
553 554 re-merge all unresolved files (yn)? n
554 555 abort: user quit
555 556 [255]
556 557
557 558 $ hg resolve --all << EOF
558 559 > y
559 560 > EOF
560 561 re-merge all unresolved files (yn)? y
561 562 merging emp1
562 563 merging emp2
563 564 merging emp3
564 565 warning: conflicts while merging emp1! (edit, then use 'hg resolve --mark')
565 566 warning: conflicts while merging emp2! (edit, then use 'hg resolve --mark')
566 567 warning: conflicts while merging emp3! (edit, then use 'hg resolve --mark')
567 568 [1]
568 569
569 570 Test that commands.resolve.confirm respect --mark option (only when no patterns args are given):
570 571 ===============================================================================================
571 572
572 573 $ hg resolve -m emp1
573 574 $ hg resolve -l
574 575 R emp1
575 576 U emp2
576 577 U emp3
577 578
578 579 $ hg resolve -m << EOF
579 580 > n
580 581 > EOF
581 582 mark all unresolved files as resolved (yn)? n
582 583 abort: user quit
583 584 [255]
584 585
585 586 $ hg resolve -m << EOF
586 587 > y
587 588 > EOF
588 589 mark all unresolved files as resolved (yn)? y
589 590 (no more unresolved files)
590 591 continue: hg rebase --continue
591 592 $ hg resolve -l
592 593 R emp1
593 594 R emp2
594 595 R emp3
595 596
596 597 Test that commands.resolve.confirm respect --unmark option (only when no patterns args are given):
597 598 =================================================================================================
598 599
599 600 $ hg resolve -u emp1
600 601
601 602 $ hg resolve -l
602 603 U emp1
603 604 R emp2
604 605 R emp3
605 606
606 607 $ hg resolve -u << EOF
607 608 > n
608 609 > EOF
609 610 mark all resolved files as unresolved (yn)? n
610 611 abort: user quit
611 612 [255]
612 613
613 614 $ hg resolve -m << EOF
614 615 > y
615 616 > EOF
616 617 mark all unresolved files as resolved (yn)? y
617 618 (no more unresolved files)
618 619 continue: hg rebase --continue
619 620
620 621 $ hg resolve -l
621 622 R emp1
622 623 R emp2
623 624 R emp3
624 625
625 626 $ hg rebase --abort
626 627 rebase aborted
627 628
628 629 Done with commands.resolve.confirm tests:
629 630 $ cd ..
630 631
631 632 Test that commands.resolve.mark-check works even if there are deleted files:
632 633 $ hg init resolve-deleted
633 634 $ cd resolve-deleted
634 635 $ echo r0 > file1
635 636 $ hg ci -qAm r0
636 637 $ echo r1 > file1
637 638 $ hg ci -qm r1
638 639 $ hg co -qr 0
639 640 $ hg rm file1
640 641 $ hg ci -qm "r2 (delete file1)"
641 642
642 643 (At this point we have r0 creating file1, and sibling commits r1 and r2, which
643 644 modify and delete file1, respectively)
644 645
645 646 $ hg merge -r 1
646 647 file 'file1' was deleted in local [working copy] but was modified in other [merge rev].
647 648 You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
648 649 What do you want to do? u
649 650 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
650 651 use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
651 652 [1]
652 653 $ hg resolve --list
653 654 U file1
654 655 Because we left it as 'unresolved' the file should still exist.
655 656 $ [ -f file1 ] || echo "File does not exist?"
656 657 BC behavior: `hg resolve --mark` accepts that the file is still there, and
657 658 doesn't have a problem with this situation.
658 659 $ hg resolve --mark --config commands.resolve.mark-check=abort
659 660 (no more unresolved files)
660 661 $ hg resolve --list
661 662 R file1
662 663 The file is still there:
663 664 $ [ -f file1 ] || echo "File does not exist?"
664 665 Let's check mark-check=warn:
665 666 $ hg resolve --unmark file1
666 667 $ hg resolve --mark --config commands.resolve.mark-check=warn
667 668 (no more unresolved files)
668 669 $ hg resolve --list
669 670 R file1
670 671 The file is still there:
671 672 $ [ -f file1 ] || echo "File does not exist?"
672 673 Let's resolve the issue by deleting the file via `hg resolve`
673 674 $ hg resolve --unmark file1
674 675 $ echo 'd' | hg resolve file1 --config ui.interactive=1
675 676 file 'file1' was deleted in local [working copy] but was modified in other [merge rev].
676 677 You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
677 678 What do you want to do? d
678 679 (no more unresolved files)
679 680 $ hg resolve --list
680 681 R file1
681 682 The file is deleted:
682 683 $ [ -f file1 ] && echo "File still exists?" || true
683 684 Doing `hg resolve --mark` doesn't break now that the file is missing:
684 685 $ hg resolve --mark --config commands.resolve.mark-check=abort
685 686 (no more unresolved files)
686 687 $ hg resolve --mark --config commands.resolve.mark-check=warn
687 688 (no more unresolved files)
688 689 Resurrect the file, and delete it outside of hg:
689 690 $ hg resolve --unmark file1
690 691 $ hg resolve file1
691 692 file 'file1' was deleted in local [working copy] but was modified in other [merge rev].
692 693 You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
693 694 What do you want to do? u
694 695 [1]
695 696 $ [ -f file1 ] || echo "File does not exist?"
696 697 $ hg resolve --list
697 698 U file1
698 699 $ rm file1
699 700 $ hg resolve --mark --config commands.resolve.mark-check=abort
700 701 (no more unresolved files)
701 702 $ hg resolve --list
702 703 R file1
703 704 $ hg resolve --unmark file1
704 705 $ hg resolve file1
705 706 file 'file1' was deleted in local [working copy] but was modified in other [merge rev].
706 707 You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
707 708 What do you want to do? u
708 709 [1]
709 710 $ [ -f file1 ] || echo "File does not exist?"
710 711 $ hg resolve --list
711 712 U file1
712 713 $ rm file1
713 714 $ hg resolve --mark --config commands.resolve.mark-check=warn
714 715 (no more unresolved files)
715 716 $ hg resolve --list
716 717 R file1
717 718
718 719
719 720 For completeness, let's try that in the opposite direction (merging r2 into r1,
720 721 instead of r1 into r2):
721 722 $ hg update -qCr 1
722 723 $ hg merge -r 2
723 724 file 'file1' was deleted in other [merge rev] but was modified in local [working copy].
724 725 You can use (c)hanged version, (d)elete, or leave (u)nresolved.
725 726 What do you want to do? u
726 727 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
727 728 use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
728 729 [1]
729 730 $ hg resolve --list
730 731 U file1
731 732 Because we left it as 'unresolved' the file should still exist.
732 733 $ [ -f file1 ] || echo "File does not exist?"
733 734 BC behavior: `hg resolve --mark` accepts that the file is still there, and
734 735 doesn't have a problem with this situation.
735 736 $ hg resolve --mark --config commands.resolve.mark-check=abort
736 737 (no more unresolved files)
737 738 $ hg resolve --list
738 739 R file1
739 740 The file is still there:
740 741 $ [ -f file1 ] || echo "File does not exist?"
741 742 Let's check mark-check=warn:
742 743 $ hg resolve --unmark file1
743 744 $ hg resolve --mark --config commands.resolve.mark-check=warn
744 745 (no more unresolved files)
745 746 $ hg resolve --list
746 747 R file1
747 748 The file is still there:
748 749 $ [ -f file1 ] || echo "File does not exist?"
749 750 Let's resolve the issue by deleting the file via `hg resolve`
750 751 $ hg resolve --unmark file1
751 752 $ echo 'd' | hg resolve file1 --config ui.interactive=1
752 753 file 'file1' was deleted in other [merge rev] but was modified in local [working copy].
753 754 You can use (c)hanged version, (d)elete, or leave (u)nresolved.
754 755 What do you want to do? d
755 756 (no more unresolved files)
756 757 $ hg resolve --list
757 758 R file1
758 759 The file is deleted:
759 760 $ [ -f file1 ] && echo "File still exists?" || true
760 761 Doing `hg resolve --mark` doesn't break now that the file is missing:
761 762 $ hg resolve --mark --config commands.resolve.mark-check=abort
762 763 (no more unresolved files)
763 764 $ hg resolve --mark --config commands.resolve.mark-check=warn
764 765 (no more unresolved files)
765 766 Resurrect the file, and delete it outside of hg:
766 767 $ hg resolve --unmark file1
767 768 $ hg resolve file1
768 769 file 'file1' was deleted in other [merge rev] but was modified in local [working copy].
769 770 You can use (c)hanged version, (d)elete, or leave (u)nresolved.
770 771 What do you want to do? u
771 772 [1]
772 773 $ [ -f file1 ] || echo "File does not exist?"
773 774 $ hg resolve --list
774 775 U file1
775 776 $ rm file1
776 777 $ hg resolve --mark --config commands.resolve.mark-check=abort
777 778 (no more unresolved files)
778 779 $ hg resolve --list
779 780 R file1
780 781 $ hg resolve --unmark file1
781 782 $ hg resolve file1
782 783 file 'file1' was deleted in other [merge rev] but was modified in local [working copy].
783 784 You can use (c)hanged version, (d)elete, or leave (u)nresolved.
784 785 What do you want to do? u
785 786 [1]
786 787 $ [ -f file1 ] || echo "File does not exist?"
787 788 $ hg resolve --list
788 789 U file1
789 790 $ rm file1
790 791 $ hg resolve --mark --config commands.resolve.mark-check=warn
791 792 (no more unresolved files)
792 793 $ hg resolve --list
793 794 R file1
794 795
795 796 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now