##// END OF EJS Templates
obsolete: closest divergent support...
Boris Feld -
r33274:68f3e819 default
parent child Browse files
Show More
@@ -1,2245 +1,2248
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import difflib
11 11 import errno
12 12 import operator
13 13 import os
14 14 import random
15 15 import socket
16 16 import string
17 17 import sys
18 18 import tempfile
19 19 import time
20 20
21 21 from .i18n import _
22 22 from .node import (
23 23 bin,
24 24 hex,
25 25 nullhex,
26 26 nullid,
27 27 nullrev,
28 28 short,
29 29 )
30 30 from . import (
31 31 bundle2,
32 32 changegroup,
33 33 cmdutil,
34 34 color,
35 35 context,
36 36 dagparser,
37 37 dagutil,
38 38 encoding,
39 39 error,
40 40 exchange,
41 41 extensions,
42 42 filemerge,
43 43 fileset,
44 44 formatter,
45 45 hg,
46 46 localrepo,
47 47 lock as lockmod,
48 48 merge as mergemod,
49 49 obsolete,
50 50 obsutil,
51 51 phases,
52 52 policy,
53 53 pvec,
54 54 pycompat,
55 55 registrar,
56 56 repair,
57 57 revlog,
58 58 revset,
59 59 revsetlang,
60 60 scmutil,
61 61 setdiscovery,
62 62 simplemerge,
63 63 smartset,
64 64 sslutil,
65 65 streamclone,
66 66 templater,
67 67 treediscovery,
68 68 upgrade,
69 69 util,
70 70 vfs as vfsmod,
71 71 )
72 72
73 73 release = lockmod.release
74 74
75 75 command = registrar.command()
76 76
77 77 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
78 78 def debugancestor(ui, repo, *args):
79 79 """find the ancestor revision of two revisions in a given index"""
80 80 if len(args) == 3:
81 81 index, rev1, rev2 = args
82 82 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
83 83 lookup = r.lookup
84 84 elif len(args) == 2:
85 85 if not repo:
86 86 raise error.Abort(_('there is no Mercurial repository here '
87 87 '(.hg not found)'))
88 88 rev1, rev2 = args
89 89 r = repo.changelog
90 90 lookup = repo.lookup
91 91 else:
92 92 raise error.Abort(_('either two or three arguments required'))
93 93 a = r.ancestor(lookup(rev1), lookup(rev2))
94 94 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
95 95
96 96 @command('debugapplystreamclonebundle', [], 'FILE')
97 97 def debugapplystreamclonebundle(ui, repo, fname):
98 98 """apply a stream clone bundle file"""
99 99 f = hg.openpath(ui, fname)
100 100 gen = exchange.readbundle(ui, f, fname)
101 101 gen.apply(repo)
102 102
103 103 @command('debugbuilddag',
104 104 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
105 105 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
106 106 ('n', 'new-file', None, _('add new file at each rev'))],
107 107 _('[OPTION]... [TEXT]'))
108 108 def debugbuilddag(ui, repo, text=None,
109 109 mergeable_file=False,
110 110 overwritten_file=False,
111 111 new_file=False):
112 112 """builds a repo with a given DAG from scratch in the current empty repo
113 113
114 114 The description of the DAG is read from stdin if not given on the
115 115 command line.
116 116
117 117 Elements:
118 118
119 119 - "+n" is a linear run of n nodes based on the current default parent
120 120 - "." is a single node based on the current default parent
121 121 - "$" resets the default parent to null (implied at the start);
122 122 otherwise the default parent is always the last node created
123 123 - "<p" sets the default parent to the backref p
124 124 - "*p" is a fork at parent p, which is a backref
125 125 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
126 126 - "/p2" is a merge of the preceding node and p2
127 127 - ":tag" defines a local tag for the preceding node
128 128 - "@branch" sets the named branch for subsequent nodes
129 129 - "#...\\n" is a comment up to the end of the line
130 130
131 131 Whitespace between the above elements is ignored.
132 132
133 133 A backref is either
134 134
135 135 - a number n, which references the node curr-n, where curr is the current
136 136 node, or
137 137 - the name of a local tag you placed earlier using ":tag", or
138 138 - empty to denote the default parent.
139 139
140 140 All string valued-elements are either strictly alphanumeric, or must
141 141 be enclosed in double quotes ("..."), with "\\" as escape character.
142 142 """
143 143
144 144 if text is None:
145 145 ui.status(_("reading DAG from stdin\n"))
146 146 text = ui.fin.read()
147 147
148 148 cl = repo.changelog
149 149 if len(cl) > 0:
150 150 raise error.Abort(_('repository is not empty'))
151 151
152 152 # determine number of revs in DAG
153 153 total = 0
154 154 for type, data in dagparser.parsedag(text):
155 155 if type == 'n':
156 156 total += 1
157 157
158 158 if mergeable_file:
159 159 linesperrev = 2
160 160 # make a file with k lines per rev
161 161 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
162 162 initialmergedlines.append("")
163 163
164 164 tags = []
165 165
166 166 wlock = lock = tr = None
167 167 try:
168 168 wlock = repo.wlock()
169 169 lock = repo.lock()
170 170 tr = repo.transaction("builddag")
171 171
172 172 at = -1
173 173 atbranch = 'default'
174 174 nodeids = []
175 175 id = 0
176 176 ui.progress(_('building'), id, unit=_('revisions'), total=total)
177 177 for type, data in dagparser.parsedag(text):
178 178 if type == 'n':
179 179 ui.note(('node %s\n' % str(data)))
180 180 id, ps = data
181 181
182 182 files = []
183 183 fctxs = {}
184 184
185 185 p2 = None
186 186 if mergeable_file:
187 187 fn = "mf"
188 188 p1 = repo[ps[0]]
189 189 if len(ps) > 1:
190 190 p2 = repo[ps[1]]
191 191 pa = p1.ancestor(p2)
192 192 base, local, other = [x[fn].data() for x in (pa, p1,
193 193 p2)]
194 194 m3 = simplemerge.Merge3Text(base, local, other)
195 195 ml = [l.strip() for l in m3.merge_lines()]
196 196 ml.append("")
197 197 elif at > 0:
198 198 ml = p1[fn].data().split("\n")
199 199 else:
200 200 ml = initialmergedlines
201 201 ml[id * linesperrev] += " r%i" % id
202 202 mergedtext = "\n".join(ml)
203 203 files.append(fn)
204 204 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
205 205
206 206 if overwritten_file:
207 207 fn = "of"
208 208 files.append(fn)
209 209 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
210 210
211 211 if new_file:
212 212 fn = "nf%i" % id
213 213 files.append(fn)
214 214 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
215 215 if len(ps) > 1:
216 216 if not p2:
217 217 p2 = repo[ps[1]]
218 218 for fn in p2:
219 219 if fn.startswith("nf"):
220 220 files.append(fn)
221 221 fctxs[fn] = p2[fn]
222 222
223 223 def fctxfn(repo, cx, path):
224 224 return fctxs.get(path)
225 225
226 226 if len(ps) == 0 or ps[0] < 0:
227 227 pars = [None, None]
228 228 elif len(ps) == 1:
229 229 pars = [nodeids[ps[0]], None]
230 230 else:
231 231 pars = [nodeids[p] for p in ps]
232 232 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
233 233 date=(id, 0),
234 234 user="debugbuilddag",
235 235 extra={'branch': atbranch})
236 236 nodeid = repo.commitctx(cx)
237 237 nodeids.append(nodeid)
238 238 at = id
239 239 elif type == 'l':
240 240 id, name = data
241 241 ui.note(('tag %s\n' % name))
242 242 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
243 243 elif type == 'a':
244 244 ui.note(('branch %s\n' % data))
245 245 atbranch = data
246 246 ui.progress(_('building'), id, unit=_('revisions'), total=total)
247 247 tr.close()
248 248
249 249 if tags:
250 250 repo.vfs.write("localtags", "".join(tags))
251 251 finally:
252 252 ui.progress(_('building'), None)
253 253 release(tr, lock, wlock)
254 254
255 255 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
256 256 indent_string = ' ' * indent
257 257 if all:
258 258 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
259 259 % indent_string)
260 260
261 261 def showchunks(named):
262 262 ui.write("\n%s%s\n" % (indent_string, named))
263 263 chain = None
264 264 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
265 265 node = chunkdata['node']
266 266 p1 = chunkdata['p1']
267 267 p2 = chunkdata['p2']
268 268 cs = chunkdata['cs']
269 269 deltabase = chunkdata['deltabase']
270 270 delta = chunkdata['delta']
271 271 ui.write("%s%s %s %s %s %s %s\n" %
272 272 (indent_string, hex(node), hex(p1), hex(p2),
273 273 hex(cs), hex(deltabase), len(delta)))
274 274 chain = node
275 275
276 276 chunkdata = gen.changelogheader()
277 277 showchunks("changelog")
278 278 chunkdata = gen.manifestheader()
279 279 showchunks("manifest")
280 280 for chunkdata in iter(gen.filelogheader, {}):
281 281 fname = chunkdata['filename']
282 282 showchunks(fname)
283 283 else:
284 284 if isinstance(gen, bundle2.unbundle20):
285 285 raise error.Abort(_('use debugbundle2 for this file'))
286 286 chunkdata = gen.changelogheader()
287 287 chain = None
288 288 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
289 289 node = chunkdata['node']
290 290 ui.write("%s%s\n" % (indent_string, hex(node)))
291 291 chain = node
292 292
293 293 def _debugobsmarkers(ui, part, indent=0, **opts):
294 294 """display version and markers contained in 'data'"""
295 295 opts = pycompat.byteskwargs(opts)
296 296 data = part.read()
297 297 indent_string = ' ' * indent
298 298 try:
299 299 version, markers = obsolete._readmarkers(data)
300 300 except error.UnknownVersion as exc:
301 301 msg = "%sunsupported version: %s (%d bytes)\n"
302 302 msg %= indent_string, exc.version, len(data)
303 303 ui.write(msg)
304 304 else:
305 305 msg = "%sversion: %s (%d bytes)\n"
306 306 msg %= indent_string, version, len(data)
307 307 ui.write(msg)
308 308 fm = ui.formatter('debugobsolete', opts)
309 309 for rawmarker in sorted(markers):
310 310 m = obsutil.marker(None, rawmarker)
311 311 fm.startitem()
312 312 fm.plain(indent_string)
313 313 cmdutil.showmarker(fm, m)
314 314 fm.end()
315 315
316 316 def _debugphaseheads(ui, data, indent=0):
317 317 """display version and markers contained in 'data'"""
318 318 indent_string = ' ' * indent
319 319 headsbyphase = bundle2._readphaseheads(data)
320 320 for phase in phases.allphases:
321 321 for head in headsbyphase[phase]:
322 322 ui.write(indent_string)
323 323 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
324 324
325 325 def _debugbundle2(ui, gen, all=None, **opts):
326 326 """lists the contents of a bundle2"""
327 327 if not isinstance(gen, bundle2.unbundle20):
328 328 raise error.Abort(_('not a bundle2 file'))
329 329 ui.write(('Stream params: %s\n' % repr(gen.params)))
330 330 parttypes = opts.get(r'part_type', [])
331 331 for part in gen.iterparts():
332 332 if parttypes and part.type not in parttypes:
333 333 continue
334 334 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
335 335 if part.type == 'changegroup':
336 336 version = part.params.get('version', '01')
337 337 cg = changegroup.getunbundler(version, part, 'UN')
338 338 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
339 339 if part.type == 'obsmarkers':
340 340 _debugobsmarkers(ui, part, indent=4, **opts)
341 341 if part.type == 'phase-heads':
342 342 _debugphaseheads(ui, part, indent=4)
343 343
344 344 @command('debugbundle',
345 345 [('a', 'all', None, _('show all details')),
346 346 ('', 'part-type', [], _('show only the named part type')),
347 347 ('', 'spec', None, _('print the bundlespec of the bundle'))],
348 348 _('FILE'),
349 349 norepo=True)
350 350 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
351 351 """lists the contents of a bundle"""
352 352 with hg.openpath(ui, bundlepath) as f:
353 353 if spec:
354 354 spec = exchange.getbundlespec(ui, f)
355 355 ui.write('%s\n' % spec)
356 356 return
357 357
358 358 gen = exchange.readbundle(ui, f, bundlepath)
359 359 if isinstance(gen, bundle2.unbundle20):
360 360 return _debugbundle2(ui, gen, all=all, **opts)
361 361 _debugchangegroup(ui, gen, all=all, **opts)
362 362
363 363 @command('debugcheckstate', [], '')
364 364 def debugcheckstate(ui, repo):
365 365 """validate the correctness of the current dirstate"""
366 366 parent1, parent2 = repo.dirstate.parents()
367 367 m1 = repo[parent1].manifest()
368 368 m2 = repo[parent2].manifest()
369 369 errors = 0
370 370 for f in repo.dirstate:
371 371 state = repo.dirstate[f]
372 372 if state in "nr" and f not in m1:
373 373 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
374 374 errors += 1
375 375 if state in "a" and f in m1:
376 376 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
377 377 errors += 1
378 378 if state in "m" and f not in m1 and f not in m2:
379 379 ui.warn(_("%s in state %s, but not in either manifest\n") %
380 380 (f, state))
381 381 errors += 1
382 382 for f in m1:
383 383 state = repo.dirstate[f]
384 384 if state not in "nrm":
385 385 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
386 386 errors += 1
387 387 if errors:
388 388 error = _(".hg/dirstate inconsistent with current parent's manifest")
389 389 raise error.Abort(error)
390 390
391 391 @command('debugcolor',
392 392 [('', 'style', None, _('show all configured styles'))],
393 393 'hg debugcolor')
394 394 def debugcolor(ui, repo, **opts):
395 395 """show available color, effects or style"""
396 396 ui.write(('color mode: %s\n') % ui._colormode)
397 397 if opts.get(r'style'):
398 398 return _debugdisplaystyle(ui)
399 399 else:
400 400 return _debugdisplaycolor(ui)
401 401
402 402 def _debugdisplaycolor(ui):
403 403 ui = ui.copy()
404 404 ui._styles.clear()
405 405 for effect in color._activeeffects(ui).keys():
406 406 ui._styles[effect] = effect
407 407 if ui._terminfoparams:
408 408 for k, v in ui.configitems('color'):
409 409 if k.startswith('color.'):
410 410 ui._styles[k] = k[6:]
411 411 elif k.startswith('terminfo.'):
412 412 ui._styles[k] = k[9:]
413 413 ui.write(_('available colors:\n'))
414 414 # sort label with a '_' after the other to group '_background' entry.
415 415 items = sorted(ui._styles.items(),
416 416 key=lambda i: ('_' in i[0], i[0], i[1]))
417 417 for colorname, label in items:
418 418 ui.write(('%s\n') % colorname, label=label)
419 419
420 420 def _debugdisplaystyle(ui):
421 421 ui.write(_('available style:\n'))
422 422 width = max(len(s) for s in ui._styles)
423 423 for label, effects in sorted(ui._styles.items()):
424 424 ui.write('%s' % label, label=label)
425 425 if effects:
426 426 # 50
427 427 ui.write(': ')
428 428 ui.write(' ' * (max(0, width - len(label))))
429 429 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
430 430 ui.write('\n')
431 431
432 432 @command('debugcreatestreamclonebundle', [], 'FILE')
433 433 def debugcreatestreamclonebundle(ui, repo, fname):
434 434 """create a stream clone bundle file
435 435
436 436 Stream bundles are special bundles that are essentially archives of
437 437 revlog files. They are commonly used for cloning very quickly.
438 438 """
439 439 # TODO we may want to turn this into an abort when this functionality
440 440 # is moved into `hg bundle`.
441 441 if phases.hassecret(repo):
442 442 ui.warn(_('(warning: stream clone bundle will contain secret '
443 443 'revisions)\n'))
444 444
445 445 requirements, gen = streamclone.generatebundlev1(repo)
446 446 changegroup.writechunks(ui, gen, fname)
447 447
448 448 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
449 449
450 450 @command('debugdag',
451 451 [('t', 'tags', None, _('use tags as labels')),
452 452 ('b', 'branches', None, _('annotate with branch names')),
453 453 ('', 'dots', None, _('use dots for runs')),
454 454 ('s', 'spaces', None, _('separate elements by spaces'))],
455 455 _('[OPTION]... [FILE [REV]...]'),
456 456 optionalrepo=True)
457 457 def debugdag(ui, repo, file_=None, *revs, **opts):
458 458 """format the changelog or an index DAG as a concise textual description
459 459
460 460 If you pass a revlog index, the revlog's DAG is emitted. If you list
461 461 revision numbers, they get labeled in the output as rN.
462 462
463 463 Otherwise, the changelog DAG of the current repo is emitted.
464 464 """
465 465 spaces = opts.get(r'spaces')
466 466 dots = opts.get(r'dots')
467 467 if file_:
468 468 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
469 469 file_)
470 470 revs = set((int(r) for r in revs))
471 471 def events():
472 472 for r in rlog:
473 473 yield 'n', (r, list(p for p in rlog.parentrevs(r)
474 474 if p != -1))
475 475 if r in revs:
476 476 yield 'l', (r, "r%i" % r)
477 477 elif repo:
478 478 cl = repo.changelog
479 479 tags = opts.get(r'tags')
480 480 branches = opts.get(r'branches')
481 481 if tags:
482 482 labels = {}
483 483 for l, n in repo.tags().items():
484 484 labels.setdefault(cl.rev(n), []).append(l)
485 485 def events():
486 486 b = "default"
487 487 for r in cl:
488 488 if branches:
489 489 newb = cl.read(cl.node(r))[5]['branch']
490 490 if newb != b:
491 491 yield 'a', newb
492 492 b = newb
493 493 yield 'n', (r, list(p for p in cl.parentrevs(r)
494 494 if p != -1))
495 495 if tags:
496 496 ls = labels.get(r)
497 497 if ls:
498 498 for l in ls:
499 499 yield 'l', (r, l)
500 500 else:
501 501 raise error.Abort(_('need repo for changelog dag'))
502 502
503 503 for line in dagparser.dagtextlines(events(),
504 504 addspaces=spaces,
505 505 wraplabels=True,
506 506 wrapannotations=True,
507 507 wrapnonlinear=dots,
508 508 usedots=dots,
509 509 maxlinewidth=70):
510 510 ui.write(line)
511 511 ui.write("\n")
512 512
513 513 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
514 514 def debugdata(ui, repo, file_, rev=None, **opts):
515 515 """dump the contents of a data file revision"""
516 516 opts = pycompat.byteskwargs(opts)
517 517 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
518 518 if rev is not None:
519 519 raise error.CommandError('debugdata', _('invalid arguments'))
520 520 file_, rev = None, file_
521 521 elif rev is None:
522 522 raise error.CommandError('debugdata', _('invalid arguments'))
523 523 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
524 524 try:
525 525 ui.write(r.revision(r.lookup(rev), raw=True))
526 526 except KeyError:
527 527 raise error.Abort(_('invalid revision identifier %s') % rev)
528 528
529 529 @command('debugdate',
530 530 [('e', 'extended', None, _('try extended date formats'))],
531 531 _('[-e] DATE [RANGE]'),
532 532 norepo=True, optionalrepo=True)
533 533 def debugdate(ui, date, range=None, **opts):
534 534 """parse and display a date"""
535 535 if opts[r"extended"]:
536 536 d = util.parsedate(date, util.extendeddateformats)
537 537 else:
538 538 d = util.parsedate(date)
539 539 ui.write(("internal: %s %s\n") % d)
540 540 ui.write(("standard: %s\n") % util.datestr(d))
541 541 if range:
542 542 m = util.matchdate(range)
543 543 ui.write(("match: %s\n") % m(d[0]))
544 544
545 545 @command('debugdeltachain',
546 546 cmdutil.debugrevlogopts + cmdutil.formatteropts,
547 547 _('-c|-m|FILE'),
548 548 optionalrepo=True)
549 549 def debugdeltachain(ui, repo, file_=None, **opts):
550 550 """dump information about delta chains in a revlog
551 551
552 552 Output can be templatized. Available template keywords are:
553 553
554 554 :``rev``: revision number
555 555 :``chainid``: delta chain identifier (numbered by unique base)
556 556 :``chainlen``: delta chain length to this revision
557 557 :``prevrev``: previous revision in delta chain
558 558 :``deltatype``: role of delta / how it was computed
559 559 :``compsize``: compressed size of revision
560 560 :``uncompsize``: uncompressed size of revision
561 561 :``chainsize``: total size of compressed revisions in chain
562 562 :``chainratio``: total chain size divided by uncompressed revision size
563 563 (new delta chains typically start at ratio 2.00)
564 564 :``lindist``: linear distance from base revision in delta chain to end
565 565 of this revision
566 566 :``extradist``: total size of revisions not part of this delta chain from
567 567 base of delta chain to end of this revision; a measurement
568 568 of how much extra data we need to read/seek across to read
569 569 the delta chain for this revision
570 570 :``extraratio``: extradist divided by chainsize; another representation of
571 571 how much unrelated data is needed to load this delta chain
572 572 """
573 573 opts = pycompat.byteskwargs(opts)
574 574 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
575 575 index = r.index
576 576 generaldelta = r.version & revlog.FLAG_GENERALDELTA
577 577
578 578 def revinfo(rev):
579 579 e = index[rev]
580 580 compsize = e[1]
581 581 uncompsize = e[2]
582 582 chainsize = 0
583 583
584 584 if generaldelta:
585 585 if e[3] == e[5]:
586 586 deltatype = 'p1'
587 587 elif e[3] == e[6]:
588 588 deltatype = 'p2'
589 589 elif e[3] == rev - 1:
590 590 deltatype = 'prev'
591 591 elif e[3] == rev:
592 592 deltatype = 'base'
593 593 else:
594 594 deltatype = 'other'
595 595 else:
596 596 if e[3] == rev:
597 597 deltatype = 'base'
598 598 else:
599 599 deltatype = 'prev'
600 600
601 601 chain = r._deltachain(rev)[0]
602 602 for iterrev in chain:
603 603 e = index[iterrev]
604 604 chainsize += e[1]
605 605
606 606 return compsize, uncompsize, deltatype, chain, chainsize
607 607
608 608 fm = ui.formatter('debugdeltachain', opts)
609 609
610 610 fm.plain(' rev chain# chainlen prev delta '
611 611 'size rawsize chainsize ratio lindist extradist '
612 612 'extraratio\n')
613 613
614 614 chainbases = {}
615 615 for rev in r:
616 616 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
617 617 chainbase = chain[0]
618 618 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
619 619 basestart = r.start(chainbase)
620 620 revstart = r.start(rev)
621 621 lineardist = revstart + comp - basestart
622 622 extradist = lineardist - chainsize
623 623 try:
624 624 prevrev = chain[-2]
625 625 except IndexError:
626 626 prevrev = -1
627 627
628 628 chainratio = float(chainsize) / float(uncomp)
629 629 extraratio = float(extradist) / float(chainsize)
630 630
631 631 fm.startitem()
632 632 fm.write('rev chainid chainlen prevrev deltatype compsize '
633 633 'uncompsize chainsize chainratio lindist extradist '
634 634 'extraratio',
635 635 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
636 636 rev, chainid, len(chain), prevrev, deltatype, comp,
637 637 uncomp, chainsize, chainratio, lineardist, extradist,
638 638 extraratio,
639 639 rev=rev, chainid=chainid, chainlen=len(chain),
640 640 prevrev=prevrev, deltatype=deltatype, compsize=comp,
641 641 uncompsize=uncomp, chainsize=chainsize,
642 642 chainratio=chainratio, lindist=lineardist,
643 643 extradist=extradist, extraratio=extraratio)
644 644
645 645 fm.end()
646 646
647 647 @command('debugdirstate|debugstate',
648 648 [('', 'nodates', None, _('do not display the saved mtime')),
649 649 ('', 'datesort', None, _('sort by saved mtime'))],
650 650 _('[OPTION]...'))
651 651 def debugstate(ui, repo, **opts):
652 652 """show the contents of the current dirstate"""
653 653
654 654 nodates = opts.get(r'nodates')
655 655 datesort = opts.get(r'datesort')
656 656
657 657 timestr = ""
658 658 if datesort:
659 659 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
660 660 else:
661 661 keyfunc = None # sort by filename
662 662 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
663 663 if ent[3] == -1:
664 664 timestr = 'unset '
665 665 elif nodates:
666 666 timestr = 'set '
667 667 else:
668 668 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
669 669 time.localtime(ent[3]))
670 670 if ent[1] & 0o20000:
671 671 mode = 'lnk'
672 672 else:
673 673 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
674 674 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
675 675 for f in repo.dirstate.copies():
676 676 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
677 677
678 678 @command('debugdiscovery',
679 679 [('', 'old', None, _('use old-style discovery')),
680 680 ('', 'nonheads', None,
681 681 _('use old-style discovery with non-heads included')),
682 682 ] + cmdutil.remoteopts,
683 683 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
684 684 def debugdiscovery(ui, repo, remoteurl="default", **opts):
685 685 """runs the changeset discovery protocol in isolation"""
686 686 opts = pycompat.byteskwargs(opts)
687 687 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
688 688 opts.get('branch'))
689 689 remote = hg.peer(repo, opts, remoteurl)
690 690 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
691 691
692 692 # make sure tests are repeatable
693 693 random.seed(12323)
694 694
695 695 def doit(localheads, remoteheads, remote=remote):
696 696 if opts.get('old'):
697 697 if localheads:
698 698 raise error.Abort('cannot use localheads with old style '
699 699 'discovery')
700 700 if not util.safehasattr(remote, 'branches'):
701 701 # enable in-client legacy support
702 702 remote = localrepo.locallegacypeer(remote.local())
703 703 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
704 704 force=True)
705 705 common = set(common)
706 706 if not opts.get('nonheads'):
707 707 ui.write(("unpruned common: %s\n") %
708 708 " ".join(sorted(short(n) for n in common)))
709 709 dag = dagutil.revlogdag(repo.changelog)
710 710 all = dag.ancestorset(dag.internalizeall(common))
711 711 common = dag.externalizeall(dag.headsetofconnecteds(all))
712 712 else:
713 713 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
714 714 common = set(common)
715 715 rheads = set(hds)
716 716 lheads = set(repo.heads())
717 717 ui.write(("common heads: %s\n") %
718 718 " ".join(sorted(short(n) for n in common)))
719 719 if lheads <= common:
720 720 ui.write(("local is subset\n"))
721 721 elif rheads <= common:
722 722 ui.write(("remote is subset\n"))
723 723
724 724 serverlogs = opts.get('serverlog')
725 725 if serverlogs:
726 726 for filename in serverlogs:
727 727 with open(filename, 'r') as logfile:
728 728 line = logfile.readline()
729 729 while line:
730 730 parts = line.strip().split(';')
731 731 op = parts[1]
732 732 if op == 'cg':
733 733 pass
734 734 elif op == 'cgss':
735 735 doit(parts[2].split(' '), parts[3].split(' '))
736 736 elif op == 'unb':
737 737 doit(parts[3].split(' '), parts[2].split(' '))
738 738 line = logfile.readline()
739 739 else:
740 740 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
741 741 opts.get('remote_head'))
742 742 localrevs = opts.get('local_head')
743 743 doit(localrevs, remoterevs)
744 744
745 745 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
746 746 def debugextensions(ui, **opts):
747 747 '''show information about active extensions'''
748 748 opts = pycompat.byteskwargs(opts)
749 749 exts = extensions.extensions(ui)
750 750 hgver = util.version()
751 751 fm = ui.formatter('debugextensions', opts)
752 752 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
753 753 isinternal = extensions.ismoduleinternal(extmod)
754 754 extsource = pycompat.fsencode(extmod.__file__)
755 755 if isinternal:
756 756 exttestedwith = [] # never expose magic string to users
757 757 else:
758 758 exttestedwith = getattr(extmod, 'testedwith', '').split()
759 759 extbuglink = getattr(extmod, 'buglink', None)
760 760
761 761 fm.startitem()
762 762
763 763 if ui.quiet or ui.verbose:
764 764 fm.write('name', '%s\n', extname)
765 765 else:
766 766 fm.write('name', '%s', extname)
767 767 if isinternal or hgver in exttestedwith:
768 768 fm.plain('\n')
769 769 elif not exttestedwith:
770 770 fm.plain(_(' (untested!)\n'))
771 771 else:
772 772 lasttestedversion = exttestedwith[-1]
773 773 fm.plain(' (%s!)\n' % lasttestedversion)
774 774
775 775 fm.condwrite(ui.verbose and extsource, 'source',
776 776 _(' location: %s\n'), extsource or "")
777 777
778 778 if ui.verbose:
779 779 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
780 780 fm.data(bundled=isinternal)
781 781
782 782 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
783 783 _(' tested with: %s\n'),
784 784 fm.formatlist(exttestedwith, name='ver'))
785 785
786 786 fm.condwrite(ui.verbose and extbuglink, 'buglink',
787 787 _(' bug reporting: %s\n'), extbuglink or "")
788 788
789 789 fm.end()
790 790
791 791 @command('debugfileset',
792 792 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
793 793 _('[-r REV] FILESPEC'))
794 794 def debugfileset(ui, repo, expr, **opts):
795 795 '''parse and apply a fileset specification'''
796 796 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
797 797 if ui.verbose:
798 798 tree = fileset.parse(expr)
799 799 ui.note(fileset.prettyformat(tree), "\n")
800 800
801 801 for f in ctx.getfileset(expr):
802 802 ui.write("%s\n" % f)
803 803
804 804 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
805 805 def debugfsinfo(ui, path="."):
806 806 """show information detected about current filesystem"""
807 807 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
808 808 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
809 809 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
810 810 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
811 811 casesensitive = '(unknown)'
812 812 try:
813 813 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
814 814 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
815 815 except OSError:
816 816 pass
817 817 ui.write(('case-sensitive: %s\n') % casesensitive)
818 818
819 819 @command('debuggetbundle',
820 820 [('H', 'head', [], _('id of head node'), _('ID')),
821 821 ('C', 'common', [], _('id of common node'), _('ID')),
822 822 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
823 823 _('REPO FILE [-H|-C ID]...'),
824 824 norepo=True)
825 825 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
826 826 """retrieves a bundle from a repo
827 827
828 828 Every ID must be a full-length hex node id string. Saves the bundle to the
829 829 given file.
830 830 """
831 831 opts = pycompat.byteskwargs(opts)
832 832 repo = hg.peer(ui, opts, repopath)
833 833 if not repo.capable('getbundle'):
834 834 raise error.Abort("getbundle() not supported by target repository")
835 835 args = {}
836 836 if common:
837 837 args[r'common'] = [bin(s) for s in common]
838 838 if head:
839 839 args[r'heads'] = [bin(s) for s in head]
840 840 # TODO: get desired bundlecaps from command line.
841 841 args[r'bundlecaps'] = None
842 842 bundle = repo.getbundle('debug', **args)
843 843
844 844 bundletype = opts.get('type', 'bzip2').lower()
845 845 btypes = {'none': 'HG10UN',
846 846 'bzip2': 'HG10BZ',
847 847 'gzip': 'HG10GZ',
848 848 'bundle2': 'HG20'}
849 849 bundletype = btypes.get(bundletype)
850 850 if bundletype not in bundle2.bundletypes:
851 851 raise error.Abort(_('unknown bundle type specified with --type'))
852 852 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
853 853
854 854 @command('debugignore', [], '[FILE]')
855 855 def debugignore(ui, repo, *files, **opts):
856 856 """display the combined ignore pattern and information about ignored files
857 857
858 858 With no argument display the combined ignore pattern.
859 859
860 860 Given space separated file names, shows if the given file is ignored and
861 861 if so, show the ignore rule (file and line number) that matched it.
862 862 """
863 863 ignore = repo.dirstate._ignore
864 864 if not files:
865 865 # Show all the patterns
866 866 ui.write("%s\n" % repr(ignore))
867 867 else:
868 868 for f in files:
869 869 nf = util.normpath(f)
870 870 ignored = None
871 871 ignoredata = None
872 872 if nf != '.':
873 873 if ignore(nf):
874 874 ignored = nf
875 875 ignoredata = repo.dirstate._ignorefileandline(nf)
876 876 else:
877 877 for p in util.finddirs(nf):
878 878 if ignore(p):
879 879 ignored = p
880 880 ignoredata = repo.dirstate._ignorefileandline(p)
881 881 break
882 882 if ignored:
883 883 if ignored == nf:
884 884 ui.write(_("%s is ignored\n") % f)
885 885 else:
886 886 ui.write(_("%s is ignored because of "
887 887 "containing folder %s\n")
888 888 % (f, ignored))
889 889 ignorefile, lineno, line = ignoredata
890 890 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
891 891 % (ignorefile, lineno, line))
892 892 else:
893 893 ui.write(_("%s is not ignored\n") % f)
894 894
895 895 @command('debugindex', cmdutil.debugrevlogopts +
896 896 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
897 897 _('[-f FORMAT] -c|-m|FILE'),
898 898 optionalrepo=True)
899 899 def debugindex(ui, repo, file_=None, **opts):
900 900 """dump the contents of an index file"""
901 901 opts = pycompat.byteskwargs(opts)
902 902 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
903 903 format = opts.get('format', 0)
904 904 if format not in (0, 1):
905 905 raise error.Abort(_("unknown format %d") % format)
906 906
907 907 generaldelta = r.version & revlog.FLAG_GENERALDELTA
908 908 if generaldelta:
909 909 basehdr = ' delta'
910 910 else:
911 911 basehdr = ' base'
912 912
913 913 if ui.debugflag:
914 914 shortfn = hex
915 915 else:
916 916 shortfn = short
917 917
918 918 # There might not be anything in r, so have a sane default
919 919 idlen = 12
920 920 for i in r:
921 921 idlen = len(shortfn(r.node(i)))
922 922 break
923 923
924 924 if format == 0:
925 925 ui.write((" rev offset length " + basehdr + " linkrev"
926 926 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
927 927 elif format == 1:
928 928 ui.write((" rev flag offset length"
929 929 " size " + basehdr + " link p1 p2"
930 930 " %s\n") % "nodeid".rjust(idlen))
931 931
932 932 for i in r:
933 933 node = r.node(i)
934 934 if generaldelta:
935 935 base = r.deltaparent(i)
936 936 else:
937 937 base = r.chainbase(i)
938 938 if format == 0:
939 939 try:
940 940 pp = r.parents(node)
941 941 except Exception:
942 942 pp = [nullid, nullid]
943 943 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
944 944 i, r.start(i), r.length(i), base, r.linkrev(i),
945 945 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
946 946 elif format == 1:
947 947 pr = r.parentrevs(i)
948 948 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
949 949 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
950 950 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
951 951
952 952 @command('debugindexdot', cmdutil.debugrevlogopts,
953 953 _('-c|-m|FILE'), optionalrepo=True)
954 954 def debugindexdot(ui, repo, file_=None, **opts):
955 955 """dump an index DAG as a graphviz dot file"""
956 956 opts = pycompat.byteskwargs(opts)
957 957 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
958 958 ui.write(("digraph G {\n"))
959 959 for i in r:
960 960 node = r.node(i)
961 961 pp = r.parents(node)
962 962 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
963 963 if pp[1] != nullid:
964 964 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
965 965 ui.write("}\n")
966 966
967 967 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
968 968 def debuginstall(ui, **opts):
969 969 '''test Mercurial installation
970 970
971 971 Returns 0 on success.
972 972 '''
973 973 opts = pycompat.byteskwargs(opts)
974 974
975 975 def writetemp(contents):
976 976 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
977 977 f = os.fdopen(fd, pycompat.sysstr("wb"))
978 978 f.write(contents)
979 979 f.close()
980 980 return name
981 981
982 982 problems = 0
983 983
984 984 fm = ui.formatter('debuginstall', opts)
985 985 fm.startitem()
986 986
987 987 # encoding
988 988 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
989 989 err = None
990 990 try:
991 991 encoding.fromlocal("test")
992 992 except error.Abort as inst:
993 993 err = inst
994 994 problems += 1
995 995 fm.condwrite(err, 'encodingerror', _(" %s\n"
996 996 " (check that your locale is properly set)\n"), err)
997 997
998 998 # Python
999 999 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1000 1000 pycompat.sysexecutable)
1001 1001 fm.write('pythonver', _("checking Python version (%s)\n"),
1002 1002 ("%d.%d.%d" % sys.version_info[:3]))
1003 1003 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1004 1004 os.path.dirname(pycompat.fsencode(os.__file__)))
1005 1005
1006 1006 security = set(sslutil.supportedprotocols)
1007 1007 if sslutil.hassni:
1008 1008 security.add('sni')
1009 1009
1010 1010 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1011 1011 fm.formatlist(sorted(security), name='protocol',
1012 1012 fmt='%s', sep=','))
1013 1013
1014 1014 # These are warnings, not errors. So don't increment problem count. This
1015 1015 # may change in the future.
1016 1016 if 'tls1.2' not in security:
1017 1017 fm.plain(_(' TLS 1.2 not supported by Python install; '
1018 1018 'network connections lack modern security\n'))
1019 1019 if 'sni' not in security:
1020 1020 fm.plain(_(' SNI not supported by Python install; may have '
1021 1021 'connectivity issues with some servers\n'))
1022 1022
1023 1023 # TODO print CA cert info
1024 1024
1025 1025 # hg version
1026 1026 hgver = util.version()
1027 1027 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1028 1028 hgver.split('+')[0])
1029 1029 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1030 1030 '+'.join(hgver.split('+')[1:]))
1031 1031
1032 1032 # compiled modules
1033 1033 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1034 1034 policy.policy)
1035 1035 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1036 1036 os.path.dirname(pycompat.fsencode(__file__)))
1037 1037
1038 1038 if policy.policy in ('c', 'allow'):
1039 1039 err = None
1040 1040 try:
1041 1041 from .cext import (
1042 1042 base85,
1043 1043 bdiff,
1044 1044 mpatch,
1045 1045 osutil,
1046 1046 )
1047 1047 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1048 1048 except Exception as inst:
1049 1049 err = inst
1050 1050 problems += 1
1051 1051 fm.condwrite(err, 'extensionserror', " %s\n", err)
1052 1052
1053 1053 compengines = util.compengines._engines.values()
1054 1054 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1055 1055 fm.formatlist(sorted(e.name() for e in compengines),
1056 1056 name='compengine', fmt='%s', sep=', '))
1057 1057 fm.write('compenginesavail', _('checking available compression engines '
1058 1058 '(%s)\n'),
1059 1059 fm.formatlist(sorted(e.name() for e in compengines
1060 1060 if e.available()),
1061 1061 name='compengine', fmt='%s', sep=', '))
1062 1062 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1063 1063 fm.write('compenginesserver', _('checking available compression engines '
1064 1064 'for wire protocol (%s)\n'),
1065 1065 fm.formatlist([e.name() for e in wirecompengines
1066 1066 if e.wireprotosupport()],
1067 1067 name='compengine', fmt='%s', sep=', '))
1068 1068
1069 1069 # templates
1070 1070 p = templater.templatepaths()
1071 1071 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1072 1072 fm.condwrite(not p, '', _(" no template directories found\n"))
1073 1073 if p:
1074 1074 m = templater.templatepath("map-cmdline.default")
1075 1075 if m:
1076 1076 # template found, check if it is working
1077 1077 err = None
1078 1078 try:
1079 1079 templater.templater.frommapfile(m)
1080 1080 except Exception as inst:
1081 1081 err = inst
1082 1082 p = None
1083 1083 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1084 1084 else:
1085 1085 p = None
1086 1086 fm.condwrite(p, 'defaulttemplate',
1087 1087 _("checking default template (%s)\n"), m)
1088 1088 fm.condwrite(not m, 'defaulttemplatenotfound',
1089 1089 _(" template '%s' not found\n"), "default")
1090 1090 if not p:
1091 1091 problems += 1
1092 1092 fm.condwrite(not p, '',
1093 1093 _(" (templates seem to have been installed incorrectly)\n"))
1094 1094
1095 1095 # editor
1096 1096 editor = ui.geteditor()
1097 1097 editor = util.expandpath(editor)
1098 1098 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1099 1099 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1100 1100 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1101 1101 _(" No commit editor set and can't find %s in PATH\n"
1102 1102 " (specify a commit editor in your configuration"
1103 1103 " file)\n"), not cmdpath and editor == 'vi' and editor)
1104 1104 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1105 1105 _(" Can't find editor '%s' in PATH\n"
1106 1106 " (specify a commit editor in your configuration"
1107 1107 " file)\n"), not cmdpath and editor)
1108 1108 if not cmdpath and editor != 'vi':
1109 1109 problems += 1
1110 1110
1111 1111 # check username
1112 1112 username = None
1113 1113 err = None
1114 1114 try:
1115 1115 username = ui.username()
1116 1116 except error.Abort as e:
1117 1117 err = e
1118 1118 problems += 1
1119 1119
1120 1120 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1121 1121 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1122 1122 " (specify a username in your configuration file)\n"), err)
1123 1123
1124 1124 fm.condwrite(not problems, '',
1125 1125 _("no problems detected\n"))
1126 1126 if not problems:
1127 1127 fm.data(problems=problems)
1128 1128 fm.condwrite(problems, 'problems',
1129 1129 _("%d problems detected,"
1130 1130 " please check your install!\n"), problems)
1131 1131 fm.end()
1132 1132
1133 1133 return problems
1134 1134
1135 1135 @command('debugknown', [], _('REPO ID...'), norepo=True)
1136 1136 def debugknown(ui, repopath, *ids, **opts):
1137 1137 """test whether node ids are known to a repo
1138 1138
1139 1139 Every ID must be a full-length hex node id string. Returns a list of 0s
1140 1140 and 1s indicating unknown/known.
1141 1141 """
1142 1142 opts = pycompat.byteskwargs(opts)
1143 1143 repo = hg.peer(ui, opts, repopath)
1144 1144 if not repo.capable('known'):
1145 1145 raise error.Abort("known() not supported by target repository")
1146 1146 flags = repo.known([bin(s) for s in ids])
1147 1147 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1148 1148
1149 1149 @command('debuglabelcomplete', [], _('LABEL...'))
1150 1150 def debuglabelcomplete(ui, repo, *args):
1151 1151 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1152 1152 debugnamecomplete(ui, repo, *args)
1153 1153
1154 1154 @command('debuglocks',
1155 1155 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1156 1156 ('W', 'force-wlock', None,
1157 1157 _('free the working state lock (DANGEROUS)'))],
1158 1158 _('[OPTION]...'))
1159 1159 def debuglocks(ui, repo, **opts):
1160 1160 """show or modify state of locks
1161 1161
1162 1162 By default, this command will show which locks are held. This
1163 1163 includes the user and process holding the lock, the amount of time
1164 1164 the lock has been held, and the machine name where the process is
1165 1165 running if it's not local.
1166 1166
1167 1167 Locks protect the integrity of Mercurial's data, so should be
1168 1168 treated with care. System crashes or other interruptions may cause
1169 1169 locks to not be properly released, though Mercurial will usually
1170 1170 detect and remove such stale locks automatically.
1171 1171
1172 1172 However, detecting stale locks may not always be possible (for
1173 1173 instance, on a shared filesystem). Removing locks may also be
1174 1174 blocked by filesystem permissions.
1175 1175
1176 1176 Returns 0 if no locks are held.
1177 1177
1178 1178 """
1179 1179
1180 1180 if opts.get(r'force_lock'):
1181 1181 repo.svfs.unlink('lock')
1182 1182 if opts.get(r'force_wlock'):
1183 1183 repo.vfs.unlink('wlock')
1184 1184 if opts.get(r'force_lock') or opts.get(r'force_lock'):
1185 1185 return 0
1186 1186
1187 1187 now = time.time()
1188 1188 held = 0
1189 1189
1190 1190 def report(vfs, name, method):
1191 1191 # this causes stale locks to get reaped for more accurate reporting
1192 1192 try:
1193 1193 l = method(False)
1194 1194 except error.LockHeld:
1195 1195 l = None
1196 1196
1197 1197 if l:
1198 1198 l.release()
1199 1199 else:
1200 1200 try:
1201 1201 stat = vfs.lstat(name)
1202 1202 age = now - stat.st_mtime
1203 1203 user = util.username(stat.st_uid)
1204 1204 locker = vfs.readlock(name)
1205 1205 if ":" in locker:
1206 1206 host, pid = locker.split(':')
1207 1207 if host == socket.gethostname():
1208 1208 locker = 'user %s, process %s' % (user, pid)
1209 1209 else:
1210 1210 locker = 'user %s, process %s, host %s' \
1211 1211 % (user, pid, host)
1212 1212 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1213 1213 return 1
1214 1214 except OSError as e:
1215 1215 if e.errno != errno.ENOENT:
1216 1216 raise
1217 1217
1218 1218 ui.write(("%-6s free\n") % (name + ":"))
1219 1219 return 0
1220 1220
1221 1221 held += report(repo.svfs, "lock", repo.lock)
1222 1222 held += report(repo.vfs, "wlock", repo.wlock)
1223 1223
1224 1224 return held
1225 1225
1226 1226 @command('debugmergestate', [], '')
1227 1227 def debugmergestate(ui, repo, *args):
1228 1228 """print merge state
1229 1229
1230 1230 Use --verbose to print out information about whether v1 or v2 merge state
1231 1231 was chosen."""
1232 1232 def _hashornull(h):
1233 1233 if h == nullhex:
1234 1234 return 'null'
1235 1235 else:
1236 1236 return h
1237 1237
1238 1238 def printrecords(version):
1239 1239 ui.write(('* version %s records\n') % version)
1240 1240 if version == 1:
1241 1241 records = v1records
1242 1242 else:
1243 1243 records = v2records
1244 1244
1245 1245 for rtype, record in records:
1246 1246 # pretty print some record types
1247 1247 if rtype == 'L':
1248 1248 ui.write(('local: %s\n') % record)
1249 1249 elif rtype == 'O':
1250 1250 ui.write(('other: %s\n') % record)
1251 1251 elif rtype == 'm':
1252 1252 driver, mdstate = record.split('\0', 1)
1253 1253 ui.write(('merge driver: %s (state "%s")\n')
1254 1254 % (driver, mdstate))
1255 1255 elif rtype in 'FDC':
1256 1256 r = record.split('\0')
1257 1257 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1258 1258 if version == 1:
1259 1259 onode = 'not stored in v1 format'
1260 1260 flags = r[7]
1261 1261 else:
1262 1262 onode, flags = r[7:9]
1263 1263 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1264 1264 % (f, rtype, state, _hashornull(hash)))
1265 1265 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1266 1266 ui.write((' ancestor path: %s (node %s)\n')
1267 1267 % (afile, _hashornull(anode)))
1268 1268 ui.write((' other path: %s (node %s)\n')
1269 1269 % (ofile, _hashornull(onode)))
1270 1270 elif rtype == 'f':
1271 1271 filename, rawextras = record.split('\0', 1)
1272 1272 extras = rawextras.split('\0')
1273 1273 i = 0
1274 1274 extrastrings = []
1275 1275 while i < len(extras):
1276 1276 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1277 1277 i += 2
1278 1278
1279 1279 ui.write(('file extras: %s (%s)\n')
1280 1280 % (filename, ', '.join(extrastrings)))
1281 1281 elif rtype == 'l':
1282 1282 labels = record.split('\0', 2)
1283 1283 labels = [l for l in labels if len(l) > 0]
1284 1284 ui.write(('labels:\n'))
1285 1285 ui.write((' local: %s\n' % labels[0]))
1286 1286 ui.write((' other: %s\n' % labels[1]))
1287 1287 if len(labels) > 2:
1288 1288 ui.write((' base: %s\n' % labels[2]))
1289 1289 else:
1290 1290 ui.write(('unrecognized entry: %s\t%s\n')
1291 1291 % (rtype, record.replace('\0', '\t')))
1292 1292
1293 1293 # Avoid mergestate.read() since it may raise an exception for unsupported
1294 1294 # merge state records. We shouldn't be doing this, but this is OK since this
1295 1295 # command is pretty low-level.
1296 1296 ms = mergemod.mergestate(repo)
1297 1297
1298 1298 # sort so that reasonable information is on top
1299 1299 v1records = ms._readrecordsv1()
1300 1300 v2records = ms._readrecordsv2()
1301 1301 order = 'LOml'
1302 1302 def key(r):
1303 1303 idx = order.find(r[0])
1304 1304 if idx == -1:
1305 1305 return (1, r[1])
1306 1306 else:
1307 1307 return (0, idx)
1308 1308 v1records.sort(key=key)
1309 1309 v2records.sort(key=key)
1310 1310
1311 1311 if not v1records and not v2records:
1312 1312 ui.write(('no merge state found\n'))
1313 1313 elif not v2records:
1314 1314 ui.note(('no version 2 merge state\n'))
1315 1315 printrecords(1)
1316 1316 elif ms._v1v2match(v1records, v2records):
1317 1317 ui.note(('v1 and v2 states match: using v2\n'))
1318 1318 printrecords(2)
1319 1319 else:
1320 1320 ui.note(('v1 and v2 states mismatch: using v1\n'))
1321 1321 printrecords(1)
1322 1322 if ui.verbose:
1323 1323 printrecords(2)
1324 1324
1325 1325 @command('debugnamecomplete', [], _('NAME...'))
1326 1326 def debugnamecomplete(ui, repo, *args):
1327 1327 '''complete "names" - tags, open branch names, bookmark names'''
1328 1328
1329 1329 names = set()
1330 1330 # since we previously only listed open branches, we will handle that
1331 1331 # specially (after this for loop)
1332 1332 for name, ns in repo.names.iteritems():
1333 1333 if name != 'branches':
1334 1334 names.update(ns.listnames(repo))
1335 1335 names.update(tag for (tag, heads, tip, closed)
1336 1336 in repo.branchmap().iterbranches() if not closed)
1337 1337 completions = set()
1338 1338 if not args:
1339 1339 args = ['']
1340 1340 for a in args:
1341 1341 completions.update(n for n in names if n.startswith(a))
1342 1342 ui.write('\n'.join(sorted(completions)))
1343 1343 ui.write('\n')
1344 1344
1345 1345 @command('debugobsolete',
1346 1346 [('', 'flags', 0, _('markers flag')),
1347 1347 ('', 'record-parents', False,
1348 1348 _('record parent information for the precursor')),
1349 1349 ('r', 'rev', [], _('display markers relevant to REV')),
1350 1350 ('', 'exclusive', False, _('restrict display to markers only '
1351 1351 'relevant to REV')),
1352 1352 ('', 'index', False, _('display index of the marker')),
1353 1353 ('', 'delete', [], _('delete markers specified by indices')),
1354 1354 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1355 1355 _('[OBSOLETED [REPLACEMENT ...]]'))
1356 1356 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1357 1357 """create arbitrary obsolete marker
1358 1358
1359 1359 With no arguments, displays the list of obsolescence markers."""
1360 1360
1361 1361 opts = pycompat.byteskwargs(opts)
1362 1362
1363 1363 def parsenodeid(s):
1364 1364 try:
1365 1365 # We do not use revsingle/revrange functions here to accept
1366 1366 # arbitrary node identifiers, possibly not present in the
1367 1367 # local repository.
1368 1368 n = bin(s)
1369 1369 if len(n) != len(nullid):
1370 1370 raise TypeError()
1371 1371 return n
1372 1372 except TypeError:
1373 1373 raise error.Abort('changeset references must be full hexadecimal '
1374 1374 'node identifiers')
1375 1375
1376 1376 if opts.get('delete'):
1377 1377 indices = []
1378 1378 for v in opts.get('delete'):
1379 1379 try:
1380 1380 indices.append(int(v))
1381 1381 except ValueError:
1382 1382 raise error.Abort(_('invalid index value: %r') % v,
1383 1383 hint=_('use integers for indices'))
1384 1384
1385 1385 if repo.currenttransaction():
1386 1386 raise error.Abort(_('cannot delete obsmarkers in the middle '
1387 1387 'of transaction.'))
1388 1388
1389 1389 with repo.lock():
1390 1390 n = repair.deleteobsmarkers(repo.obsstore, indices)
1391 1391 ui.write(_('deleted %i obsolescence markers\n') % n)
1392 1392
1393 1393 return
1394 1394
1395 1395 if precursor is not None:
1396 1396 if opts['rev']:
1397 1397 raise error.Abort('cannot select revision when creating marker')
1398 1398 metadata = {}
1399 1399 metadata['user'] = opts['user'] or ui.username()
1400 1400 succs = tuple(parsenodeid(succ) for succ in successors)
1401 1401 l = repo.lock()
1402 1402 try:
1403 1403 tr = repo.transaction('debugobsolete')
1404 1404 try:
1405 1405 date = opts.get('date')
1406 1406 if date:
1407 1407 date = util.parsedate(date)
1408 1408 else:
1409 1409 date = None
1410 1410 prec = parsenodeid(precursor)
1411 1411 parents = None
1412 1412 if opts['record_parents']:
1413 1413 if prec not in repo.unfiltered():
1414 1414 raise error.Abort('cannot used --record-parents on '
1415 1415 'unknown changesets')
1416 1416 parents = repo.unfiltered()[prec].parents()
1417 1417 parents = tuple(p.node() for p in parents)
1418 1418 repo.obsstore.create(tr, prec, succs, opts['flags'],
1419 1419 parents=parents, date=date,
1420 1420 metadata=metadata, ui=ui)
1421 1421 tr.close()
1422 1422 except ValueError as exc:
1423 1423 raise error.Abort(_('bad obsmarker input: %s') % exc)
1424 1424 finally:
1425 1425 tr.release()
1426 1426 finally:
1427 1427 l.release()
1428 1428 else:
1429 1429 if opts['rev']:
1430 1430 revs = scmutil.revrange(repo, opts['rev'])
1431 1431 nodes = [repo[r].node() for r in revs]
1432 1432 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1433 1433 exclusive=opts['exclusive']))
1434 1434 markers.sort(key=lambda x: x._data)
1435 1435 else:
1436 1436 markers = obsutil.getmarkers(repo)
1437 1437
1438 1438 markerstoiter = markers
1439 1439 isrelevant = lambda m: True
1440 1440 if opts.get('rev') and opts.get('index'):
1441 1441 markerstoiter = obsutil.getmarkers(repo)
1442 1442 markerset = set(markers)
1443 1443 isrelevant = lambda m: m in markerset
1444 1444
1445 1445 fm = ui.formatter('debugobsolete', opts)
1446 1446 for i, m in enumerate(markerstoiter):
1447 1447 if not isrelevant(m):
1448 1448 # marker can be irrelevant when we're iterating over a set
1449 1449 # of markers (markerstoiter) which is bigger than the set
1450 1450 # of markers we want to display (markers)
1451 1451 # this can happen if both --index and --rev options are
1452 1452 # provided and thus we need to iterate over all of the markers
1453 1453 # to get the correct indices, but only display the ones that
1454 1454 # are relevant to --rev value
1455 1455 continue
1456 1456 fm.startitem()
1457 1457 ind = i if opts.get('index') else None
1458 1458 cmdutil.showmarker(fm, m, index=ind)
1459 1459 fm.end()
1460 1460
1461 1461 @command('debugpathcomplete',
1462 1462 [('f', 'full', None, _('complete an entire path')),
1463 1463 ('n', 'normal', None, _('show only normal files')),
1464 1464 ('a', 'added', None, _('show only added files')),
1465 1465 ('r', 'removed', None, _('show only removed files'))],
1466 1466 _('FILESPEC...'))
1467 1467 def debugpathcomplete(ui, repo, *specs, **opts):
1468 1468 '''complete part or all of a tracked path
1469 1469
1470 1470 This command supports shells that offer path name completion. It
1471 1471 currently completes only files already known to the dirstate.
1472 1472
1473 1473 Completion extends only to the next path segment unless
1474 1474 --full is specified, in which case entire paths are used.'''
1475 1475
1476 1476 def complete(path, acceptable):
1477 1477 dirstate = repo.dirstate
1478 1478 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1479 1479 rootdir = repo.root + pycompat.ossep
1480 1480 if spec != repo.root and not spec.startswith(rootdir):
1481 1481 return [], []
1482 1482 if os.path.isdir(spec):
1483 1483 spec += '/'
1484 1484 spec = spec[len(rootdir):]
1485 1485 fixpaths = pycompat.ossep != '/'
1486 1486 if fixpaths:
1487 1487 spec = spec.replace(pycompat.ossep, '/')
1488 1488 speclen = len(spec)
1489 1489 fullpaths = opts[r'full']
1490 1490 files, dirs = set(), set()
1491 1491 adddir, addfile = dirs.add, files.add
1492 1492 for f, st in dirstate.iteritems():
1493 1493 if f.startswith(spec) and st[0] in acceptable:
1494 1494 if fixpaths:
1495 1495 f = f.replace('/', pycompat.ossep)
1496 1496 if fullpaths:
1497 1497 addfile(f)
1498 1498 continue
1499 1499 s = f.find(pycompat.ossep, speclen)
1500 1500 if s >= 0:
1501 1501 adddir(f[:s])
1502 1502 else:
1503 1503 addfile(f)
1504 1504 return files, dirs
1505 1505
1506 1506 acceptable = ''
1507 1507 if opts[r'normal']:
1508 1508 acceptable += 'nm'
1509 1509 if opts[r'added']:
1510 1510 acceptable += 'a'
1511 1511 if opts[r'removed']:
1512 1512 acceptable += 'r'
1513 1513 cwd = repo.getcwd()
1514 1514 if not specs:
1515 1515 specs = ['.']
1516 1516
1517 1517 files, dirs = set(), set()
1518 1518 for spec in specs:
1519 1519 f, d = complete(spec, acceptable or 'nmar')
1520 1520 files.update(f)
1521 1521 dirs.update(d)
1522 1522 files.update(dirs)
1523 1523 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1524 1524 ui.write('\n')
1525 1525
1526 1526 @command('debugpickmergetool',
1527 1527 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1528 1528 ('', 'changedelete', None, _('emulate merging change and delete')),
1529 1529 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1530 1530 _('[PATTERN]...'),
1531 1531 inferrepo=True)
1532 1532 def debugpickmergetool(ui, repo, *pats, **opts):
1533 1533 """examine which merge tool is chosen for specified file
1534 1534
1535 1535 As described in :hg:`help merge-tools`, Mercurial examines
1536 1536 configurations below in this order to decide which merge tool is
1537 1537 chosen for specified file.
1538 1538
1539 1539 1. ``--tool`` option
1540 1540 2. ``HGMERGE`` environment variable
1541 1541 3. configurations in ``merge-patterns`` section
1542 1542 4. configuration of ``ui.merge``
1543 1543 5. configurations in ``merge-tools`` section
1544 1544 6. ``hgmerge`` tool (for historical reason only)
1545 1545 7. default tool for fallback (``:merge`` or ``:prompt``)
1546 1546
1547 1547 This command writes out examination result in the style below::
1548 1548
1549 1549 FILE = MERGETOOL
1550 1550
1551 1551 By default, all files known in the first parent context of the
1552 1552 working directory are examined. Use file patterns and/or -I/-X
1553 1553 options to limit target files. -r/--rev is also useful to examine
1554 1554 files in another context without actual updating to it.
1555 1555
1556 1556 With --debug, this command shows warning messages while matching
1557 1557 against ``merge-patterns`` and so on, too. It is recommended to
1558 1558 use this option with explicit file patterns and/or -I/-X options,
1559 1559 because this option increases amount of output per file according
1560 1560 to configurations in hgrc.
1561 1561
1562 1562 With -v/--verbose, this command shows configurations below at
1563 1563 first (only if specified).
1564 1564
1565 1565 - ``--tool`` option
1566 1566 - ``HGMERGE`` environment variable
1567 1567 - configuration of ``ui.merge``
1568 1568
1569 1569 If merge tool is chosen before matching against
1570 1570 ``merge-patterns``, this command can't show any helpful
1571 1571 information, even with --debug. In such case, information above is
1572 1572 useful to know why a merge tool is chosen.
1573 1573 """
1574 1574 opts = pycompat.byteskwargs(opts)
1575 1575 overrides = {}
1576 1576 if opts['tool']:
1577 1577 overrides[('ui', 'forcemerge')] = opts['tool']
1578 1578 ui.note(('with --tool %r\n') % (opts['tool']))
1579 1579
1580 1580 with ui.configoverride(overrides, 'debugmergepatterns'):
1581 1581 hgmerge = encoding.environ.get("HGMERGE")
1582 1582 if hgmerge is not None:
1583 1583 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1584 1584 uimerge = ui.config("ui", "merge")
1585 1585 if uimerge:
1586 1586 ui.note(('with ui.merge=%r\n') % (uimerge))
1587 1587
1588 1588 ctx = scmutil.revsingle(repo, opts.get('rev'))
1589 1589 m = scmutil.match(ctx, pats, opts)
1590 1590 changedelete = opts['changedelete']
1591 1591 for path in ctx.walk(m):
1592 1592 fctx = ctx[path]
1593 1593 try:
1594 1594 if not ui.debugflag:
1595 1595 ui.pushbuffer(error=True)
1596 1596 tool, toolpath = filemerge._picktool(repo, ui, path,
1597 1597 fctx.isbinary(),
1598 1598 'l' in fctx.flags(),
1599 1599 changedelete)
1600 1600 finally:
1601 1601 if not ui.debugflag:
1602 1602 ui.popbuffer()
1603 1603 ui.write(('%s = %s\n') % (path, tool))
1604 1604
1605 1605 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1606 1606 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1607 1607 '''access the pushkey key/value protocol
1608 1608
1609 1609 With two args, list the keys in the given namespace.
1610 1610
1611 1611 With five args, set a key to new if it currently is set to old.
1612 1612 Reports success or failure.
1613 1613 '''
1614 1614
1615 1615 target = hg.peer(ui, {}, repopath)
1616 1616 if keyinfo:
1617 1617 key, old, new = keyinfo
1618 1618 r = target.pushkey(namespace, key, old, new)
1619 1619 ui.status(str(r) + '\n')
1620 1620 return not r
1621 1621 else:
1622 1622 for k, v in sorted(target.listkeys(namespace).iteritems()):
1623 1623 ui.write("%s\t%s\n" % (util.escapestr(k),
1624 1624 util.escapestr(v)))
1625 1625
1626 1626 @command('debugpvec', [], _('A B'))
1627 1627 def debugpvec(ui, repo, a, b=None):
1628 1628 ca = scmutil.revsingle(repo, a)
1629 1629 cb = scmutil.revsingle(repo, b)
1630 1630 pa = pvec.ctxpvec(ca)
1631 1631 pb = pvec.ctxpvec(cb)
1632 1632 if pa == pb:
1633 1633 rel = "="
1634 1634 elif pa > pb:
1635 1635 rel = ">"
1636 1636 elif pa < pb:
1637 1637 rel = "<"
1638 1638 elif pa | pb:
1639 1639 rel = "|"
1640 1640 ui.write(_("a: %s\n") % pa)
1641 1641 ui.write(_("b: %s\n") % pb)
1642 1642 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1643 1643 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1644 1644 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1645 1645 pa.distance(pb), rel))
1646 1646
1647 1647 @command('debugrebuilddirstate|debugrebuildstate',
1648 1648 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1649 1649 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1650 1650 'the working copy parent')),
1651 1651 ],
1652 1652 _('[-r REV]'))
1653 1653 def debugrebuilddirstate(ui, repo, rev, **opts):
1654 1654 """rebuild the dirstate as it would look like for the given revision
1655 1655
1656 1656 If no revision is specified the first current parent will be used.
1657 1657
1658 1658 The dirstate will be set to the files of the given revision.
1659 1659 The actual working directory content or existing dirstate
1660 1660 information such as adds or removes is not considered.
1661 1661
1662 1662 ``minimal`` will only rebuild the dirstate status for files that claim to be
1663 1663 tracked but are not in the parent manifest, or that exist in the parent
1664 1664 manifest but are not in the dirstate. It will not change adds, removes, or
1665 1665 modified files that are in the working copy parent.
1666 1666
1667 1667 One use of this command is to make the next :hg:`status` invocation
1668 1668 check the actual file content.
1669 1669 """
1670 1670 ctx = scmutil.revsingle(repo, rev)
1671 1671 with repo.wlock():
1672 1672 dirstate = repo.dirstate
1673 1673 changedfiles = None
1674 1674 # See command doc for what minimal does.
1675 1675 if opts.get(r'minimal'):
1676 1676 manifestfiles = set(ctx.manifest().keys())
1677 1677 dirstatefiles = set(dirstate)
1678 1678 manifestonly = manifestfiles - dirstatefiles
1679 1679 dsonly = dirstatefiles - manifestfiles
1680 1680 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1681 1681 changedfiles = manifestonly | dsnotadded
1682 1682
1683 1683 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1684 1684
1685 1685 @command('debugrebuildfncache', [], '')
1686 1686 def debugrebuildfncache(ui, repo):
1687 1687 """rebuild the fncache file"""
1688 1688 repair.rebuildfncache(ui, repo)
1689 1689
1690 1690 @command('debugrename',
1691 1691 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1692 1692 _('[-r REV] FILE'))
1693 1693 def debugrename(ui, repo, file1, *pats, **opts):
1694 1694 """dump rename information"""
1695 1695
1696 1696 opts = pycompat.byteskwargs(opts)
1697 1697 ctx = scmutil.revsingle(repo, opts.get('rev'))
1698 1698 m = scmutil.match(ctx, (file1,) + pats, opts)
1699 1699 for abs in ctx.walk(m):
1700 1700 fctx = ctx[abs]
1701 1701 o = fctx.filelog().renamed(fctx.filenode())
1702 1702 rel = m.rel(abs)
1703 1703 if o:
1704 1704 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1705 1705 else:
1706 1706 ui.write(_("%s not renamed\n") % rel)
1707 1707
1708 1708 @command('debugrevlog', cmdutil.debugrevlogopts +
1709 1709 [('d', 'dump', False, _('dump index data'))],
1710 1710 _('-c|-m|FILE'),
1711 1711 optionalrepo=True)
1712 1712 def debugrevlog(ui, repo, file_=None, **opts):
1713 1713 """show data and statistics about a revlog"""
1714 1714 opts = pycompat.byteskwargs(opts)
1715 1715 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1716 1716
1717 1717 if opts.get("dump"):
1718 1718 numrevs = len(r)
1719 1719 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1720 1720 " rawsize totalsize compression heads chainlen\n"))
1721 1721 ts = 0
1722 1722 heads = set()
1723 1723
1724 1724 for rev in xrange(numrevs):
1725 1725 dbase = r.deltaparent(rev)
1726 1726 if dbase == -1:
1727 1727 dbase = rev
1728 1728 cbase = r.chainbase(rev)
1729 1729 clen = r.chainlen(rev)
1730 1730 p1, p2 = r.parentrevs(rev)
1731 1731 rs = r.rawsize(rev)
1732 1732 ts = ts + rs
1733 1733 heads -= set(r.parentrevs(rev))
1734 1734 heads.add(rev)
1735 1735 try:
1736 1736 compression = ts / r.end(rev)
1737 1737 except ZeroDivisionError:
1738 1738 compression = 0
1739 1739 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1740 1740 "%11d %5d %8d\n" %
1741 1741 (rev, p1, p2, r.start(rev), r.end(rev),
1742 1742 r.start(dbase), r.start(cbase),
1743 1743 r.start(p1), r.start(p2),
1744 1744 rs, ts, compression, len(heads), clen))
1745 1745 return 0
1746 1746
1747 1747 v = r.version
1748 1748 format = v & 0xFFFF
1749 1749 flags = []
1750 1750 gdelta = False
1751 1751 if v & revlog.FLAG_INLINE_DATA:
1752 1752 flags.append('inline')
1753 1753 if v & revlog.FLAG_GENERALDELTA:
1754 1754 gdelta = True
1755 1755 flags.append('generaldelta')
1756 1756 if not flags:
1757 1757 flags = ['(none)']
1758 1758
1759 1759 nummerges = 0
1760 1760 numfull = 0
1761 1761 numprev = 0
1762 1762 nump1 = 0
1763 1763 nump2 = 0
1764 1764 numother = 0
1765 1765 nump1prev = 0
1766 1766 nump2prev = 0
1767 1767 chainlengths = []
1768 1768 chainbases = []
1769 1769 chainspans = []
1770 1770
1771 1771 datasize = [None, 0, 0]
1772 1772 fullsize = [None, 0, 0]
1773 1773 deltasize = [None, 0, 0]
1774 1774 chunktypecounts = {}
1775 1775 chunktypesizes = {}
1776 1776
1777 1777 def addsize(size, l):
1778 1778 if l[0] is None or size < l[0]:
1779 1779 l[0] = size
1780 1780 if size > l[1]:
1781 1781 l[1] = size
1782 1782 l[2] += size
1783 1783
1784 1784 numrevs = len(r)
1785 1785 for rev in xrange(numrevs):
1786 1786 p1, p2 = r.parentrevs(rev)
1787 1787 delta = r.deltaparent(rev)
1788 1788 if format > 0:
1789 1789 addsize(r.rawsize(rev), datasize)
1790 1790 if p2 != nullrev:
1791 1791 nummerges += 1
1792 1792 size = r.length(rev)
1793 1793 if delta == nullrev:
1794 1794 chainlengths.append(0)
1795 1795 chainbases.append(r.start(rev))
1796 1796 chainspans.append(size)
1797 1797 numfull += 1
1798 1798 addsize(size, fullsize)
1799 1799 else:
1800 1800 chainlengths.append(chainlengths[delta] + 1)
1801 1801 baseaddr = chainbases[delta]
1802 1802 revaddr = r.start(rev)
1803 1803 chainbases.append(baseaddr)
1804 1804 chainspans.append((revaddr - baseaddr) + size)
1805 1805 addsize(size, deltasize)
1806 1806 if delta == rev - 1:
1807 1807 numprev += 1
1808 1808 if delta == p1:
1809 1809 nump1prev += 1
1810 1810 elif delta == p2:
1811 1811 nump2prev += 1
1812 1812 elif delta == p1:
1813 1813 nump1 += 1
1814 1814 elif delta == p2:
1815 1815 nump2 += 1
1816 1816 elif delta != nullrev:
1817 1817 numother += 1
1818 1818
1819 1819 # Obtain data on the raw chunks in the revlog.
1820 1820 segment = r._getsegmentforrevs(rev, rev)[1]
1821 1821 if segment:
1822 1822 chunktype = bytes(segment[0:1])
1823 1823 else:
1824 1824 chunktype = 'empty'
1825 1825
1826 1826 if chunktype not in chunktypecounts:
1827 1827 chunktypecounts[chunktype] = 0
1828 1828 chunktypesizes[chunktype] = 0
1829 1829
1830 1830 chunktypecounts[chunktype] += 1
1831 1831 chunktypesizes[chunktype] += size
1832 1832
1833 1833 # Adjust size min value for empty cases
1834 1834 for size in (datasize, fullsize, deltasize):
1835 1835 if size[0] is None:
1836 1836 size[0] = 0
1837 1837
1838 1838 numdeltas = numrevs - numfull
1839 1839 numoprev = numprev - nump1prev - nump2prev
1840 1840 totalrawsize = datasize[2]
1841 1841 datasize[2] /= numrevs
1842 1842 fulltotal = fullsize[2]
1843 1843 fullsize[2] /= numfull
1844 1844 deltatotal = deltasize[2]
1845 1845 if numrevs - numfull > 0:
1846 1846 deltasize[2] /= numrevs - numfull
1847 1847 totalsize = fulltotal + deltatotal
1848 1848 avgchainlen = sum(chainlengths) / numrevs
1849 1849 maxchainlen = max(chainlengths)
1850 1850 maxchainspan = max(chainspans)
1851 1851 compratio = 1
1852 1852 if totalsize:
1853 1853 compratio = totalrawsize / totalsize
1854 1854
1855 1855 basedfmtstr = '%%%dd\n'
1856 1856 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1857 1857
1858 1858 def dfmtstr(max):
1859 1859 return basedfmtstr % len(str(max))
1860 1860 def pcfmtstr(max, padding=0):
1861 1861 return basepcfmtstr % (len(str(max)), ' ' * padding)
1862 1862
1863 1863 def pcfmt(value, total):
1864 1864 if total:
1865 1865 return (value, 100 * float(value) / total)
1866 1866 else:
1867 1867 return value, 100.0
1868 1868
1869 1869 ui.write(('format : %d\n') % format)
1870 1870 ui.write(('flags : %s\n') % ', '.join(flags))
1871 1871
1872 1872 ui.write('\n')
1873 1873 fmt = pcfmtstr(totalsize)
1874 1874 fmt2 = dfmtstr(totalsize)
1875 1875 ui.write(('revisions : ') + fmt2 % numrevs)
1876 1876 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1877 1877 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1878 1878 ui.write(('revisions : ') + fmt2 % numrevs)
1879 1879 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1880 1880 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1881 1881 ui.write(('revision size : ') + fmt2 % totalsize)
1882 1882 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1883 1883 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1884 1884
1885 1885 def fmtchunktype(chunktype):
1886 1886 if chunktype == 'empty':
1887 1887 return ' %s : ' % chunktype
1888 1888 elif chunktype in pycompat.bytestr(string.ascii_letters):
1889 1889 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1890 1890 else:
1891 1891 return ' 0x%s : ' % hex(chunktype)
1892 1892
1893 1893 ui.write('\n')
1894 1894 ui.write(('chunks : ') + fmt2 % numrevs)
1895 1895 for chunktype in sorted(chunktypecounts):
1896 1896 ui.write(fmtchunktype(chunktype))
1897 1897 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1898 1898 ui.write(('chunks size : ') + fmt2 % totalsize)
1899 1899 for chunktype in sorted(chunktypecounts):
1900 1900 ui.write(fmtchunktype(chunktype))
1901 1901 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1902 1902
1903 1903 ui.write('\n')
1904 1904 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
1905 1905 ui.write(('avg chain length : ') + fmt % avgchainlen)
1906 1906 ui.write(('max chain length : ') + fmt % maxchainlen)
1907 1907 ui.write(('max chain reach : ') + fmt % maxchainspan)
1908 1908 ui.write(('compression ratio : ') + fmt % compratio)
1909 1909
1910 1910 if format > 0:
1911 1911 ui.write('\n')
1912 1912 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1913 1913 % tuple(datasize))
1914 1914 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1915 1915 % tuple(fullsize))
1916 1916 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1917 1917 % tuple(deltasize))
1918 1918
1919 1919 if numdeltas > 0:
1920 1920 ui.write('\n')
1921 1921 fmt = pcfmtstr(numdeltas)
1922 1922 fmt2 = pcfmtstr(numdeltas, 4)
1923 1923 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1924 1924 if numprev > 0:
1925 1925 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1926 1926 numprev))
1927 1927 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1928 1928 numprev))
1929 1929 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1930 1930 numprev))
1931 1931 if gdelta:
1932 1932 ui.write(('deltas against p1 : ')
1933 1933 + fmt % pcfmt(nump1, numdeltas))
1934 1934 ui.write(('deltas against p2 : ')
1935 1935 + fmt % pcfmt(nump2, numdeltas))
1936 1936 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1937 1937 numdeltas))
1938 1938
1939 1939 @command('debugrevspec',
1940 1940 [('', 'optimize', None,
1941 1941 _('print parsed tree after optimizing (DEPRECATED)')),
1942 1942 ('', 'show-revs', True, _('print list of result revisions (default)')),
1943 1943 ('s', 'show-set', None, _('print internal representation of result set')),
1944 1944 ('p', 'show-stage', [],
1945 1945 _('print parsed tree at the given stage'), _('NAME')),
1946 1946 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1947 1947 ('', 'verify-optimized', False, _('verify optimized result')),
1948 1948 ],
1949 1949 ('REVSPEC'))
1950 1950 def debugrevspec(ui, repo, expr, **opts):
1951 1951 """parse and apply a revision specification
1952 1952
1953 1953 Use -p/--show-stage option to print the parsed tree at the given stages.
1954 1954 Use -p all to print tree at every stage.
1955 1955
1956 1956 Use --no-show-revs option with -s or -p to print only the set
1957 1957 representation or the parsed tree respectively.
1958 1958
1959 1959 Use --verify-optimized to compare the optimized result with the unoptimized
1960 1960 one. Returns 1 if the optimized result differs.
1961 1961 """
1962 1962 opts = pycompat.byteskwargs(opts)
1963 1963 stages = [
1964 1964 ('parsed', lambda tree: tree),
1965 1965 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1966 1966 ('concatenated', revsetlang.foldconcat),
1967 1967 ('analyzed', revsetlang.analyze),
1968 1968 ('optimized', revsetlang.optimize),
1969 1969 ]
1970 1970 if opts['no_optimized']:
1971 1971 stages = stages[:-1]
1972 1972 if opts['verify_optimized'] and opts['no_optimized']:
1973 1973 raise error.Abort(_('cannot use --verify-optimized with '
1974 1974 '--no-optimized'))
1975 1975 stagenames = set(n for n, f in stages)
1976 1976
1977 1977 showalways = set()
1978 1978 showchanged = set()
1979 1979 if ui.verbose and not opts['show_stage']:
1980 1980 # show parsed tree by --verbose (deprecated)
1981 1981 showalways.add('parsed')
1982 1982 showchanged.update(['expanded', 'concatenated'])
1983 1983 if opts['optimize']:
1984 1984 showalways.add('optimized')
1985 1985 if opts['show_stage'] and opts['optimize']:
1986 1986 raise error.Abort(_('cannot use --optimize with --show-stage'))
1987 1987 if opts['show_stage'] == ['all']:
1988 1988 showalways.update(stagenames)
1989 1989 else:
1990 1990 for n in opts['show_stage']:
1991 1991 if n not in stagenames:
1992 1992 raise error.Abort(_('invalid stage name: %s') % n)
1993 1993 showalways.update(opts['show_stage'])
1994 1994
1995 1995 treebystage = {}
1996 1996 printedtree = None
1997 1997 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1998 1998 for n, f in stages:
1999 1999 treebystage[n] = tree = f(tree)
2000 2000 if n in showalways or (n in showchanged and tree != printedtree):
2001 2001 if opts['show_stage'] or n != 'parsed':
2002 2002 ui.write(("* %s:\n") % n)
2003 2003 ui.write(revsetlang.prettyformat(tree), "\n")
2004 2004 printedtree = tree
2005 2005
2006 2006 if opts['verify_optimized']:
2007 2007 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2008 2008 brevs = revset.makematcher(treebystage['optimized'])(repo)
2009 2009 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2010 2010 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2011 2011 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2012 2012 arevs = list(arevs)
2013 2013 brevs = list(brevs)
2014 2014 if arevs == brevs:
2015 2015 return 0
2016 2016 ui.write(('--- analyzed\n'), label='diff.file_a')
2017 2017 ui.write(('+++ optimized\n'), label='diff.file_b')
2018 2018 sm = difflib.SequenceMatcher(None, arevs, brevs)
2019 2019 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2020 2020 if tag in ('delete', 'replace'):
2021 2021 for c in arevs[alo:ahi]:
2022 2022 ui.write('-%s\n' % c, label='diff.deleted')
2023 2023 if tag in ('insert', 'replace'):
2024 2024 for c in brevs[blo:bhi]:
2025 2025 ui.write('+%s\n' % c, label='diff.inserted')
2026 2026 if tag == 'equal':
2027 2027 for c in arevs[alo:ahi]:
2028 2028 ui.write(' %s\n' % c)
2029 2029 return 1
2030 2030
2031 2031 func = revset.makematcher(tree)
2032 2032 revs = func(repo)
2033 2033 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2034 2034 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2035 2035 if not opts['show_revs']:
2036 2036 return
2037 2037 for c in revs:
2038 2038 ui.write("%s\n" % c)
2039 2039
2040 2040 @command('debugsetparents', [], _('REV1 [REV2]'))
2041 2041 def debugsetparents(ui, repo, rev1, rev2=None):
2042 2042 """manually set the parents of the current working directory
2043 2043
2044 2044 This is useful for writing repository conversion tools, but should
2045 2045 be used with care. For example, neither the working directory nor the
2046 2046 dirstate is updated, so file status may be incorrect after running this
2047 2047 command.
2048 2048
2049 2049 Returns 0 on success.
2050 2050 """
2051 2051
2052 2052 r1 = scmutil.revsingle(repo, rev1).node()
2053 2053 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2054 2054
2055 2055 with repo.wlock():
2056 2056 repo.setparents(r1, r2)
2057 2057
2058 2058 @command('debugsub',
2059 2059 [('r', 'rev', '',
2060 2060 _('revision to check'), _('REV'))],
2061 2061 _('[-r REV] [REV]'))
2062 2062 def debugsub(ui, repo, rev=None):
2063 2063 ctx = scmutil.revsingle(repo, rev, None)
2064 2064 for k, v in sorted(ctx.substate.items()):
2065 2065 ui.write(('path %s\n') % k)
2066 2066 ui.write((' source %s\n') % v[0])
2067 2067 ui.write((' revision %s\n') % v[1])
2068 2068
2069 2069 @command('debugsuccessorssets',
2070 [],
2070 [('', 'closest', False, _('return closest successors sets only'))],
2071 2071 _('[REV]'))
2072 def debugsuccessorssets(ui, repo, *revs):
2072 def debugsuccessorssets(ui, repo, *revs, **opts):
2073 2073 """show set of successors for revision
2074 2074
2075 2075 A successors set of changeset A is a consistent group of revisions that
2076 succeed A. It contains non-obsolete changesets only.
2076 succeed A. It contains non-obsolete changesets only unless closests
2077 successors set is set.
2077 2078
2078 2079 In most cases a changeset A has a single successors set containing a single
2079 2080 successor (changeset A replaced by A').
2080 2081
2081 2082 A changeset that is made obsolete with no successors are called "pruned".
2082 2083 Such changesets have no successors sets at all.
2083 2084
2084 2085 A changeset that has been "split" will have a successors set containing
2085 2086 more than one successor.
2086 2087
2087 2088 A changeset that has been rewritten in multiple different ways is called
2088 2089 "divergent". Such changesets have multiple successor sets (each of which
2089 2090 may also be split, i.e. have multiple successors).
2090 2091
2091 2092 Results are displayed as follows::
2092 2093
2093 2094 <rev1>
2094 2095 <successors-1A>
2095 2096 <rev2>
2096 2097 <successors-2A>
2097 2098 <successors-2B1> <successors-2B2> <successors-2B3>
2098 2099
2099 2100 Here rev2 has two possible (i.e. divergent) successors sets. The first
2100 2101 holds one element, whereas the second holds three (i.e. the changeset has
2101 2102 been split).
2102 2103 """
2103 2104 # passed to successorssets caching computation from one call to another
2104 2105 cache = {}
2105 2106 ctx2str = str
2106 2107 node2str = short
2107 2108 if ui.debug():
2108 2109 def ctx2str(ctx):
2109 2110 return ctx.hex()
2110 2111 node2str = hex
2111 2112 for rev in scmutil.revrange(repo, revs):
2112 2113 ctx = repo[rev]
2113 2114 ui.write('%s\n'% ctx2str(ctx))
2114 for succsset in obsutil.successorssets(repo, ctx.node(), cache=cache):
2115 for succsset in obsutil.successorssets(repo, ctx.node(),
2116 closest=opts['closest'],
2117 cache=cache):
2115 2118 if succsset:
2116 2119 ui.write(' ')
2117 2120 ui.write(node2str(succsset[0]))
2118 2121 for node in succsset[1:]:
2119 2122 ui.write(' ')
2120 2123 ui.write(node2str(node))
2121 2124 ui.write('\n')
2122 2125
2123 2126 @command('debugtemplate',
2124 2127 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2125 2128 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2126 2129 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2127 2130 optionalrepo=True)
2128 2131 def debugtemplate(ui, repo, tmpl, **opts):
2129 2132 """parse and apply a template
2130 2133
2131 2134 If -r/--rev is given, the template is processed as a log template and
2132 2135 applied to the given changesets. Otherwise, it is processed as a generic
2133 2136 template.
2134 2137
2135 2138 Use --verbose to print the parsed tree.
2136 2139 """
2137 2140 revs = None
2138 2141 if opts[r'rev']:
2139 2142 if repo is None:
2140 2143 raise error.RepoError(_('there is no Mercurial repository here '
2141 2144 '(.hg not found)'))
2142 2145 revs = scmutil.revrange(repo, opts[r'rev'])
2143 2146
2144 2147 props = {}
2145 2148 for d in opts[r'define']:
2146 2149 try:
2147 2150 k, v = (e.strip() for e in d.split('=', 1))
2148 2151 if not k or k == 'ui':
2149 2152 raise ValueError
2150 2153 props[k] = v
2151 2154 except ValueError:
2152 2155 raise error.Abort(_('malformed keyword definition: %s') % d)
2153 2156
2154 2157 if ui.verbose:
2155 2158 aliases = ui.configitems('templatealias')
2156 2159 tree = templater.parse(tmpl)
2157 2160 ui.note(templater.prettyformat(tree), '\n')
2158 2161 newtree = templater.expandaliases(tree, aliases)
2159 2162 if newtree != tree:
2160 2163 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2161 2164
2162 2165 if revs is None:
2163 2166 t = formatter.maketemplater(ui, tmpl)
2164 2167 props['ui'] = ui
2165 2168 ui.write(t.render(props))
2166 2169 else:
2167 2170 displayer = cmdutil.makelogtemplater(ui, repo, tmpl)
2168 2171 for r in revs:
2169 2172 displayer.show(repo[r], **pycompat.strkwargs(props))
2170 2173 displayer.close()
2171 2174
2172 2175 @command('debugupdatecaches', [])
2173 2176 def debugupdatecaches(ui, repo, *pats, **opts):
2174 2177 """warm all known caches in the repository"""
2175 2178 with repo.wlock():
2176 2179 with repo.lock():
2177 2180 repo.updatecaches()
2178 2181
2179 2182 @command('debugupgraderepo', [
2180 2183 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2181 2184 ('', 'run', False, _('performs an upgrade')),
2182 2185 ])
2183 2186 def debugupgraderepo(ui, repo, run=False, optimize=None):
2184 2187 """upgrade a repository to use different features
2185 2188
2186 2189 If no arguments are specified, the repository is evaluated for upgrade
2187 2190 and a list of problems and potential optimizations is printed.
2188 2191
2189 2192 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2190 2193 can be influenced via additional arguments. More details will be provided
2191 2194 by the command output when run without ``--run``.
2192 2195
2193 2196 During the upgrade, the repository will be locked and no writes will be
2194 2197 allowed.
2195 2198
2196 2199 At the end of the upgrade, the repository may not be readable while new
2197 2200 repository data is swapped in. This window will be as long as it takes to
2198 2201 rename some directories inside the ``.hg`` directory. On most machines, this
2199 2202 should complete almost instantaneously and the chances of a consumer being
2200 2203 unable to access the repository should be low.
2201 2204 """
2202 2205 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2203 2206
2204 2207 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2205 2208 inferrepo=True)
2206 2209 def debugwalk(ui, repo, *pats, **opts):
2207 2210 """show how files match on given patterns"""
2208 2211 opts = pycompat.byteskwargs(opts)
2209 2212 m = scmutil.match(repo[None], pats, opts)
2210 2213 ui.write(('matcher: %r\n' % m))
2211 2214 items = list(repo[None].walk(m))
2212 2215 if not items:
2213 2216 return
2214 2217 f = lambda fn: fn
2215 2218 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2216 2219 f = lambda fn: util.normpath(fn)
2217 2220 fmt = 'f %%-%ds %%-%ds %%s' % (
2218 2221 max([len(abs) for abs in items]),
2219 2222 max([len(m.rel(abs)) for abs in items]))
2220 2223 for abs in items:
2221 2224 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2222 2225 ui.write("%s\n" % line.rstrip())
2223 2226
2224 2227 @command('debugwireargs',
2225 2228 [('', 'three', '', 'three'),
2226 2229 ('', 'four', '', 'four'),
2227 2230 ('', 'five', '', 'five'),
2228 2231 ] + cmdutil.remoteopts,
2229 2232 _('REPO [OPTIONS]... [ONE [TWO]]'),
2230 2233 norepo=True)
2231 2234 def debugwireargs(ui, repopath, *vals, **opts):
2232 2235 opts = pycompat.byteskwargs(opts)
2233 2236 repo = hg.peer(ui, opts, repopath)
2234 2237 for opt in cmdutil.remoteopts:
2235 2238 del opts[opt[1]]
2236 2239 args = {}
2237 2240 for k, v in opts.iteritems():
2238 2241 if v:
2239 2242 args[k] = v
2240 2243 # run twice to check that we don't mess up the stream for the next command
2241 2244 res1 = repo.debugwireargs(*vals, **args)
2242 2245 res2 = repo.debugwireargs(*vals, **args)
2243 2246 ui.write("%s\n" % res1)
2244 2247 if res1 != res2:
2245 2248 ui.warn("%s\n" % res2)
@@ -1,518 +1,537
1 1 # obsutil.py - utility functions for obsolescence
2 2 #
3 3 # Copyright 2017 Boris Feld <boris.feld@octobus.net>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 from . import (
11 11 phases,
12 12 )
13 13
14 14 class marker(object):
15 15 """Wrap obsolete marker raw data"""
16 16
17 17 def __init__(self, repo, data):
18 18 # the repo argument will be used to create changectx in later version
19 19 self._repo = repo
20 20 self._data = data
21 21 self._decodedmeta = None
22 22
23 23 def __hash__(self):
24 24 return hash(self._data)
25 25
26 26 def __eq__(self, other):
27 27 if type(other) != type(self):
28 28 return False
29 29 return self._data == other._data
30 30
31 31 def precnode(self):
32 32 """Precursor changeset node identifier"""
33 33 return self._data[0]
34 34
35 35 def succnodes(self):
36 36 """List of successor changesets node identifiers"""
37 37 return self._data[1]
38 38
39 39 def parentnodes(self):
40 40 """Parents of the precursors (None if not recorded)"""
41 41 return self._data[5]
42 42
43 43 def metadata(self):
44 44 """Decoded metadata dictionary"""
45 45 return dict(self._data[3])
46 46
47 47 def date(self):
48 48 """Creation date as (unixtime, offset)"""
49 49 return self._data[4]
50 50
51 51 def flags(self):
52 52 """The flags field of the marker"""
53 53 return self._data[2]
54 54
55 55 def getmarkers(repo, nodes=None, exclusive=False):
56 56 """returns markers known in a repository
57 57
58 58 If <nodes> is specified, only markers "relevant" to those nodes are are
59 59 returned"""
60 60 if nodes is None:
61 61 rawmarkers = repo.obsstore
62 62 elif exclusive:
63 63 rawmarkers = exclusivemarkers(repo, nodes)
64 64 else:
65 65 rawmarkers = repo.obsstore.relevantmarkers(nodes)
66 66
67 67 for markerdata in rawmarkers:
68 68 yield marker(repo, markerdata)
69 69
70 70 def closestpredecessors(repo, nodeid):
71 71 """yield the list of next predecessors pointing on visible changectx nodes
72 72
73 73 This function respect the repoview filtering, filtered revision will be
74 74 considered missing.
75 75 """
76 76
77 77 precursors = repo.obsstore.precursors
78 78 stack = [nodeid]
79 79 seen = set(stack)
80 80
81 81 while stack:
82 82 current = stack.pop()
83 83 currentpreccs = precursors.get(current, ())
84 84
85 85 for prec in currentpreccs:
86 86 precnodeid = prec[0]
87 87
88 88 # Basic cycle protection
89 89 if precnodeid in seen:
90 90 continue
91 91 seen.add(precnodeid)
92 92
93 93 if precnodeid in repo:
94 94 yield precnodeid
95 95 else:
96 96 stack.append(precnodeid)
97 97
98 98 def allprecursors(obsstore, nodes, ignoreflags=0):
99 99 """Yield node for every precursors of <nodes>.
100 100
101 101 Some precursors may be unknown locally.
102 102
103 103 This is a linear yield unsuited to detecting folded changesets. It includes
104 104 initial nodes too."""
105 105
106 106 remaining = set(nodes)
107 107 seen = set(remaining)
108 108 while remaining:
109 109 current = remaining.pop()
110 110 yield current
111 111 for mark in obsstore.precursors.get(current, ()):
112 112 # ignore marker flagged with specified flag
113 113 if mark[2] & ignoreflags:
114 114 continue
115 115 suc = mark[0]
116 116 if suc not in seen:
117 117 seen.add(suc)
118 118 remaining.add(suc)
119 119
120 120 def allsuccessors(obsstore, nodes, ignoreflags=0):
121 121 """Yield node for every successor of <nodes>.
122 122
123 123 Some successors may be unknown locally.
124 124
125 125 This is a linear yield unsuited to detecting split changesets. It includes
126 126 initial nodes too."""
127 127 remaining = set(nodes)
128 128 seen = set(remaining)
129 129 while remaining:
130 130 current = remaining.pop()
131 131 yield current
132 132 for mark in obsstore.successors.get(current, ()):
133 133 # ignore marker flagged with specified flag
134 134 if mark[2] & ignoreflags:
135 135 continue
136 136 for suc in mark[1]:
137 137 if suc not in seen:
138 138 seen.add(suc)
139 139 remaining.add(suc)
140 140
141 141 def _filterprunes(markers):
142 142 """return a set with no prune markers"""
143 143 return set(m for m in markers if m[1])
144 144
145 145 def exclusivemarkers(repo, nodes):
146 146 """set of markers relevant to "nodes" but no other locally-known nodes
147 147
148 148 This function compute the set of markers "exclusive" to a locally-known
149 149 node. This means we walk the markers starting from <nodes> until we reach a
150 150 locally-known precursors outside of <nodes>. Element of <nodes> with
151 151 locally-known successors outside of <nodes> are ignored (since their
152 152 precursors markers are also relevant to these successors).
153 153
154 154 For example:
155 155
156 156 # (A0 rewritten as A1)
157 157 #
158 158 # A0 <-1- A1 # Marker "1" is exclusive to A1
159 159
160 160 or
161 161
162 162 # (A0 rewritten as AX; AX rewritten as A1; AX is unkown locally)
163 163 #
164 164 # <-1- A0 <-2- AX <-3- A1 # Marker "2,3" are exclusive to A1
165 165
166 166 or
167 167
168 168 # (A0 has unknown precursors, A0 rewritten as A1 and A2 (divergence))
169 169 #
170 170 # <-2- A1 # Marker "2" is exclusive to A0,A1
171 171 # /
172 172 # <-1- A0
173 173 # \
174 174 # <-3- A2 # Marker "3" is exclusive to A0,A2
175 175 #
176 176 # in addition:
177 177 #
178 178 # Markers "2,3" are exclusive to A1,A2
179 179 # Markers "1,2,3" are exclusive to A0,A1,A2
180 180
181 181 See test/test-obsolete-bundle-strip.t for more examples.
182 182
183 183 An example usage is strip. When stripping a changeset, we also want to
184 184 strip the markers exclusive to this changeset. Otherwise we would have
185 185 "dangling"" obsolescence markers from its precursors: Obsolescence markers
186 186 marking a node as obsolete without any successors available locally.
187 187
188 188 As for relevant markers, the prune markers for children will be followed.
189 189 Of course, they will only be followed if the pruned children is
190 190 locally-known. Since the prune markers are relevant to the pruned node.
191 191 However, while prune markers are considered relevant to the parent of the
192 192 pruned changesets, prune markers for locally-known changeset (with no
193 193 successors) are considered exclusive to the pruned nodes. This allows
194 194 to strip the prune markers (with the rest of the exclusive chain) alongside
195 195 the pruned changesets.
196 196 """
197 197 # running on a filtered repository would be dangerous as markers could be
198 198 # reported as exclusive when they are relevant for other filtered nodes.
199 199 unfi = repo.unfiltered()
200 200
201 201 # shortcut to various useful item
202 202 nm = unfi.changelog.nodemap
203 203 precursorsmarkers = unfi.obsstore.precursors
204 204 successormarkers = unfi.obsstore.successors
205 205 childrenmarkers = unfi.obsstore.children
206 206
207 207 # exclusive markers (return of the function)
208 208 exclmarkers = set()
209 209 # we need fast membership testing
210 210 nodes = set(nodes)
211 211 # looking for head in the obshistory
212 212 #
213 213 # XXX we are ignoring all issues in regard with cycle for now.
214 214 stack = [n for n in nodes if not _filterprunes(successormarkers.get(n, ()))]
215 215 stack.sort()
216 216 # nodes already stacked
217 217 seennodes = set(stack)
218 218 while stack:
219 219 current = stack.pop()
220 220 # fetch precursors markers
221 221 markers = list(precursorsmarkers.get(current, ()))
222 222 # extend the list with prune markers
223 223 for mark in successormarkers.get(current, ()):
224 224 if not mark[1]:
225 225 markers.append(mark)
226 226 # and markers from children (looking for prune)
227 227 for mark in childrenmarkers.get(current, ()):
228 228 if not mark[1]:
229 229 markers.append(mark)
230 230 # traverse the markers
231 231 for mark in markers:
232 232 if mark in exclmarkers:
233 233 # markers already selected
234 234 continue
235 235
236 236 # If the markers is about the current node, select it
237 237 #
238 238 # (this delay the addition of markers from children)
239 239 if mark[1] or mark[0] == current:
240 240 exclmarkers.add(mark)
241 241
242 242 # should we keep traversing through the precursors?
243 243 prec = mark[0]
244 244
245 245 # nodes in the stack or already processed
246 246 if prec in seennodes:
247 247 continue
248 248
249 249 # is this a locally known node ?
250 250 known = prec in nm
251 251 # if locally-known and not in the <nodes> set the traversal
252 252 # stop here.
253 253 if known and prec not in nodes:
254 254 continue
255 255
256 256 # do not keep going if there are unselected markers pointing to this
257 257 # nodes. If we end up traversing these unselected markers later the
258 258 # node will be taken care of at that point.
259 259 precmarkers = _filterprunes(successormarkers.get(prec))
260 260 if precmarkers.issubset(exclmarkers):
261 261 seennodes.add(prec)
262 262 stack.append(prec)
263 263
264 264 return exclmarkers
265 265
266 266 def foreground(repo, nodes):
267 267 """return all nodes in the "foreground" of other node
268 268
269 269 The foreground of a revision is anything reachable using parent -> children
270 270 or precursor -> successor relation. It is very similar to "descendant" but
271 271 augmented with obsolescence information.
272 272
273 273 Beware that possible obsolescence cycle may result if complex situation.
274 274 """
275 275 repo = repo.unfiltered()
276 276 foreground = set(repo.set('%ln::', nodes))
277 277 if repo.obsstore:
278 278 # We only need this complicated logic if there is obsolescence
279 279 # XXX will probably deserve an optimised revset.
280 280 nm = repo.changelog.nodemap
281 281 plen = -1
282 282 # compute the whole set of successors or descendants
283 283 while len(foreground) != plen:
284 284 plen = len(foreground)
285 285 succs = set(c.node() for c in foreground)
286 286 mutable = [c.node() for c in foreground if c.mutable()]
287 287 succs.update(allsuccessors(repo.obsstore, mutable))
288 288 known = (n for n in succs if n in nm)
289 289 foreground = set(repo.set('%ln::', known))
290 290 return set(c.node() for c in foreground)
291 291
292 292 def getobsoleted(repo, tr):
293 293 """return the set of pre-existing revisions obsoleted by a transaction"""
294 294 torev = repo.unfiltered().changelog.nodemap.get
295 295 phase = repo._phasecache.phase
296 296 succsmarkers = repo.obsstore.successors.get
297 297 public = phases.public
298 298 addedmarkers = tr.changes.get('obsmarkers')
299 299 addedrevs = tr.changes.get('revs')
300 300 seenrevs = set(addedrevs)
301 301 obsoleted = set()
302 302 for mark in addedmarkers:
303 303 node = mark[0]
304 304 rev = torev(node)
305 305 if rev is None or rev in seenrevs:
306 306 continue
307 307 seenrevs.add(rev)
308 308 if phase(repo, rev) == public:
309 309 continue
310 310 if set(succsmarkers(node)).issubset(addedmarkers):
311 311 obsoleted.add(rev)
312 312 return obsoleted
313 313
314 def successorssets(repo, initialnode, cache=None):
314 def successorssets(repo, initialnode, closest=False, cache=None):
315 315 """Return set of all latest successors of initial nodes
316 316
317 317 The successors set of a changeset A are the group of revisions that succeed
318 318 A. It succeeds A as a consistent whole, each revision being only a partial
319 replacement. The successors set contains non-obsolete changesets only.
319 replacement. By default, the successors set contains non-obsolete
320 changesets only, walking the obsolescence graph until reaching a leaf. If
321 'closest' is set to True, closest successors-sets are return (the
322 obsolescence walk stops on known changesets).
320 323
321 324 This function returns the full list of successor sets which is why it
322 325 returns a list of tuples and not just a single tuple. Each tuple is a valid
323 326 successors set. Note that (A,) may be a valid successors set for changeset A
324 327 (see below).
325 328
326 329 In most cases, a changeset A will have a single element (e.g. the changeset
327 330 A is replaced by A') in its successors set. Though, it is also common for a
328 331 changeset A to have no elements in its successor set (e.g. the changeset
329 332 has been pruned). Therefore, the returned list of successors sets will be
330 333 [(A',)] or [], respectively.
331 334
332 335 When a changeset A is split into A' and B', however, it will result in a
333 336 successors set containing more than a single element, i.e. [(A',B')].
334 337 Divergent changesets will result in multiple successors sets, i.e. [(A',),
335 338 (A'')].
336 339
337 340 If a changeset A is not obsolete, then it will conceptually have no
338 341 successors set. To distinguish this from a pruned changeset, the successor
339 342 set will contain itself only, i.e. [(A,)].
340 343
341 344 Finally, final successors unknown locally are considered to be pruned
342 345 (pruned: obsoleted without any successors). (Final: successors not affected
343 346 by markers).
344 347
348 The 'closest' mode respect the repoview filtering. For example, without
349 filter it will stop at the first locally known changeset, with 'visible'
350 filter it will stop on visible changesets).
351
345 352 The optional `cache` parameter is a dictionary that may contains
346 353 precomputed successors sets. It is meant to reuse the computation of a
347 354 previous call to `successorssets` when multiple calls are made at the same
348 355 time. The cache dictionary is updated in place. The caller is responsible
349 356 for its life span. Code that makes multiple calls to `successorssets`
350 357 *should* use this cache mechanism or risk a performance hit.
358
359 Since results are different depending of the 'closest' most, the same cache
360 cannot be reused for both mode.
351 361 """
352 362
353 363 succmarkers = repo.obsstore.successors
354 364
355 365 # Stack of nodes we search successors sets for
356 366 toproceed = [initialnode]
357 367 # set version of above list for fast loop detection
358 368 # element added to "toproceed" must be added here
359 369 stackedset = set(toproceed)
360 370 if cache is None:
361 371 cache = {}
362 372
363 373 # This while loop is the flattened version of a recursive search for
364 374 # successors sets
365 375 #
366 376 # def successorssets(x):
367 377 # successors = directsuccessors(x)
368 378 # ss = [[]]
369 379 # for succ in directsuccessors(x):
370 380 # # product as in itertools cartesian product
371 381 # ss = product(ss, successorssets(succ))
372 382 # return ss
373 383 #
374 384 # But we can not use plain recursive calls here:
375 385 # - that would blow the python call stack
376 386 # - obsolescence markers may have cycles, we need to handle them.
377 387 #
378 388 # The `toproceed` list act as our call stack. Every node we search
379 389 # successors set for are stacked there.
380 390 #
381 391 # The `stackedset` is set version of this stack used to check if a node is
382 392 # already stacked. This check is used to detect cycles and prevent infinite
383 393 # loop.
384 394 #
385 395 # successors set of all nodes are stored in the `cache` dictionary.
386 396 #
387 397 # After this while loop ends we use the cache to return the successors sets
388 398 # for the node requested by the caller.
389 399 while toproceed:
390 400 # Every iteration tries to compute the successors sets of the topmost
391 401 # node of the stack: CURRENT.
392 402 #
393 403 # There are four possible outcomes:
394 404 #
395 405 # 1) We already know the successors sets of CURRENT:
396 406 # -> mission accomplished, pop it from the stack.
397 # 2) Node is not obsolete:
407 # 2) Stop the walk:
408 # default case: Node is not obsolete
409 # closest case: Node is known at this repo filter level
398 410 # -> the node is its own successors sets. Add it to the cache.
399 411 # 3) We do not know successors set of direct successors of CURRENT:
400 412 # -> We add those successors to the stack.
401 413 # 4) We know successors sets of all direct successors of CURRENT:
402 414 # -> We can compute CURRENT successors set and add it to the
403 415 # cache.
404 416 #
405 417 current = toproceed[-1]
418
419 # case 2 condition is a bit hairy because of closest,
420 # we compute it on its own
421 case2condition = ((current not in succmarkers)
422 or (closest and current != initialnode
423 and current in repo))
424
406 425 if current in cache:
407 426 # case (1): We already know the successors sets
408 427 stackedset.remove(toproceed.pop())
409 elif current not in succmarkers:
410 # case (2): The node is not obsolete.
428 elif case2condition:
429 # case (2): end of walk.
411 430 if current in repo:
412 # We have a valid last successors.
431 # We have a valid successors.
413 432 cache[current] = [(current,)]
414 433 else:
415 434 # Final obsolete version is unknown locally.
416 435 # Do not count that as a valid successors
417 436 cache[current] = []
418 437 else:
419 438 # cases (3) and (4)
420 439 #
421 440 # We proceed in two phases. Phase 1 aims to distinguish case (3)
422 441 # from case (4):
423 442 #
424 443 # For each direct successors of CURRENT, we check whether its
425 444 # successors sets are known. If they are not, we stack the
426 445 # unknown node and proceed to the next iteration of the while
427 446 # loop. (case 3)
428 447 #
429 448 # During this step, we may detect obsolescence cycles: a node
430 449 # with unknown successors sets but already in the call stack.
431 450 # In such a situation, we arbitrary set the successors sets of
432 451 # the node to nothing (node pruned) to break the cycle.
433 452 #
434 453 # If no break was encountered we proceed to phase 2.
435 454 #
436 455 # Phase 2 computes successors sets of CURRENT (case 4); see details
437 456 # in phase 2 itself.
438 457 #
439 458 # Note the two levels of iteration in each phase.
440 459 # - The first one handles obsolescence markers using CURRENT as
441 460 # precursor (successors markers of CURRENT).
442 461 #
443 462 # Having multiple entry here means divergence.
444 463 #
445 464 # - The second one handles successors defined in each marker.
446 465 #
447 466 # Having none means pruned node, multiple successors means split,
448 467 # single successors are standard replacement.
449 468 #
450 469 for mark in sorted(succmarkers[current]):
451 470 for suc in mark[1]:
452 471 if suc not in cache:
453 472 if suc in stackedset:
454 473 # cycle breaking
455 474 cache[suc] = []
456 475 else:
457 476 # case (3) If we have not computed successors sets
458 477 # of one of those successors we add it to the
459 478 # `toproceed` stack and stop all work for this
460 479 # iteration.
461 480 toproceed.append(suc)
462 481 stackedset.add(suc)
463 482 break
464 483 else:
465 484 continue
466 485 break
467 486 else:
468 487 # case (4): we know all successors sets of all direct
469 488 # successors
470 489 #
471 490 # Successors set contributed by each marker depends on the
472 491 # successors sets of all its "successors" node.
473 492 #
474 493 # Each different marker is a divergence in the obsolescence
475 494 # history. It contributes successors sets distinct from other
476 495 # markers.
477 496 #
478 497 # Within a marker, a successor may have divergent successors
479 498 # sets. In such a case, the marker will contribute multiple
480 499 # divergent successors sets. If multiple successors have
481 500 # divergent successors sets, a Cartesian product is used.
482 501 #
483 502 # At the end we post-process successors sets to remove
484 503 # duplicated entry and successors set that are strict subset of
485 504 # another one.
486 505 succssets = []
487 506 for mark in sorted(succmarkers[current]):
488 507 # successors sets contributed by this marker
489 508 markss = [[]]
490 509 for suc in mark[1]:
491 510 # cardinal product with previous successors
492 511 productresult = []
493 512 for prefix in markss:
494 513 for suffix in cache[suc]:
495 514 newss = list(prefix)
496 515 for part in suffix:
497 516 # do not duplicated entry in successors set
498 517 # first entry wins.
499 518 if part not in newss:
500 519 newss.append(part)
501 520 productresult.append(newss)
502 521 markss = productresult
503 522 succssets.extend(markss)
504 523 # remove duplicated and subset
505 524 seen = []
506 525 final = []
507 526 candidate = sorted(((set(s), s) for s in succssets if s),
508 527 key=lambda x: len(x[1]), reverse=True)
509 528 for setversion, listversion in candidate:
510 529 for seenset in seen:
511 530 if setversion.issubset(seenset):
512 531 break
513 532 else:
514 533 final.append(listversion)
515 534 seen.append(setversion)
516 535 final.reverse() # put small successors set first
517 536 cache[current] = final
518 537 return cache[initialnode]
@@ -1,381 +1,381
1 1 Show all commands except debug commands
2 2 $ hg debugcomplete
3 3 add
4 4 addremove
5 5 annotate
6 6 archive
7 7 backout
8 8 bisect
9 9 bookmarks
10 10 branch
11 11 branches
12 12 bundle
13 13 cat
14 14 clone
15 15 commit
16 16 config
17 17 copy
18 18 diff
19 19 export
20 20 files
21 21 forget
22 22 graft
23 23 grep
24 24 heads
25 25 help
26 26 identify
27 27 import
28 28 incoming
29 29 init
30 30 locate
31 31 log
32 32 manifest
33 33 merge
34 34 outgoing
35 35 parents
36 36 paths
37 37 phase
38 38 pull
39 39 push
40 40 recover
41 41 remove
42 42 rename
43 43 resolve
44 44 revert
45 45 rollback
46 46 root
47 47 serve
48 48 status
49 49 summary
50 50 tag
51 51 tags
52 52 tip
53 53 unbundle
54 54 update
55 55 verify
56 56 version
57 57
58 58 Show all commands that start with "a"
59 59 $ hg debugcomplete a
60 60 add
61 61 addremove
62 62 annotate
63 63 archive
64 64
65 65 Do not show debug commands if there are other candidates
66 66 $ hg debugcomplete d
67 67 diff
68 68
69 69 Show debug commands if there are no other candidates
70 70 $ hg debugcomplete debug
71 71 debugancestor
72 72 debugapplystreamclonebundle
73 73 debugbuilddag
74 74 debugbundle
75 75 debugcheckstate
76 76 debugcolor
77 77 debugcommands
78 78 debugcomplete
79 79 debugconfig
80 80 debugcreatestreamclonebundle
81 81 debugdag
82 82 debugdata
83 83 debugdate
84 84 debugdeltachain
85 85 debugdirstate
86 86 debugdiscovery
87 87 debugextensions
88 88 debugfileset
89 89 debugfsinfo
90 90 debuggetbundle
91 91 debugignore
92 92 debugindex
93 93 debugindexdot
94 94 debuginstall
95 95 debugknown
96 96 debuglabelcomplete
97 97 debuglocks
98 98 debugmergestate
99 99 debugnamecomplete
100 100 debugobsolete
101 101 debugpathcomplete
102 102 debugpickmergetool
103 103 debugpushkey
104 104 debugpvec
105 105 debugrebuilddirstate
106 106 debugrebuildfncache
107 107 debugrename
108 108 debugrevlog
109 109 debugrevspec
110 110 debugsetparents
111 111 debugsub
112 112 debugsuccessorssets
113 113 debugtemplate
114 114 debugupdatecaches
115 115 debugupgraderepo
116 116 debugwalk
117 117 debugwireargs
118 118
119 119 Do not show the alias of a debug command if there are other candidates
120 120 (this should hide rawcommit)
121 121 $ hg debugcomplete r
122 122 recover
123 123 remove
124 124 rename
125 125 resolve
126 126 revert
127 127 rollback
128 128 root
129 129 Show the alias of a debug command if there are no other candidates
130 130 $ hg debugcomplete rawc
131 131
132 132
133 133 Show the global options
134 134 $ hg debugcomplete --options | sort
135 135 --color
136 136 --config
137 137 --cwd
138 138 --debug
139 139 --debugger
140 140 --encoding
141 141 --encodingmode
142 142 --help
143 143 --hidden
144 144 --noninteractive
145 145 --pager
146 146 --profile
147 147 --quiet
148 148 --repository
149 149 --time
150 150 --traceback
151 151 --verbose
152 152 --version
153 153 -R
154 154 -h
155 155 -q
156 156 -v
157 157 -y
158 158
159 159 Show the options for the "serve" command
160 160 $ hg debugcomplete --options serve | sort
161 161 --accesslog
162 162 --address
163 163 --certificate
164 164 --cmdserver
165 165 --color
166 166 --config
167 167 --cwd
168 168 --daemon
169 169 --daemon-postexec
170 170 --debug
171 171 --debugger
172 172 --encoding
173 173 --encodingmode
174 174 --errorlog
175 175 --help
176 176 --hidden
177 177 --ipv6
178 178 --name
179 179 --noninteractive
180 180 --pager
181 181 --pid-file
182 182 --port
183 183 --prefix
184 184 --profile
185 185 --quiet
186 186 --repository
187 187 --stdio
188 188 --style
189 189 --subrepos
190 190 --templates
191 191 --time
192 192 --traceback
193 193 --verbose
194 194 --version
195 195 --web-conf
196 196 -6
197 197 -A
198 198 -E
199 199 -R
200 200 -S
201 201 -a
202 202 -d
203 203 -h
204 204 -n
205 205 -p
206 206 -q
207 207 -t
208 208 -v
209 209 -y
210 210
211 211 Show an error if we use --options with an ambiguous abbreviation
212 212 $ hg debugcomplete --options s
213 213 hg: command 's' is ambiguous:
214 214 serve showconfig status summary
215 215 [255]
216 216
217 217 Show all commands + options
218 218 $ hg debugcommands
219 219 add: include, exclude, subrepos, dry-run
220 220 annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, skip, ignore-all-space, ignore-space-change, ignore-blank-lines, include, exclude, template
221 221 clone: noupdate, updaterev, rev, branch, pull, uncompressed, ssh, remotecmd, insecure
222 222 commit: addremove, close-branch, amend, secret, edit, interactive, include, exclude, message, logfile, date, user, subrepos
223 223 diff: rev, change, text, git, binary, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, unified, stat, root, include, exclude, subrepos
224 224 export: output, switch-parent, rev, text, git, binary, nodates
225 225 forget: include, exclude
226 226 init: ssh, remotecmd, insecure
227 227 log: follow, follow-first, date, copies, keyword, rev, removed, only-merges, user, only-branch, branch, prune, patch, git, limit, no-merges, stat, graph, style, template, include, exclude
228 228 merge: force, rev, preview, tool
229 229 pull: update, force, rev, bookmark, branch, ssh, remotecmd, insecure
230 230 push: force, rev, bookmark, branch, new-branch, ssh, remotecmd, insecure
231 231 remove: after, force, subrepos, include, exclude
232 232 serve: accesslog, daemon, daemon-postexec, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, cmdserver, templates, style, ipv6, certificate, subrepos
233 233 status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, copies, print0, rev, change, include, exclude, subrepos, template
234 234 summary: remote
235 235 update: clean, check, merge, date, rev, tool
236 236 addremove: similarity, subrepos, include, exclude, dry-run
237 237 archive: no-decode, prefix, rev, type, subrepos, include, exclude
238 238 backout: merge, commit, no-commit, parent, rev, edit, tool, include, exclude, message, logfile, date, user
239 239 bisect: reset, good, bad, skip, extend, command, noupdate
240 240 bookmarks: force, rev, delete, rename, inactive, template
241 241 branch: force, clean
242 242 branches: active, closed, template
243 243 bundle: force, rev, branch, base, all, type, ssh, remotecmd, insecure
244 244 cat: output, rev, decode, include, exclude, template
245 245 config: untrusted, edit, local, global, template
246 246 copy: after, force, include, exclude, dry-run
247 247 debugancestor:
248 248 debugapplystreamclonebundle:
249 249 debugbuilddag: mergeable-file, overwritten-file, new-file
250 250 debugbundle: all, part-type, spec
251 251 debugcheckstate:
252 252 debugcolor: style
253 253 debugcommands:
254 254 debugcomplete: options
255 255 debugcreatestreamclonebundle:
256 256 debugdag: tags, branches, dots, spaces
257 257 debugdata: changelog, manifest, dir
258 258 debugdate: extended
259 259 debugdeltachain: changelog, manifest, dir, template
260 260 debugdirstate: nodates, datesort
261 261 debugdiscovery: old, nonheads, ssh, remotecmd, insecure
262 262 debugextensions: template
263 263 debugfileset: rev
264 264 debugfsinfo:
265 265 debuggetbundle: head, common, type
266 266 debugignore:
267 267 debugindex: changelog, manifest, dir, format
268 268 debugindexdot: changelog, manifest, dir
269 269 debuginstall: template
270 270 debugknown:
271 271 debuglabelcomplete:
272 272 debuglocks: force-lock, force-wlock
273 273 debugmergestate:
274 274 debugnamecomplete:
275 275 debugobsolete: flags, record-parents, rev, exclusive, index, delete, date, user, template
276 276 debugpathcomplete: full, normal, added, removed
277 277 debugpickmergetool: rev, changedelete, include, exclude, tool
278 278 debugpushkey:
279 279 debugpvec:
280 280 debugrebuilddirstate: rev, minimal
281 281 debugrebuildfncache:
282 282 debugrename: rev
283 283 debugrevlog: changelog, manifest, dir, dump
284 284 debugrevspec: optimize, show-revs, show-set, show-stage, no-optimized, verify-optimized
285 285 debugsetparents:
286 286 debugsub: rev
287 debugsuccessorssets:
287 debugsuccessorssets: closest
288 288 debugtemplate: rev, define
289 289 debugupdatecaches:
290 290 debugupgraderepo: optimize, run
291 291 debugwalk: include, exclude
292 292 debugwireargs: three, four, five, ssh, remotecmd, insecure
293 293 files: rev, print0, include, exclude, template, subrepos
294 294 graft: rev, continue, edit, log, force, currentdate, currentuser, date, user, tool, dry-run
295 295 grep: print0, all, text, follow, ignore-case, files-with-matches, line-number, rev, user, date, template, include, exclude
296 296 heads: rev, topo, active, closed, style, template
297 297 help: extension, command, keyword, system
298 298 identify: rev, num, id, branch, tags, bookmarks, ssh, remotecmd, insecure, template
299 299 import: strip, base, edit, force, no-commit, bypass, partial, exact, prefix, import-branch, message, logfile, date, user, similarity
300 300 incoming: force, newest-first, bundle, rev, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
301 301 locate: rev, print0, fullpath, include, exclude
302 302 manifest: rev, all, template
303 303 outgoing: force, rev, newest-first, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
304 304 parents: rev, style, template
305 305 paths: template
306 306 phase: public, draft, secret, force, rev
307 307 recover:
308 308 rename: after, force, include, exclude, dry-run
309 309 resolve: all, list, mark, unmark, no-status, tool, include, exclude, template
310 310 revert: all, date, rev, no-backup, interactive, include, exclude, dry-run
311 311 rollback: dry-run, force
312 312 root:
313 313 tag: force, local, rev, remove, edit, message, date, user
314 314 tags: template
315 315 tip: patch, git, style, template
316 316 unbundle: update
317 317 verify:
318 318 version: template
319 319
320 320 $ hg init a
321 321 $ cd a
322 322 $ echo fee > fee
323 323 $ hg ci -q -Amfee
324 324 $ hg tag fee
325 325 $ mkdir fie
326 326 $ echo dead > fie/dead
327 327 $ echo live > fie/live
328 328 $ hg bookmark fo
329 329 $ hg branch -q fie
330 330 $ hg ci -q -Amfie
331 331 $ echo fo > fo
332 332 $ hg branch -qf default
333 333 $ hg ci -q -Amfo
334 334 $ echo Fum > Fum
335 335 $ hg ci -q -AmFum
336 336 $ hg bookmark Fum
337 337
338 338 Test debugpathcomplete
339 339
340 340 $ hg debugpathcomplete f
341 341 fee
342 342 fie
343 343 fo
344 344 $ hg debugpathcomplete -f f
345 345 fee
346 346 fie/dead
347 347 fie/live
348 348 fo
349 349
350 350 $ hg rm Fum
351 351 $ hg debugpathcomplete -r F
352 352 Fum
353 353
354 354 Test debugnamecomplete
355 355
356 356 $ hg debugnamecomplete
357 357 Fum
358 358 default
359 359 fee
360 360 fie
361 361 fo
362 362 tip
363 363 $ hg debugnamecomplete f
364 364 fee
365 365 fie
366 366 fo
367 367
368 368 Test debuglabelcomplete, a deprecated name for debugnamecomplete that is still
369 369 used for completions in some shells.
370 370
371 371 $ hg debuglabelcomplete
372 372 Fum
373 373 default
374 374 fee
375 375 fie
376 376 fo
377 377 tip
378 378 $ hg debuglabelcomplete f
379 379 fee
380 380 fie
381 381 fo
@@ -1,437 +1,619
1 1 Test file dedicated to testing the divergent troubles from obsolete changeset.
2 2
3 3 This is the most complex troubles from far so we isolate it in a dedicated
4 4 file.
5 5
6 6 Enable obsolete
7 7
8 8 $ cat >> $HGRCPATH << EOF
9 9 > [ui]
10 10 > logtemplate = {rev}:{node|short} {desc}\n
11 11 > [experimental]
12 12 > evolution=createmarkers
13 13 > [alias]
14 14 > debugobsolete = debugobsolete -d '0 0'
15 15 > [phases]
16 16 > publish=False
17 17 > EOF
18 18
19 19
20 20 $ mkcommit() {
21 21 > echo "$1" > "$1"
22 22 > hg add "$1"
23 23 > hg ci -m "$1"
24 24 > }
25 25 $ getid() {
26 26 > hg log --hidden -r "desc('$1')" -T '{node}\n'
27 27 > }
28 28
29 29 setup repo
30 30
31 31 $ hg init reference
32 32 $ cd reference
33 33 $ mkcommit base
34 34 $ mkcommit A_0
35 35 $ hg up 0
36 36 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
37 37 $ mkcommit A_1
38 38 created new head
39 39 $ hg up 0
40 40 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
41 41 $ mkcommit A_2
42 42 created new head
43 43 $ hg up 0
44 44 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
45 45 $ cd ..
46 46
47 47
48 48 $ newcase() {
49 49 > hg clone -u 0 -q reference $1
50 50 > cd $1
51 51 > }
52 52
53 53 direct divergence
54 54 -----------------
55 55
56 56 A_1 have two direct and divergent successors A_1 and A_1
57 57
58 58 $ newcase direct
59 59 $ hg debugobsolete `getid A_0` `getid A_1`
60 60 $ hg debugobsolete `getid A_0` `getid A_2`
61 61 $ hg log -G --hidden
62 62 o 3:392fd25390da A_2
63 63 |
64 64 | o 2:82623d38b9ba A_1
65 65 |/
66 66 | x 1:007dc284c1f8 A_0
67 67 |/
68 68 @ 0:d20a80d4def3 base
69 69
70 70 $ hg debugsuccessorssets --hidden 'all()'
71 71 d20a80d4def3
72 72 d20a80d4def3
73 73 007dc284c1f8
74 74 82623d38b9ba
75 75 392fd25390da
76 76 82623d38b9ba
77 77 82623d38b9ba
78 78 392fd25390da
79 79 392fd25390da
80 80 $ hg log -r 'divergent()'
81 81 2:82623d38b9ba A_1
82 82 3:392fd25390da A_2
83 $ hg debugsuccessorssets 'all()' --closest
84 d20a80d4def3
85 d20a80d4def3
86 82623d38b9ba
87 82623d38b9ba
88 392fd25390da
89 392fd25390da
90 $ hg debugsuccessorssets 'all()' --closest --hidden
91 d20a80d4def3
92 d20a80d4def3
93 007dc284c1f8
94 82623d38b9ba
95 392fd25390da
96 82623d38b9ba
97 82623d38b9ba
98 392fd25390da
99 392fd25390da
83 100
84 101 check that mercurial refuse to push
85 102
86 103 $ hg init ../other
87 104 $ hg push ../other
88 105 pushing to ../other
89 106 searching for changes
90 107 abort: push includes divergent changeset: 392fd25390da!
91 108 [255]
92 109
93 110 $ cd ..
94 111
95 112
96 113 indirect divergence with known changeset
97 114 -------------------------------------------
98 115
99 116 $ newcase indirect_known
100 117 $ hg debugobsolete `getid A_0` `getid A_1`
101 118 $ hg debugobsolete `getid A_0` `getid A_2`
102 119 $ mkcommit A_3
103 120 created new head
104 121 $ hg debugobsolete `getid A_2` `getid A_3`
105 122 $ hg log -G --hidden
106 123 @ 4:01f36c5a8fda A_3
107 124 |
108 125 | x 3:392fd25390da A_2
109 126 |/
110 127 | o 2:82623d38b9ba A_1
111 128 |/
112 129 | x 1:007dc284c1f8 A_0
113 130 |/
114 131 o 0:d20a80d4def3 base
115 132
116 133 $ hg debugsuccessorssets --hidden 'all()'
117 134 d20a80d4def3
118 135 d20a80d4def3
119 136 007dc284c1f8
120 137 82623d38b9ba
121 138 01f36c5a8fda
122 139 82623d38b9ba
123 140 82623d38b9ba
124 141 392fd25390da
125 142 01f36c5a8fda
126 143 01f36c5a8fda
127 144 01f36c5a8fda
128 145 $ hg log -r 'divergent()'
129 146 2:82623d38b9ba A_1
130 147 4:01f36c5a8fda A_3
148 $ hg debugsuccessorssets 'all()' --closest
149 d20a80d4def3
150 d20a80d4def3
151 82623d38b9ba
152 82623d38b9ba
153 01f36c5a8fda
154 01f36c5a8fda
155 $ hg debugsuccessorssets 'all()' --closest --hidden
156 d20a80d4def3
157 d20a80d4def3
158 007dc284c1f8
159 82623d38b9ba
160 392fd25390da
161 82623d38b9ba
162 82623d38b9ba
163 392fd25390da
164 392fd25390da
165 01f36c5a8fda
166 01f36c5a8fda
131 167 $ cd ..
132 168
133 169
134 170 indirect divergence with known changeset
135 171 -------------------------------------------
136 172
137 173 $ newcase indirect_unknown
138 174 $ hg debugobsolete `getid A_0` aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
139 175 $ hg debugobsolete aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa `getid A_1`
140 176 $ hg debugobsolete `getid A_0` `getid A_2`
141 177 $ hg log -G --hidden
142 178 o 3:392fd25390da A_2
143 179 |
144 180 | o 2:82623d38b9ba A_1
145 181 |/
146 182 | x 1:007dc284c1f8 A_0
147 183 |/
148 184 @ 0:d20a80d4def3 base
149 185
150 186 $ hg debugsuccessorssets --hidden 'all()'
151 187 d20a80d4def3
152 188 d20a80d4def3
153 189 007dc284c1f8
154 190 82623d38b9ba
155 191 392fd25390da
156 192 82623d38b9ba
157 193 82623d38b9ba
158 194 392fd25390da
159 195 392fd25390da
160 196 $ hg log -r 'divergent()'
161 197 2:82623d38b9ba A_1
162 198 3:392fd25390da A_2
199 $ hg debugsuccessorssets 'all()' --closest
200 d20a80d4def3
201 d20a80d4def3
202 82623d38b9ba
203 82623d38b9ba
204 392fd25390da
205 392fd25390da
206 $ hg debugsuccessorssets 'all()' --closest --hidden
207 d20a80d4def3
208 d20a80d4def3
209 007dc284c1f8
210 82623d38b9ba
211 392fd25390da
212 82623d38b9ba
213 82623d38b9ba
214 392fd25390da
215 392fd25390da
163 216 $ cd ..
164 217
165 218 do not take unknown node in account if they are final
166 219 -----------------------------------------------------
167 220
168 221 $ newcase final-unknown
169 222 $ hg debugobsolete `getid A_0` `getid A_1`
170 223 $ hg debugobsolete `getid A_1` `getid A_2`
171 224 $ hg debugobsolete `getid A_0` bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
172 225 $ hg debugobsolete bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb cccccccccccccccccccccccccccccccccccccccc
173 226 $ hg debugobsolete `getid A_1` dddddddddddddddddddddddddddddddddddddddd
174 227
175 228 $ hg debugsuccessorssets --hidden 'desc('A_0')'
176 229 007dc284c1f8
177 230 392fd25390da
231 $ hg debugsuccessorssets 'desc('A_0')' --closest
232 $ hg debugsuccessorssets 'desc('A_0')' --closest --hidden
233 007dc284c1f8
234 82623d38b9ba
178 235
179 236 $ cd ..
180 237
181 238 divergence that converge again is not divergence anymore
182 239 -----------------------------------------------------
183 240
184 241 $ newcase converged_divergence
185 242 $ hg debugobsolete `getid A_0` `getid A_1`
186 243 $ hg debugobsolete `getid A_0` `getid A_2`
187 244 $ mkcommit A_3
188 245 created new head
189 246 $ hg debugobsolete `getid A_1` `getid A_3`
190 247 $ hg debugobsolete `getid A_2` `getid A_3`
191 248 $ hg log -G --hidden
192 249 @ 4:01f36c5a8fda A_3
193 250 |
194 251 | x 3:392fd25390da A_2
195 252 |/
196 253 | x 2:82623d38b9ba A_1
197 254 |/
198 255 | x 1:007dc284c1f8 A_0
199 256 |/
200 257 o 0:d20a80d4def3 base
201 258
202 259 $ hg debugsuccessorssets --hidden 'all()'
203 260 d20a80d4def3
204 261 d20a80d4def3
205 262 007dc284c1f8
206 263 01f36c5a8fda
207 264 82623d38b9ba
208 265 01f36c5a8fda
209 266 392fd25390da
210 267 01f36c5a8fda
211 268 01f36c5a8fda
212 269 01f36c5a8fda
213 270 $ hg log -r 'divergent()'
271 $ hg debugsuccessorssets 'all()' --closest
272 d20a80d4def3
273 d20a80d4def3
274 01f36c5a8fda
275 01f36c5a8fda
276 $ hg debugsuccessorssets 'all()' --closest --hidden
277 d20a80d4def3
278 d20a80d4def3
279 007dc284c1f8
280 82623d38b9ba
281 392fd25390da
282 82623d38b9ba
283 82623d38b9ba
284 392fd25390da
285 392fd25390da
286 01f36c5a8fda
287 01f36c5a8fda
214 288 $ cd ..
215 289
216 290 split is not divergences
217 291 -----------------------------
218 292
219 293 $ newcase split
220 294 $ hg debugobsolete `getid A_0` `getid A_1` `getid A_2`
221 295 $ hg log -G --hidden
222 296 o 3:392fd25390da A_2
223 297 |
224 298 | o 2:82623d38b9ba A_1
225 299 |/
226 300 | x 1:007dc284c1f8 A_0
227 301 |/
228 302 @ 0:d20a80d4def3 base
229 303
230 304 $ hg debugsuccessorssets --hidden 'all()'
231 305 d20a80d4def3
232 306 d20a80d4def3
233 307 007dc284c1f8
234 308 82623d38b9ba 392fd25390da
235 309 82623d38b9ba
236 310 82623d38b9ba
237 311 392fd25390da
238 312 392fd25390da
239 313 $ hg log -r 'divergent()'
314 $ hg debugsuccessorssets 'all()' --closest
315 d20a80d4def3
316 d20a80d4def3
317 82623d38b9ba
318 82623d38b9ba
319 392fd25390da
320 392fd25390da
321 $ hg debugsuccessorssets 'all()' --closest --hidden
322 d20a80d4def3
323 d20a80d4def3
324 007dc284c1f8
325 82623d38b9ba 392fd25390da
326 82623d38b9ba
327 82623d38b9ba
328 392fd25390da
329 392fd25390da
240 330
241 331 Even when subsequent rewriting happen
242 332
243 333 $ mkcommit A_3
244 334 created new head
245 335 $ hg debugobsolete `getid A_1` `getid A_3`
246 336 $ hg up 0
247 337 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
248 338 $ mkcommit A_4
249 339 created new head
250 340 $ hg debugobsolete `getid A_2` `getid A_4`
251 341 $ hg up 0
252 342 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
253 343 $ mkcommit A_5
254 344 created new head
255 345 $ hg debugobsolete `getid A_4` `getid A_5`
256 346 $ hg log -G --hidden
257 347 @ 6:e442cfc57690 A_5
258 348 |
259 349 | x 5:6a411f0d7a0a A_4
260 350 |/
261 351 | o 4:01f36c5a8fda A_3
262 352 |/
263 353 | x 3:392fd25390da A_2
264 354 |/
265 355 | x 2:82623d38b9ba A_1
266 356 |/
267 357 | x 1:007dc284c1f8 A_0
268 358 |/
269 359 o 0:d20a80d4def3 base
270 360
271 361 $ hg debugsuccessorssets --hidden 'all()'
272 362 d20a80d4def3
273 363 d20a80d4def3
274 364 007dc284c1f8
275 365 01f36c5a8fda e442cfc57690
276 366 82623d38b9ba
277 367 01f36c5a8fda
278 368 392fd25390da
279 369 e442cfc57690
280 370 01f36c5a8fda
281 371 01f36c5a8fda
282 372 6a411f0d7a0a
283 373 e442cfc57690
284 374 e442cfc57690
285 375 e442cfc57690
376 $ hg debugsuccessorssets 'all()' --closest
377 d20a80d4def3
378 d20a80d4def3
379 01f36c5a8fda
380 01f36c5a8fda
381 e442cfc57690
382 e442cfc57690
383 $ hg debugsuccessorssets 'all()' --closest --hidden
384 d20a80d4def3
385 d20a80d4def3
386 007dc284c1f8
387 82623d38b9ba 392fd25390da
388 82623d38b9ba
389 82623d38b9ba
390 392fd25390da
391 392fd25390da
392 01f36c5a8fda
393 01f36c5a8fda
394 6a411f0d7a0a
395 e442cfc57690
396 e442cfc57690
397 e442cfc57690
286 398 $ hg log -r 'divergent()'
287 399
288 400 Check more complex obsolescence graft (with divergence)
289 401
290 402 $ mkcommit B_0; hg up 0
291 403 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
292 404 $ hg debugobsolete `getid B_0` `getid A_2`
293 405 $ mkcommit A_7; hg up 0
294 406 created new head
295 407 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
296 408 $ mkcommit A_8; hg up 0
297 409 created new head
298 410 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
299 411 $ hg debugobsolete `getid A_5` `getid A_7` `getid A_8`
300 412 $ mkcommit A_9; hg up 0
301 413 created new head
302 414 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
303 415 $ hg debugobsolete `getid A_5` `getid A_9`
304 416 $ hg log -G --hidden
305 417 o 10:bed64f5d2f5a A_9
306 418 |
307 419 | o 9:14608b260df8 A_8
308 420 |/
309 421 | o 8:7ae126973a96 A_7
310 422 |/
311 423 | x 7:3750ebee865d B_0
312 424 | |
313 425 | x 6:e442cfc57690 A_5
314 426 |/
315 427 | x 5:6a411f0d7a0a A_4
316 428 |/
317 429 | o 4:01f36c5a8fda A_3
318 430 |/
319 431 | x 3:392fd25390da A_2
320 432 |/
321 433 | x 2:82623d38b9ba A_1
322 434 |/
323 435 | x 1:007dc284c1f8 A_0
324 436 |/
325 437 @ 0:d20a80d4def3 base
326 438
327 439 $ hg debugsuccessorssets --hidden 'all()'
328 440 d20a80d4def3
329 441 d20a80d4def3
330 442 007dc284c1f8
331 443 01f36c5a8fda bed64f5d2f5a
332 444 01f36c5a8fda 7ae126973a96 14608b260df8
333 445 82623d38b9ba
334 446 01f36c5a8fda
335 447 392fd25390da
336 448 bed64f5d2f5a
337 449 7ae126973a96 14608b260df8
338 450 01f36c5a8fda
339 451 01f36c5a8fda
340 452 6a411f0d7a0a
341 453 bed64f5d2f5a
342 454 7ae126973a96 14608b260df8
343 455 e442cfc57690
344 456 bed64f5d2f5a
345 457 7ae126973a96 14608b260df8
346 458 3750ebee865d
347 459 bed64f5d2f5a
348 460 7ae126973a96 14608b260df8
349 461 7ae126973a96
350 462 7ae126973a96
351 463 14608b260df8
352 464 14608b260df8
353 465 bed64f5d2f5a
354 466 bed64f5d2f5a
467 $ hg debugsuccessorssets 'all()' --closest
468 d20a80d4def3
469 d20a80d4def3
470 01f36c5a8fda
471 01f36c5a8fda
472 7ae126973a96
473 7ae126973a96
474 14608b260df8
475 14608b260df8
476 bed64f5d2f5a
477 bed64f5d2f5a
478 $ hg debugsuccessorssets 'all()' --closest --hidden
479 d20a80d4def3
480 d20a80d4def3
481 007dc284c1f8
482 82623d38b9ba 392fd25390da
483 82623d38b9ba
484 82623d38b9ba
485 392fd25390da
486 392fd25390da
487 01f36c5a8fda
488 01f36c5a8fda
489 6a411f0d7a0a
490 e442cfc57690
491 e442cfc57690
492 e442cfc57690
493 3750ebee865d
494 392fd25390da
495 7ae126973a96
496 7ae126973a96
497 14608b260df8
498 14608b260df8
499 bed64f5d2f5a
500 bed64f5d2f5a
355 501 $ hg log -r 'divergent()'
356 502 4:01f36c5a8fda A_3
357 503 8:7ae126973a96 A_7
358 504 9:14608b260df8 A_8
359 505 10:bed64f5d2f5a A_9
360 506
361 507 fix the divergence
362 508
363 509 $ mkcommit A_A; hg up 0
364 510 created new head
365 511 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
366 512 $ hg debugobsolete `getid A_9` `getid A_A`
367 513 $ hg debugobsolete `getid A_7` `getid A_A`
368 514 $ hg debugobsolete `getid A_8` `getid A_A`
369 515 $ hg log -G --hidden
370 516 o 11:a139f71be9da A_A
371 517 |
372 518 | x 10:bed64f5d2f5a A_9
373 519 |/
374 520 | x 9:14608b260df8 A_8
375 521 |/
376 522 | x 8:7ae126973a96 A_7
377 523 |/
378 524 | x 7:3750ebee865d B_0
379 525 | |
380 526 | x 6:e442cfc57690 A_5
381 527 |/
382 528 | x 5:6a411f0d7a0a A_4
383 529 |/
384 530 | o 4:01f36c5a8fda A_3
385 531 |/
386 532 | x 3:392fd25390da A_2
387 533 |/
388 534 | x 2:82623d38b9ba A_1
389 535 |/
390 536 | x 1:007dc284c1f8 A_0
391 537 |/
392 538 @ 0:d20a80d4def3 base
393 539
394 540 $ hg debugsuccessorssets --hidden 'all()'
395 541 d20a80d4def3
396 542 d20a80d4def3
397 543 007dc284c1f8
398 544 01f36c5a8fda a139f71be9da
399 545 82623d38b9ba
400 546 01f36c5a8fda
401 547 392fd25390da
402 548 a139f71be9da
403 549 01f36c5a8fda
404 550 01f36c5a8fda
405 551 6a411f0d7a0a
406 552 a139f71be9da
407 553 e442cfc57690
408 554 a139f71be9da
409 555 3750ebee865d
410 556 a139f71be9da
411 557 7ae126973a96
412 558 a139f71be9da
413 559 14608b260df8
414 560 a139f71be9da
415 561 bed64f5d2f5a
416 562 a139f71be9da
417 563 a139f71be9da
418 564 a139f71be9da
565 $ hg debugsuccessorssets 'all()' --closest
566 d20a80d4def3
567 d20a80d4def3
568 01f36c5a8fda
569 01f36c5a8fda
570 a139f71be9da
571 a139f71be9da
572 $ hg debugsuccessorssets 'all()' --closest --hidden
573 d20a80d4def3
574 d20a80d4def3
575 007dc284c1f8
576 82623d38b9ba 392fd25390da
577 82623d38b9ba
578 82623d38b9ba
579 392fd25390da
580 392fd25390da
581 01f36c5a8fda
582 01f36c5a8fda
583 6a411f0d7a0a
584 e442cfc57690
585 e442cfc57690
586 e442cfc57690
587 3750ebee865d
588 392fd25390da
589 7ae126973a96
590 a139f71be9da
591 14608b260df8
592 a139f71be9da
593 bed64f5d2f5a
594 a139f71be9da
595 a139f71be9da
596 a139f71be9da
419 597 $ hg log -r 'divergent()'
420 598
421 599 $ cd ..
422 600
423 601
424 602 Subset does not diverge
425 603 ------------------------------
426 604
427 605 Do not report divergent successors-set if it is a subset of another
428 606 successors-set. (report [A,B] not [A] + [A,B])
429 607
430 608 $ newcase subset
431 609 $ hg debugobsolete `getid A_0` `getid A_2`
432 610 $ hg debugobsolete `getid A_0` `getid A_1` `getid A_2`
433 611 $ hg debugsuccessorssets --hidden 'desc('A_0')'
434 612 007dc284c1f8
435 613 82623d38b9ba 392fd25390da
614 $ hg debugsuccessorssets 'desc('A_0')' --closest
615 $ hg debugsuccessorssets 'desc('A_0')' --closest --hidden
616 007dc284c1f8
617 82623d38b9ba 392fd25390da
436 618
437 619 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now