##// END OF EJS Templates
obsolete: add a function to compute "exclusive-markers" for a set of nodes...
marmoute -
r32626:00a7f7b1 default
parent child Browse files
Show More
@@ -1,2185 +1,2188 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import difflib
11 11 import errno
12 12 import operator
13 13 import os
14 14 import random
15 15 import socket
16 16 import string
17 17 import sys
18 18 import tempfile
19 19 import time
20 20
21 21 from .i18n import _
22 22 from .node import (
23 23 bin,
24 24 hex,
25 25 nullhex,
26 26 nullid,
27 27 nullrev,
28 28 short,
29 29 )
30 30 from . import (
31 31 bundle2,
32 32 changegroup,
33 33 cmdutil,
34 34 color,
35 35 context,
36 36 dagparser,
37 37 dagutil,
38 38 encoding,
39 39 error,
40 40 exchange,
41 41 extensions,
42 42 filemerge,
43 43 fileset,
44 44 formatter,
45 45 hg,
46 46 localrepo,
47 47 lock as lockmod,
48 48 merge as mergemod,
49 49 obsolete,
50 50 policy,
51 51 pvec,
52 52 pycompat,
53 53 registrar,
54 54 repair,
55 55 revlog,
56 56 revset,
57 57 revsetlang,
58 58 scmutil,
59 59 setdiscovery,
60 60 simplemerge,
61 61 smartset,
62 62 sslutil,
63 63 streamclone,
64 64 templater,
65 65 treediscovery,
66 66 upgrade,
67 67 util,
68 68 vfs as vfsmod,
69 69 )
70 70
71 71 release = lockmod.release
72 72
73 73 command = registrar.command()
74 74
75 75 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
76 76 def debugancestor(ui, repo, *args):
77 77 """find the ancestor revision of two revisions in a given index"""
78 78 if len(args) == 3:
79 79 index, rev1, rev2 = args
80 80 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
81 81 lookup = r.lookup
82 82 elif len(args) == 2:
83 83 if not repo:
84 84 raise error.Abort(_('there is no Mercurial repository here '
85 85 '(.hg not found)'))
86 86 rev1, rev2 = args
87 87 r = repo.changelog
88 88 lookup = repo.lookup
89 89 else:
90 90 raise error.Abort(_('either two or three arguments required'))
91 91 a = r.ancestor(lookup(rev1), lookup(rev2))
92 92 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
93 93
94 94 @command('debugapplystreamclonebundle', [], 'FILE')
95 95 def debugapplystreamclonebundle(ui, repo, fname):
96 96 """apply a stream clone bundle file"""
97 97 f = hg.openpath(ui, fname)
98 98 gen = exchange.readbundle(ui, f, fname)
99 99 gen.apply(repo)
100 100
101 101 @command('debugbuilddag',
102 102 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
103 103 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
104 104 ('n', 'new-file', None, _('add new file at each rev'))],
105 105 _('[OPTION]... [TEXT]'))
106 106 def debugbuilddag(ui, repo, text=None,
107 107 mergeable_file=False,
108 108 overwritten_file=False,
109 109 new_file=False):
110 110 """builds a repo with a given DAG from scratch in the current empty repo
111 111
112 112 The description of the DAG is read from stdin if not given on the
113 113 command line.
114 114
115 115 Elements:
116 116
117 117 - "+n" is a linear run of n nodes based on the current default parent
118 118 - "." is a single node based on the current default parent
119 119 - "$" resets the default parent to null (implied at the start);
120 120 otherwise the default parent is always the last node created
121 121 - "<p" sets the default parent to the backref p
122 122 - "*p" is a fork at parent p, which is a backref
123 123 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
124 124 - "/p2" is a merge of the preceding node and p2
125 125 - ":tag" defines a local tag for the preceding node
126 126 - "@branch" sets the named branch for subsequent nodes
127 127 - "#...\\n" is a comment up to the end of the line
128 128
129 129 Whitespace between the above elements is ignored.
130 130
131 131 A backref is either
132 132
133 133 - a number n, which references the node curr-n, where curr is the current
134 134 node, or
135 135 - the name of a local tag you placed earlier using ":tag", or
136 136 - empty to denote the default parent.
137 137
138 138 All string valued-elements are either strictly alphanumeric, or must
139 139 be enclosed in double quotes ("..."), with "\\" as escape character.
140 140 """
141 141
142 142 if text is None:
143 143 ui.status(_("reading DAG from stdin\n"))
144 144 text = ui.fin.read()
145 145
146 146 cl = repo.changelog
147 147 if len(cl) > 0:
148 148 raise error.Abort(_('repository is not empty'))
149 149
150 150 # determine number of revs in DAG
151 151 total = 0
152 152 for type, data in dagparser.parsedag(text):
153 153 if type == 'n':
154 154 total += 1
155 155
156 156 if mergeable_file:
157 157 linesperrev = 2
158 158 # make a file with k lines per rev
159 159 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
160 160 initialmergedlines.append("")
161 161
162 162 tags = []
163 163
164 164 wlock = lock = tr = None
165 165 try:
166 166 wlock = repo.wlock()
167 167 lock = repo.lock()
168 168 tr = repo.transaction("builddag")
169 169
170 170 at = -1
171 171 atbranch = 'default'
172 172 nodeids = []
173 173 id = 0
174 174 ui.progress(_('building'), id, unit=_('revisions'), total=total)
175 175 for type, data in dagparser.parsedag(text):
176 176 if type == 'n':
177 177 ui.note(('node %s\n' % str(data)))
178 178 id, ps = data
179 179
180 180 files = []
181 181 fctxs = {}
182 182
183 183 p2 = None
184 184 if mergeable_file:
185 185 fn = "mf"
186 186 p1 = repo[ps[0]]
187 187 if len(ps) > 1:
188 188 p2 = repo[ps[1]]
189 189 pa = p1.ancestor(p2)
190 190 base, local, other = [x[fn].data() for x in (pa, p1,
191 191 p2)]
192 192 m3 = simplemerge.Merge3Text(base, local, other)
193 193 ml = [l.strip() for l in m3.merge_lines()]
194 194 ml.append("")
195 195 elif at > 0:
196 196 ml = p1[fn].data().split("\n")
197 197 else:
198 198 ml = initialmergedlines
199 199 ml[id * linesperrev] += " r%i" % id
200 200 mergedtext = "\n".join(ml)
201 201 files.append(fn)
202 202 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
203 203
204 204 if overwritten_file:
205 205 fn = "of"
206 206 files.append(fn)
207 207 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
208 208
209 209 if new_file:
210 210 fn = "nf%i" % id
211 211 files.append(fn)
212 212 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
213 213 if len(ps) > 1:
214 214 if not p2:
215 215 p2 = repo[ps[1]]
216 216 for fn in p2:
217 217 if fn.startswith("nf"):
218 218 files.append(fn)
219 219 fctxs[fn] = p2[fn]
220 220
221 221 def fctxfn(repo, cx, path):
222 222 return fctxs.get(path)
223 223
224 224 if len(ps) == 0 or ps[0] < 0:
225 225 pars = [None, None]
226 226 elif len(ps) == 1:
227 227 pars = [nodeids[ps[0]], None]
228 228 else:
229 229 pars = [nodeids[p] for p in ps]
230 230 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
231 231 date=(id, 0),
232 232 user="debugbuilddag",
233 233 extra={'branch': atbranch})
234 234 nodeid = repo.commitctx(cx)
235 235 nodeids.append(nodeid)
236 236 at = id
237 237 elif type == 'l':
238 238 id, name = data
239 239 ui.note(('tag %s\n' % name))
240 240 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
241 241 elif type == 'a':
242 242 ui.note(('branch %s\n' % data))
243 243 atbranch = data
244 244 ui.progress(_('building'), id, unit=_('revisions'), total=total)
245 245 tr.close()
246 246
247 247 if tags:
248 248 repo.vfs.write("localtags", "".join(tags))
249 249 finally:
250 250 ui.progress(_('building'), None)
251 251 release(tr, lock, wlock)
252 252
253 253 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
254 254 indent_string = ' ' * indent
255 255 if all:
256 256 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
257 257 % indent_string)
258 258
259 259 def showchunks(named):
260 260 ui.write("\n%s%s\n" % (indent_string, named))
261 261 chain = None
262 262 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
263 263 node = chunkdata['node']
264 264 p1 = chunkdata['p1']
265 265 p2 = chunkdata['p2']
266 266 cs = chunkdata['cs']
267 267 deltabase = chunkdata['deltabase']
268 268 delta = chunkdata['delta']
269 269 ui.write("%s%s %s %s %s %s %s\n" %
270 270 (indent_string, hex(node), hex(p1), hex(p2),
271 271 hex(cs), hex(deltabase), len(delta)))
272 272 chain = node
273 273
274 274 chunkdata = gen.changelogheader()
275 275 showchunks("changelog")
276 276 chunkdata = gen.manifestheader()
277 277 showchunks("manifest")
278 278 for chunkdata in iter(gen.filelogheader, {}):
279 279 fname = chunkdata['filename']
280 280 showchunks(fname)
281 281 else:
282 282 if isinstance(gen, bundle2.unbundle20):
283 283 raise error.Abort(_('use debugbundle2 for this file'))
284 284 chunkdata = gen.changelogheader()
285 285 chain = None
286 286 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
287 287 node = chunkdata['node']
288 288 ui.write("%s%s\n" % (indent_string, hex(node)))
289 289 chain = node
290 290
291 291 def _debugobsmarkers(ui, data, all=None, indent=0, **opts):
292 292 """display version and markers contained in 'data'"""
293 293 indent_string = ' ' * indent
294 294 try:
295 295 version, markers = obsolete._readmarkers(data)
296 296 except error.UnknownVersion as exc:
297 297 msg = "%sunsupported version: %s (%d bytes)\n"
298 298 msg %= indent_string, exc.version, len(data)
299 299 ui.write(msg)
300 300 else:
301 301 msg = "%sversion: %s (%d bytes)\n"
302 302 msg %= indent_string, version, len(data)
303 303 ui.write(msg)
304 304 fm = ui.formatter('debugobsolete', opts)
305 305 for rawmarker in sorted(markers):
306 306 m = obsolete.marker(None, rawmarker)
307 307 fm.startitem()
308 308 fm.plain(indent_string)
309 309 cmdutil.showmarker(fm, m)
310 310 fm.end()
311 311
312 312 def _debugbundle2(ui, gen, all=None, **opts):
313 313 """lists the contents of a bundle2"""
314 314 if not isinstance(gen, bundle2.unbundle20):
315 315 raise error.Abort(_('not a bundle2 file'))
316 316 ui.write(('Stream params: %s\n' % repr(gen.params)))
317 317 for part in gen.iterparts():
318 318 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
319 319 if part.type == 'changegroup':
320 320 version = part.params.get('version', '01')
321 321 cg = changegroup.getunbundler(version, part, 'UN')
322 322 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
323 323 if part.type == 'obsmarkers':
324 324 _debugobsmarkers(ui, part.read(), all=all, indent=4, **opts)
325 325
326 326 @command('debugbundle',
327 327 [('a', 'all', None, _('show all details')),
328 328 ('', 'spec', None, _('print the bundlespec of the bundle'))],
329 329 _('FILE'),
330 330 norepo=True)
331 331 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
332 332 """lists the contents of a bundle"""
333 333 with hg.openpath(ui, bundlepath) as f:
334 334 if spec:
335 335 spec = exchange.getbundlespec(ui, f)
336 336 ui.write('%s\n' % spec)
337 337 return
338 338
339 339 gen = exchange.readbundle(ui, f, bundlepath)
340 340 if isinstance(gen, bundle2.unbundle20):
341 341 return _debugbundle2(ui, gen, all=all, **opts)
342 342 _debugchangegroup(ui, gen, all=all, **opts)
343 343
344 344 @command('debugcheckstate', [], '')
345 345 def debugcheckstate(ui, repo):
346 346 """validate the correctness of the current dirstate"""
347 347 parent1, parent2 = repo.dirstate.parents()
348 348 m1 = repo[parent1].manifest()
349 349 m2 = repo[parent2].manifest()
350 350 errors = 0
351 351 for f in repo.dirstate:
352 352 state = repo.dirstate[f]
353 353 if state in "nr" and f not in m1:
354 354 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
355 355 errors += 1
356 356 if state in "a" and f in m1:
357 357 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
358 358 errors += 1
359 359 if state in "m" and f not in m1 and f not in m2:
360 360 ui.warn(_("%s in state %s, but not in either manifest\n") %
361 361 (f, state))
362 362 errors += 1
363 363 for f in m1:
364 364 state = repo.dirstate[f]
365 365 if state not in "nrm":
366 366 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
367 367 errors += 1
368 368 if errors:
369 369 error = _(".hg/dirstate inconsistent with current parent's manifest")
370 370 raise error.Abort(error)
371 371
372 372 @command('debugcolor',
373 373 [('', 'style', None, _('show all configured styles'))],
374 374 'hg debugcolor')
375 375 def debugcolor(ui, repo, **opts):
376 376 """show available color, effects or style"""
377 377 ui.write(('color mode: %s\n') % ui._colormode)
378 378 if opts.get('style'):
379 379 return _debugdisplaystyle(ui)
380 380 else:
381 381 return _debugdisplaycolor(ui)
382 382
383 383 def _debugdisplaycolor(ui):
384 384 ui = ui.copy()
385 385 ui._styles.clear()
386 386 for effect in color._activeeffects(ui).keys():
387 387 ui._styles[effect] = effect
388 388 if ui._terminfoparams:
389 389 for k, v in ui.configitems('color'):
390 390 if k.startswith('color.'):
391 391 ui._styles[k] = k[6:]
392 392 elif k.startswith('terminfo.'):
393 393 ui._styles[k] = k[9:]
394 394 ui.write(_('available colors:\n'))
395 395 # sort label with a '_' after the other to group '_background' entry.
396 396 items = sorted(ui._styles.items(),
397 397 key=lambda i: ('_' in i[0], i[0], i[1]))
398 398 for colorname, label in items:
399 399 ui.write(('%s\n') % colorname, label=label)
400 400
401 401 def _debugdisplaystyle(ui):
402 402 ui.write(_('available style:\n'))
403 403 width = max(len(s) for s in ui._styles)
404 404 for label, effects in sorted(ui._styles.items()):
405 405 ui.write('%s' % label, label=label)
406 406 if effects:
407 407 # 50
408 408 ui.write(': ')
409 409 ui.write(' ' * (max(0, width - len(label))))
410 410 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
411 411 ui.write('\n')
412 412
413 413 @command('debugcreatestreamclonebundle', [], 'FILE')
414 414 def debugcreatestreamclonebundle(ui, repo, fname):
415 415 """create a stream clone bundle file
416 416
417 417 Stream bundles are special bundles that are essentially archives of
418 418 revlog files. They are commonly used for cloning very quickly.
419 419 """
420 420 requirements, gen = streamclone.generatebundlev1(repo)
421 421 changegroup.writechunks(ui, gen, fname)
422 422
423 423 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
424 424
425 425 @command('debugdag',
426 426 [('t', 'tags', None, _('use tags as labels')),
427 427 ('b', 'branches', None, _('annotate with branch names')),
428 428 ('', 'dots', None, _('use dots for runs')),
429 429 ('s', 'spaces', None, _('separate elements by spaces'))],
430 430 _('[OPTION]... [FILE [REV]...]'),
431 431 optionalrepo=True)
432 432 def debugdag(ui, repo, file_=None, *revs, **opts):
433 433 """format the changelog or an index DAG as a concise textual description
434 434
435 435 If you pass a revlog index, the revlog's DAG is emitted. If you list
436 436 revision numbers, they get labeled in the output as rN.
437 437
438 438 Otherwise, the changelog DAG of the current repo is emitted.
439 439 """
440 440 spaces = opts.get('spaces')
441 441 dots = opts.get('dots')
442 442 if file_:
443 443 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
444 444 file_)
445 445 revs = set((int(r) for r in revs))
446 446 def events():
447 447 for r in rlog:
448 448 yield 'n', (r, list(p for p in rlog.parentrevs(r)
449 449 if p != -1))
450 450 if r in revs:
451 451 yield 'l', (r, "r%i" % r)
452 452 elif repo:
453 453 cl = repo.changelog
454 454 tags = opts.get('tags')
455 455 branches = opts.get('branches')
456 456 if tags:
457 457 labels = {}
458 458 for l, n in repo.tags().items():
459 459 labels.setdefault(cl.rev(n), []).append(l)
460 460 def events():
461 461 b = "default"
462 462 for r in cl:
463 463 if branches:
464 464 newb = cl.read(cl.node(r))[5]['branch']
465 465 if newb != b:
466 466 yield 'a', newb
467 467 b = newb
468 468 yield 'n', (r, list(p for p in cl.parentrevs(r)
469 469 if p != -1))
470 470 if tags:
471 471 ls = labels.get(r)
472 472 if ls:
473 473 for l in ls:
474 474 yield 'l', (r, l)
475 475 else:
476 476 raise error.Abort(_('need repo for changelog dag'))
477 477
478 478 for line in dagparser.dagtextlines(events(),
479 479 addspaces=spaces,
480 480 wraplabels=True,
481 481 wrapannotations=True,
482 482 wrapnonlinear=dots,
483 483 usedots=dots,
484 484 maxlinewidth=70):
485 485 ui.write(line)
486 486 ui.write("\n")
487 487
488 488 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
489 489 def debugdata(ui, repo, file_, rev=None, **opts):
490 490 """dump the contents of a data file revision"""
491 491 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
492 492 if rev is not None:
493 493 raise error.CommandError('debugdata', _('invalid arguments'))
494 494 file_, rev = None, file_
495 495 elif rev is None:
496 496 raise error.CommandError('debugdata', _('invalid arguments'))
497 497 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
498 498 try:
499 499 ui.write(r.revision(r.lookup(rev), raw=True))
500 500 except KeyError:
501 501 raise error.Abort(_('invalid revision identifier %s') % rev)
502 502
503 503 @command('debugdate',
504 504 [('e', 'extended', None, _('try extended date formats'))],
505 505 _('[-e] DATE [RANGE]'),
506 506 norepo=True, optionalrepo=True)
507 507 def debugdate(ui, date, range=None, **opts):
508 508 """parse and display a date"""
509 509 if opts["extended"]:
510 510 d = util.parsedate(date, util.extendeddateformats)
511 511 else:
512 512 d = util.parsedate(date)
513 513 ui.write(("internal: %s %s\n") % d)
514 514 ui.write(("standard: %s\n") % util.datestr(d))
515 515 if range:
516 516 m = util.matchdate(range)
517 517 ui.write(("match: %s\n") % m(d[0]))
518 518
519 519 @command('debugdeltachain',
520 520 cmdutil.debugrevlogopts + cmdutil.formatteropts,
521 521 _('-c|-m|FILE'),
522 522 optionalrepo=True)
523 523 def debugdeltachain(ui, repo, file_=None, **opts):
524 524 """dump information about delta chains in a revlog
525 525
526 526 Output can be templatized. Available template keywords are:
527 527
528 528 :``rev``: revision number
529 529 :``chainid``: delta chain identifier (numbered by unique base)
530 530 :``chainlen``: delta chain length to this revision
531 531 :``prevrev``: previous revision in delta chain
532 532 :``deltatype``: role of delta / how it was computed
533 533 :``compsize``: compressed size of revision
534 534 :``uncompsize``: uncompressed size of revision
535 535 :``chainsize``: total size of compressed revisions in chain
536 536 :``chainratio``: total chain size divided by uncompressed revision size
537 537 (new delta chains typically start at ratio 2.00)
538 538 :``lindist``: linear distance from base revision in delta chain to end
539 539 of this revision
540 540 :``extradist``: total size of revisions not part of this delta chain from
541 541 base of delta chain to end of this revision; a measurement
542 542 of how much extra data we need to read/seek across to read
543 543 the delta chain for this revision
544 544 :``extraratio``: extradist divided by chainsize; another representation of
545 545 how much unrelated data is needed to load this delta chain
546 546 """
547 547 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
548 548 index = r.index
549 549 generaldelta = r.version & revlog.FLAG_GENERALDELTA
550 550
551 551 def revinfo(rev):
552 552 e = index[rev]
553 553 compsize = e[1]
554 554 uncompsize = e[2]
555 555 chainsize = 0
556 556
557 557 if generaldelta:
558 558 if e[3] == e[5]:
559 559 deltatype = 'p1'
560 560 elif e[3] == e[6]:
561 561 deltatype = 'p2'
562 562 elif e[3] == rev - 1:
563 563 deltatype = 'prev'
564 564 elif e[3] == rev:
565 565 deltatype = 'base'
566 566 else:
567 567 deltatype = 'other'
568 568 else:
569 569 if e[3] == rev:
570 570 deltatype = 'base'
571 571 else:
572 572 deltatype = 'prev'
573 573
574 574 chain = r._deltachain(rev)[0]
575 575 for iterrev in chain:
576 576 e = index[iterrev]
577 577 chainsize += e[1]
578 578
579 579 return compsize, uncompsize, deltatype, chain, chainsize
580 580
581 581 fm = ui.formatter('debugdeltachain', opts)
582 582
583 583 fm.plain(' rev chain# chainlen prev delta '
584 584 'size rawsize chainsize ratio lindist extradist '
585 585 'extraratio\n')
586 586
587 587 chainbases = {}
588 588 for rev in r:
589 589 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
590 590 chainbase = chain[0]
591 591 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
592 592 basestart = r.start(chainbase)
593 593 revstart = r.start(rev)
594 594 lineardist = revstart + comp - basestart
595 595 extradist = lineardist - chainsize
596 596 try:
597 597 prevrev = chain[-2]
598 598 except IndexError:
599 599 prevrev = -1
600 600
601 601 chainratio = float(chainsize) / float(uncomp)
602 602 extraratio = float(extradist) / float(chainsize)
603 603
604 604 fm.startitem()
605 605 fm.write('rev chainid chainlen prevrev deltatype compsize '
606 606 'uncompsize chainsize chainratio lindist extradist '
607 607 'extraratio',
608 608 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
609 609 rev, chainid, len(chain), prevrev, deltatype, comp,
610 610 uncomp, chainsize, chainratio, lineardist, extradist,
611 611 extraratio,
612 612 rev=rev, chainid=chainid, chainlen=len(chain),
613 613 prevrev=prevrev, deltatype=deltatype, compsize=comp,
614 614 uncompsize=uncomp, chainsize=chainsize,
615 615 chainratio=chainratio, lindist=lineardist,
616 616 extradist=extradist, extraratio=extraratio)
617 617
618 618 fm.end()
619 619
620 620 @command('debugdirstate|debugstate',
621 621 [('', 'nodates', None, _('do not display the saved mtime')),
622 622 ('', 'datesort', None, _('sort by saved mtime'))],
623 623 _('[OPTION]...'))
624 624 def debugstate(ui, repo, **opts):
625 625 """show the contents of the current dirstate"""
626 626
627 627 nodates = opts.get('nodates')
628 628 datesort = opts.get('datesort')
629 629
630 630 timestr = ""
631 631 if datesort:
632 632 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
633 633 else:
634 634 keyfunc = None # sort by filename
635 635 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
636 636 if ent[3] == -1:
637 637 timestr = 'unset '
638 638 elif nodates:
639 639 timestr = 'set '
640 640 else:
641 641 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
642 642 time.localtime(ent[3]))
643 643 if ent[1] & 0o20000:
644 644 mode = 'lnk'
645 645 else:
646 646 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
647 647 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
648 648 for f in repo.dirstate.copies():
649 649 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
650 650
651 651 @command('debugdiscovery',
652 652 [('', 'old', None, _('use old-style discovery')),
653 653 ('', 'nonheads', None,
654 654 _('use old-style discovery with non-heads included')),
655 655 ] + cmdutil.remoteopts,
656 656 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
657 657 def debugdiscovery(ui, repo, remoteurl="default", **opts):
658 658 """runs the changeset discovery protocol in isolation"""
659 659 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
660 660 opts.get('branch'))
661 661 remote = hg.peer(repo, opts, remoteurl)
662 662 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
663 663
664 664 # make sure tests are repeatable
665 665 random.seed(12323)
666 666
667 667 def doit(localheads, remoteheads, remote=remote):
668 668 if opts.get('old'):
669 669 if localheads:
670 670 raise error.Abort('cannot use localheads with old style '
671 671 'discovery')
672 672 if not util.safehasattr(remote, 'branches'):
673 673 # enable in-client legacy support
674 674 remote = localrepo.locallegacypeer(remote.local())
675 675 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
676 676 force=True)
677 677 common = set(common)
678 678 if not opts.get('nonheads'):
679 679 ui.write(("unpruned common: %s\n") %
680 680 " ".join(sorted(short(n) for n in common)))
681 681 dag = dagutil.revlogdag(repo.changelog)
682 682 all = dag.ancestorset(dag.internalizeall(common))
683 683 common = dag.externalizeall(dag.headsetofconnecteds(all))
684 684 else:
685 685 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
686 686 common = set(common)
687 687 rheads = set(hds)
688 688 lheads = set(repo.heads())
689 689 ui.write(("common heads: %s\n") %
690 690 " ".join(sorted(short(n) for n in common)))
691 691 if lheads <= common:
692 692 ui.write(("local is subset\n"))
693 693 elif rheads <= common:
694 694 ui.write(("remote is subset\n"))
695 695
696 696 serverlogs = opts.get('serverlog')
697 697 if serverlogs:
698 698 for filename in serverlogs:
699 699 with open(filename, 'r') as logfile:
700 700 line = logfile.readline()
701 701 while line:
702 702 parts = line.strip().split(';')
703 703 op = parts[1]
704 704 if op == 'cg':
705 705 pass
706 706 elif op == 'cgss':
707 707 doit(parts[2].split(' '), parts[3].split(' '))
708 708 elif op == 'unb':
709 709 doit(parts[3].split(' '), parts[2].split(' '))
710 710 line = logfile.readline()
711 711 else:
712 712 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
713 713 opts.get('remote_head'))
714 714 localrevs = opts.get('local_head')
715 715 doit(localrevs, remoterevs)
716 716
717 717 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
718 718 def debugextensions(ui, **opts):
719 719 '''show information about active extensions'''
720 720 exts = extensions.extensions(ui)
721 721 hgver = util.version()
722 722 fm = ui.formatter('debugextensions', opts)
723 723 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
724 724 isinternal = extensions.ismoduleinternal(extmod)
725 725 extsource = pycompat.fsencode(extmod.__file__)
726 726 if isinternal:
727 727 exttestedwith = [] # never expose magic string to users
728 728 else:
729 729 exttestedwith = getattr(extmod, 'testedwith', '').split()
730 730 extbuglink = getattr(extmod, 'buglink', None)
731 731
732 732 fm.startitem()
733 733
734 734 if ui.quiet or ui.verbose:
735 735 fm.write('name', '%s\n', extname)
736 736 else:
737 737 fm.write('name', '%s', extname)
738 738 if isinternal or hgver in exttestedwith:
739 739 fm.plain('\n')
740 740 elif not exttestedwith:
741 741 fm.plain(_(' (untested!)\n'))
742 742 else:
743 743 lasttestedversion = exttestedwith[-1]
744 744 fm.plain(' (%s!)\n' % lasttestedversion)
745 745
746 746 fm.condwrite(ui.verbose and extsource, 'source',
747 747 _(' location: %s\n'), extsource or "")
748 748
749 749 if ui.verbose:
750 750 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
751 751 fm.data(bundled=isinternal)
752 752
753 753 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
754 754 _(' tested with: %s\n'),
755 755 fm.formatlist(exttestedwith, name='ver'))
756 756
757 757 fm.condwrite(ui.verbose and extbuglink, 'buglink',
758 758 _(' bug reporting: %s\n'), extbuglink or "")
759 759
760 760 fm.end()
761 761
762 762 @command('debugfileset',
763 763 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
764 764 _('[-r REV] FILESPEC'))
765 765 def debugfileset(ui, repo, expr, **opts):
766 766 '''parse and apply a fileset specification'''
767 767 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
768 768 if ui.verbose:
769 769 tree = fileset.parse(expr)
770 770 ui.note(fileset.prettyformat(tree), "\n")
771 771
772 772 for f in ctx.getfileset(expr):
773 773 ui.write("%s\n" % f)
774 774
775 775 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
776 776 def debugfsinfo(ui, path="."):
777 777 """show information detected about current filesystem"""
778 778 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
779 779 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
780 780 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
781 781 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
782 782 casesensitive = '(unknown)'
783 783 try:
784 784 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
785 785 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
786 786 except OSError:
787 787 pass
788 788 ui.write(('case-sensitive: %s\n') % casesensitive)
789 789
790 790 @command('debuggetbundle',
791 791 [('H', 'head', [], _('id of head node'), _('ID')),
792 792 ('C', 'common', [], _('id of common node'), _('ID')),
793 793 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
794 794 _('REPO FILE [-H|-C ID]...'),
795 795 norepo=True)
796 796 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
797 797 """retrieves a bundle from a repo
798 798
799 799 Every ID must be a full-length hex node id string. Saves the bundle to the
800 800 given file.
801 801 """
802 802 repo = hg.peer(ui, opts, repopath)
803 803 if not repo.capable('getbundle'):
804 804 raise error.Abort("getbundle() not supported by target repository")
805 805 args = {}
806 806 if common:
807 807 args['common'] = [bin(s) for s in common]
808 808 if head:
809 809 args['heads'] = [bin(s) for s in head]
810 810 # TODO: get desired bundlecaps from command line.
811 811 args['bundlecaps'] = None
812 812 bundle = repo.getbundle('debug', **args)
813 813
814 814 bundletype = opts.get('type', 'bzip2').lower()
815 815 btypes = {'none': 'HG10UN',
816 816 'bzip2': 'HG10BZ',
817 817 'gzip': 'HG10GZ',
818 818 'bundle2': 'HG20'}
819 819 bundletype = btypes.get(bundletype)
820 820 if bundletype not in bundle2.bundletypes:
821 821 raise error.Abort(_('unknown bundle type specified with --type'))
822 822 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
823 823
824 824 @command('debugignore', [], '[FILE]')
825 825 def debugignore(ui, repo, *files, **opts):
826 826 """display the combined ignore pattern and information about ignored files
827 827
828 828 With no argument display the combined ignore pattern.
829 829
830 830 Given space separated file names, shows if the given file is ignored and
831 831 if so, show the ignore rule (file and line number) that matched it.
832 832 """
833 833 ignore = repo.dirstate._ignore
834 834 if not files:
835 835 # Show all the patterns
836 836 ui.write("%s\n" % repr(ignore))
837 837 else:
838 838 for f in files:
839 839 nf = util.normpath(f)
840 840 ignored = None
841 841 ignoredata = None
842 842 if nf != '.':
843 843 if ignore(nf):
844 844 ignored = nf
845 845 ignoredata = repo.dirstate._ignorefileandline(nf)
846 846 else:
847 847 for p in util.finddirs(nf):
848 848 if ignore(p):
849 849 ignored = p
850 850 ignoredata = repo.dirstate._ignorefileandline(p)
851 851 break
852 852 if ignored:
853 853 if ignored == nf:
854 854 ui.write(_("%s is ignored\n") % f)
855 855 else:
856 856 ui.write(_("%s is ignored because of "
857 857 "containing folder %s\n")
858 858 % (f, ignored))
859 859 ignorefile, lineno, line = ignoredata
860 860 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
861 861 % (ignorefile, lineno, line))
862 862 else:
863 863 ui.write(_("%s is not ignored\n") % f)
864 864
865 865 @command('debugindex', cmdutil.debugrevlogopts +
866 866 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
867 867 _('[-f FORMAT] -c|-m|FILE'),
868 868 optionalrepo=True)
869 869 def debugindex(ui, repo, file_=None, **opts):
870 870 """dump the contents of an index file"""
871 871 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
872 872 format = opts.get('format', 0)
873 873 if format not in (0, 1):
874 874 raise error.Abort(_("unknown format %d") % format)
875 875
876 876 generaldelta = r.version & revlog.FLAG_GENERALDELTA
877 877 if generaldelta:
878 878 basehdr = ' delta'
879 879 else:
880 880 basehdr = ' base'
881 881
882 882 if ui.debugflag:
883 883 shortfn = hex
884 884 else:
885 885 shortfn = short
886 886
887 887 # There might not be anything in r, so have a sane default
888 888 idlen = 12
889 889 for i in r:
890 890 idlen = len(shortfn(r.node(i)))
891 891 break
892 892
893 893 if format == 0:
894 894 ui.write((" rev offset length " + basehdr + " linkrev"
895 895 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
896 896 elif format == 1:
897 897 ui.write((" rev flag offset length"
898 898 " size " + basehdr + " link p1 p2"
899 899 " %s\n") % "nodeid".rjust(idlen))
900 900
901 901 for i in r:
902 902 node = r.node(i)
903 903 if generaldelta:
904 904 base = r.deltaparent(i)
905 905 else:
906 906 base = r.chainbase(i)
907 907 if format == 0:
908 908 try:
909 909 pp = r.parents(node)
910 910 except Exception:
911 911 pp = [nullid, nullid]
912 912 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
913 913 i, r.start(i), r.length(i), base, r.linkrev(i),
914 914 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
915 915 elif format == 1:
916 916 pr = r.parentrevs(i)
917 917 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
918 918 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
919 919 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
920 920
921 921 @command('debugindexdot', cmdutil.debugrevlogopts,
922 922 _('-c|-m|FILE'), optionalrepo=True)
923 923 def debugindexdot(ui, repo, file_=None, **opts):
924 924 """dump an index DAG as a graphviz dot file"""
925 925 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
926 926 ui.write(("digraph G {\n"))
927 927 for i in r:
928 928 node = r.node(i)
929 929 pp = r.parents(node)
930 930 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
931 931 if pp[1] != nullid:
932 932 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
933 933 ui.write("}\n")
934 934
935 935 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
936 936 def debuginstall(ui, **opts):
937 937 '''test Mercurial installation
938 938
939 939 Returns 0 on success.
940 940 '''
941 941
942 942 def writetemp(contents):
943 943 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
944 944 f = os.fdopen(fd, pycompat.sysstr("wb"))
945 945 f.write(contents)
946 946 f.close()
947 947 return name
948 948
949 949 problems = 0
950 950
951 951 fm = ui.formatter('debuginstall', opts)
952 952 fm.startitem()
953 953
954 954 # encoding
955 955 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
956 956 err = None
957 957 try:
958 958 encoding.fromlocal("test")
959 959 except error.Abort as inst:
960 960 err = inst
961 961 problems += 1
962 962 fm.condwrite(err, 'encodingerror', _(" %s\n"
963 963 " (check that your locale is properly set)\n"), err)
964 964
965 965 # Python
966 966 fm.write('pythonexe', _("checking Python executable (%s)\n"),
967 967 pycompat.sysexecutable)
968 968 fm.write('pythonver', _("checking Python version (%s)\n"),
969 969 ("%d.%d.%d" % sys.version_info[:3]))
970 970 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
971 971 os.path.dirname(pycompat.fsencode(os.__file__)))
972 972
973 973 security = set(sslutil.supportedprotocols)
974 974 if sslutil.hassni:
975 975 security.add('sni')
976 976
977 977 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
978 978 fm.formatlist(sorted(security), name='protocol',
979 979 fmt='%s', sep=','))
980 980
981 981 # These are warnings, not errors. So don't increment problem count. This
982 982 # may change in the future.
983 983 if 'tls1.2' not in security:
984 984 fm.plain(_(' TLS 1.2 not supported by Python install; '
985 985 'network connections lack modern security\n'))
986 986 if 'sni' not in security:
987 987 fm.plain(_(' SNI not supported by Python install; may have '
988 988 'connectivity issues with some servers\n'))
989 989
990 990 # TODO print CA cert info
991 991
992 992 # hg version
993 993 hgver = util.version()
994 994 fm.write('hgver', _("checking Mercurial version (%s)\n"),
995 995 hgver.split('+')[0])
996 996 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
997 997 '+'.join(hgver.split('+')[1:]))
998 998
999 999 # compiled modules
1000 1000 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1001 1001 policy.policy)
1002 1002 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1003 1003 os.path.dirname(pycompat.fsencode(__file__)))
1004 1004
1005 1005 if policy.policy in ('c', 'allow'):
1006 1006 err = None
1007 1007 try:
1008 1008 from .cext import (
1009 1009 base85,
1010 1010 bdiff,
1011 1011 mpatch,
1012 1012 osutil,
1013 1013 )
1014 1014 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1015 1015 except Exception as inst:
1016 1016 err = inst
1017 1017 problems += 1
1018 1018 fm.condwrite(err, 'extensionserror', " %s\n", err)
1019 1019
1020 1020 compengines = util.compengines._engines.values()
1021 1021 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1022 1022 fm.formatlist(sorted(e.name() for e in compengines),
1023 1023 name='compengine', fmt='%s', sep=', '))
1024 1024 fm.write('compenginesavail', _('checking available compression engines '
1025 1025 '(%s)\n'),
1026 1026 fm.formatlist(sorted(e.name() for e in compengines
1027 1027 if e.available()),
1028 1028 name='compengine', fmt='%s', sep=', '))
1029 1029 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1030 1030 fm.write('compenginesserver', _('checking available compression engines '
1031 1031 'for wire protocol (%s)\n'),
1032 1032 fm.formatlist([e.name() for e in wirecompengines
1033 1033 if e.wireprotosupport()],
1034 1034 name='compengine', fmt='%s', sep=', '))
1035 1035
1036 1036 # templates
1037 1037 p = templater.templatepaths()
1038 1038 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1039 1039 fm.condwrite(not p, '', _(" no template directories found\n"))
1040 1040 if p:
1041 1041 m = templater.templatepath("map-cmdline.default")
1042 1042 if m:
1043 1043 # template found, check if it is working
1044 1044 err = None
1045 1045 try:
1046 1046 templater.templater.frommapfile(m)
1047 1047 except Exception as inst:
1048 1048 err = inst
1049 1049 p = None
1050 1050 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1051 1051 else:
1052 1052 p = None
1053 1053 fm.condwrite(p, 'defaulttemplate',
1054 1054 _("checking default template (%s)\n"), m)
1055 1055 fm.condwrite(not m, 'defaulttemplatenotfound',
1056 1056 _(" template '%s' not found\n"), "default")
1057 1057 if not p:
1058 1058 problems += 1
1059 1059 fm.condwrite(not p, '',
1060 1060 _(" (templates seem to have been installed incorrectly)\n"))
1061 1061
1062 1062 # editor
1063 1063 editor = ui.geteditor()
1064 1064 editor = util.expandpath(editor)
1065 1065 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1066 1066 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1067 1067 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1068 1068 _(" No commit editor set and can't find %s in PATH\n"
1069 1069 " (specify a commit editor in your configuration"
1070 1070 " file)\n"), not cmdpath and editor == 'vi' and editor)
1071 1071 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1072 1072 _(" Can't find editor '%s' in PATH\n"
1073 1073 " (specify a commit editor in your configuration"
1074 1074 " file)\n"), not cmdpath and editor)
1075 1075 if not cmdpath and editor != 'vi':
1076 1076 problems += 1
1077 1077
1078 1078 # check username
1079 1079 username = None
1080 1080 err = None
1081 1081 try:
1082 1082 username = ui.username()
1083 1083 except error.Abort as e:
1084 1084 err = e
1085 1085 problems += 1
1086 1086
1087 1087 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1088 1088 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1089 1089 " (specify a username in your configuration file)\n"), err)
1090 1090
1091 1091 fm.condwrite(not problems, '',
1092 1092 _("no problems detected\n"))
1093 1093 if not problems:
1094 1094 fm.data(problems=problems)
1095 1095 fm.condwrite(problems, 'problems',
1096 1096 _("%d problems detected,"
1097 1097 " please check your install!\n"), problems)
1098 1098 fm.end()
1099 1099
1100 1100 return problems
1101 1101
1102 1102 @command('debugknown', [], _('REPO ID...'), norepo=True)
1103 1103 def debugknown(ui, repopath, *ids, **opts):
1104 1104 """test whether node ids are known to a repo
1105 1105
1106 1106 Every ID must be a full-length hex node id string. Returns a list of 0s
1107 1107 and 1s indicating unknown/known.
1108 1108 """
1109 1109 repo = hg.peer(ui, opts, repopath)
1110 1110 if not repo.capable('known'):
1111 1111 raise error.Abort("known() not supported by target repository")
1112 1112 flags = repo.known([bin(s) for s in ids])
1113 1113 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1114 1114
1115 1115 @command('debuglabelcomplete', [], _('LABEL...'))
1116 1116 def debuglabelcomplete(ui, repo, *args):
1117 1117 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1118 1118 debugnamecomplete(ui, repo, *args)
1119 1119
1120 1120 @command('debuglocks',
1121 1121 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1122 1122 ('W', 'force-wlock', None,
1123 1123 _('free the working state lock (DANGEROUS)'))],
1124 1124 _('[OPTION]...'))
1125 1125 def debuglocks(ui, repo, **opts):
1126 1126 """show or modify state of locks
1127 1127
1128 1128 By default, this command will show which locks are held. This
1129 1129 includes the user and process holding the lock, the amount of time
1130 1130 the lock has been held, and the machine name where the process is
1131 1131 running if it's not local.
1132 1132
1133 1133 Locks protect the integrity of Mercurial's data, so should be
1134 1134 treated with care. System crashes or other interruptions may cause
1135 1135 locks to not be properly released, though Mercurial will usually
1136 1136 detect and remove such stale locks automatically.
1137 1137
1138 1138 However, detecting stale locks may not always be possible (for
1139 1139 instance, on a shared filesystem). Removing locks may also be
1140 1140 blocked by filesystem permissions.
1141 1141
1142 1142 Returns 0 if no locks are held.
1143 1143
1144 1144 """
1145 1145
1146 1146 if opts.get('force_lock'):
1147 1147 repo.svfs.unlink('lock')
1148 1148 if opts.get('force_wlock'):
1149 1149 repo.vfs.unlink('wlock')
1150 1150 if opts.get('force_lock') or opts.get('force_lock'):
1151 1151 return 0
1152 1152
1153 1153 now = time.time()
1154 1154 held = 0
1155 1155
1156 1156 def report(vfs, name, method):
1157 1157 # this causes stale locks to get reaped for more accurate reporting
1158 1158 try:
1159 1159 l = method(False)
1160 1160 except error.LockHeld:
1161 1161 l = None
1162 1162
1163 1163 if l:
1164 1164 l.release()
1165 1165 else:
1166 1166 try:
1167 1167 stat = vfs.lstat(name)
1168 1168 age = now - stat.st_mtime
1169 1169 user = util.username(stat.st_uid)
1170 1170 locker = vfs.readlock(name)
1171 1171 if ":" in locker:
1172 1172 host, pid = locker.split(':')
1173 1173 if host == socket.gethostname():
1174 1174 locker = 'user %s, process %s' % (user, pid)
1175 1175 else:
1176 1176 locker = 'user %s, process %s, host %s' \
1177 1177 % (user, pid, host)
1178 1178 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1179 1179 return 1
1180 1180 except OSError as e:
1181 1181 if e.errno != errno.ENOENT:
1182 1182 raise
1183 1183
1184 1184 ui.write(("%-6s free\n") % (name + ":"))
1185 1185 return 0
1186 1186
1187 1187 held += report(repo.svfs, "lock", repo.lock)
1188 1188 held += report(repo.vfs, "wlock", repo.wlock)
1189 1189
1190 1190 return held
1191 1191
1192 1192 @command('debugmergestate', [], '')
1193 1193 def debugmergestate(ui, repo, *args):
1194 1194 """print merge state
1195 1195
1196 1196 Use --verbose to print out information about whether v1 or v2 merge state
1197 1197 was chosen."""
1198 1198 def _hashornull(h):
1199 1199 if h == nullhex:
1200 1200 return 'null'
1201 1201 else:
1202 1202 return h
1203 1203
1204 1204 def printrecords(version):
1205 1205 ui.write(('* version %s records\n') % version)
1206 1206 if version == 1:
1207 1207 records = v1records
1208 1208 else:
1209 1209 records = v2records
1210 1210
1211 1211 for rtype, record in records:
1212 1212 # pretty print some record types
1213 1213 if rtype == 'L':
1214 1214 ui.write(('local: %s\n') % record)
1215 1215 elif rtype == 'O':
1216 1216 ui.write(('other: %s\n') % record)
1217 1217 elif rtype == 'm':
1218 1218 driver, mdstate = record.split('\0', 1)
1219 1219 ui.write(('merge driver: %s (state "%s")\n')
1220 1220 % (driver, mdstate))
1221 1221 elif rtype in 'FDC':
1222 1222 r = record.split('\0')
1223 1223 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1224 1224 if version == 1:
1225 1225 onode = 'not stored in v1 format'
1226 1226 flags = r[7]
1227 1227 else:
1228 1228 onode, flags = r[7:9]
1229 1229 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1230 1230 % (f, rtype, state, _hashornull(hash)))
1231 1231 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1232 1232 ui.write((' ancestor path: %s (node %s)\n')
1233 1233 % (afile, _hashornull(anode)))
1234 1234 ui.write((' other path: %s (node %s)\n')
1235 1235 % (ofile, _hashornull(onode)))
1236 1236 elif rtype == 'f':
1237 1237 filename, rawextras = record.split('\0', 1)
1238 1238 extras = rawextras.split('\0')
1239 1239 i = 0
1240 1240 extrastrings = []
1241 1241 while i < len(extras):
1242 1242 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1243 1243 i += 2
1244 1244
1245 1245 ui.write(('file extras: %s (%s)\n')
1246 1246 % (filename, ', '.join(extrastrings)))
1247 1247 elif rtype == 'l':
1248 1248 labels = record.split('\0', 2)
1249 1249 labels = [l for l in labels if len(l) > 0]
1250 1250 ui.write(('labels:\n'))
1251 1251 ui.write((' local: %s\n' % labels[0]))
1252 1252 ui.write((' other: %s\n' % labels[1]))
1253 1253 if len(labels) > 2:
1254 1254 ui.write((' base: %s\n' % labels[2]))
1255 1255 else:
1256 1256 ui.write(('unrecognized entry: %s\t%s\n')
1257 1257 % (rtype, record.replace('\0', '\t')))
1258 1258
1259 1259 # Avoid mergestate.read() since it may raise an exception for unsupported
1260 1260 # merge state records. We shouldn't be doing this, but this is OK since this
1261 1261 # command is pretty low-level.
1262 1262 ms = mergemod.mergestate(repo)
1263 1263
1264 1264 # sort so that reasonable information is on top
1265 1265 v1records = ms._readrecordsv1()
1266 1266 v2records = ms._readrecordsv2()
1267 1267 order = 'LOml'
1268 1268 def key(r):
1269 1269 idx = order.find(r[0])
1270 1270 if idx == -1:
1271 1271 return (1, r[1])
1272 1272 else:
1273 1273 return (0, idx)
1274 1274 v1records.sort(key=key)
1275 1275 v2records.sort(key=key)
1276 1276
1277 1277 if not v1records and not v2records:
1278 1278 ui.write(('no merge state found\n'))
1279 1279 elif not v2records:
1280 1280 ui.note(('no version 2 merge state\n'))
1281 1281 printrecords(1)
1282 1282 elif ms._v1v2match(v1records, v2records):
1283 1283 ui.note(('v1 and v2 states match: using v2\n'))
1284 1284 printrecords(2)
1285 1285 else:
1286 1286 ui.note(('v1 and v2 states mismatch: using v1\n'))
1287 1287 printrecords(1)
1288 1288 if ui.verbose:
1289 1289 printrecords(2)
1290 1290
1291 1291 @command('debugnamecomplete', [], _('NAME...'))
1292 1292 def debugnamecomplete(ui, repo, *args):
1293 1293 '''complete "names" - tags, open branch names, bookmark names'''
1294 1294
1295 1295 names = set()
1296 1296 # since we previously only listed open branches, we will handle that
1297 1297 # specially (after this for loop)
1298 1298 for name, ns in repo.names.iteritems():
1299 1299 if name != 'branches':
1300 1300 names.update(ns.listnames(repo))
1301 1301 names.update(tag for (tag, heads, tip, closed)
1302 1302 in repo.branchmap().iterbranches() if not closed)
1303 1303 completions = set()
1304 1304 if not args:
1305 1305 args = ['']
1306 1306 for a in args:
1307 1307 completions.update(n for n in names if n.startswith(a))
1308 1308 ui.write('\n'.join(sorted(completions)))
1309 1309 ui.write('\n')
1310 1310
1311 1311 @command('debugobsolete',
1312 1312 [('', 'flags', 0, _('markers flag')),
1313 1313 ('', 'record-parents', False,
1314 1314 _('record parent information for the precursor')),
1315 1315 ('r', 'rev', [], _('display markers relevant to REV')),
1316 ('', 'exclusive', False, _('restrict display to markers only '
1317 'relevant to REV')),
1316 1318 ('', 'index', False, _('display index of the marker')),
1317 1319 ('', 'delete', [], _('delete markers specified by indices')),
1318 1320 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1319 1321 _('[OBSOLETED [REPLACEMENT ...]]'))
1320 1322 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1321 1323 """create arbitrary obsolete marker
1322 1324
1323 1325 With no arguments, displays the list of obsolescence markers."""
1324 1326
1325 1327 def parsenodeid(s):
1326 1328 try:
1327 1329 # We do not use revsingle/revrange functions here to accept
1328 1330 # arbitrary node identifiers, possibly not present in the
1329 1331 # local repository.
1330 1332 n = bin(s)
1331 1333 if len(n) != len(nullid):
1332 1334 raise TypeError()
1333 1335 return n
1334 1336 except TypeError:
1335 1337 raise error.Abort('changeset references must be full hexadecimal '
1336 1338 'node identifiers')
1337 1339
1338 1340 if opts.get('delete'):
1339 1341 indices = []
1340 1342 for v in opts.get('delete'):
1341 1343 try:
1342 1344 indices.append(int(v))
1343 1345 except ValueError:
1344 1346 raise error.Abort(_('invalid index value: %r') % v,
1345 1347 hint=_('use integers for indices'))
1346 1348
1347 1349 if repo.currenttransaction():
1348 1350 raise error.Abort(_('cannot delete obsmarkers in the middle '
1349 1351 'of transaction.'))
1350 1352
1351 1353 with repo.lock():
1352 1354 n = repair.deleteobsmarkers(repo.obsstore, indices)
1353 1355 ui.write(_('deleted %i obsolescence markers\n') % n)
1354 1356
1355 1357 return
1356 1358
1357 1359 if precursor is not None:
1358 1360 if opts['rev']:
1359 1361 raise error.Abort('cannot select revision when creating marker')
1360 1362 metadata = {}
1361 1363 metadata['user'] = opts['user'] or ui.username()
1362 1364 succs = tuple(parsenodeid(succ) for succ in successors)
1363 1365 l = repo.lock()
1364 1366 try:
1365 1367 tr = repo.transaction('debugobsolete')
1366 1368 try:
1367 1369 date = opts.get('date')
1368 1370 if date:
1369 1371 date = util.parsedate(date)
1370 1372 else:
1371 1373 date = None
1372 1374 prec = parsenodeid(precursor)
1373 1375 parents = None
1374 1376 if opts['record_parents']:
1375 1377 if prec not in repo.unfiltered():
1376 1378 raise error.Abort('cannot used --record-parents on '
1377 1379 'unknown changesets')
1378 1380 parents = repo.unfiltered()[prec].parents()
1379 1381 parents = tuple(p.node() for p in parents)
1380 1382 repo.obsstore.create(tr, prec, succs, opts['flags'],
1381 1383 parents=parents, date=date,
1382 1384 metadata=metadata, ui=ui)
1383 1385 tr.close()
1384 1386 except ValueError as exc:
1385 1387 raise error.Abort(_('bad obsmarker input: %s') % exc)
1386 1388 finally:
1387 1389 tr.release()
1388 1390 finally:
1389 1391 l.release()
1390 1392 else:
1391 1393 if opts['rev']:
1392 1394 revs = scmutil.revrange(repo, opts['rev'])
1393 1395 nodes = [repo[r].node() for r in revs]
1394 markers = list(obsolete.getmarkers(repo, nodes=nodes))
1396 markers = list(obsolete.getmarkers(repo, nodes=nodes,
1397 exclusive=opts['exclusive']))
1395 1398 markers.sort(key=lambda x: x._data)
1396 1399 else:
1397 1400 markers = obsolete.getmarkers(repo)
1398 1401
1399 1402 markerstoiter = markers
1400 1403 isrelevant = lambda m: True
1401 1404 if opts.get('rev') and opts.get('index'):
1402 1405 markerstoiter = obsolete.getmarkers(repo)
1403 1406 markerset = set(markers)
1404 1407 isrelevant = lambda m: m in markerset
1405 1408
1406 1409 fm = ui.formatter('debugobsolete', opts)
1407 1410 for i, m in enumerate(markerstoiter):
1408 1411 if not isrelevant(m):
1409 1412 # marker can be irrelevant when we're iterating over a set
1410 1413 # of markers (markerstoiter) which is bigger than the set
1411 1414 # of markers we want to display (markers)
1412 1415 # this can happen if both --index and --rev options are
1413 1416 # provided and thus we need to iterate over all of the markers
1414 1417 # to get the correct indices, but only display the ones that
1415 1418 # are relevant to --rev value
1416 1419 continue
1417 1420 fm.startitem()
1418 1421 ind = i if opts.get('index') else None
1419 1422 cmdutil.showmarker(fm, m, index=ind)
1420 1423 fm.end()
1421 1424
1422 1425 @command('debugpathcomplete',
1423 1426 [('f', 'full', None, _('complete an entire path')),
1424 1427 ('n', 'normal', None, _('show only normal files')),
1425 1428 ('a', 'added', None, _('show only added files')),
1426 1429 ('r', 'removed', None, _('show only removed files'))],
1427 1430 _('FILESPEC...'))
1428 1431 def debugpathcomplete(ui, repo, *specs, **opts):
1429 1432 '''complete part or all of a tracked path
1430 1433
1431 1434 This command supports shells that offer path name completion. It
1432 1435 currently completes only files already known to the dirstate.
1433 1436
1434 1437 Completion extends only to the next path segment unless
1435 1438 --full is specified, in which case entire paths are used.'''
1436 1439
1437 1440 def complete(path, acceptable):
1438 1441 dirstate = repo.dirstate
1439 1442 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1440 1443 rootdir = repo.root + pycompat.ossep
1441 1444 if spec != repo.root and not spec.startswith(rootdir):
1442 1445 return [], []
1443 1446 if os.path.isdir(spec):
1444 1447 spec += '/'
1445 1448 spec = spec[len(rootdir):]
1446 1449 fixpaths = pycompat.ossep != '/'
1447 1450 if fixpaths:
1448 1451 spec = spec.replace(pycompat.ossep, '/')
1449 1452 speclen = len(spec)
1450 1453 fullpaths = opts['full']
1451 1454 files, dirs = set(), set()
1452 1455 adddir, addfile = dirs.add, files.add
1453 1456 for f, st in dirstate.iteritems():
1454 1457 if f.startswith(spec) and st[0] in acceptable:
1455 1458 if fixpaths:
1456 1459 f = f.replace('/', pycompat.ossep)
1457 1460 if fullpaths:
1458 1461 addfile(f)
1459 1462 continue
1460 1463 s = f.find(pycompat.ossep, speclen)
1461 1464 if s >= 0:
1462 1465 adddir(f[:s])
1463 1466 else:
1464 1467 addfile(f)
1465 1468 return files, dirs
1466 1469
1467 1470 acceptable = ''
1468 1471 if opts['normal']:
1469 1472 acceptable += 'nm'
1470 1473 if opts['added']:
1471 1474 acceptable += 'a'
1472 1475 if opts['removed']:
1473 1476 acceptable += 'r'
1474 1477 cwd = repo.getcwd()
1475 1478 if not specs:
1476 1479 specs = ['.']
1477 1480
1478 1481 files, dirs = set(), set()
1479 1482 for spec in specs:
1480 1483 f, d = complete(spec, acceptable or 'nmar')
1481 1484 files.update(f)
1482 1485 dirs.update(d)
1483 1486 files.update(dirs)
1484 1487 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1485 1488 ui.write('\n')
1486 1489
1487 1490 @command('debugpickmergetool',
1488 1491 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1489 1492 ('', 'changedelete', None, _('emulate merging change and delete')),
1490 1493 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1491 1494 _('[PATTERN]...'),
1492 1495 inferrepo=True)
1493 1496 def debugpickmergetool(ui, repo, *pats, **opts):
1494 1497 """examine which merge tool is chosen for specified file
1495 1498
1496 1499 As described in :hg:`help merge-tools`, Mercurial examines
1497 1500 configurations below in this order to decide which merge tool is
1498 1501 chosen for specified file.
1499 1502
1500 1503 1. ``--tool`` option
1501 1504 2. ``HGMERGE`` environment variable
1502 1505 3. configurations in ``merge-patterns`` section
1503 1506 4. configuration of ``ui.merge``
1504 1507 5. configurations in ``merge-tools`` section
1505 1508 6. ``hgmerge`` tool (for historical reason only)
1506 1509 7. default tool for fallback (``:merge`` or ``:prompt``)
1507 1510
1508 1511 This command writes out examination result in the style below::
1509 1512
1510 1513 FILE = MERGETOOL
1511 1514
1512 1515 By default, all files known in the first parent context of the
1513 1516 working directory are examined. Use file patterns and/or -I/-X
1514 1517 options to limit target files. -r/--rev is also useful to examine
1515 1518 files in another context without actual updating to it.
1516 1519
1517 1520 With --debug, this command shows warning messages while matching
1518 1521 against ``merge-patterns`` and so on, too. It is recommended to
1519 1522 use this option with explicit file patterns and/or -I/-X options,
1520 1523 because this option increases amount of output per file according
1521 1524 to configurations in hgrc.
1522 1525
1523 1526 With -v/--verbose, this command shows configurations below at
1524 1527 first (only if specified).
1525 1528
1526 1529 - ``--tool`` option
1527 1530 - ``HGMERGE`` environment variable
1528 1531 - configuration of ``ui.merge``
1529 1532
1530 1533 If merge tool is chosen before matching against
1531 1534 ``merge-patterns``, this command can't show any helpful
1532 1535 information, even with --debug. In such case, information above is
1533 1536 useful to know why a merge tool is chosen.
1534 1537 """
1535 1538 overrides = {}
1536 1539 if opts['tool']:
1537 1540 overrides[('ui', 'forcemerge')] = opts['tool']
1538 1541 ui.note(('with --tool %r\n') % (opts['tool']))
1539 1542
1540 1543 with ui.configoverride(overrides, 'debugmergepatterns'):
1541 1544 hgmerge = encoding.environ.get("HGMERGE")
1542 1545 if hgmerge is not None:
1543 1546 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1544 1547 uimerge = ui.config("ui", "merge")
1545 1548 if uimerge:
1546 1549 ui.note(('with ui.merge=%r\n') % (uimerge))
1547 1550
1548 1551 ctx = scmutil.revsingle(repo, opts.get('rev'))
1549 1552 m = scmutil.match(ctx, pats, opts)
1550 1553 changedelete = opts['changedelete']
1551 1554 for path in ctx.walk(m):
1552 1555 fctx = ctx[path]
1553 1556 try:
1554 1557 if not ui.debugflag:
1555 1558 ui.pushbuffer(error=True)
1556 1559 tool, toolpath = filemerge._picktool(repo, ui, path,
1557 1560 fctx.isbinary(),
1558 1561 'l' in fctx.flags(),
1559 1562 changedelete)
1560 1563 finally:
1561 1564 if not ui.debugflag:
1562 1565 ui.popbuffer()
1563 1566 ui.write(('%s = %s\n') % (path, tool))
1564 1567
1565 1568 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1566 1569 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1567 1570 '''access the pushkey key/value protocol
1568 1571
1569 1572 With two args, list the keys in the given namespace.
1570 1573
1571 1574 With five args, set a key to new if it currently is set to old.
1572 1575 Reports success or failure.
1573 1576 '''
1574 1577
1575 1578 target = hg.peer(ui, {}, repopath)
1576 1579 if keyinfo:
1577 1580 key, old, new = keyinfo
1578 1581 r = target.pushkey(namespace, key, old, new)
1579 1582 ui.status(str(r) + '\n')
1580 1583 return not r
1581 1584 else:
1582 1585 for k, v in sorted(target.listkeys(namespace).iteritems()):
1583 1586 ui.write("%s\t%s\n" % (util.escapestr(k),
1584 1587 util.escapestr(v)))
1585 1588
1586 1589 @command('debugpvec', [], _('A B'))
1587 1590 def debugpvec(ui, repo, a, b=None):
1588 1591 ca = scmutil.revsingle(repo, a)
1589 1592 cb = scmutil.revsingle(repo, b)
1590 1593 pa = pvec.ctxpvec(ca)
1591 1594 pb = pvec.ctxpvec(cb)
1592 1595 if pa == pb:
1593 1596 rel = "="
1594 1597 elif pa > pb:
1595 1598 rel = ">"
1596 1599 elif pa < pb:
1597 1600 rel = "<"
1598 1601 elif pa | pb:
1599 1602 rel = "|"
1600 1603 ui.write(_("a: %s\n") % pa)
1601 1604 ui.write(_("b: %s\n") % pb)
1602 1605 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1603 1606 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1604 1607 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1605 1608 pa.distance(pb), rel))
1606 1609
1607 1610 @command('debugrebuilddirstate|debugrebuildstate',
1608 1611 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1609 1612 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1610 1613 'the working copy parent')),
1611 1614 ],
1612 1615 _('[-r REV]'))
1613 1616 def debugrebuilddirstate(ui, repo, rev, **opts):
1614 1617 """rebuild the dirstate as it would look like for the given revision
1615 1618
1616 1619 If no revision is specified the first current parent will be used.
1617 1620
1618 1621 The dirstate will be set to the files of the given revision.
1619 1622 The actual working directory content or existing dirstate
1620 1623 information such as adds or removes is not considered.
1621 1624
1622 1625 ``minimal`` will only rebuild the dirstate status for files that claim to be
1623 1626 tracked but are not in the parent manifest, or that exist in the parent
1624 1627 manifest but are not in the dirstate. It will not change adds, removes, or
1625 1628 modified files that are in the working copy parent.
1626 1629
1627 1630 One use of this command is to make the next :hg:`status` invocation
1628 1631 check the actual file content.
1629 1632 """
1630 1633 ctx = scmutil.revsingle(repo, rev)
1631 1634 with repo.wlock():
1632 1635 dirstate = repo.dirstate
1633 1636 changedfiles = None
1634 1637 # See command doc for what minimal does.
1635 1638 if opts.get('minimal'):
1636 1639 manifestfiles = set(ctx.manifest().keys())
1637 1640 dirstatefiles = set(dirstate)
1638 1641 manifestonly = manifestfiles - dirstatefiles
1639 1642 dsonly = dirstatefiles - manifestfiles
1640 1643 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1641 1644 changedfiles = manifestonly | dsnotadded
1642 1645
1643 1646 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1644 1647
1645 1648 @command('debugrebuildfncache', [], '')
1646 1649 def debugrebuildfncache(ui, repo):
1647 1650 """rebuild the fncache file"""
1648 1651 repair.rebuildfncache(ui, repo)
1649 1652
1650 1653 @command('debugrename',
1651 1654 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1652 1655 _('[-r REV] FILE'))
1653 1656 def debugrename(ui, repo, file1, *pats, **opts):
1654 1657 """dump rename information"""
1655 1658
1656 1659 ctx = scmutil.revsingle(repo, opts.get('rev'))
1657 1660 m = scmutil.match(ctx, (file1,) + pats, opts)
1658 1661 for abs in ctx.walk(m):
1659 1662 fctx = ctx[abs]
1660 1663 o = fctx.filelog().renamed(fctx.filenode())
1661 1664 rel = m.rel(abs)
1662 1665 if o:
1663 1666 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1664 1667 else:
1665 1668 ui.write(_("%s not renamed\n") % rel)
1666 1669
1667 1670 @command('debugrevlog', cmdutil.debugrevlogopts +
1668 1671 [('d', 'dump', False, _('dump index data'))],
1669 1672 _('-c|-m|FILE'),
1670 1673 optionalrepo=True)
1671 1674 def debugrevlog(ui, repo, file_=None, **opts):
1672 1675 """show data and statistics about a revlog"""
1673 1676 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1674 1677
1675 1678 if opts.get("dump"):
1676 1679 numrevs = len(r)
1677 1680 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1678 1681 " rawsize totalsize compression heads chainlen\n"))
1679 1682 ts = 0
1680 1683 heads = set()
1681 1684
1682 1685 for rev in xrange(numrevs):
1683 1686 dbase = r.deltaparent(rev)
1684 1687 if dbase == -1:
1685 1688 dbase = rev
1686 1689 cbase = r.chainbase(rev)
1687 1690 clen = r.chainlen(rev)
1688 1691 p1, p2 = r.parentrevs(rev)
1689 1692 rs = r.rawsize(rev)
1690 1693 ts = ts + rs
1691 1694 heads -= set(r.parentrevs(rev))
1692 1695 heads.add(rev)
1693 1696 try:
1694 1697 compression = ts / r.end(rev)
1695 1698 except ZeroDivisionError:
1696 1699 compression = 0
1697 1700 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1698 1701 "%11d %5d %8d\n" %
1699 1702 (rev, p1, p2, r.start(rev), r.end(rev),
1700 1703 r.start(dbase), r.start(cbase),
1701 1704 r.start(p1), r.start(p2),
1702 1705 rs, ts, compression, len(heads), clen))
1703 1706 return 0
1704 1707
1705 1708 v = r.version
1706 1709 format = v & 0xFFFF
1707 1710 flags = []
1708 1711 gdelta = False
1709 1712 if v & revlog.FLAG_INLINE_DATA:
1710 1713 flags.append('inline')
1711 1714 if v & revlog.FLAG_GENERALDELTA:
1712 1715 gdelta = True
1713 1716 flags.append('generaldelta')
1714 1717 if not flags:
1715 1718 flags = ['(none)']
1716 1719
1717 1720 nummerges = 0
1718 1721 numfull = 0
1719 1722 numprev = 0
1720 1723 nump1 = 0
1721 1724 nump2 = 0
1722 1725 numother = 0
1723 1726 nump1prev = 0
1724 1727 nump2prev = 0
1725 1728 chainlengths = []
1726 1729
1727 1730 datasize = [None, 0, 0]
1728 1731 fullsize = [None, 0, 0]
1729 1732 deltasize = [None, 0, 0]
1730 1733 chunktypecounts = {}
1731 1734 chunktypesizes = {}
1732 1735
1733 1736 def addsize(size, l):
1734 1737 if l[0] is None or size < l[0]:
1735 1738 l[0] = size
1736 1739 if size > l[1]:
1737 1740 l[1] = size
1738 1741 l[2] += size
1739 1742
1740 1743 numrevs = len(r)
1741 1744 for rev in xrange(numrevs):
1742 1745 p1, p2 = r.parentrevs(rev)
1743 1746 delta = r.deltaparent(rev)
1744 1747 if format > 0:
1745 1748 addsize(r.rawsize(rev), datasize)
1746 1749 if p2 != nullrev:
1747 1750 nummerges += 1
1748 1751 size = r.length(rev)
1749 1752 if delta == nullrev:
1750 1753 chainlengths.append(0)
1751 1754 numfull += 1
1752 1755 addsize(size, fullsize)
1753 1756 else:
1754 1757 chainlengths.append(chainlengths[delta] + 1)
1755 1758 addsize(size, deltasize)
1756 1759 if delta == rev - 1:
1757 1760 numprev += 1
1758 1761 if delta == p1:
1759 1762 nump1prev += 1
1760 1763 elif delta == p2:
1761 1764 nump2prev += 1
1762 1765 elif delta == p1:
1763 1766 nump1 += 1
1764 1767 elif delta == p2:
1765 1768 nump2 += 1
1766 1769 elif delta != nullrev:
1767 1770 numother += 1
1768 1771
1769 1772 # Obtain data on the raw chunks in the revlog.
1770 1773 segment = r._getsegmentforrevs(rev, rev)[1]
1771 1774 if segment:
1772 1775 chunktype = segment[0]
1773 1776 else:
1774 1777 chunktype = 'empty'
1775 1778
1776 1779 if chunktype not in chunktypecounts:
1777 1780 chunktypecounts[chunktype] = 0
1778 1781 chunktypesizes[chunktype] = 0
1779 1782
1780 1783 chunktypecounts[chunktype] += 1
1781 1784 chunktypesizes[chunktype] += size
1782 1785
1783 1786 # Adjust size min value for empty cases
1784 1787 for size in (datasize, fullsize, deltasize):
1785 1788 if size[0] is None:
1786 1789 size[0] = 0
1787 1790
1788 1791 numdeltas = numrevs - numfull
1789 1792 numoprev = numprev - nump1prev - nump2prev
1790 1793 totalrawsize = datasize[2]
1791 1794 datasize[2] /= numrevs
1792 1795 fulltotal = fullsize[2]
1793 1796 fullsize[2] /= numfull
1794 1797 deltatotal = deltasize[2]
1795 1798 if numrevs - numfull > 0:
1796 1799 deltasize[2] /= numrevs - numfull
1797 1800 totalsize = fulltotal + deltatotal
1798 1801 avgchainlen = sum(chainlengths) / numrevs
1799 1802 maxchainlen = max(chainlengths)
1800 1803 compratio = 1
1801 1804 if totalsize:
1802 1805 compratio = totalrawsize / totalsize
1803 1806
1804 1807 basedfmtstr = '%%%dd\n'
1805 1808 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1806 1809
1807 1810 def dfmtstr(max):
1808 1811 return basedfmtstr % len(str(max))
1809 1812 def pcfmtstr(max, padding=0):
1810 1813 return basepcfmtstr % (len(str(max)), ' ' * padding)
1811 1814
1812 1815 def pcfmt(value, total):
1813 1816 if total:
1814 1817 return (value, 100 * float(value) / total)
1815 1818 else:
1816 1819 return value, 100.0
1817 1820
1818 1821 ui.write(('format : %d\n') % format)
1819 1822 ui.write(('flags : %s\n') % ', '.join(flags))
1820 1823
1821 1824 ui.write('\n')
1822 1825 fmt = pcfmtstr(totalsize)
1823 1826 fmt2 = dfmtstr(totalsize)
1824 1827 ui.write(('revisions : ') + fmt2 % numrevs)
1825 1828 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1826 1829 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1827 1830 ui.write(('revisions : ') + fmt2 % numrevs)
1828 1831 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1829 1832 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1830 1833 ui.write(('revision size : ') + fmt2 % totalsize)
1831 1834 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1832 1835 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1833 1836
1834 1837 def fmtchunktype(chunktype):
1835 1838 if chunktype == 'empty':
1836 1839 return ' %s : ' % chunktype
1837 1840 elif chunktype in string.ascii_letters:
1838 1841 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1839 1842 else:
1840 1843 return ' 0x%s : ' % hex(chunktype)
1841 1844
1842 1845 ui.write('\n')
1843 1846 ui.write(('chunks : ') + fmt2 % numrevs)
1844 1847 for chunktype in sorted(chunktypecounts):
1845 1848 ui.write(fmtchunktype(chunktype))
1846 1849 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1847 1850 ui.write(('chunks size : ') + fmt2 % totalsize)
1848 1851 for chunktype in sorted(chunktypecounts):
1849 1852 ui.write(fmtchunktype(chunktype))
1850 1853 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1851 1854
1852 1855 ui.write('\n')
1853 1856 fmt = dfmtstr(max(avgchainlen, compratio))
1854 1857 ui.write(('avg chain length : ') + fmt % avgchainlen)
1855 1858 ui.write(('max chain length : ') + fmt % maxchainlen)
1856 1859 ui.write(('compression ratio : ') + fmt % compratio)
1857 1860
1858 1861 if format > 0:
1859 1862 ui.write('\n')
1860 1863 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1861 1864 % tuple(datasize))
1862 1865 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1863 1866 % tuple(fullsize))
1864 1867 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1865 1868 % tuple(deltasize))
1866 1869
1867 1870 if numdeltas > 0:
1868 1871 ui.write('\n')
1869 1872 fmt = pcfmtstr(numdeltas)
1870 1873 fmt2 = pcfmtstr(numdeltas, 4)
1871 1874 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1872 1875 if numprev > 0:
1873 1876 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1874 1877 numprev))
1875 1878 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1876 1879 numprev))
1877 1880 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1878 1881 numprev))
1879 1882 if gdelta:
1880 1883 ui.write(('deltas against p1 : ')
1881 1884 + fmt % pcfmt(nump1, numdeltas))
1882 1885 ui.write(('deltas against p2 : ')
1883 1886 + fmt % pcfmt(nump2, numdeltas))
1884 1887 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1885 1888 numdeltas))
1886 1889
1887 1890 @command('debugrevspec',
1888 1891 [('', 'optimize', None,
1889 1892 _('print parsed tree after optimizing (DEPRECATED)')),
1890 1893 ('p', 'show-stage', [],
1891 1894 _('print parsed tree at the given stage'), _('NAME')),
1892 1895 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1893 1896 ('', 'verify-optimized', False, _('verify optimized result')),
1894 1897 ],
1895 1898 ('REVSPEC'))
1896 1899 def debugrevspec(ui, repo, expr, **opts):
1897 1900 """parse and apply a revision specification
1898 1901
1899 1902 Use -p/--show-stage option to print the parsed tree at the given stages.
1900 1903 Use -p all to print tree at every stage.
1901 1904
1902 1905 Use --verify-optimized to compare the optimized result with the unoptimized
1903 1906 one. Returns 1 if the optimized result differs.
1904 1907 """
1905 1908 stages = [
1906 1909 ('parsed', lambda tree: tree),
1907 1910 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1908 1911 ('concatenated', revsetlang.foldconcat),
1909 1912 ('analyzed', revsetlang.analyze),
1910 1913 ('optimized', revsetlang.optimize),
1911 1914 ]
1912 1915 if opts['no_optimized']:
1913 1916 stages = stages[:-1]
1914 1917 if opts['verify_optimized'] and opts['no_optimized']:
1915 1918 raise error.Abort(_('cannot use --verify-optimized with '
1916 1919 '--no-optimized'))
1917 1920 stagenames = set(n for n, f in stages)
1918 1921
1919 1922 showalways = set()
1920 1923 showchanged = set()
1921 1924 if ui.verbose and not opts['show_stage']:
1922 1925 # show parsed tree by --verbose (deprecated)
1923 1926 showalways.add('parsed')
1924 1927 showchanged.update(['expanded', 'concatenated'])
1925 1928 if opts['optimize']:
1926 1929 showalways.add('optimized')
1927 1930 if opts['show_stage'] and opts['optimize']:
1928 1931 raise error.Abort(_('cannot use --optimize with --show-stage'))
1929 1932 if opts['show_stage'] == ['all']:
1930 1933 showalways.update(stagenames)
1931 1934 else:
1932 1935 for n in opts['show_stage']:
1933 1936 if n not in stagenames:
1934 1937 raise error.Abort(_('invalid stage name: %s') % n)
1935 1938 showalways.update(opts['show_stage'])
1936 1939
1937 1940 treebystage = {}
1938 1941 printedtree = None
1939 1942 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1940 1943 for n, f in stages:
1941 1944 treebystage[n] = tree = f(tree)
1942 1945 if n in showalways or (n in showchanged and tree != printedtree):
1943 1946 if opts['show_stage'] or n != 'parsed':
1944 1947 ui.write(("* %s:\n") % n)
1945 1948 ui.write(revsetlang.prettyformat(tree), "\n")
1946 1949 printedtree = tree
1947 1950
1948 1951 if opts['verify_optimized']:
1949 1952 arevs = revset.makematcher(treebystage['analyzed'])(repo)
1950 1953 brevs = revset.makematcher(treebystage['optimized'])(repo)
1951 1954 if ui.verbose:
1952 1955 ui.note(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
1953 1956 ui.note(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
1954 1957 arevs = list(arevs)
1955 1958 brevs = list(brevs)
1956 1959 if arevs == brevs:
1957 1960 return 0
1958 1961 ui.write(('--- analyzed\n'), label='diff.file_a')
1959 1962 ui.write(('+++ optimized\n'), label='diff.file_b')
1960 1963 sm = difflib.SequenceMatcher(None, arevs, brevs)
1961 1964 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1962 1965 if tag in ('delete', 'replace'):
1963 1966 for c in arevs[alo:ahi]:
1964 1967 ui.write('-%s\n' % c, label='diff.deleted')
1965 1968 if tag in ('insert', 'replace'):
1966 1969 for c in brevs[blo:bhi]:
1967 1970 ui.write('+%s\n' % c, label='diff.inserted')
1968 1971 if tag == 'equal':
1969 1972 for c in arevs[alo:ahi]:
1970 1973 ui.write(' %s\n' % c)
1971 1974 return 1
1972 1975
1973 1976 func = revset.makematcher(tree)
1974 1977 revs = func(repo)
1975 1978 if ui.verbose:
1976 1979 ui.note(("* set:\n"), smartset.prettyformat(revs), "\n")
1977 1980 for c in revs:
1978 1981 ui.write("%s\n" % c)
1979 1982
1980 1983 @command('debugsetparents', [], _('REV1 [REV2]'))
1981 1984 def debugsetparents(ui, repo, rev1, rev2=None):
1982 1985 """manually set the parents of the current working directory
1983 1986
1984 1987 This is useful for writing repository conversion tools, but should
1985 1988 be used with care. For example, neither the working directory nor the
1986 1989 dirstate is updated, so file status may be incorrect after running this
1987 1990 command.
1988 1991
1989 1992 Returns 0 on success.
1990 1993 """
1991 1994
1992 1995 r1 = scmutil.revsingle(repo, rev1).node()
1993 1996 r2 = scmutil.revsingle(repo, rev2, 'null').node()
1994 1997
1995 1998 with repo.wlock():
1996 1999 repo.setparents(r1, r2)
1997 2000
1998 2001 @command('debugsub',
1999 2002 [('r', 'rev', '',
2000 2003 _('revision to check'), _('REV'))],
2001 2004 _('[-r REV] [REV]'))
2002 2005 def debugsub(ui, repo, rev=None):
2003 2006 ctx = scmutil.revsingle(repo, rev, None)
2004 2007 for k, v in sorted(ctx.substate.items()):
2005 2008 ui.write(('path %s\n') % k)
2006 2009 ui.write((' source %s\n') % v[0])
2007 2010 ui.write((' revision %s\n') % v[1])
2008 2011
2009 2012 @command('debugsuccessorssets',
2010 2013 [],
2011 2014 _('[REV]'))
2012 2015 def debugsuccessorssets(ui, repo, *revs):
2013 2016 """show set of successors for revision
2014 2017
2015 2018 A successors set of changeset A is a consistent group of revisions that
2016 2019 succeed A. It contains non-obsolete changesets only.
2017 2020
2018 2021 In most cases a changeset A has a single successors set containing a single
2019 2022 successor (changeset A replaced by A').
2020 2023
2021 2024 A changeset that is made obsolete with no successors are called "pruned".
2022 2025 Such changesets have no successors sets at all.
2023 2026
2024 2027 A changeset that has been "split" will have a successors set containing
2025 2028 more than one successor.
2026 2029
2027 2030 A changeset that has been rewritten in multiple different ways is called
2028 2031 "divergent". Such changesets have multiple successor sets (each of which
2029 2032 may also be split, i.e. have multiple successors).
2030 2033
2031 2034 Results are displayed as follows::
2032 2035
2033 2036 <rev1>
2034 2037 <successors-1A>
2035 2038 <rev2>
2036 2039 <successors-2A>
2037 2040 <successors-2B1> <successors-2B2> <successors-2B3>
2038 2041
2039 2042 Here rev2 has two possible (i.e. divergent) successors sets. The first
2040 2043 holds one element, whereas the second holds three (i.e. the changeset has
2041 2044 been split).
2042 2045 """
2043 2046 # passed to successorssets caching computation from one call to another
2044 2047 cache = {}
2045 2048 ctx2str = str
2046 2049 node2str = short
2047 2050 if ui.debug():
2048 2051 def ctx2str(ctx):
2049 2052 return ctx.hex()
2050 2053 node2str = hex
2051 2054 for rev in scmutil.revrange(repo, revs):
2052 2055 ctx = repo[rev]
2053 2056 ui.write('%s\n'% ctx2str(ctx))
2054 2057 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2055 2058 if succsset:
2056 2059 ui.write(' ')
2057 2060 ui.write(node2str(succsset[0]))
2058 2061 for node in succsset[1:]:
2059 2062 ui.write(' ')
2060 2063 ui.write(node2str(node))
2061 2064 ui.write('\n')
2062 2065
2063 2066 @command('debugtemplate',
2064 2067 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2065 2068 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2066 2069 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2067 2070 optionalrepo=True)
2068 2071 def debugtemplate(ui, repo, tmpl, **opts):
2069 2072 """parse and apply a template
2070 2073
2071 2074 If -r/--rev is given, the template is processed as a log template and
2072 2075 applied to the given changesets. Otherwise, it is processed as a generic
2073 2076 template.
2074 2077
2075 2078 Use --verbose to print the parsed tree.
2076 2079 """
2077 2080 revs = None
2078 2081 if opts['rev']:
2079 2082 if repo is None:
2080 2083 raise error.RepoError(_('there is no Mercurial repository here '
2081 2084 '(.hg not found)'))
2082 2085 revs = scmutil.revrange(repo, opts['rev'])
2083 2086
2084 2087 props = {}
2085 2088 for d in opts['define']:
2086 2089 try:
2087 2090 k, v = (e.strip() for e in d.split('=', 1))
2088 2091 if not k or k == 'ui':
2089 2092 raise ValueError
2090 2093 props[k] = v
2091 2094 except ValueError:
2092 2095 raise error.Abort(_('malformed keyword definition: %s') % d)
2093 2096
2094 2097 if ui.verbose:
2095 2098 aliases = ui.configitems('templatealias')
2096 2099 tree = templater.parse(tmpl)
2097 2100 ui.note(templater.prettyformat(tree), '\n')
2098 2101 newtree = templater.expandaliases(tree, aliases)
2099 2102 if newtree != tree:
2100 2103 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2101 2104
2102 2105 mapfile = None
2103 2106 if revs is None:
2104 2107 k = 'debugtemplate'
2105 2108 t = formatter.maketemplater(ui, k, tmpl)
2106 2109 ui.write(templater.stringify(t(k, ui=ui, **props)))
2107 2110 else:
2108 2111 displayer = cmdutil.changeset_templater(ui, repo, None, opts, tmpl,
2109 2112 mapfile, buffered=False)
2110 2113 for r in revs:
2111 2114 displayer.show(repo[r], **props)
2112 2115 displayer.close()
2113 2116
2114 2117 @command('debugupdatecaches', [])
2115 2118 def debugupdatecaches(ui, repo, *pats, **opts):
2116 2119 """warm all known caches in the repository"""
2117 2120 with repo.wlock():
2118 2121 with repo.lock():
2119 2122 repo.updatecaches()
2120 2123
2121 2124 @command('debugupgraderepo', [
2122 2125 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2123 2126 ('', 'run', False, _('performs an upgrade')),
2124 2127 ])
2125 2128 def debugupgraderepo(ui, repo, run=False, optimize=None):
2126 2129 """upgrade a repository to use different features
2127 2130
2128 2131 If no arguments are specified, the repository is evaluated for upgrade
2129 2132 and a list of problems and potential optimizations is printed.
2130 2133
2131 2134 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2132 2135 can be influenced via additional arguments. More details will be provided
2133 2136 by the command output when run without ``--run``.
2134 2137
2135 2138 During the upgrade, the repository will be locked and no writes will be
2136 2139 allowed.
2137 2140
2138 2141 At the end of the upgrade, the repository may not be readable while new
2139 2142 repository data is swapped in. This window will be as long as it takes to
2140 2143 rename some directories inside the ``.hg`` directory. On most machines, this
2141 2144 should complete almost instantaneously and the chances of a consumer being
2142 2145 unable to access the repository should be low.
2143 2146 """
2144 2147 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2145 2148
2146 2149 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2147 2150 inferrepo=True)
2148 2151 def debugwalk(ui, repo, *pats, **opts):
2149 2152 """show how files match on given patterns"""
2150 2153 m = scmutil.match(repo[None], pats, opts)
2151 2154 ui.write(('matcher: %r\n' % m))
2152 2155 items = list(repo[None].walk(m))
2153 2156 if not items:
2154 2157 return
2155 2158 f = lambda fn: fn
2156 2159 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2157 2160 f = lambda fn: util.normpath(fn)
2158 2161 fmt = 'f %%-%ds %%-%ds %%s' % (
2159 2162 max([len(abs) for abs in items]),
2160 2163 max([len(m.rel(abs)) for abs in items]))
2161 2164 for abs in items:
2162 2165 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2163 2166 ui.write("%s\n" % line.rstrip())
2164 2167
2165 2168 @command('debugwireargs',
2166 2169 [('', 'three', '', 'three'),
2167 2170 ('', 'four', '', 'four'),
2168 2171 ('', 'five', '', 'five'),
2169 2172 ] + cmdutil.remoteopts,
2170 2173 _('REPO [OPTIONS]... [ONE [TWO]]'),
2171 2174 norepo=True)
2172 2175 def debugwireargs(ui, repopath, *vals, **opts):
2173 2176 repo = hg.peer(ui, opts, repopath)
2174 2177 for opt in cmdutil.remoteopts:
2175 2178 del opts[opt[1]]
2176 2179 args = {}
2177 2180 for k, v in opts.iteritems():
2178 2181 if v:
2179 2182 args[k] = v
2180 2183 # run twice to check that we don't mess up the stream for the next command
2181 2184 res1 = repo.debugwireargs(*vals, **args)
2182 2185 res2 = repo.debugwireargs(*vals, **args)
2183 2186 ui.write("%s\n" % res1)
2184 2187 if res1 != res2:
2185 2188 ui.warn("%s\n" % res2)
@@ -1,1301 +1,1426 b''
1 1 # obsolete.py - obsolete markers handling
2 2 #
3 3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
4 4 # Logilab SA <contact@logilab.fr>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 """Obsolete marker handling
10 10
11 11 An obsolete marker maps an old changeset to a list of new
12 12 changesets. If the list of new changesets is empty, the old changeset
13 13 is said to be "killed". Otherwise, the old changeset is being
14 14 "replaced" by the new changesets.
15 15
16 16 Obsolete markers can be used to record and distribute changeset graph
17 17 transformations performed by history rewrite operations, and help
18 18 building new tools to reconcile conflicting rewrite actions. To
19 19 facilitate conflict resolution, markers include various annotations
20 20 besides old and news changeset identifiers, such as creation date or
21 21 author name.
22 22
23 23 The old obsoleted changeset is called a "precursor" and possible
24 24 replacements are called "successors". Markers that used changeset X as
25 25 a precursor are called "successor markers of X" because they hold
26 26 information about the successors of X. Markers that use changeset Y as
27 27 a successors are call "precursor markers of Y" because they hold
28 28 information about the precursors of Y.
29 29
30 30 Examples:
31 31
32 32 - When changeset A is replaced by changeset A', one marker is stored:
33 33
34 34 (A, (A',))
35 35
36 36 - When changesets A and B are folded into a new changeset C, two markers are
37 37 stored:
38 38
39 39 (A, (C,)) and (B, (C,))
40 40
41 41 - When changeset A is simply "pruned" from the graph, a marker is created:
42 42
43 43 (A, ())
44 44
45 45 - When changeset A is split into B and C, a single marker is used:
46 46
47 47 (A, (B, C))
48 48
49 49 We use a single marker to distinguish the "split" case from the "divergence"
50 50 case. If two independent operations rewrite the same changeset A in to A' and
51 51 A'', we have an error case: divergent rewriting. We can detect it because
52 52 two markers will be created independently:
53 53
54 54 (A, (B,)) and (A, (C,))
55 55
56 56 Format
57 57 ------
58 58
59 59 Markers are stored in an append-only file stored in
60 60 '.hg/store/obsstore'.
61 61
62 62 The file starts with a version header:
63 63
64 64 - 1 unsigned byte: version number, starting at zero.
65 65
66 66 The header is followed by the markers. Marker format depend of the version. See
67 67 comment associated with each format for details.
68 68
69 69 """
70 70 from __future__ import absolute_import
71 71
72 72 import errno
73 73 import struct
74 74
75 75 from .i18n import _
76 76 from . import (
77 77 error,
78 78 node,
79 79 phases,
80 80 policy,
81 81 util,
82 82 )
83 83
84 84 parsers = policy.importmod(r'parsers')
85 85
86 86 _pack = struct.pack
87 87 _unpack = struct.unpack
88 88 _calcsize = struct.calcsize
89 89 propertycache = util.propertycache
90 90
91 91 # the obsolete feature is not mature enough to be enabled by default.
92 92 # you have to rely on third party extension extension to enable this.
93 93 _enabled = False
94 94
95 95 # Options for obsolescence
96 96 createmarkersopt = 'createmarkers'
97 97 allowunstableopt = 'allowunstable'
98 98 exchangeopt = 'exchange'
99 99
100 100 def isenabled(repo, option):
101 101 """Returns True if the given repository has the given obsolete option
102 102 enabled.
103 103 """
104 104 result = set(repo.ui.configlist('experimental', 'evolution'))
105 105 if 'all' in result:
106 106 return True
107 107
108 108 # For migration purposes, temporarily return true if the config hasn't been
109 109 # set but _enabled is true.
110 110 if len(result) == 0 and _enabled:
111 111 return True
112 112
113 113 # createmarkers must be enabled if other options are enabled
114 114 if ((allowunstableopt in result or exchangeopt in result) and
115 115 not createmarkersopt in result):
116 116 raise error.Abort(_("'createmarkers' obsolete option must be enabled "
117 117 "if other obsolete options are enabled"))
118 118
119 119 return option in result
120 120
121 121 ### obsolescence marker flag
122 122
123 123 ## bumpedfix flag
124 124 #
125 125 # When a changeset A' succeed to a changeset A which became public, we call A'
126 126 # "bumped" because it's a successors of a public changesets
127 127 #
128 128 # o A' (bumped)
129 129 # |`:
130 130 # | o A
131 131 # |/
132 132 # o Z
133 133 #
134 134 # The way to solve this situation is to create a new changeset Ad as children
135 135 # of A. This changeset have the same content than A'. So the diff from A to A'
136 136 # is the same than the diff from A to Ad. Ad is marked as a successors of A'
137 137 #
138 138 # o Ad
139 139 # |`:
140 140 # | x A'
141 141 # |'|
142 142 # o | A
143 143 # |/
144 144 # o Z
145 145 #
146 146 # But by transitivity Ad is also a successors of A. To avoid having Ad marked
147 147 # as bumped too, we add the `bumpedfix` flag to the marker. <A', (Ad,)>.
148 148 # This flag mean that the successors express the changes between the public and
149 149 # bumped version and fix the situation, breaking the transitivity of
150 150 # "bumped" here.
151 151 bumpedfix = 1
152 152 usingsha256 = 2
153 153
154 154 ## Parsing and writing of version "0"
155 155 #
156 156 # The header is followed by the markers. Each marker is made of:
157 157 #
158 158 # - 1 uint8 : number of new changesets "N", can be zero.
159 159 #
160 160 # - 1 uint32: metadata size "M" in bytes.
161 161 #
162 162 # - 1 byte: a bit field. It is reserved for flags used in common
163 163 # obsolete marker operations, to avoid repeated decoding of metadata
164 164 # entries.
165 165 #
166 166 # - 20 bytes: obsoleted changeset identifier.
167 167 #
168 168 # - N*20 bytes: new changesets identifiers.
169 169 #
170 170 # - M bytes: metadata as a sequence of nul-terminated strings. Each
171 171 # string contains a key and a value, separated by a colon ':', without
172 172 # additional encoding. Keys cannot contain '\0' or ':' and values
173 173 # cannot contain '\0'.
174 174 _fm0version = 0
175 175 _fm0fixed = '>BIB20s'
176 176 _fm0node = '20s'
177 177 _fm0fsize = _calcsize(_fm0fixed)
178 178 _fm0fnodesize = _calcsize(_fm0node)
179 179
180 180 def _fm0readmarkers(data, off):
181 181 # Loop on markers
182 182 l = len(data)
183 183 while off + _fm0fsize <= l:
184 184 # read fixed part
185 185 cur = data[off:off + _fm0fsize]
186 186 off += _fm0fsize
187 187 numsuc, mdsize, flags, pre = _unpack(_fm0fixed, cur)
188 188 # read replacement
189 189 sucs = ()
190 190 if numsuc:
191 191 s = (_fm0fnodesize * numsuc)
192 192 cur = data[off:off + s]
193 193 sucs = _unpack(_fm0node * numsuc, cur)
194 194 off += s
195 195 # read metadata
196 196 # (metadata will be decoded on demand)
197 197 metadata = data[off:off + mdsize]
198 198 if len(metadata) != mdsize:
199 199 raise error.Abort(_('parsing obsolete marker: metadata is too '
200 200 'short, %d bytes expected, got %d')
201 201 % (mdsize, len(metadata)))
202 202 off += mdsize
203 203 metadata = _fm0decodemeta(metadata)
204 204 try:
205 205 when, offset = metadata.pop('date', '0 0').split(' ')
206 206 date = float(when), int(offset)
207 207 except ValueError:
208 208 date = (0., 0)
209 209 parents = None
210 210 if 'p2' in metadata:
211 211 parents = (metadata.pop('p1', None), metadata.pop('p2', None))
212 212 elif 'p1' in metadata:
213 213 parents = (metadata.pop('p1', None),)
214 214 elif 'p0' in metadata:
215 215 parents = ()
216 216 if parents is not None:
217 217 try:
218 218 parents = tuple(node.bin(p) for p in parents)
219 219 # if parent content is not a nodeid, drop the data
220 220 for p in parents:
221 221 if len(p) != 20:
222 222 parents = None
223 223 break
224 224 except TypeError:
225 225 # if content cannot be translated to nodeid drop the data.
226 226 parents = None
227 227
228 228 metadata = tuple(sorted(metadata.iteritems()))
229 229
230 230 yield (pre, sucs, flags, metadata, date, parents)
231 231
232 232 def _fm0encodeonemarker(marker):
233 233 pre, sucs, flags, metadata, date, parents = marker
234 234 if flags & usingsha256:
235 235 raise error.Abort(_('cannot handle sha256 with old obsstore format'))
236 236 metadata = dict(metadata)
237 237 time, tz = date
238 238 metadata['date'] = '%r %i' % (time, tz)
239 239 if parents is not None:
240 240 if not parents:
241 241 # mark that we explicitly recorded no parents
242 242 metadata['p0'] = ''
243 243 for i, p in enumerate(parents, 1):
244 244 metadata['p%i' % i] = node.hex(p)
245 245 metadata = _fm0encodemeta(metadata)
246 246 numsuc = len(sucs)
247 247 format = _fm0fixed + (_fm0node * numsuc)
248 248 data = [numsuc, len(metadata), flags, pre]
249 249 data.extend(sucs)
250 250 return _pack(format, *data) + metadata
251 251
252 252 def _fm0encodemeta(meta):
253 253 """Return encoded metadata string to string mapping.
254 254
255 255 Assume no ':' in key and no '\0' in both key and value."""
256 256 for key, value in meta.iteritems():
257 257 if ':' in key or '\0' in key:
258 258 raise ValueError("':' and '\0' are forbidden in metadata key'")
259 259 if '\0' in value:
260 260 raise ValueError("':' is forbidden in metadata value'")
261 261 return '\0'.join(['%s:%s' % (k, meta[k]) for k in sorted(meta)])
262 262
263 263 def _fm0decodemeta(data):
264 264 """Return string to string dictionary from encoded version."""
265 265 d = {}
266 266 for l in data.split('\0'):
267 267 if l:
268 268 key, value = l.split(':')
269 269 d[key] = value
270 270 return d
271 271
272 272 ## Parsing and writing of version "1"
273 273 #
274 274 # The header is followed by the markers. Each marker is made of:
275 275 #
276 276 # - uint32: total size of the marker (including this field)
277 277 #
278 278 # - float64: date in seconds since epoch
279 279 #
280 280 # - int16: timezone offset in minutes
281 281 #
282 282 # - uint16: a bit field. It is reserved for flags used in common
283 283 # obsolete marker operations, to avoid repeated decoding of metadata
284 284 # entries.
285 285 #
286 286 # - uint8: number of successors "N", can be zero.
287 287 #
288 288 # - uint8: number of parents "P", can be zero.
289 289 #
290 290 # 0: parents data stored but no parent,
291 291 # 1: one parent stored,
292 292 # 2: two parents stored,
293 293 # 3: no parent data stored
294 294 #
295 295 # - uint8: number of metadata entries M
296 296 #
297 297 # - 20 or 32 bytes: precursor changeset identifier.
298 298 #
299 299 # - N*(20 or 32) bytes: successors changesets identifiers.
300 300 #
301 301 # - P*(20 or 32) bytes: parents of the precursors changesets.
302 302 #
303 303 # - M*(uint8, uint8): size of all metadata entries (key and value)
304 304 #
305 305 # - remaining bytes: the metadata, each (key, value) pair after the other.
306 306 _fm1version = 1
307 307 _fm1fixed = '>IdhHBBB20s'
308 308 _fm1nodesha1 = '20s'
309 309 _fm1nodesha256 = '32s'
310 310 _fm1nodesha1size = _calcsize(_fm1nodesha1)
311 311 _fm1nodesha256size = _calcsize(_fm1nodesha256)
312 312 _fm1fsize = _calcsize(_fm1fixed)
313 313 _fm1parentnone = 3
314 314 _fm1parentshift = 14
315 315 _fm1parentmask = (_fm1parentnone << _fm1parentshift)
316 316 _fm1metapair = 'BB'
317 317 _fm1metapairsize = _calcsize('BB')
318 318
319 319 def _fm1purereadmarkers(data, off):
320 320 # make some global constants local for performance
321 321 noneflag = _fm1parentnone
322 322 sha2flag = usingsha256
323 323 sha1size = _fm1nodesha1size
324 324 sha2size = _fm1nodesha256size
325 325 sha1fmt = _fm1nodesha1
326 326 sha2fmt = _fm1nodesha256
327 327 metasize = _fm1metapairsize
328 328 metafmt = _fm1metapair
329 329 fsize = _fm1fsize
330 330 unpack = _unpack
331 331
332 332 # Loop on markers
333 333 stop = len(data) - _fm1fsize
334 334 ufixed = struct.Struct(_fm1fixed).unpack
335 335
336 336 while off <= stop:
337 337 # read fixed part
338 338 o1 = off + fsize
339 339 t, secs, tz, flags, numsuc, numpar, nummeta, prec = ufixed(data[off:o1])
340 340
341 341 if flags & sha2flag:
342 342 # FIXME: prec was read as a SHA1, needs to be amended
343 343
344 344 # read 0 or more successors
345 345 if numsuc == 1:
346 346 o2 = o1 + sha2size
347 347 sucs = (data[o1:o2],)
348 348 else:
349 349 o2 = o1 + sha2size * numsuc
350 350 sucs = unpack(sha2fmt * numsuc, data[o1:o2])
351 351
352 352 # read parents
353 353 if numpar == noneflag:
354 354 o3 = o2
355 355 parents = None
356 356 elif numpar == 1:
357 357 o3 = o2 + sha2size
358 358 parents = (data[o2:o3],)
359 359 else:
360 360 o3 = o2 + sha2size * numpar
361 361 parents = unpack(sha2fmt * numpar, data[o2:o3])
362 362 else:
363 363 # read 0 or more successors
364 364 if numsuc == 1:
365 365 o2 = o1 + sha1size
366 366 sucs = (data[o1:o2],)
367 367 else:
368 368 o2 = o1 + sha1size * numsuc
369 369 sucs = unpack(sha1fmt * numsuc, data[o1:o2])
370 370
371 371 # read parents
372 372 if numpar == noneflag:
373 373 o3 = o2
374 374 parents = None
375 375 elif numpar == 1:
376 376 o3 = o2 + sha1size
377 377 parents = (data[o2:o3],)
378 378 else:
379 379 o3 = o2 + sha1size * numpar
380 380 parents = unpack(sha1fmt * numpar, data[o2:o3])
381 381
382 382 # read metadata
383 383 off = o3 + metasize * nummeta
384 384 metapairsize = unpack('>' + (metafmt * nummeta), data[o3:off])
385 385 metadata = []
386 386 for idx in xrange(0, len(metapairsize), 2):
387 387 o1 = off + metapairsize[idx]
388 388 o2 = o1 + metapairsize[idx + 1]
389 389 metadata.append((data[off:o1], data[o1:o2]))
390 390 off = o2
391 391
392 392 yield (prec, sucs, flags, tuple(metadata), (secs, tz * 60), parents)
393 393
394 394 def _fm1encodeonemarker(marker):
395 395 pre, sucs, flags, metadata, date, parents = marker
396 396 # determine node size
397 397 _fm1node = _fm1nodesha1
398 398 if flags & usingsha256:
399 399 _fm1node = _fm1nodesha256
400 400 numsuc = len(sucs)
401 401 numextranodes = numsuc
402 402 if parents is None:
403 403 numpar = _fm1parentnone
404 404 else:
405 405 numpar = len(parents)
406 406 numextranodes += numpar
407 407 formatnodes = _fm1node * numextranodes
408 408 formatmeta = _fm1metapair * len(metadata)
409 409 format = _fm1fixed + formatnodes + formatmeta
410 410 # tz is stored in minutes so we divide by 60
411 411 tz = date[1]//60
412 412 data = [None, date[0], tz, flags, numsuc, numpar, len(metadata), pre]
413 413 data.extend(sucs)
414 414 if parents is not None:
415 415 data.extend(parents)
416 416 totalsize = _calcsize(format)
417 417 for key, value in metadata:
418 418 lk = len(key)
419 419 lv = len(value)
420 420 data.append(lk)
421 421 data.append(lv)
422 422 totalsize += lk + lv
423 423 data[0] = totalsize
424 424 data = [_pack(format, *data)]
425 425 for key, value in metadata:
426 426 data.append(key)
427 427 data.append(value)
428 428 return ''.join(data)
429 429
430 430 def _fm1readmarkers(data, off):
431 431 native = getattr(parsers, 'fm1readmarkers', None)
432 432 if not native:
433 433 return _fm1purereadmarkers(data, off)
434 434 stop = len(data) - _fm1fsize
435 435 return native(data, off, stop)
436 436
437 437 # mapping to read/write various marker formats
438 438 # <version> -> (decoder, encoder)
439 439 formats = {_fm0version: (_fm0readmarkers, _fm0encodeonemarker),
440 440 _fm1version: (_fm1readmarkers, _fm1encodeonemarker)}
441 441
442 442 @util.nogc
443 443 def _readmarkers(data):
444 444 """Read and enumerate markers from raw data"""
445 445 off = 0
446 446 diskversion = _unpack('>B', data[off:off + 1])[0]
447 447 off += 1
448 448 if diskversion not in formats:
449 449 msg = _('parsing obsolete marker: unknown version %r') % diskversion
450 450 raise error.UnknownVersion(msg, version=diskversion)
451 451 return diskversion, formats[diskversion][0](data, off)
452 452
453 453 def encodemarkers(markers, addheader=False, version=_fm0version):
454 454 # Kept separate from flushmarkers(), it will be reused for
455 455 # markers exchange.
456 456 encodeone = formats[version][1]
457 457 if addheader:
458 458 yield _pack('>B', version)
459 459 for marker in markers:
460 460 yield encodeone(marker)
461 461
462 462
463 463 class marker(object):
464 464 """Wrap obsolete marker raw data"""
465 465
466 466 def __init__(self, repo, data):
467 467 # the repo argument will be used to create changectx in later version
468 468 self._repo = repo
469 469 self._data = data
470 470 self._decodedmeta = None
471 471
472 472 def __hash__(self):
473 473 return hash(self._data)
474 474
475 475 def __eq__(self, other):
476 476 if type(other) != type(self):
477 477 return False
478 478 return self._data == other._data
479 479
480 480 def precnode(self):
481 481 """Precursor changeset node identifier"""
482 482 return self._data[0]
483 483
484 484 def succnodes(self):
485 485 """List of successor changesets node identifiers"""
486 486 return self._data[1]
487 487
488 488 def parentnodes(self):
489 489 """Parents of the precursors (None if not recorded)"""
490 490 return self._data[5]
491 491
492 492 def metadata(self):
493 493 """Decoded metadata dictionary"""
494 494 return dict(self._data[3])
495 495
496 496 def date(self):
497 497 """Creation date as (unixtime, offset)"""
498 498 return self._data[4]
499 499
500 500 def flags(self):
501 501 """The flags field of the marker"""
502 502 return self._data[2]
503 503
504 504 @util.nogc
505 505 def _addsuccessors(successors, markers):
506 506 for mark in markers:
507 507 successors.setdefault(mark[0], set()).add(mark)
508 508
509 509 @util.nogc
510 510 def _addprecursors(precursors, markers):
511 511 for mark in markers:
512 512 for suc in mark[1]:
513 513 precursors.setdefault(suc, set()).add(mark)
514 514
515 515 @util.nogc
516 516 def _addchildren(children, markers):
517 517 for mark in markers:
518 518 parents = mark[5]
519 519 if parents is not None:
520 520 for p in parents:
521 521 children.setdefault(p, set()).add(mark)
522 522
523 523 def _checkinvalidmarkers(markers):
524 524 """search for marker with invalid data and raise error if needed
525 525
526 526 Exist as a separated function to allow the evolve extension for a more
527 527 subtle handling.
528 528 """
529 529 for mark in markers:
530 530 if node.nullid in mark[1]:
531 531 raise error.Abort(_('bad obsolescence marker detected: '
532 532 'invalid successors nullid'))
533 533
534 534 class obsstore(object):
535 535 """Store obsolete markers
536 536
537 537 Markers can be accessed with two mappings:
538 538 - precursors[x] -> set(markers on precursors edges of x)
539 539 - successors[x] -> set(markers on successors edges of x)
540 540 - children[x] -> set(markers on precursors edges of children(x)
541 541 """
542 542
543 543 fields = ('prec', 'succs', 'flag', 'meta', 'date', 'parents')
544 544 # prec: nodeid, precursor changesets
545 545 # succs: tuple of nodeid, successor changesets (0-N length)
546 546 # flag: integer, flag field carrying modifier for the markers (see doc)
547 547 # meta: binary blob, encoded metadata dictionary
548 548 # date: (float, int) tuple, date of marker creation
549 549 # parents: (tuple of nodeid) or None, parents of precursors
550 550 # None is used when no data has been recorded
551 551
552 552 def __init__(self, svfs, defaultformat=_fm1version, readonly=False):
553 553 # caches for various obsolescence related cache
554 554 self.caches = {}
555 555 self.svfs = svfs
556 556 self._version = defaultformat
557 557 self._readonly = readonly
558 558
559 559 def __iter__(self):
560 560 return iter(self._all)
561 561
562 562 def __len__(self):
563 563 return len(self._all)
564 564
565 565 def __nonzero__(self):
566 566 if not self._cached('_all'):
567 567 try:
568 568 return self.svfs.stat('obsstore').st_size > 1
569 569 except OSError as inst:
570 570 if inst.errno != errno.ENOENT:
571 571 raise
572 572 # just build an empty _all list if no obsstore exists, which
573 573 # avoids further stat() syscalls
574 574 pass
575 575 return bool(self._all)
576 576
577 577 __bool__ = __nonzero__
578 578
579 579 @property
580 580 def readonly(self):
581 581 """True if marker creation is disabled
582 582
583 583 Remove me in the future when obsolete marker is always on."""
584 584 return self._readonly
585 585
586 586 def create(self, transaction, prec, succs=(), flag=0, parents=None,
587 587 date=None, metadata=None, ui=None):
588 588 """obsolete: add a new obsolete marker
589 589
590 590 * ensuring it is hashable
591 591 * check mandatory metadata
592 592 * encode metadata
593 593
594 594 If you are a human writing code creating marker you want to use the
595 595 `createmarkers` function in this module instead.
596 596
597 597 return True if a new marker have been added, False if the markers
598 598 already existed (no op).
599 599 """
600 600 if metadata is None:
601 601 metadata = {}
602 602 if date is None:
603 603 if 'date' in metadata:
604 604 # as a courtesy for out-of-tree extensions
605 605 date = util.parsedate(metadata.pop('date'))
606 606 elif ui is not None:
607 607 date = ui.configdate('devel', 'default-date')
608 608 if date is None:
609 609 date = util.makedate()
610 610 else:
611 611 date = util.makedate()
612 612 if len(prec) != 20:
613 613 raise ValueError(prec)
614 614 for succ in succs:
615 615 if len(succ) != 20:
616 616 raise ValueError(succ)
617 617 if prec in succs:
618 618 raise ValueError(_('in-marker cycle with %s') % node.hex(prec))
619 619
620 620 metadata = tuple(sorted(metadata.iteritems()))
621 621
622 622 marker = (str(prec), tuple(succs), int(flag), metadata, date, parents)
623 623 return bool(self.add(transaction, [marker]))
624 624
625 625 def add(self, transaction, markers):
626 626 """Add new markers to the store
627 627
628 628 Take care of filtering duplicate.
629 629 Return the number of new marker."""
630 630 if self._readonly:
631 631 raise error.Abort(_('creating obsolete markers is not enabled on '
632 632 'this repo'))
633 633 known = set(self._all)
634 634 new = []
635 635 for m in markers:
636 636 if m not in known:
637 637 known.add(m)
638 638 new.append(m)
639 639 if new:
640 640 f = self.svfs('obsstore', 'ab')
641 641 try:
642 642 offset = f.tell()
643 643 transaction.add('obsstore', offset)
644 644 # offset == 0: new file - add the version header
645 645 for bytes in encodemarkers(new, offset == 0, self._version):
646 646 f.write(bytes)
647 647 finally:
648 648 # XXX: f.close() == filecache invalidation == obsstore rebuilt.
649 649 # call 'filecacheentry.refresh()' here
650 650 f.close()
651 651 self._addmarkers(new)
652 652 # new marker *may* have changed several set. invalidate the cache.
653 653 self.caches.clear()
654 654 # records the number of new markers for the transaction hooks
655 655 previous = int(transaction.hookargs.get('new_obsmarkers', '0'))
656 656 transaction.hookargs['new_obsmarkers'] = str(previous + len(new))
657 657 return len(new)
658 658
659 659 def mergemarkers(self, transaction, data):
660 660 """merge a binary stream of markers inside the obsstore
661 661
662 662 Returns the number of new markers added."""
663 663 version, markers = _readmarkers(data)
664 664 return self.add(transaction, markers)
665 665
666 666 @propertycache
667 667 def _all(self):
668 668 data = self.svfs.tryread('obsstore')
669 669 if not data:
670 670 return []
671 671 self._version, markers = _readmarkers(data)
672 672 markers = list(markers)
673 673 _checkinvalidmarkers(markers)
674 674 return markers
675 675
676 676 @propertycache
677 677 def successors(self):
678 678 successors = {}
679 679 _addsuccessors(successors, self._all)
680 680 return successors
681 681
682 682 @propertycache
683 683 def precursors(self):
684 684 precursors = {}
685 685 _addprecursors(precursors, self._all)
686 686 return precursors
687 687
688 688 @propertycache
689 689 def children(self):
690 690 children = {}
691 691 _addchildren(children, self._all)
692 692 return children
693 693
694 694 def _cached(self, attr):
695 695 return attr in self.__dict__
696 696
697 697 def _addmarkers(self, markers):
698 698 markers = list(markers) # to allow repeated iteration
699 699 self._all.extend(markers)
700 700 if self._cached('successors'):
701 701 _addsuccessors(self.successors, markers)
702 702 if self._cached('precursors'):
703 703 _addprecursors(self.precursors, markers)
704 704 if self._cached('children'):
705 705 _addchildren(self.children, markers)
706 706 _checkinvalidmarkers(markers)
707 707
708 708 def relevantmarkers(self, nodes):
709 709 """return a set of all obsolescence markers relevant to a set of nodes.
710 710
711 711 "relevant" to a set of nodes mean:
712 712
713 713 - marker that use this changeset as successor
714 714 - prune marker of direct children on this changeset
715 715 - recursive application of the two rules on precursors of these markers
716 716
717 717 It is a set so you cannot rely on order."""
718 718
719 719 pendingnodes = set(nodes)
720 720 seenmarkers = set()
721 721 seennodes = set(pendingnodes)
722 722 precursorsmarkers = self.precursors
723 723 succsmarkers = self.successors
724 724 children = self.children
725 725 while pendingnodes:
726 726 direct = set()
727 727 for current in pendingnodes:
728 728 direct.update(precursorsmarkers.get(current, ()))
729 729 pruned = [m for m in children.get(current, ()) if not m[1]]
730 730 direct.update(pruned)
731 731 pruned = [m for m in succsmarkers.get(current, ()) if not m[1]]
732 732 direct.update(pruned)
733 733 direct -= seenmarkers
734 734 pendingnodes = set([m[0] for m in direct])
735 735 seenmarkers |= direct
736 736 pendingnodes -= seennodes
737 737 seennodes |= pendingnodes
738 738 return seenmarkers
739 739
740 def _filterprunes(markers):
741 """return a set with no prune markers"""
742 return set(m for m in markers if m[1])
743
744 def exclusivemarkers(repo, nodes):
745 """set of markers relevant to "nodes" but no other locally-known nodes
746
747 This function compute the set of markers "exclusive" to a locally-known
748 node. This means we walk the markers starting from <nodes> until we reach a
749 locally-known precursors outside of <nodes>. Element of <nodes> with
750 locally-known successors outside of <nodes> are ignored (since their
751 precursors markers are also relevant to these successors).
752
753 For example:
754
755 # (A0 rewritten as A1)
756 #
757 # A0 <-1- A1 # Marker "1" is exclusive to A1
758
759 or
760
761 # (A0 rewritten as AX; AX rewritten as A1; AX is unkown locally)
762 #
763 # <-1- A0 <-2- AX <-3- A1 # Marker "2,3" are exclusive to A1
764
765 or
766
767 # (A0 has unknown precursors, A0 rewritten as A1 and A2 (divergence))
768 #
769 # <-2- A1 # Marker "2" is exclusive to A0,A1
770 # /
771 # <-1- A0
772 # \
773 # <-3- A2 # Marker "3" is exclusive to A0,A2
774 #
775 # in addition:
776 #
777 # Markers "2,3" are exclusive to A1,A2
778 # Markers "1,2,3" are exclusive to A0,A1,A2
779
780 An example usage is strip. When stripping a changeset, we also want to
781 strip the markers exclusive to this changeset. Otherwise we would have
782 "dangling"" obsolescence markers from its precursors: Obsolescence markers
783 marking a node as obsolete without any successors available locally.
784
785 As for relevant markers, the prune markers for children will be followed.
786 Of course, they will only be followed if the pruned children is
787 locally-known. Since the prune markers are relevant to the pruned node.
788 However, while prune markers are considered relevant to the parent of the
789 pruned changesets, prune markers for locally-known changeset (with no
790 successors) are considered exclusive to the pruned nodes. This allows
791 to strip the prune markers (with the rest of the exclusive chain) alongside
792 the pruned changesets.
793 """
794 # running on a filtered repository would be dangerous as markers could be
795 # reported as exclusive when they are relevant for other filtered nodes.
796 unfi = repo.unfiltered()
797
798 # shortcut to various useful item
799 nm = unfi.changelog.nodemap
800 precursorsmarkers = unfi.obsstore.precursors
801 successormarkers = unfi.obsstore.successors
802 childrenmarkers = unfi.obsstore.children
803
804 # exclusive markers (return of the function)
805 exclmarkers = set()
806 # we need fast membership testing
807 nodes = set(nodes)
808 # looking for head in the obshistory
809 #
810 # XXX we are ignoring all issues in regard with cycle for now.
811 stack = [n for n in nodes if not _filterprunes(successormarkers.get(n, ()))]
812 stack.sort()
813 # nodes already stacked
814 seennodes = set(stack)
815 while stack:
816 current = stack.pop()
817 # fetch precursors markers
818 markers = list(precursorsmarkers.get(current, ()))
819 # extend the list with prune markers
820 for mark in successormarkers.get(current, ()):
821 if not mark[1]:
822 markers.append(mark)
823 # and markers from children (looking for prune)
824 for mark in childrenmarkers.get(current, ()):
825 if not mark[1]:
826 markers.append(mark)
827 # traverse the markers
828 for mark in markers:
829 if mark in exclmarkers:
830 # markers already selected
831 continue
832
833 # If the markers is about the current node, select it
834 #
835 # (this delay the addition of markers from children)
836 if mark[1] or mark[0] == current:
837 exclmarkers.add(mark)
838
839 # should we keep traversing through the precursors?
840 prec = mark[0]
841
842 # nodes in the stack or already processed
843 if prec in seennodes:
844 continue
845
846 # is this a locally known node ?
847 known = prec in nm
848 # if locally-known and not in the <nodes> set the traversal
849 # stop here.
850 if known and prec not in nodes:
851 continue
852
853 # do not keep going if there are unselected markers pointing to this
854 # nodes. If we end up traversing these unselected markers later the
855 # node will be taken care of at that point.
856 precmarkers = _filterprunes(successormarkers.get(prec))
857 if precmarkers.issubset(exclmarkers):
858 seennodes.add(prec)
859 stack.append(prec)
860
861 return exclmarkers
862
740 863 def commonversion(versions):
741 864 """Return the newest version listed in both versions and our local formats.
742 865
743 866 Returns None if no common version exists.
744 867 """
745 868 versions.sort(reverse=True)
746 869 # search for highest version known on both side
747 870 for v in versions:
748 871 if v in formats:
749 872 return v
750 873 return None
751 874
752 875 # arbitrary picked to fit into 8K limit from HTTP server
753 876 # you have to take in account:
754 877 # - the version header
755 878 # - the base85 encoding
756 879 _maxpayload = 5300
757 880
758 881 def _pushkeyescape(markers):
759 882 """encode markers into a dict suitable for pushkey exchange
760 883
761 884 - binary data is base85 encoded
762 885 - split in chunks smaller than 5300 bytes"""
763 886 keys = {}
764 887 parts = []
765 888 currentlen = _maxpayload * 2 # ensure we create a new part
766 889 for marker in markers:
767 890 nextdata = _fm0encodeonemarker(marker)
768 891 if (len(nextdata) + currentlen > _maxpayload):
769 892 currentpart = []
770 893 currentlen = 0
771 894 parts.append(currentpart)
772 895 currentpart.append(nextdata)
773 896 currentlen += len(nextdata)
774 897 for idx, part in enumerate(reversed(parts)):
775 898 data = ''.join([_pack('>B', _fm0version)] + part)
776 899 keys['dump%i' % idx] = util.b85encode(data)
777 900 return keys
778 901
779 902 def listmarkers(repo):
780 903 """List markers over pushkey"""
781 904 if not repo.obsstore:
782 905 return {}
783 906 return _pushkeyescape(sorted(repo.obsstore))
784 907
785 908 def pushmarker(repo, key, old, new):
786 909 """Push markers over pushkey"""
787 910 if not key.startswith('dump'):
788 911 repo.ui.warn(_('unknown key: %r') % key)
789 912 return 0
790 913 if old:
791 914 repo.ui.warn(_('unexpected old value for %r') % key)
792 915 return 0
793 916 data = util.b85decode(new)
794 917 lock = repo.lock()
795 918 try:
796 919 tr = repo.transaction('pushkey: obsolete markers')
797 920 try:
798 921 repo.obsstore.mergemarkers(tr, data)
799 922 repo.invalidatevolatilesets()
800 923 tr.close()
801 924 return 1
802 925 finally:
803 926 tr.release()
804 927 finally:
805 928 lock.release()
806 929
807 def getmarkers(repo, nodes=None):
930 def getmarkers(repo, nodes=None, exclusive=False):
808 931 """returns markers known in a repository
809 932
810 933 If <nodes> is specified, only markers "relevant" to those nodes are are
811 934 returned"""
812 935 if nodes is None:
813 936 rawmarkers = repo.obsstore
937 elif exclusive:
938 rawmarkers = exclusivemarkers(repo, nodes)
814 939 else:
815 940 rawmarkers = repo.obsstore.relevantmarkers(nodes)
816 941
817 942 for markerdata in rawmarkers:
818 943 yield marker(repo, markerdata)
819 944
820 945 def relevantmarkers(repo, node):
821 946 """all obsolete markers relevant to some revision"""
822 947 for markerdata in repo.obsstore.relevantmarkers(node):
823 948 yield marker(repo, markerdata)
824 949
825 950
826 951 def precursormarkers(ctx):
827 952 """obsolete marker marking this changeset as a successors"""
828 953 for data in ctx.repo().obsstore.precursors.get(ctx.node(), ()):
829 954 yield marker(ctx.repo(), data)
830 955
831 956 def successormarkers(ctx):
832 957 """obsolete marker making this changeset obsolete"""
833 958 for data in ctx.repo().obsstore.successors.get(ctx.node(), ()):
834 959 yield marker(ctx.repo(), data)
835 960
836 961 def allsuccessors(obsstore, nodes, ignoreflags=0):
837 962 """Yield node for every successor of <nodes>.
838 963
839 964 Some successors may be unknown locally.
840 965
841 966 This is a linear yield unsuited to detecting split changesets. It includes
842 967 initial nodes too."""
843 968 remaining = set(nodes)
844 969 seen = set(remaining)
845 970 while remaining:
846 971 current = remaining.pop()
847 972 yield current
848 973 for mark in obsstore.successors.get(current, ()):
849 974 # ignore marker flagged with specified flag
850 975 if mark[2] & ignoreflags:
851 976 continue
852 977 for suc in mark[1]:
853 978 if suc not in seen:
854 979 seen.add(suc)
855 980 remaining.add(suc)
856 981
857 982 def allprecursors(obsstore, nodes, ignoreflags=0):
858 983 """Yield node for every precursors of <nodes>.
859 984
860 985 Some precursors may be unknown locally.
861 986
862 987 This is a linear yield unsuited to detecting folded changesets. It includes
863 988 initial nodes too."""
864 989
865 990 remaining = set(nodes)
866 991 seen = set(remaining)
867 992 while remaining:
868 993 current = remaining.pop()
869 994 yield current
870 995 for mark in obsstore.precursors.get(current, ()):
871 996 # ignore marker flagged with specified flag
872 997 if mark[2] & ignoreflags:
873 998 continue
874 999 suc = mark[0]
875 1000 if suc not in seen:
876 1001 seen.add(suc)
877 1002 remaining.add(suc)
878 1003
879 1004 def foreground(repo, nodes):
880 1005 """return all nodes in the "foreground" of other node
881 1006
882 1007 The foreground of a revision is anything reachable using parent -> children
883 1008 or precursor -> successor relation. It is very similar to "descendant" but
884 1009 augmented with obsolescence information.
885 1010
886 1011 Beware that possible obsolescence cycle may result if complex situation.
887 1012 """
888 1013 repo = repo.unfiltered()
889 1014 foreground = set(repo.set('%ln::', nodes))
890 1015 if repo.obsstore:
891 1016 # We only need this complicated logic if there is obsolescence
892 1017 # XXX will probably deserve an optimised revset.
893 1018 nm = repo.changelog.nodemap
894 1019 plen = -1
895 1020 # compute the whole set of successors or descendants
896 1021 while len(foreground) != plen:
897 1022 plen = len(foreground)
898 1023 succs = set(c.node() for c in foreground)
899 1024 mutable = [c.node() for c in foreground if c.mutable()]
900 1025 succs.update(allsuccessors(repo.obsstore, mutable))
901 1026 known = (n for n in succs if n in nm)
902 1027 foreground = set(repo.set('%ln::', known))
903 1028 return set(c.node() for c in foreground)
904 1029
905 1030
906 1031 def successorssets(repo, initialnode, cache=None):
907 1032 """Return set of all latest successors of initial nodes
908 1033
909 1034 The successors set of a changeset A are the group of revisions that succeed
910 1035 A. It succeeds A as a consistent whole, each revision being only a partial
911 1036 replacement. The successors set contains non-obsolete changesets only.
912 1037
913 1038 This function returns the full list of successor sets which is why it
914 1039 returns a list of tuples and not just a single tuple. Each tuple is a valid
915 1040 successors set. Note that (A,) may be a valid successors set for changeset A
916 1041 (see below).
917 1042
918 1043 In most cases, a changeset A will have a single element (e.g. the changeset
919 1044 A is replaced by A') in its successors set. Though, it is also common for a
920 1045 changeset A to have no elements in its successor set (e.g. the changeset
921 1046 has been pruned). Therefore, the returned list of successors sets will be
922 1047 [(A',)] or [], respectively.
923 1048
924 1049 When a changeset A is split into A' and B', however, it will result in a
925 1050 successors set containing more than a single element, i.e. [(A',B')].
926 1051 Divergent changesets will result in multiple successors sets, i.e. [(A',),
927 1052 (A'')].
928 1053
929 1054 If a changeset A is not obsolete, then it will conceptually have no
930 1055 successors set. To distinguish this from a pruned changeset, the successor
931 1056 set will contain itself only, i.e. [(A,)].
932 1057
933 1058 Finally, successors unknown locally are considered to be pruned (obsoleted
934 1059 without any successors).
935 1060
936 1061 The optional `cache` parameter is a dictionary that may contain precomputed
937 1062 successors sets. It is meant to reuse the computation of a previous call to
938 1063 `successorssets` when multiple calls are made at the same time. The cache
939 1064 dictionary is updated in place. The caller is responsible for its life
940 1065 span. Code that makes multiple calls to `successorssets` *must* use this
941 1066 cache mechanism or suffer terrible performance.
942 1067 """
943 1068
944 1069 succmarkers = repo.obsstore.successors
945 1070
946 1071 # Stack of nodes we search successors sets for
947 1072 toproceed = [initialnode]
948 1073 # set version of above list for fast loop detection
949 1074 # element added to "toproceed" must be added here
950 1075 stackedset = set(toproceed)
951 1076 if cache is None:
952 1077 cache = {}
953 1078
954 1079 # This while loop is the flattened version of a recursive search for
955 1080 # successors sets
956 1081 #
957 1082 # def successorssets(x):
958 1083 # successors = directsuccessors(x)
959 1084 # ss = [[]]
960 1085 # for succ in directsuccessors(x):
961 1086 # # product as in itertools cartesian product
962 1087 # ss = product(ss, successorssets(succ))
963 1088 # return ss
964 1089 #
965 1090 # But we can not use plain recursive calls here:
966 1091 # - that would blow the python call stack
967 1092 # - obsolescence markers may have cycles, we need to handle them.
968 1093 #
969 1094 # The `toproceed` list act as our call stack. Every node we search
970 1095 # successors set for are stacked there.
971 1096 #
972 1097 # The `stackedset` is set version of this stack used to check if a node is
973 1098 # already stacked. This check is used to detect cycles and prevent infinite
974 1099 # loop.
975 1100 #
976 1101 # successors set of all nodes are stored in the `cache` dictionary.
977 1102 #
978 1103 # After this while loop ends we use the cache to return the successors sets
979 1104 # for the node requested by the caller.
980 1105 while toproceed:
981 1106 # Every iteration tries to compute the successors sets of the topmost
982 1107 # node of the stack: CURRENT.
983 1108 #
984 1109 # There are four possible outcomes:
985 1110 #
986 1111 # 1) We already know the successors sets of CURRENT:
987 1112 # -> mission accomplished, pop it from the stack.
988 1113 # 2) Node is not obsolete:
989 1114 # -> the node is its own successors sets. Add it to the cache.
990 1115 # 3) We do not know successors set of direct successors of CURRENT:
991 1116 # -> We add those successors to the stack.
992 1117 # 4) We know successors sets of all direct successors of CURRENT:
993 1118 # -> We can compute CURRENT successors set and add it to the
994 1119 # cache.
995 1120 #
996 1121 current = toproceed[-1]
997 1122 if current in cache:
998 1123 # case (1): We already know the successors sets
999 1124 stackedset.remove(toproceed.pop())
1000 1125 elif current not in succmarkers:
1001 1126 # case (2): The node is not obsolete.
1002 1127 if current in repo:
1003 1128 # We have a valid last successors.
1004 1129 cache[current] = [(current,)]
1005 1130 else:
1006 1131 # Final obsolete version is unknown locally.
1007 1132 # Do not count that as a valid successors
1008 1133 cache[current] = []
1009 1134 else:
1010 1135 # cases (3) and (4)
1011 1136 #
1012 1137 # We proceed in two phases. Phase 1 aims to distinguish case (3)
1013 1138 # from case (4):
1014 1139 #
1015 1140 # For each direct successors of CURRENT, we check whether its
1016 1141 # successors sets are known. If they are not, we stack the
1017 1142 # unknown node and proceed to the next iteration of the while
1018 1143 # loop. (case 3)
1019 1144 #
1020 1145 # During this step, we may detect obsolescence cycles: a node
1021 1146 # with unknown successors sets but already in the call stack.
1022 1147 # In such a situation, we arbitrary set the successors sets of
1023 1148 # the node to nothing (node pruned) to break the cycle.
1024 1149 #
1025 1150 # If no break was encountered we proceed to phase 2.
1026 1151 #
1027 1152 # Phase 2 computes successors sets of CURRENT (case 4); see details
1028 1153 # in phase 2 itself.
1029 1154 #
1030 1155 # Note the two levels of iteration in each phase.
1031 1156 # - The first one handles obsolescence markers using CURRENT as
1032 1157 # precursor (successors markers of CURRENT).
1033 1158 #
1034 1159 # Having multiple entry here means divergence.
1035 1160 #
1036 1161 # - The second one handles successors defined in each marker.
1037 1162 #
1038 1163 # Having none means pruned node, multiple successors means split,
1039 1164 # single successors are standard replacement.
1040 1165 #
1041 1166 for mark in sorted(succmarkers[current]):
1042 1167 for suc in mark[1]:
1043 1168 if suc not in cache:
1044 1169 if suc in stackedset:
1045 1170 # cycle breaking
1046 1171 cache[suc] = []
1047 1172 else:
1048 1173 # case (3) If we have not computed successors sets
1049 1174 # of one of those successors we add it to the
1050 1175 # `toproceed` stack and stop all work for this
1051 1176 # iteration.
1052 1177 toproceed.append(suc)
1053 1178 stackedset.add(suc)
1054 1179 break
1055 1180 else:
1056 1181 continue
1057 1182 break
1058 1183 else:
1059 1184 # case (4): we know all successors sets of all direct
1060 1185 # successors
1061 1186 #
1062 1187 # Successors set contributed by each marker depends on the
1063 1188 # successors sets of all its "successors" node.
1064 1189 #
1065 1190 # Each different marker is a divergence in the obsolescence
1066 1191 # history. It contributes successors sets distinct from other
1067 1192 # markers.
1068 1193 #
1069 1194 # Within a marker, a successor may have divergent successors
1070 1195 # sets. In such a case, the marker will contribute multiple
1071 1196 # divergent successors sets. If multiple successors have
1072 1197 # divergent successors sets, a Cartesian product is used.
1073 1198 #
1074 1199 # At the end we post-process successors sets to remove
1075 1200 # duplicated entry and successors set that are strict subset of
1076 1201 # another one.
1077 1202 succssets = []
1078 1203 for mark in sorted(succmarkers[current]):
1079 1204 # successors sets contributed by this marker
1080 1205 markss = [[]]
1081 1206 for suc in mark[1]:
1082 1207 # cardinal product with previous successors
1083 1208 productresult = []
1084 1209 for prefix in markss:
1085 1210 for suffix in cache[suc]:
1086 1211 newss = list(prefix)
1087 1212 for part in suffix:
1088 1213 # do not duplicated entry in successors set
1089 1214 # first entry wins.
1090 1215 if part not in newss:
1091 1216 newss.append(part)
1092 1217 productresult.append(newss)
1093 1218 markss = productresult
1094 1219 succssets.extend(markss)
1095 1220 # remove duplicated and subset
1096 1221 seen = []
1097 1222 final = []
1098 1223 candidate = sorted(((set(s), s) for s in succssets if s),
1099 1224 key=lambda x: len(x[1]), reverse=True)
1100 1225 for setversion, listversion in candidate:
1101 1226 for seenset in seen:
1102 1227 if setversion.issubset(seenset):
1103 1228 break
1104 1229 else:
1105 1230 final.append(listversion)
1106 1231 seen.append(setversion)
1107 1232 final.reverse() # put small successors set first
1108 1233 cache[current] = final
1109 1234 return cache[initialnode]
1110 1235
1111 1236 # mapping of 'set-name' -> <function to compute this set>
1112 1237 cachefuncs = {}
1113 1238 def cachefor(name):
1114 1239 """Decorator to register a function as computing the cache for a set"""
1115 1240 def decorator(func):
1116 1241 assert name not in cachefuncs
1117 1242 cachefuncs[name] = func
1118 1243 return func
1119 1244 return decorator
1120 1245
1121 1246 def getrevs(repo, name):
1122 1247 """Return the set of revision that belong to the <name> set
1123 1248
1124 1249 Such access may compute the set and cache it for future use"""
1125 1250 repo = repo.unfiltered()
1126 1251 if not repo.obsstore:
1127 1252 return frozenset()
1128 1253 if name not in repo.obsstore.caches:
1129 1254 repo.obsstore.caches[name] = cachefuncs[name](repo)
1130 1255 return repo.obsstore.caches[name]
1131 1256
1132 1257 # To be simple we need to invalidate obsolescence cache when:
1133 1258 #
1134 1259 # - new changeset is added:
1135 1260 # - public phase is changed
1136 1261 # - obsolescence marker are added
1137 1262 # - strip is used a repo
1138 1263 def clearobscaches(repo):
1139 1264 """Remove all obsolescence related cache from a repo
1140 1265
1141 1266 This remove all cache in obsstore is the obsstore already exist on the
1142 1267 repo.
1143 1268
1144 1269 (We could be smarter here given the exact event that trigger the cache
1145 1270 clearing)"""
1146 1271 # only clear cache is there is obsstore data in this repo
1147 1272 if 'obsstore' in repo._filecache:
1148 1273 repo.obsstore.caches.clear()
1149 1274
1150 1275 @cachefor('obsolete')
1151 1276 def _computeobsoleteset(repo):
1152 1277 """the set of obsolete revisions"""
1153 1278 obs = set()
1154 1279 getnode = repo.changelog.node
1155 1280 notpublic = repo._phasecache.getrevset(repo, (phases.draft, phases.secret))
1156 1281 for r in notpublic:
1157 1282 if getnode(r) in repo.obsstore.successors:
1158 1283 obs.add(r)
1159 1284 return obs
1160 1285
1161 1286 @cachefor('unstable')
1162 1287 def _computeunstableset(repo):
1163 1288 """the set of non obsolete revisions with obsolete parents"""
1164 1289 revs = [(ctx.rev(), ctx) for ctx in
1165 1290 repo.set('(not public()) and (not obsolete())')]
1166 1291 revs.sort(key=lambda x:x[0])
1167 1292 unstable = set()
1168 1293 for rev, ctx in revs:
1169 1294 # A rev is unstable if one of its parent is obsolete or unstable
1170 1295 # this works since we traverse following growing rev order
1171 1296 if any((x.obsolete() or (x.rev() in unstable))
1172 1297 for x in ctx.parents()):
1173 1298 unstable.add(rev)
1174 1299 return unstable
1175 1300
1176 1301 @cachefor('suspended')
1177 1302 def _computesuspendedset(repo):
1178 1303 """the set of obsolete parents with non obsolete descendants"""
1179 1304 suspended = repo.changelog.ancestors(getrevs(repo, 'unstable'))
1180 1305 return set(r for r in getrevs(repo, 'obsolete') if r in suspended)
1181 1306
1182 1307 @cachefor('extinct')
1183 1308 def _computeextinctset(repo):
1184 1309 """the set of obsolete parents without non obsolete descendants"""
1185 1310 return getrevs(repo, 'obsolete') - getrevs(repo, 'suspended')
1186 1311
1187 1312
1188 1313 @cachefor('bumped')
1189 1314 def _computebumpedset(repo):
1190 1315 """the set of revs trying to obsolete public revisions"""
1191 1316 bumped = set()
1192 1317 # util function (avoid attribute lookup in the loop)
1193 1318 phase = repo._phasecache.phase # would be faster to grab the full list
1194 1319 public = phases.public
1195 1320 cl = repo.changelog
1196 1321 torev = cl.nodemap.get
1197 1322 for ctx in repo.set('(not public()) and (not obsolete())'):
1198 1323 rev = ctx.rev()
1199 1324 # We only evaluate mutable, non-obsolete revision
1200 1325 node = ctx.node()
1201 1326 # (future) A cache of precursors may worth if split is very common
1202 1327 for pnode in allprecursors(repo.obsstore, [node],
1203 1328 ignoreflags=bumpedfix):
1204 1329 prev = torev(pnode) # unfiltered! but so is phasecache
1205 1330 if (prev is not None) and (phase(repo, prev) <= public):
1206 1331 # we have a public precursor
1207 1332 bumped.add(rev)
1208 1333 break # Next draft!
1209 1334 return bumped
1210 1335
1211 1336 @cachefor('divergent')
1212 1337 def _computedivergentset(repo):
1213 1338 """the set of rev that compete to be the final successors of some revision.
1214 1339 """
1215 1340 divergent = set()
1216 1341 obsstore = repo.obsstore
1217 1342 newermap = {}
1218 1343 for ctx in repo.set('(not public()) - obsolete()'):
1219 1344 mark = obsstore.precursors.get(ctx.node(), ())
1220 1345 toprocess = set(mark)
1221 1346 seen = set()
1222 1347 while toprocess:
1223 1348 prec = toprocess.pop()[0]
1224 1349 if prec in seen:
1225 1350 continue # emergency cycle hanging prevention
1226 1351 seen.add(prec)
1227 1352 if prec not in newermap:
1228 1353 successorssets(repo, prec, newermap)
1229 1354 newer = [n for n in newermap[prec] if n]
1230 1355 if len(newer) > 1:
1231 1356 divergent.add(ctx.rev())
1232 1357 break
1233 1358 toprocess.update(obsstore.precursors.get(prec, ()))
1234 1359 return divergent
1235 1360
1236 1361
1237 1362 def createmarkers(repo, relations, flag=0, date=None, metadata=None,
1238 1363 operation=None):
1239 1364 """Add obsolete markers between changesets in a repo
1240 1365
1241 1366 <relations> must be an iterable of (<old>, (<new>, ...)[,{metadata}])
1242 1367 tuple. `old` and `news` are changectx. metadata is an optional dictionary
1243 1368 containing metadata for this marker only. It is merged with the global
1244 1369 metadata specified through the `metadata` argument of this function,
1245 1370
1246 1371 Trying to obsolete a public changeset will raise an exception.
1247 1372
1248 1373 Current user and date are used except if specified otherwise in the
1249 1374 metadata attribute.
1250 1375
1251 1376 This function operates within a transaction of its own, but does
1252 1377 not take any lock on the repo.
1253 1378 """
1254 1379 # prepare metadata
1255 1380 if metadata is None:
1256 1381 metadata = {}
1257 1382 if 'user' not in metadata:
1258 1383 metadata['user'] = repo.ui.username()
1259 1384 useoperation = repo.ui.configbool('experimental',
1260 1385 'evolution.track-operation',
1261 1386 False)
1262 1387 if useoperation and operation:
1263 1388 metadata['operation'] = operation
1264 1389 tr = repo.transaction('add-obsolescence-marker')
1265 1390 try:
1266 1391 markerargs = []
1267 1392 for rel in relations:
1268 1393 prec = rel[0]
1269 1394 sucs = rel[1]
1270 1395 localmetadata = metadata.copy()
1271 1396 if 2 < len(rel):
1272 1397 localmetadata.update(rel[2])
1273 1398
1274 1399 if not prec.mutable():
1275 1400 raise error.Abort(_("cannot obsolete public changeset: %s")
1276 1401 % prec,
1277 1402 hint="see 'hg help phases' for details")
1278 1403 nprec = prec.node()
1279 1404 nsucs = tuple(s.node() for s in sucs)
1280 1405 npare = None
1281 1406 if not nsucs:
1282 1407 npare = tuple(p.node() for p in prec.parents())
1283 1408 if nprec in nsucs:
1284 1409 raise error.Abort(_("changeset %s cannot obsolete itself")
1285 1410 % prec)
1286 1411
1287 1412 # Creating the marker causes the hidden cache to become invalid,
1288 1413 # which causes recomputation when we ask for prec.parents() above.
1289 1414 # Resulting in n^2 behavior. So let's prepare all of the args
1290 1415 # first, then create the markers.
1291 1416 markerargs.append((nprec, nsucs, npare, localmetadata))
1292 1417
1293 1418 for args in markerargs:
1294 1419 nprec, nsucs, npare, localmetadata = args
1295 1420 repo.obsstore.create(tr, nprec, nsucs, flag, parents=npare,
1296 1421 date=date, metadata=localmetadata,
1297 1422 ui=repo.ui)
1298 1423 repo.filteredrevcache.clear()
1299 1424 tr.close()
1300 1425 finally:
1301 1426 tr.release()
@@ -1,381 +1,381 b''
1 1 Show all commands except debug commands
2 2 $ hg debugcomplete
3 3 add
4 4 addremove
5 5 annotate
6 6 archive
7 7 backout
8 8 bisect
9 9 bookmarks
10 10 branch
11 11 branches
12 12 bundle
13 13 cat
14 14 clone
15 15 commit
16 16 config
17 17 copy
18 18 diff
19 19 export
20 20 files
21 21 forget
22 22 graft
23 23 grep
24 24 heads
25 25 help
26 26 identify
27 27 import
28 28 incoming
29 29 init
30 30 locate
31 31 log
32 32 manifest
33 33 merge
34 34 outgoing
35 35 parents
36 36 paths
37 37 phase
38 38 pull
39 39 push
40 40 recover
41 41 remove
42 42 rename
43 43 resolve
44 44 revert
45 45 rollback
46 46 root
47 47 serve
48 48 status
49 49 summary
50 50 tag
51 51 tags
52 52 tip
53 53 unbundle
54 54 update
55 55 verify
56 56 version
57 57
58 58 Show all commands that start with "a"
59 59 $ hg debugcomplete a
60 60 add
61 61 addremove
62 62 annotate
63 63 archive
64 64
65 65 Do not show debug commands if there are other candidates
66 66 $ hg debugcomplete d
67 67 diff
68 68
69 69 Show debug commands if there are no other candidates
70 70 $ hg debugcomplete debug
71 71 debugancestor
72 72 debugapplystreamclonebundle
73 73 debugbuilddag
74 74 debugbundle
75 75 debugcheckstate
76 76 debugcolor
77 77 debugcommands
78 78 debugcomplete
79 79 debugconfig
80 80 debugcreatestreamclonebundle
81 81 debugdag
82 82 debugdata
83 83 debugdate
84 84 debugdeltachain
85 85 debugdirstate
86 86 debugdiscovery
87 87 debugextensions
88 88 debugfileset
89 89 debugfsinfo
90 90 debuggetbundle
91 91 debugignore
92 92 debugindex
93 93 debugindexdot
94 94 debuginstall
95 95 debugknown
96 96 debuglabelcomplete
97 97 debuglocks
98 98 debugmergestate
99 99 debugnamecomplete
100 100 debugobsolete
101 101 debugpathcomplete
102 102 debugpickmergetool
103 103 debugpushkey
104 104 debugpvec
105 105 debugrebuilddirstate
106 106 debugrebuildfncache
107 107 debugrename
108 108 debugrevlog
109 109 debugrevspec
110 110 debugsetparents
111 111 debugsub
112 112 debugsuccessorssets
113 113 debugtemplate
114 114 debugupdatecaches
115 115 debugupgraderepo
116 116 debugwalk
117 117 debugwireargs
118 118
119 119 Do not show the alias of a debug command if there are other candidates
120 120 (this should hide rawcommit)
121 121 $ hg debugcomplete r
122 122 recover
123 123 remove
124 124 rename
125 125 resolve
126 126 revert
127 127 rollback
128 128 root
129 129 Show the alias of a debug command if there are no other candidates
130 130 $ hg debugcomplete rawc
131 131
132 132
133 133 Show the global options
134 134 $ hg debugcomplete --options | sort
135 135 --color
136 136 --config
137 137 --cwd
138 138 --debug
139 139 --debugger
140 140 --encoding
141 141 --encodingmode
142 142 --help
143 143 --hidden
144 144 --noninteractive
145 145 --pager
146 146 --profile
147 147 --quiet
148 148 --repository
149 149 --time
150 150 --traceback
151 151 --verbose
152 152 --version
153 153 -R
154 154 -h
155 155 -q
156 156 -v
157 157 -y
158 158
159 159 Show the options for the "serve" command
160 160 $ hg debugcomplete --options serve | sort
161 161 --accesslog
162 162 --address
163 163 --certificate
164 164 --cmdserver
165 165 --color
166 166 --config
167 167 --cwd
168 168 --daemon
169 169 --daemon-postexec
170 170 --debug
171 171 --debugger
172 172 --encoding
173 173 --encodingmode
174 174 --errorlog
175 175 --help
176 176 --hidden
177 177 --ipv6
178 178 --name
179 179 --noninteractive
180 180 --pager
181 181 --pid-file
182 182 --port
183 183 --prefix
184 184 --profile
185 185 --quiet
186 186 --repository
187 187 --stdio
188 188 --style
189 189 --subrepos
190 190 --templates
191 191 --time
192 192 --traceback
193 193 --verbose
194 194 --version
195 195 --web-conf
196 196 -6
197 197 -A
198 198 -E
199 199 -R
200 200 -S
201 201 -a
202 202 -d
203 203 -h
204 204 -n
205 205 -p
206 206 -q
207 207 -t
208 208 -v
209 209 -y
210 210
211 211 Show an error if we use --options with an ambiguous abbreviation
212 212 $ hg debugcomplete --options s
213 213 hg: command 's' is ambiguous:
214 214 serve showconfig status summary
215 215 [255]
216 216
217 217 Show all commands + options
218 218 $ hg debugcommands
219 219 add: include, exclude, subrepos, dry-run
220 220 annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, skip, ignore-all-space, ignore-space-change, ignore-blank-lines, include, exclude, template
221 221 clone: noupdate, updaterev, rev, branch, pull, uncompressed, ssh, remotecmd, insecure
222 222 commit: addremove, close-branch, amend, secret, edit, interactive, include, exclude, message, logfile, date, user, subrepos
223 223 diff: rev, change, text, git, binary, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, unified, stat, root, include, exclude, subrepos
224 224 export: output, switch-parent, rev, text, git, binary, nodates
225 225 forget: include, exclude
226 226 init: ssh, remotecmd, insecure
227 227 log: follow, follow-first, date, copies, keyword, rev, removed, only-merges, user, only-branch, branch, prune, patch, git, limit, no-merges, stat, graph, style, template, include, exclude
228 228 merge: force, rev, preview, tool
229 229 pull: update, force, rev, bookmark, branch, ssh, remotecmd, insecure
230 230 push: force, rev, bookmark, branch, new-branch, ssh, remotecmd, insecure
231 231 remove: after, force, subrepos, include, exclude
232 232 serve: accesslog, daemon, daemon-postexec, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, cmdserver, templates, style, ipv6, certificate, subrepos
233 233 status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, copies, print0, rev, change, include, exclude, subrepos, template
234 234 summary: remote
235 235 update: clean, check, merge, date, rev, tool
236 236 addremove: similarity, subrepos, include, exclude, dry-run
237 237 archive: no-decode, prefix, rev, type, subrepos, include, exclude
238 238 backout: merge, commit, no-commit, parent, rev, edit, tool, include, exclude, message, logfile, date, user
239 239 bisect: reset, good, bad, skip, extend, command, noupdate
240 240 bookmarks: force, rev, delete, rename, inactive, template
241 241 branch: force, clean
242 242 branches: active, closed, template
243 243 bundle: force, rev, branch, base, all, type, ssh, remotecmd, insecure
244 244 cat: output, rev, decode, include, exclude, template
245 245 config: untrusted, edit, local, global, template
246 246 copy: after, force, include, exclude, dry-run
247 247 debugancestor:
248 248 debugapplystreamclonebundle:
249 249 debugbuilddag: mergeable-file, overwritten-file, new-file
250 250 debugbundle: all, spec
251 251 debugcheckstate:
252 252 debugcolor: style
253 253 debugcommands:
254 254 debugcomplete: options
255 255 debugcreatestreamclonebundle:
256 256 debugdag: tags, branches, dots, spaces
257 257 debugdata: changelog, manifest, dir
258 258 debugdate: extended
259 259 debugdeltachain: changelog, manifest, dir, template
260 260 debugdirstate: nodates, datesort
261 261 debugdiscovery: old, nonheads, ssh, remotecmd, insecure
262 262 debugextensions: template
263 263 debugfileset: rev
264 264 debugfsinfo:
265 265 debuggetbundle: head, common, type
266 266 debugignore:
267 267 debugindex: changelog, manifest, dir, format
268 268 debugindexdot: changelog, manifest, dir
269 269 debuginstall: template
270 270 debugknown:
271 271 debuglabelcomplete:
272 272 debuglocks: force-lock, force-wlock
273 273 debugmergestate:
274 274 debugnamecomplete:
275 debugobsolete: flags, record-parents, rev, index, delete, date, user, template
275 debugobsolete: flags, record-parents, rev, exclusive, index, delete, date, user, template
276 276 debugpathcomplete: full, normal, added, removed
277 277 debugpickmergetool: rev, changedelete, include, exclude, tool
278 278 debugpushkey:
279 279 debugpvec:
280 280 debugrebuilddirstate: rev, minimal
281 281 debugrebuildfncache:
282 282 debugrename: rev
283 283 debugrevlog: changelog, manifest, dir, dump
284 284 debugrevspec: optimize, show-stage, no-optimized, verify-optimized
285 285 debugsetparents:
286 286 debugsub: rev
287 287 debugsuccessorssets:
288 288 debugtemplate: rev, define
289 289 debugupdatecaches:
290 290 debugupgraderepo: optimize, run
291 291 debugwalk: include, exclude
292 292 debugwireargs: three, four, five, ssh, remotecmd, insecure
293 293 files: rev, print0, include, exclude, template, subrepos
294 294 graft: rev, continue, edit, log, force, currentdate, currentuser, date, user, tool, dry-run
295 295 grep: print0, all, text, follow, ignore-case, files-with-matches, line-number, rev, user, date, template, include, exclude
296 296 heads: rev, topo, active, closed, style, template
297 297 help: extension, command, keyword, system
298 298 identify: rev, num, id, branch, tags, bookmarks, ssh, remotecmd, insecure
299 299 import: strip, base, edit, force, no-commit, bypass, partial, exact, prefix, import-branch, message, logfile, date, user, similarity
300 300 incoming: force, newest-first, bundle, rev, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
301 301 locate: rev, print0, fullpath, include, exclude
302 302 manifest: rev, all, template
303 303 outgoing: force, rev, newest-first, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
304 304 parents: rev, style, template
305 305 paths: template
306 306 phase: public, draft, secret, force, rev
307 307 recover:
308 308 rename: after, force, include, exclude, dry-run
309 309 resolve: all, list, mark, unmark, no-status, tool, include, exclude, template
310 310 revert: all, date, rev, no-backup, interactive, include, exclude, dry-run
311 311 rollback: dry-run, force
312 312 root:
313 313 tag: force, local, rev, remove, edit, message, date, user
314 314 tags: template
315 315 tip: patch, git, style, template
316 316 unbundle: update
317 317 verify:
318 318 version: template
319 319
320 320 $ hg init a
321 321 $ cd a
322 322 $ echo fee > fee
323 323 $ hg ci -q -Amfee
324 324 $ hg tag fee
325 325 $ mkdir fie
326 326 $ echo dead > fie/dead
327 327 $ echo live > fie/live
328 328 $ hg bookmark fo
329 329 $ hg branch -q fie
330 330 $ hg ci -q -Amfie
331 331 $ echo fo > fo
332 332 $ hg branch -qf default
333 333 $ hg ci -q -Amfo
334 334 $ echo Fum > Fum
335 335 $ hg ci -q -AmFum
336 336 $ hg bookmark Fum
337 337
338 338 Test debugpathcomplete
339 339
340 340 $ hg debugpathcomplete f
341 341 fee
342 342 fie
343 343 fo
344 344 $ hg debugpathcomplete -f f
345 345 fee
346 346 fie/dead
347 347 fie/live
348 348 fo
349 349
350 350 $ hg rm Fum
351 351 $ hg debugpathcomplete -r F
352 352 Fum
353 353
354 354 Test debugnamecomplete
355 355
356 356 $ hg debugnamecomplete
357 357 Fum
358 358 default
359 359 fee
360 360 fie
361 361 fo
362 362 tip
363 363 $ hg debugnamecomplete f
364 364 fee
365 365 fie
366 366 fo
367 367
368 368 Test debuglabelcomplete, a deprecated name for debugnamecomplete that is still
369 369 used for completions in some shells.
370 370
371 371 $ hg debuglabelcomplete
372 372 Fum
373 373 default
374 374 fee
375 375 fie
376 376 fo
377 377 tip
378 378 $ hg debuglabelcomplete f
379 379 fee
380 380 fie
381 381 fo
@@ -1,1307 +1,1343 b''
1 1 $ cat >> $HGRCPATH << EOF
2 2 > [phases]
3 3 > # public changeset are not obsolete
4 4 > publish=false
5 5 > [ui]
6 6 > logtemplate="{rev}:{node|short} ({phase}{if(obsolete, ' *{obsolete}*')}{if(troubles, ' {troubles}')}) [{tags} {bookmarks}] {desc|firstline}\n"
7 7 > EOF
8 8 $ mkcommit() {
9 9 > echo "$1" > "$1"
10 10 > hg add "$1"
11 11 > hg ci -m "add $1"
12 12 > }
13 13 $ getid() {
14 14 > hg log -T "{node}\n" --hidden -r "desc('$1')"
15 15 > }
16 16
17 17 $ cat > debugkeys.py <<EOF
18 18 > def reposetup(ui, repo):
19 19 > class debugkeysrepo(repo.__class__):
20 20 > def listkeys(self, namespace):
21 21 > ui.write('listkeys %s\n' % (namespace,))
22 22 > return super(debugkeysrepo, self).listkeys(namespace)
23 23 >
24 24 > if repo.local():
25 25 > repo.__class__ = debugkeysrepo
26 26 > EOF
27 27
28 28 $ hg init tmpa
29 29 $ cd tmpa
30 30 $ mkcommit kill_me
31 31
32 32 Checking that the feature is properly disabled
33 33
34 34 $ hg debugobsolete -d '0 0' `getid kill_me` -u babar
35 35 abort: creating obsolete markers is not enabled on this repo
36 36 [255]
37 37
38 38 Enabling it
39 39
40 40 $ cat >> $HGRCPATH << EOF
41 41 > [experimental]
42 42 > evolution=createmarkers,exchange
43 43 > EOF
44 44
45 45 Killing a single changeset without replacement
46 46
47 47 $ hg debugobsolete 0
48 48 abort: changeset references must be full hexadecimal node identifiers
49 49 [255]
50 50 $ hg debugobsolete '00'
51 51 abort: changeset references must be full hexadecimal node identifiers
52 52 [255]
53 53 $ hg debugobsolete -d '0 0' `getid kill_me` -u babar
54 54 $ hg debugobsolete
55 55 97b7c2d76b1845ed3eb988cd612611e72406cef0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'babar'}
56 56
57 57 (test that mercurial is not confused)
58 58
59 59 $ hg up null --quiet # having 0 as parent prevents it to be hidden
60 60 $ hg tip
61 61 -1:000000000000 (public) [tip ]
62 62 $ hg up --hidden tip --quiet
63 63
64 64 Killing a single changeset with itself should fail
65 65 (simple local safeguard)
66 66
67 67 $ hg debugobsolete `getid kill_me` `getid kill_me`
68 68 abort: bad obsmarker input: in-marker cycle with 97b7c2d76b1845ed3eb988cd612611e72406cef0
69 69 [255]
70 70
71 71 $ cd ..
72 72
73 73 Killing a single changeset with replacement
74 74 (and testing the format option)
75 75
76 76 $ hg init tmpb
77 77 $ cd tmpb
78 78 $ mkcommit a
79 79 $ mkcommit b
80 80 $ mkcommit original_c
81 81 $ hg up "desc('b')"
82 82 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
83 83 $ mkcommit new_c
84 84 created new head
85 85 $ hg log -r 'hidden()' --template '{rev}:{node|short} {desc}\n' --hidden
86 86 $ hg debugobsolete --config format.obsstore-version=0 --flag 12 `getid original_c` `getid new_c` -d '121 120'
87 87 $ hg log -r 'hidden()' --template '{rev}:{node|short} {desc}\n' --hidden
88 88 2:245bde4270cd add original_c
89 89 $ hg debugrevlog -cd
90 90 # rev p1rev p2rev start end deltastart base p1 p2 rawsize totalsize compression heads chainlen
91 91 0 -1 -1 0 59 0 0 0 0 58 58 0 1 0
92 92 1 0 -1 59 118 59 59 0 0 58 116 0 1 0
93 93 2 1 -1 118 193 118 118 59 0 76 192 0 1 0
94 94 3 1 -1 193 260 193 193 59 0 66 258 0 2 0
95 95 $ hg debugobsolete
96 96 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
97 97
98 98 (check for version number of the obsstore)
99 99
100 100 $ dd bs=1 count=1 if=.hg/store/obsstore 2>/dev/null
101 101 \x00 (no-eol) (esc)
102 102
103 103 do it again (it read the obsstore before adding new changeset)
104 104
105 105 $ hg up '.^'
106 106 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
107 107 $ mkcommit new_2_c
108 108 created new head
109 109 $ hg debugobsolete -d '1337 0' `getid new_c` `getid new_2_c`
110 110 $ hg debugobsolete
111 111 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
112 112 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
113 113
114 114 Register two markers with a missing node
115 115
116 116 $ hg up '.^'
117 117 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
118 118 $ mkcommit new_3_c
119 119 created new head
120 120 $ hg debugobsolete -d '1338 0' `getid new_2_c` 1337133713371337133713371337133713371337
121 121 $ hg debugobsolete -d '1339 0' 1337133713371337133713371337133713371337 `getid new_3_c`
122 122 $ hg debugobsolete
123 123 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
124 124 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
125 125 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
126 126 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
127 127
128 128 Test the --index option of debugobsolete command
129 129 $ hg debugobsolete --index
130 130 0 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
131 131 1 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
132 132 2 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
133 133 3 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
134 134
135 135 Refuse pathological nullid successors
136 136 $ hg debugobsolete -d '9001 0' 1337133713371337133713371337133713371337 0000000000000000000000000000000000000000
137 137 transaction abort!
138 138 rollback completed
139 139 abort: bad obsolescence marker detected: invalid successors nullid
140 140 [255]
141 141
142 142 Check that graphlog detect that a changeset is obsolete:
143 143
144 144 $ hg log -G
145 145 @ 5:5601fb93a350 (draft) [tip ] add new_3_c
146 146 |
147 147 o 1:7c3bad9141dc (draft) [ ] add b
148 148 |
149 149 o 0:1f0dee641bb7 (draft) [ ] add a
150 150
151 151
152 152 check that heads does not report them
153 153
154 154 $ hg heads
155 155 5:5601fb93a350 (draft) [tip ] add new_3_c
156 156 $ hg heads --hidden
157 157 5:5601fb93a350 (draft) [tip ] add new_3_c
158 158 4:ca819180edb9 (draft *obsolete*) [ ] add new_2_c
159 159 3:cdbce2fbb163 (draft *obsolete*) [ ] add new_c
160 160 2:245bde4270cd (draft *obsolete*) [ ] add original_c
161 161
162 162
163 163 check that summary does not report them
164 164
165 165 $ hg init ../sink
166 166 $ echo '[paths]' >> .hg/hgrc
167 167 $ echo 'default=../sink' >> .hg/hgrc
168 168 $ hg summary --remote
169 169 parent: 5:5601fb93a350 tip
170 170 add new_3_c
171 171 branch: default
172 172 commit: (clean)
173 173 update: (current)
174 174 phases: 3 draft
175 175 remote: 3 outgoing
176 176
177 177 $ hg summary --remote --hidden
178 178 parent: 5:5601fb93a350 tip
179 179 add new_3_c
180 180 branch: default
181 181 commit: (clean)
182 182 update: 3 new changesets, 4 branch heads (merge)
183 183 phases: 6 draft
184 184 remote: 3 outgoing
185 185
186 186 check that various commands work well with filtering
187 187
188 188 $ hg tip
189 189 5:5601fb93a350 (draft) [tip ] add new_3_c
190 190 $ hg log -r 6
191 191 abort: unknown revision '6'!
192 192 [255]
193 193 $ hg log -r 4
194 194 abort: hidden revision '4'!
195 195 (use --hidden to access hidden revisions)
196 196 [255]
197 197 $ hg debugrevspec 'rev(6)'
198 198 $ hg debugrevspec 'rev(4)'
199 199 $ hg debugrevspec 'null'
200 200 -1
201 201
202 202 Check that public changeset are not accounted as obsolete:
203 203
204 204 $ hg --hidden phase --public 2
205 205 $ hg log -G
206 206 @ 5:5601fb93a350 (draft bumped) [tip ] add new_3_c
207 207 |
208 208 | o 2:245bde4270cd (public) [ ] add original_c
209 209 |/
210 210 o 1:7c3bad9141dc (public) [ ] add b
211 211 |
212 212 o 0:1f0dee641bb7 (public) [ ] add a
213 213
214 214
215 215 And that bumped changeset are detected
216 216 --------------------------------------
217 217
218 218 If we didn't filtered obsolete changesets out, 3 and 4 would show up too. Also
219 219 note that the bumped changeset (5:5601fb93a350) is not a direct successor of
220 220 the public changeset
221 221
222 222 $ hg log --hidden -r 'bumped()'
223 223 5:5601fb93a350 (draft bumped) [tip ] add new_3_c
224 224
225 225 And that we can't push bumped changeset
226 226
227 227 $ hg push ../tmpa -r 0 --force #(make repo related)
228 228 pushing to ../tmpa
229 229 searching for changes
230 230 warning: repository is unrelated
231 231 adding changesets
232 232 adding manifests
233 233 adding file changes
234 234 added 1 changesets with 1 changes to 1 files (+1 heads)
235 235 $ hg push ../tmpa
236 236 pushing to ../tmpa
237 237 searching for changes
238 238 abort: push includes bumped changeset: 5601fb93a350!
239 239 [255]
240 240
241 241 Fixing "bumped" situation
242 242 We need to create a clone of 5 and add a special marker with a flag
243 243
244 244 $ hg summary
245 245 parent: 5:5601fb93a350 tip (bumped)
246 246 add new_3_c
247 247 branch: default
248 248 commit: (clean)
249 249 update: 1 new changesets, 2 branch heads (merge)
250 250 phases: 1 draft
251 251 bumped: 1 changesets
252 252 $ hg up '5^'
253 253 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
254 254 $ hg revert -ar 5
255 255 adding new_3_c
256 256 $ hg ci -m 'add n3w_3_c'
257 257 created new head
258 258 $ hg debugobsolete -d '1338 0' --flags 1 `getid new_3_c` `getid n3w_3_c`
259 259 $ hg log -r 'bumped()'
260 260 $ hg log -G
261 261 @ 6:6f9641995072 (draft) [tip ] add n3w_3_c
262 262 |
263 263 | o 2:245bde4270cd (public) [ ] add original_c
264 264 |/
265 265 o 1:7c3bad9141dc (public) [ ] add b
266 266 |
267 267 o 0:1f0dee641bb7 (public) [ ] add a
268 268
269 269
270 Basic exclusive testing
271
272 $ hg log -G --hidden
273 @ 6:6f9641995072 (draft) [tip ] add n3w_3_c
274 |
275 | x 5:5601fb93a350 (draft *obsolete*) [ ] add new_3_c
276 |/
277 | x 4:ca819180edb9 (draft *obsolete*) [ ] add new_2_c
278 |/
279 | x 3:cdbce2fbb163 (draft *obsolete*) [ ] add new_c
280 |/
281 | o 2:245bde4270cd (public) [ ] add original_c
282 |/
283 o 1:7c3bad9141dc (public) [ ] add b
284 |
285 o 0:1f0dee641bb7 (public) [ ] add a
286
287 $ hg debugobsolete --rev 6f9641995072
288 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
289 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
290 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
291 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
292 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
293 $ hg debugobsolete --rev 6f9641995072 --exclusive
294 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
295 $ hg debugobsolete --rev 5601fb93a350 --hidden
296 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
297 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
298 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
299 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
300 $ hg debugobsolete --rev 5601fb93a350 --hidden --exclusive
301 $ hg debugobsolete --rev 5601fb93a350+6f9641995072 --hidden --exclusive
302 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
303 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
304 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
305
270 306 $ cd ..
271 307
272 308 Revision 0 is hidden
273 309 --------------------
274 310
275 311 $ hg init rev0hidden
276 312 $ cd rev0hidden
277 313
278 314 $ mkcommit kill0
279 315 $ hg up -q null
280 316 $ hg debugobsolete `getid kill0`
281 317 $ mkcommit a
282 318 $ mkcommit b
283 319
284 320 Should pick the first visible revision as "repo" node
285 321
286 322 $ hg archive ../archive-null
287 323 $ cat ../archive-null/.hg_archival.txt
288 324 repo: 1f0dee641bb7258c56bd60e93edfa2405381c41e
289 325 node: 7c3bad9141dcb46ff89abf5f61856facd56e476c
290 326 branch: default
291 327 latesttag: null
292 328 latesttagdistance: 2
293 329 changessincelatesttag: 2
294 330
295 331
296 332 $ cd ..
297 333
298 334 Exchange Test
299 335 ============================
300 336
301 337 Destination repo does not have any data
302 338 ---------------------------------------
303 339
304 340 Simple incoming test
305 341
306 342 $ hg init tmpc
307 343 $ cd tmpc
308 344 $ hg incoming ../tmpb
309 345 comparing with ../tmpb
310 346 0:1f0dee641bb7 (public) [ ] add a
311 347 1:7c3bad9141dc (public) [ ] add b
312 348 2:245bde4270cd (public) [ ] add original_c
313 349 6:6f9641995072 (draft) [tip ] add n3w_3_c
314 350
315 351 Try to pull markers
316 352 (extinct changeset are excluded but marker are pushed)
317 353
318 354 $ hg pull ../tmpb
319 355 pulling from ../tmpb
320 356 requesting all changes
321 357 adding changesets
322 358 adding manifests
323 359 adding file changes
324 360 added 4 changesets with 4 changes to 4 files (+1 heads)
325 361 5 new obsolescence markers
326 362 (run 'hg heads' to see heads, 'hg merge' to merge)
327 363 $ hg debugobsolete
328 364 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
329 365 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
330 366 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
331 367 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
332 368 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
333 369
334 370 Rollback//Transaction support
335 371
336 372 $ hg debugobsolete -d '1340 0' aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
337 373 $ hg debugobsolete
338 374 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
339 375 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
340 376 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
341 377 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
342 378 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
343 379 aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb 0 (Thu Jan 01 00:22:20 1970 +0000) {'user': 'test'}
344 380 $ hg rollback -n
345 381 repository tip rolled back to revision 3 (undo debugobsolete)
346 382 $ hg rollback
347 383 repository tip rolled back to revision 3 (undo debugobsolete)
348 384 $ hg debugobsolete
349 385 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
350 386 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
351 387 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
352 388 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
353 389 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
354 390
355 391 $ cd ..
356 392
357 393 Try to push markers
358 394
359 395 $ hg init tmpd
360 396 $ hg -R tmpb push tmpd
361 397 pushing to tmpd
362 398 searching for changes
363 399 adding changesets
364 400 adding manifests
365 401 adding file changes
366 402 added 4 changesets with 4 changes to 4 files (+1 heads)
367 403 5 new obsolescence markers
368 404 $ hg -R tmpd debugobsolete | sort
369 405 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
370 406 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
371 407 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
372 408 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
373 409 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
374 410
375 411 Check obsolete keys are exchanged only if source has an obsolete store
376 412
377 413 $ hg init empty
378 414 $ hg --config extensions.debugkeys=debugkeys.py -R empty push tmpd
379 415 pushing to tmpd
380 416 listkeys phases
381 417 listkeys bookmarks
382 418 no changes found
383 419 listkeys phases
384 420 [1]
385 421
386 422 clone support
387 423 (markers are copied and extinct changesets are included to allow hardlinks)
388 424
389 425 $ hg clone tmpb clone-dest
390 426 updating to branch default
391 427 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
392 428 $ hg -R clone-dest log -G --hidden
393 429 @ 6:6f9641995072 (draft) [tip ] add n3w_3_c
394 430 |
395 431 | x 5:5601fb93a350 (draft *obsolete*) [ ] add new_3_c
396 432 |/
397 433 | x 4:ca819180edb9 (draft *obsolete*) [ ] add new_2_c
398 434 |/
399 435 | x 3:cdbce2fbb163 (draft *obsolete*) [ ] add new_c
400 436 |/
401 437 | o 2:245bde4270cd (public) [ ] add original_c
402 438 |/
403 439 o 1:7c3bad9141dc (public) [ ] add b
404 440 |
405 441 o 0:1f0dee641bb7 (public) [ ] add a
406 442
407 443 $ hg -R clone-dest debugobsolete
408 444 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
409 445 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
410 446 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
411 447 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
412 448 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
413 449
414 450
415 451 Destination repo have existing data
416 452 ---------------------------------------
417 453
418 454 On pull
419 455
420 456 $ hg init tmpe
421 457 $ cd tmpe
422 458 $ hg debugobsolete -d '1339 0' 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00
423 459 $ hg pull ../tmpb
424 460 pulling from ../tmpb
425 461 requesting all changes
426 462 adding changesets
427 463 adding manifests
428 464 adding file changes
429 465 added 4 changesets with 4 changes to 4 files (+1 heads)
430 466 5 new obsolescence markers
431 467 (run 'hg heads' to see heads, 'hg merge' to merge)
432 468 $ hg debugobsolete
433 469 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
434 470 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
435 471 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
436 472 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
437 473 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
438 474 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
439 475
440 476
441 477 On push
442 478
443 479 $ hg push ../tmpc
444 480 pushing to ../tmpc
445 481 searching for changes
446 482 no changes found
447 483 1 new obsolescence markers
448 484 [1]
449 485 $ hg -R ../tmpc debugobsolete
450 486 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
451 487 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
452 488 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
453 489 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
454 490 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
455 491 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
456 492
457 493 detect outgoing obsolete and unstable
458 494 ---------------------------------------
459 495
460 496
461 497 $ hg log -G
462 498 o 3:6f9641995072 (draft) [tip ] add n3w_3_c
463 499 |
464 500 | o 2:245bde4270cd (public) [ ] add original_c
465 501 |/
466 502 o 1:7c3bad9141dc (public) [ ] add b
467 503 |
468 504 o 0:1f0dee641bb7 (public) [ ] add a
469 505
470 506 $ hg up 'desc("n3w_3_c")'
471 507 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
472 508 $ mkcommit original_d
473 509 $ mkcommit original_e
474 510 $ hg debugobsolete --record-parents `getid original_d` -d '0 0'
475 511 $ hg debugobsolete | grep `getid original_d`
476 512 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
477 513 $ hg log -r 'obsolete()'
478 514 4:94b33453f93b (draft *obsolete*) [ ] add original_d
479 515 $ hg summary
480 516 parent: 5:cda648ca50f5 tip (unstable)
481 517 add original_e
482 518 branch: default
483 519 commit: (clean)
484 520 update: 1 new changesets, 2 branch heads (merge)
485 521 phases: 3 draft
486 522 unstable: 1 changesets
487 523 $ hg log -G -r '::unstable()'
488 524 @ 5:cda648ca50f5 (draft unstable) [tip ] add original_e
489 525 |
490 526 x 4:94b33453f93b (draft *obsolete*) [ ] add original_d
491 527 |
492 528 o 3:6f9641995072 (draft) [ ] add n3w_3_c
493 529 |
494 530 o 1:7c3bad9141dc (public) [ ] add b
495 531 |
496 532 o 0:1f0dee641bb7 (public) [ ] add a
497 533
498 534
499 535 refuse to push obsolete changeset
500 536
501 537 $ hg push ../tmpc/ -r 'desc("original_d")'
502 538 pushing to ../tmpc/
503 539 searching for changes
504 540 abort: push includes obsolete changeset: 94b33453f93b!
505 541 [255]
506 542
507 543 refuse to push unstable changeset
508 544
509 545 $ hg push ../tmpc/
510 546 pushing to ../tmpc/
511 547 searching for changes
512 548 abort: push includes unstable changeset: cda648ca50f5!
513 549 [255]
514 550
515 551 Test that extinct changeset are properly detected
516 552
517 553 $ hg log -r 'extinct()'
518 554
519 555 Don't try to push extinct changeset
520 556
521 557 $ hg init ../tmpf
522 558 $ hg out ../tmpf
523 559 comparing with ../tmpf
524 560 searching for changes
525 561 0:1f0dee641bb7 (public) [ ] add a
526 562 1:7c3bad9141dc (public) [ ] add b
527 563 2:245bde4270cd (public) [ ] add original_c
528 564 3:6f9641995072 (draft) [ ] add n3w_3_c
529 565 4:94b33453f93b (draft *obsolete*) [ ] add original_d
530 566 5:cda648ca50f5 (draft unstable) [tip ] add original_e
531 567 $ hg push ../tmpf -f # -f because be push unstable too
532 568 pushing to ../tmpf
533 569 searching for changes
534 570 adding changesets
535 571 adding manifests
536 572 adding file changes
537 573 added 6 changesets with 6 changes to 6 files (+1 heads)
538 574 7 new obsolescence markers
539 575
540 576 no warning displayed
541 577
542 578 $ hg push ../tmpf
543 579 pushing to ../tmpf
544 580 searching for changes
545 581 no changes found
546 582 [1]
547 583
548 584 Do not warn about new head when the new head is a successors of a remote one
549 585
550 586 $ hg log -G
551 587 @ 5:cda648ca50f5 (draft unstable) [tip ] add original_e
552 588 |
553 589 x 4:94b33453f93b (draft *obsolete*) [ ] add original_d
554 590 |
555 591 o 3:6f9641995072 (draft) [ ] add n3w_3_c
556 592 |
557 593 | o 2:245bde4270cd (public) [ ] add original_c
558 594 |/
559 595 o 1:7c3bad9141dc (public) [ ] add b
560 596 |
561 597 o 0:1f0dee641bb7 (public) [ ] add a
562 598
563 599 $ hg up -q 'desc(n3w_3_c)'
564 600 $ mkcommit obsolete_e
565 601 created new head
566 602 $ hg debugobsolete `getid 'original_e'` `getid 'obsolete_e'`
567 603 $ hg outgoing ../tmpf # parasite hg outgoing testin
568 604 comparing with ../tmpf
569 605 searching for changes
570 606 6:3de5eca88c00 (draft) [tip ] add obsolete_e
571 607 $ hg push ../tmpf
572 608 pushing to ../tmpf
573 609 searching for changes
574 610 adding changesets
575 611 adding manifests
576 612 adding file changes
577 613 added 1 changesets with 1 changes to 1 files (+1 heads)
578 614 1 new obsolescence markers
579 615
580 616 test relevance computation
581 617 ---------------------------------------
582 618
583 619 Checking simple case of "marker relevance".
584 620
585 621
586 622 Reminder of the repo situation
587 623
588 624 $ hg log --hidden --graph
589 625 @ 6:3de5eca88c00 (draft) [tip ] add obsolete_e
590 626 |
591 627 | x 5:cda648ca50f5 (draft *obsolete*) [ ] add original_e
592 628 | |
593 629 | x 4:94b33453f93b (draft *obsolete*) [ ] add original_d
594 630 |/
595 631 o 3:6f9641995072 (draft) [ ] add n3w_3_c
596 632 |
597 633 | o 2:245bde4270cd (public) [ ] add original_c
598 634 |/
599 635 o 1:7c3bad9141dc (public) [ ] add b
600 636 |
601 637 o 0:1f0dee641bb7 (public) [ ] add a
602 638
603 639
604 640 List of all markers
605 641
606 642 $ hg debugobsolete
607 643 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
608 644 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
609 645 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
610 646 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
611 647 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
612 648 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
613 649 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
614 650 cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test'} (glob)
615 651
616 652 List of changesets with no chain
617 653
618 654 $ hg debugobsolete --hidden --rev ::2
619 655
620 656 List of changesets that are included on marker chain
621 657
622 658 $ hg debugobsolete --hidden --rev 6
623 659 cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test'} (glob)
624 660
625 661 List of changesets with a longer chain, (including a pruned children)
626 662
627 663 $ hg debugobsolete --hidden --rev 3
628 664 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
629 665 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
630 666 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
631 667 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
632 668 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
633 669 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
634 670 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
635 671
636 672 List of both
637 673
638 674 $ hg debugobsolete --hidden --rev 3::6
639 675 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
640 676 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
641 677 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
642 678 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
643 679 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
644 680 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
645 681 cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test'} (glob)
646 682 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
647 683
648 684 List of all markers in JSON
649 685
650 686 $ hg debugobsolete -Tjson
651 687 [
652 688 {
653 689 "date": [1339.0, 0],
654 690 "flag": 0,
655 691 "metadata": {"user": "test"},
656 692 "precnode": "1339133913391339133913391339133913391339",
657 693 "succnodes": ["ca819180edb99ed25ceafb3e9584ac287e240b00"]
658 694 },
659 695 {
660 696 "date": [1339.0, 0],
661 697 "flag": 0,
662 698 "metadata": {"user": "test"},
663 699 "precnode": "1337133713371337133713371337133713371337",
664 700 "succnodes": ["5601fb93a350734d935195fee37f4054c529ff39"]
665 701 },
666 702 {
667 703 "date": [121.0, 120],
668 704 "flag": 12,
669 705 "metadata": {"user": "test"},
670 706 "precnode": "245bde4270cd1072a27757984f9cda8ba26f08ca",
671 707 "succnodes": ["cdbce2fbb16313928851e97e0d85413f3f7eb77f"]
672 708 },
673 709 {
674 710 "date": [1338.0, 0],
675 711 "flag": 1,
676 712 "metadata": {"user": "test"},
677 713 "precnode": "5601fb93a350734d935195fee37f4054c529ff39",
678 714 "succnodes": ["6f96419950729f3671185b847352890f074f7557"]
679 715 },
680 716 {
681 717 "date": [1338.0, 0],
682 718 "flag": 0,
683 719 "metadata": {"user": "test"},
684 720 "precnode": "ca819180edb99ed25ceafb3e9584ac287e240b00",
685 721 "succnodes": ["1337133713371337133713371337133713371337"]
686 722 },
687 723 {
688 724 "date": [1337.0, 0],
689 725 "flag": 0,
690 726 "metadata": {"user": "test"},
691 727 "precnode": "cdbce2fbb16313928851e97e0d85413f3f7eb77f",
692 728 "succnodes": ["ca819180edb99ed25ceafb3e9584ac287e240b00"]
693 729 },
694 730 {
695 731 "date": [0.0, 0],
696 732 "flag": 0,
697 733 "metadata": {"user": "test"},
698 734 "parentnodes": ["6f96419950729f3671185b847352890f074f7557"],
699 735 "precnode": "94b33453f93bdb8d457ef9b770851a618bf413e1",
700 736 "succnodes": []
701 737 },
702 738 {
703 739 "date": *, (glob)
704 740 "flag": 0,
705 741 "metadata": {"user": "test"},
706 742 "precnode": "cda648ca50f50482b7055c0b0c4c117bba6733d9",
707 743 "succnodes": ["3de5eca88c00aa039da7399a220f4a5221faa585"]
708 744 }
709 745 ]
710 746
711 747 Template keywords
712 748
713 749 $ hg debugobsolete -r6 -T '{succnodes % "{node|short}"} {date|shortdate}\n'
714 750 3de5eca88c00 ????-??-?? (glob)
715 751 $ hg debugobsolete -r6 -T '{join(metadata % "{key}={value}", " ")}\n'
716 752 user=test
717 753 $ hg debugobsolete -r6 -T '{metadata}\n'
718 754 'user': 'test'
719 755 $ hg debugobsolete -r6 -T '{flag} {get(metadata, "user")}\n'
720 756 0 test
721 757
722 758 Test the debug output for exchange
723 759 ----------------------------------
724 760
725 761 $ hg pull ../tmpb --config 'experimental.obsmarkers-exchange-debug=True' # bundle2
726 762 pulling from ../tmpb
727 763 searching for changes
728 764 no changes found
729 765 obsmarker-exchange: 346 bytes received
730 766
731 767 check hgweb does not explode
732 768 ====================================
733 769
734 770 $ hg unbundle $TESTDIR/bundles/hgweb+obs.hg
735 771 adding changesets
736 772 adding manifests
737 773 adding file changes
738 774 added 62 changesets with 63 changes to 9 files (+60 heads)
739 775 (run 'hg heads .' to see heads, 'hg merge' to merge)
740 776 $ for node in `hg log -r 'desc(babar_)' --template '{node}\n'`;
741 777 > do
742 778 > hg debugobsolete $node
743 779 > done
744 780 $ hg up tip
745 781 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
746 782
747 783 #if serve
748 784
749 785 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
750 786 $ cat hg.pid >> $DAEMON_PIDS
751 787
752 788 check changelog view
753 789
754 790 $ get-with-headers.py --headeronly localhost:$HGPORT 'shortlog/'
755 791 200 Script output follows
756 792
757 793 check graph view
758 794
759 795 $ get-with-headers.py --headeronly localhost:$HGPORT 'graph'
760 796 200 Script output follows
761 797
762 798 check filelog view
763 799
764 800 $ get-with-headers.py --headeronly localhost:$HGPORT 'log/'`hg log -r . -T "{node}"`/'babar'
765 801 200 Script output follows
766 802
767 803 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/68'
768 804 200 Script output follows
769 805 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/67'
770 806 404 Not Found
771 807 [1]
772 808
773 809 check that web.view config option:
774 810
775 811 $ killdaemons.py hg.pid
776 812 $ cat >> .hg/hgrc << EOF
777 813 > [web]
778 814 > view=all
779 815 > EOF
780 816 $ wait
781 817 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
782 818 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/67'
783 819 200 Script output follows
784 820 $ killdaemons.py hg.pid
785 821
786 822 Checking _enable=False warning if obsolete marker exists
787 823
788 824 $ echo '[experimental]' >> $HGRCPATH
789 825 $ echo "evolution=" >> $HGRCPATH
790 826 $ hg log -r tip
791 827 obsolete feature not enabled but 68 markers found!
792 828 68:c15e9edfca13 (draft) [tip ] add celestine
793 829
794 830 reenable for later test
795 831
796 832 $ echo '[experimental]' >> $HGRCPATH
797 833 $ echo "evolution=createmarkers,exchange" >> $HGRCPATH
798 834
799 835 $ rm hg.pid access.log errors.log
800 836 #endif
801 837
802 838 Several troubles on the same changeset (create an unstable and bumped changeset)
803 839
804 840 $ hg debugobsolete `getid obsolete_e`
805 841 $ hg debugobsolete `getid original_c` `getid babar`
806 842 $ hg log --config ui.logtemplate= -r 'bumped() and unstable()'
807 843 changeset: 7:50c51b361e60
808 844 user: test
809 845 date: Thu Jan 01 00:00:00 1970 +0000
810 846 trouble: unstable, bumped
811 847 summary: add babar
812 848
813 849
814 850 test the "obsolete" templatekw
815 851
816 852 $ hg log -r 'obsolete()'
817 853 6:3de5eca88c00 (draft *obsolete*) [ ] add obsolete_e
818 854
819 855 test the "troubles" templatekw
820 856
821 857 $ hg log -r 'bumped() and unstable()'
822 858 7:50c51b361e60 (draft unstable bumped) [ ] add babar
823 859
824 860 test the default cmdline template
825 861
826 862 $ hg log -T default -r 'bumped()'
827 863 changeset: 7:50c51b361e60
828 864 user: test
829 865 date: Thu Jan 01 00:00:00 1970 +0000
830 866 trouble: unstable, bumped
831 867 summary: add babar
832 868
833 869 $ hg log -T default -r 'obsolete()'
834 870 changeset: 6:3de5eca88c00
835 871 parent: 3:6f9641995072
836 872 user: test
837 873 date: Thu Jan 01 00:00:00 1970 +0000
838 874 summary: add obsolete_e
839 875
840 876
841 877 test summary output
842 878
843 879 $ hg up -r 'bumped() and unstable()'
844 880 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
845 881 $ hg summary
846 882 parent: 7:50c51b361e60 (unstable, bumped)
847 883 add babar
848 884 branch: default
849 885 commit: (clean)
850 886 update: 2 new changesets (update)
851 887 phases: 4 draft
852 888 unstable: 2 changesets
853 889 bumped: 1 changesets
854 890 $ hg up -r 'obsolete()'
855 891 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
856 892 $ hg summary
857 893 parent: 6:3de5eca88c00 (obsolete)
858 894 add obsolete_e
859 895 branch: default
860 896 commit: (clean)
861 897 update: 3 new changesets (update)
862 898 phases: 4 draft
863 899 unstable: 2 changesets
864 900 bumped: 1 changesets
865 901
866 902 Test incoming/outcoming with changesets obsoleted remotely, known locally
867 903 ===============================================================================
868 904
869 905 This test issue 3805
870 906
871 907 $ hg init repo-issue3805
872 908 $ cd repo-issue3805
873 909 $ echo "base" > base
874 910 $ hg ci -Am "base"
875 911 adding base
876 912 $ echo "foo" > foo
877 913 $ hg ci -Am "A"
878 914 adding foo
879 915 $ hg clone . ../other-issue3805
880 916 updating to branch default
881 917 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
882 918 $ echo "bar" >> foo
883 919 $ hg ci --amend
884 920 $ cd ../other-issue3805
885 921 $ hg log -G
886 922 @ 1:29f0c6921ddd (draft) [tip ] A
887 923 |
888 924 o 0:d20a80d4def3 (draft) [ ] base
889 925
890 926 $ hg log -G -R ../repo-issue3805
891 927 @ 3:323a9c3ddd91 (draft) [tip ] A
892 928 |
893 929 o 0:d20a80d4def3 (draft) [ ] base
894 930
895 931 $ hg incoming
896 932 comparing with $TESTTMP/tmpe/repo-issue3805 (glob)
897 933 searching for changes
898 934 3:323a9c3ddd91 (draft) [tip ] A
899 935 $ hg incoming --bundle ../issue3805.hg
900 936 comparing with $TESTTMP/tmpe/repo-issue3805 (glob)
901 937 searching for changes
902 938 3:323a9c3ddd91 (draft) [tip ] A
903 939 $ hg outgoing
904 940 comparing with $TESTTMP/tmpe/repo-issue3805 (glob)
905 941 searching for changes
906 942 1:29f0c6921ddd (draft) [tip ] A
907 943
908 944 #if serve
909 945
910 946 $ hg serve -R ../repo-issue3805 -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
911 947 $ cat hg.pid >> $DAEMON_PIDS
912 948
913 949 $ hg incoming http://localhost:$HGPORT
914 950 comparing with http://localhost:$HGPORT/
915 951 searching for changes
916 952 2:323a9c3ddd91 (draft) [tip ] A
917 953 $ hg outgoing http://localhost:$HGPORT
918 954 comparing with http://localhost:$HGPORT/
919 955 searching for changes
920 956 1:29f0c6921ddd (draft) [tip ] A
921 957
922 958 $ killdaemons.py
923 959
924 960 #endif
925 961
926 962 This test issue 3814
927 963
928 964 (nothing to push but locally hidden changeset)
929 965
930 966 $ cd ..
931 967 $ hg init repo-issue3814
932 968 $ cd repo-issue3805
933 969 $ hg push -r 323a9c3ddd91 ../repo-issue3814
934 970 pushing to ../repo-issue3814
935 971 searching for changes
936 972 adding changesets
937 973 adding manifests
938 974 adding file changes
939 975 added 2 changesets with 2 changes to 2 files
940 976 2 new obsolescence markers
941 977 $ hg out ../repo-issue3814
942 978 comparing with ../repo-issue3814
943 979 searching for changes
944 980 no changes found
945 981 [1]
946 982
947 983 Test that a local tag blocks a changeset from being hidden
948 984
949 985 $ hg tag -l visible -r 1 --hidden
950 986 $ hg log -G
951 987 @ 3:323a9c3ddd91 (draft) [tip ] A
952 988 |
953 989 | x 1:29f0c6921ddd (draft *obsolete*) [visible ] A
954 990 |/
955 991 o 0:d20a80d4def3 (draft) [ ] base
956 992
957 993 Test that removing a local tag does not cause some commands to fail
958 994
959 995 $ hg tag -l -r tip tiptag
960 996 $ hg tags
961 997 tiptag 3:323a9c3ddd91
962 998 tip 3:323a9c3ddd91
963 999 visible 1:29f0c6921ddd
964 1000 $ hg --config extensions.strip= strip -r tip --no-backup
965 1001 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
966 1002 $ hg tags
967 1003 visible 1:29f0c6921ddd
968 1004 tip 1:29f0c6921ddd
969 1005
970 1006 Test bundle overlay onto hidden revision
971 1007
972 1008 $ cd ..
973 1009 $ hg init repo-bundleoverlay
974 1010 $ cd repo-bundleoverlay
975 1011 $ echo "A" > foo
976 1012 $ hg ci -Am "A"
977 1013 adding foo
978 1014 $ echo "B" >> foo
979 1015 $ hg ci -m "B"
980 1016 $ hg up 0
981 1017 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
982 1018 $ echo "C" >> foo
983 1019 $ hg ci -m "C"
984 1020 created new head
985 1021 $ hg log -G
986 1022 @ 2:c186d7714947 (draft) [tip ] C
987 1023 |
988 1024 | o 1:44526ebb0f98 (draft) [ ] B
989 1025 |/
990 1026 o 0:4b34ecfb0d56 (draft) [ ] A
991 1027
992 1028
993 1029 $ hg clone -r1 . ../other-bundleoverlay
994 1030 adding changesets
995 1031 adding manifests
996 1032 adding file changes
997 1033 added 2 changesets with 2 changes to 1 files
998 1034 updating to branch default
999 1035 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1000 1036 $ cd ../other-bundleoverlay
1001 1037 $ echo "B+" >> foo
1002 1038 $ hg ci --amend -m "B+"
1003 1039 $ hg log -G --hidden
1004 1040 @ 3:b7d587542d40 (draft) [tip ] B+
1005 1041 |
1006 1042 | x 2:eb95e9297e18 (draft *obsolete*) [ ] temporary amend commit for 44526ebb0f98
1007 1043 | |
1008 1044 | x 1:44526ebb0f98 (draft *obsolete*) [ ] B
1009 1045 |/
1010 1046 o 0:4b34ecfb0d56 (draft) [ ] A
1011 1047
1012 1048
1013 1049 $ hg incoming ../repo-bundleoverlay --bundle ../bundleoverlay.hg
1014 1050 comparing with ../repo-bundleoverlay
1015 1051 searching for changes
1016 1052 1:44526ebb0f98 (draft) [ ] B
1017 1053 2:c186d7714947 (draft) [tip ] C
1018 1054 $ hg log -G -R ../bundleoverlay.hg
1019 1055 o 4:c186d7714947 (draft) [tip ] C
1020 1056 |
1021 1057 | @ 3:b7d587542d40 (draft) [ ] B+
1022 1058 |/
1023 1059 o 0:4b34ecfb0d56 (draft) [ ] A
1024 1060
1025 1061
1026 1062 #if serve
1027 1063
1028 1064 Test issue 4506
1029 1065
1030 1066 $ cd ..
1031 1067 $ hg init repo-issue4506
1032 1068 $ cd repo-issue4506
1033 1069 $ echo "0" > foo
1034 1070 $ hg add foo
1035 1071 $ hg ci -m "content-0"
1036 1072
1037 1073 $ hg up null
1038 1074 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1039 1075 $ echo "1" > bar
1040 1076 $ hg add bar
1041 1077 $ hg ci -m "content-1"
1042 1078 created new head
1043 1079 $ hg up 0
1044 1080 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
1045 1081 $ hg graft 1
1046 1082 grafting 1:1c9eddb02162 "content-1" (tip)
1047 1083
1048 1084 $ hg debugobsolete `hg log -r1 -T'{node}'` `hg log -r2 -T'{node}'`
1049 1085
1050 1086 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1051 1087 $ cat hg.pid >> $DAEMON_PIDS
1052 1088
1053 1089 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/1'
1054 1090 404 Not Found
1055 1091 [1]
1056 1092 $ get-with-headers.py --headeronly localhost:$HGPORT 'file/tip/bar'
1057 1093 200 Script output follows
1058 1094 $ get-with-headers.py --headeronly localhost:$HGPORT 'annotate/tip/bar'
1059 1095 200 Script output follows
1060 1096
1061 1097 $ killdaemons.py
1062 1098
1063 1099 #endif
1064 1100
1065 1101 Test heads computation on pending index changes with obsolescence markers
1066 1102 $ cd ..
1067 1103 $ cat >$TESTTMP/test_extension.py << EOF
1068 1104 > from mercurial import cmdutil, registrar
1069 1105 > from mercurial.i18n import _
1070 1106 >
1071 1107 > cmdtable = {}
1072 1108 > command = registrar.command(cmdtable)
1073 1109 > @command("amendtransient",[], _('hg amendtransient [rev]'))
1074 1110 > def amend(ui, repo, *pats, **opts):
1075 1111 > def commitfunc(ui, repo, message, match, opts):
1076 1112 > return repo.commit(message, repo['.'].user(), repo['.'].date(), match)
1077 1113 > opts['message'] = 'Test'
1078 1114 > opts['logfile'] = None
1079 1115 > cmdutil.amend(ui, repo, commitfunc, repo['.'], {}, pats, opts)
1080 1116 > ui.write('%s\n' % repo.changelog.headrevs())
1081 1117 > EOF
1082 1118 $ cat >> $HGRCPATH << EOF
1083 1119 > [extensions]
1084 1120 > testextension=$TESTTMP/test_extension.py
1085 1121 > EOF
1086 1122 $ hg init repo-issue-nativerevs-pending-changes
1087 1123 $ cd repo-issue-nativerevs-pending-changes
1088 1124 $ mkcommit a
1089 1125 $ mkcommit b
1090 1126 $ hg up ".^"
1091 1127 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1092 1128 $ echo aa > a
1093 1129 $ hg amendtransient
1094 1130 [1, 3]
1095 1131
1096 1132 Test cache consistency for the visible filter
1097 1133 1) We want to make sure that the cached filtered revs are invalidated when
1098 1134 bookmarks change
1099 1135 $ cd ..
1100 1136 $ cat >$TESTTMP/test_extension.py << EOF
1101 1137 > import weakref
1102 1138 > from mercurial import cmdutil, extensions, bookmarks, repoview
1103 1139 > def _bookmarkchanged(orig, bkmstoreinst, *args, **kwargs):
1104 1140 > reporef = weakref.ref(bkmstoreinst._repo)
1105 1141 > def trhook(tr):
1106 1142 > repo = reporef()
1107 1143 > hidden1 = repoview.computehidden(repo)
1108 1144 > hidden = repoview.filterrevs(repo, 'visible')
1109 1145 > if sorted(hidden1) != sorted(hidden):
1110 1146 > print "cache inconsistency"
1111 1147 > bkmstoreinst._repo.currenttransaction().addpostclose('test_extension', trhook)
1112 1148 > orig(bkmstoreinst, *args, **kwargs)
1113 1149 > def extsetup(ui):
1114 1150 > extensions.wrapfunction(bookmarks.bmstore, 'recordchange',
1115 1151 > _bookmarkchanged)
1116 1152 > EOF
1117 1153
1118 1154 $ hg init repo-cache-inconsistency
1119 1155 $ cd repo-issue-nativerevs-pending-changes
1120 1156 $ mkcommit a
1121 1157 a already tracked!
1122 1158 $ mkcommit b
1123 1159 $ hg id
1124 1160 13bedc178fce tip
1125 1161 $ echo "hello" > b
1126 1162 $ hg commit --amend -m "message"
1127 1163 $ hg book bookb -r 13bedc178fce --hidden
1128 1164 $ hg log -r 13bedc178fce
1129 1165 5:13bedc178fce (draft *obsolete*) [ bookb] add b
1130 1166 $ hg book -d bookb
1131 1167 $ hg log -r 13bedc178fce
1132 1168 abort: hidden revision '13bedc178fce'!
1133 1169 (use --hidden to access hidden revisions)
1134 1170 [255]
1135 1171
1136 1172 Empty out the test extension, as it isn't compatible with later parts
1137 1173 of the test.
1138 1174 $ echo > $TESTTMP/test_extension.py
1139 1175
1140 1176 Test ability to pull changeset with locally applying obsolescence markers
1141 1177 (issue4945)
1142 1178
1143 1179 $ cd ..
1144 1180 $ hg init issue4845
1145 1181 $ cd issue4845
1146 1182
1147 1183 $ echo foo > f0
1148 1184 $ hg add f0
1149 1185 $ hg ci -m '0'
1150 1186 $ echo foo > f1
1151 1187 $ hg add f1
1152 1188 $ hg ci -m '1'
1153 1189 $ echo foo > f2
1154 1190 $ hg add f2
1155 1191 $ hg ci -m '2'
1156 1192
1157 1193 $ echo bar > f2
1158 1194 $ hg commit --amend --config experimetnal.evolution=createmarkers
1159 1195 $ hg log -G
1160 1196 @ 4:b0551702f918 (draft) [tip ] 2
1161 1197 |
1162 1198 o 1:e016b03fd86f (draft) [ ] 1
1163 1199 |
1164 1200 o 0:a78f55e5508c (draft) [ ] 0
1165 1201
1166 1202 $ hg log -G --hidden
1167 1203 @ 4:b0551702f918 (draft) [tip ] 2
1168 1204 |
1169 1205 | x 3:f27abbcc1f77 (draft *obsolete*) [ ] temporary amend commit for e008cf283490
1170 1206 | |
1171 1207 | x 2:e008cf283490 (draft *obsolete*) [ ] 2
1172 1208 |/
1173 1209 o 1:e016b03fd86f (draft) [ ] 1
1174 1210 |
1175 1211 o 0:a78f55e5508c (draft) [ ] 0
1176 1212
1177 1213
1178 1214 $ hg strip -r 1 --config extensions.strip=
1179 1215 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
1180 1216 saved backup bundle to $TESTTMP/tmpe/issue4845/.hg/strip-backup/e016b03fd86f-c41c6bcc-backup.hg (glob)
1181 1217 $ hg log -G
1182 1218 @ 0:a78f55e5508c (draft) [tip ] 0
1183 1219
1184 1220 $ hg log -G --hidden
1185 1221 @ 0:a78f55e5508c (draft) [tip ] 0
1186 1222
1187 1223 $ hg debugbundle .hg/strip-backup/e016b03fd86f-c41c6bcc-backup.hg
1188 1224 Stream params: sortdict([('Compression', 'BZ')])
1189 1225 changegroup -- "sortdict([('version', '02'), ('nbchanges', '4')])"
1190 1226 e016b03fd86fcccc54817d120b90b751aaf367d6
1191 1227 e008cf2834908e5d6b0f792a9d4b0e2272260fb8
1192 1228 f27abbcc1f77fb409cf9160482fe619541e2d605
1193 1229 b0551702f918510f01ae838ab03a463054c67b46
1194 1230 obsmarkers -- 'sortdict()'
1195 1231 version: 1 (139 bytes)
1196 1232 e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1197 1233 f27abbcc1f77fb409cf9160482fe619541e2d605 0 {e008cf2834908e5d6b0f792a9d4b0e2272260fb8} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1198 1234
1199 1235 $ hg pull .hg/strip-backup/*
1200 1236 pulling from .hg/strip-backup/e016b03fd86f-c41c6bcc-backup.hg
1201 1237 searching for changes
1202 1238 adding changesets
1203 1239 adding manifests
1204 1240 adding file changes
1205 1241 added 2 changesets with 2 changes to 2 files
1206 1242 (run 'hg update' to get a working copy)
1207 1243 $ hg log -G
1208 1244 o 2:b0551702f918 (draft) [tip ] 2
1209 1245 |
1210 1246 o 1:e016b03fd86f (draft) [ ] 1
1211 1247 |
1212 1248 @ 0:a78f55e5508c (draft) [ ] 0
1213 1249
1214 1250 $ hg log -G --hidden
1215 1251 o 2:b0551702f918 (draft) [tip ] 2
1216 1252 |
1217 1253 o 1:e016b03fd86f (draft) [ ] 1
1218 1254 |
1219 1255 @ 0:a78f55e5508c (draft) [ ] 0
1220 1256
1221 1257 Test that 'hg debugobsolete --index --rev' can show indices of obsmarkers when
1222 1258 only a subset of those are displayed (because of --rev option)
1223 1259 $ hg init doindexrev
1224 1260 $ cd doindexrev
1225 1261 $ echo a > a
1226 1262 $ hg ci -Am a
1227 1263 adding a
1228 1264 $ hg ci --amend -m aa
1229 1265 $ echo b > b
1230 1266 $ hg ci -Am b
1231 1267 adding b
1232 1268 $ hg ci --amend -m bb
1233 1269 $ echo c > c
1234 1270 $ hg ci -Am c
1235 1271 adding c
1236 1272 $ hg ci --amend -m cc
1237 1273 $ echo d > d
1238 1274 $ hg ci -Am d
1239 1275 adding d
1240 1276 $ hg ci --amend -m dd --config experimental.evolution.track-operation=1
1241 1277 $ hg debugobsolete --index --rev "3+7"
1242 1278 1 6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1 d27fb9b066076fd921277a4b9e8b9cb48c95bc6a 0 \(.*\) {'user': 'test'} (re)
1243 1279 3 4715cf767440ed891755448016c2b8cf70760c30 7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d 0 \(.*\) {'operation': 'amend', 'user': 'test'} (re)
1244 1280 $ hg debugobsolete --index --rev "3+7" -Tjson
1245 1281 [
1246 1282 {
1247 1283 "date": [0.0, 0],
1248 1284 "flag": 0,
1249 1285 "index": 1,
1250 1286 "metadata": {"user": "test"},
1251 1287 "precnode": "6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1",
1252 1288 "succnodes": ["d27fb9b066076fd921277a4b9e8b9cb48c95bc6a"]
1253 1289 },
1254 1290 {
1255 1291 "date": [0.0, 0],
1256 1292 "flag": 0,
1257 1293 "index": 3,
1258 1294 "metadata": {"operation": "amend", "user": "test"},
1259 1295 "precnode": "4715cf767440ed891755448016c2b8cf70760c30",
1260 1296 "succnodes": ["7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d"]
1261 1297 }
1262 1298 ]
1263 1299
1264 1300 Test the --delete option of debugobsolete command
1265 1301 $ hg debugobsolete --index
1266 1302 0 cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b f9bd49731b0b175e42992a3c8fa6c678b2bc11f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1267 1303 1 6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1 d27fb9b066076fd921277a4b9e8b9cb48c95bc6a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1268 1304 2 1ab51af8f9b41ef8c7f6f3312d4706d870b1fb74 29346082e4a9e27042b62d2da0e2de211c027621 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1269 1305 3 4715cf767440ed891755448016c2b8cf70760c30 7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
1270 1306 $ hg debugobsolete --delete 1 --delete 3
1271 1307 deleted 2 obsolescence markers
1272 1308 $ hg debugobsolete
1273 1309 cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b f9bd49731b0b175e42992a3c8fa6c678b2bc11f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1274 1310 1ab51af8f9b41ef8c7f6f3312d4706d870b1fb74 29346082e4a9e27042b62d2da0e2de211c027621 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1275 1311
1276 1312 Test adding changeset after obsmarkers affecting it
1277 1313 (eg: during pull, or unbundle)
1278 1314
1279 1315 $ mkcommit e
1280 1316 $ hg bundle -r . --base .~1 ../bundle-2.hg
1281 1317 1 changesets found
1282 1318 $ getid .
1283 1319 $ hg --config extensions.strip= strip -r .
1284 1320 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1285 1321 saved backup bundle to $TESTTMP/tmpe/issue4845/doindexrev/.hg/strip-backup/9bc153528424-ee80edd4-backup.hg (glob)
1286 1322 $ hg debugobsolete 9bc153528424ea266d13e57f9ff0d799dfe61e4b
1287 1323 $ hg unbundle ../bundle-2.hg
1288 1324 adding changesets
1289 1325 adding manifests
1290 1326 adding file changes
1291 1327 added 1 changesets with 1 changes to 1 files
1292 1328 (run 'hg update' to get a working copy)
1293 1329 $ hg log -G
1294 1330 @ 7:7ae79c5d60f0 (draft) [tip ] dd
1295 1331 |
1296 1332 | o 6:4715cf767440 (draft) [ ] d
1297 1333 |/
1298 1334 o 5:29346082e4a9 (draft) [ ] cc
1299 1335 |
1300 1336 o 3:d27fb9b06607 (draft) [ ] bb
1301 1337 |
1302 1338 | o 2:6fdef60fcbab (draft) [ ] b
1303 1339 |/
1304 1340 o 1:f9bd49731b0b (draft) [ ] aa
1305 1341
1306 1342
1307 1343 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now