##// END OF EJS Templates
debugbundle: display the content of obsmarkers parts...
marmoute -
r32517:b62b2b37 default
parent child Browse files
Show More
@@ -1,2162 +1,2185
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import difflib
11 11 import errno
12 12 import operator
13 13 import os
14 14 import random
15 15 import socket
16 16 import string
17 17 import sys
18 18 import tempfile
19 19 import time
20 20
21 21 from .i18n import _
22 22 from .node import (
23 23 bin,
24 24 hex,
25 25 nullhex,
26 26 nullid,
27 27 nullrev,
28 28 short,
29 29 )
30 30 from . import (
31 31 bundle2,
32 32 changegroup,
33 33 cmdutil,
34 34 color,
35 35 context,
36 36 dagparser,
37 37 dagutil,
38 38 encoding,
39 39 error,
40 40 exchange,
41 41 extensions,
42 42 filemerge,
43 43 fileset,
44 44 formatter,
45 45 hg,
46 46 localrepo,
47 47 lock as lockmod,
48 48 merge as mergemod,
49 49 obsolete,
50 50 policy,
51 51 pvec,
52 52 pycompat,
53 53 registrar,
54 54 repair,
55 55 revlog,
56 56 revset,
57 57 revsetlang,
58 58 scmutil,
59 59 setdiscovery,
60 60 simplemerge,
61 61 smartset,
62 62 sslutil,
63 63 streamclone,
64 64 templater,
65 65 treediscovery,
66 66 upgrade,
67 67 util,
68 68 vfs as vfsmod,
69 69 )
70 70
71 71 release = lockmod.release
72 72
73 73 command = registrar.command()
74 74
75 75 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
76 76 def debugancestor(ui, repo, *args):
77 77 """find the ancestor revision of two revisions in a given index"""
78 78 if len(args) == 3:
79 79 index, rev1, rev2 = args
80 80 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
81 81 lookup = r.lookup
82 82 elif len(args) == 2:
83 83 if not repo:
84 84 raise error.Abort(_('there is no Mercurial repository here '
85 85 '(.hg not found)'))
86 86 rev1, rev2 = args
87 87 r = repo.changelog
88 88 lookup = repo.lookup
89 89 else:
90 90 raise error.Abort(_('either two or three arguments required'))
91 91 a = r.ancestor(lookup(rev1), lookup(rev2))
92 92 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
93 93
94 94 @command('debugapplystreamclonebundle', [], 'FILE')
95 95 def debugapplystreamclonebundle(ui, repo, fname):
96 96 """apply a stream clone bundle file"""
97 97 f = hg.openpath(ui, fname)
98 98 gen = exchange.readbundle(ui, f, fname)
99 99 gen.apply(repo)
100 100
101 101 @command('debugbuilddag',
102 102 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
103 103 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
104 104 ('n', 'new-file', None, _('add new file at each rev'))],
105 105 _('[OPTION]... [TEXT]'))
106 106 def debugbuilddag(ui, repo, text=None,
107 107 mergeable_file=False,
108 108 overwritten_file=False,
109 109 new_file=False):
110 110 """builds a repo with a given DAG from scratch in the current empty repo
111 111
112 112 The description of the DAG is read from stdin if not given on the
113 113 command line.
114 114
115 115 Elements:
116 116
117 117 - "+n" is a linear run of n nodes based on the current default parent
118 118 - "." is a single node based on the current default parent
119 119 - "$" resets the default parent to null (implied at the start);
120 120 otherwise the default parent is always the last node created
121 121 - "<p" sets the default parent to the backref p
122 122 - "*p" is a fork at parent p, which is a backref
123 123 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
124 124 - "/p2" is a merge of the preceding node and p2
125 125 - ":tag" defines a local tag for the preceding node
126 126 - "@branch" sets the named branch for subsequent nodes
127 127 - "#...\\n" is a comment up to the end of the line
128 128
129 129 Whitespace between the above elements is ignored.
130 130
131 131 A backref is either
132 132
133 133 - a number n, which references the node curr-n, where curr is the current
134 134 node, or
135 135 - the name of a local tag you placed earlier using ":tag", or
136 136 - empty to denote the default parent.
137 137
138 138 All string valued-elements are either strictly alphanumeric, or must
139 139 be enclosed in double quotes ("..."), with "\\" as escape character.
140 140 """
141 141
142 142 if text is None:
143 143 ui.status(_("reading DAG from stdin\n"))
144 144 text = ui.fin.read()
145 145
146 146 cl = repo.changelog
147 147 if len(cl) > 0:
148 148 raise error.Abort(_('repository is not empty'))
149 149
150 150 # determine number of revs in DAG
151 151 total = 0
152 152 for type, data in dagparser.parsedag(text):
153 153 if type == 'n':
154 154 total += 1
155 155
156 156 if mergeable_file:
157 157 linesperrev = 2
158 158 # make a file with k lines per rev
159 159 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
160 160 initialmergedlines.append("")
161 161
162 162 tags = []
163 163
164 164 wlock = lock = tr = None
165 165 try:
166 166 wlock = repo.wlock()
167 167 lock = repo.lock()
168 168 tr = repo.transaction("builddag")
169 169
170 170 at = -1
171 171 atbranch = 'default'
172 172 nodeids = []
173 173 id = 0
174 174 ui.progress(_('building'), id, unit=_('revisions'), total=total)
175 175 for type, data in dagparser.parsedag(text):
176 176 if type == 'n':
177 177 ui.note(('node %s\n' % str(data)))
178 178 id, ps = data
179 179
180 180 files = []
181 181 fctxs = {}
182 182
183 183 p2 = None
184 184 if mergeable_file:
185 185 fn = "mf"
186 186 p1 = repo[ps[0]]
187 187 if len(ps) > 1:
188 188 p2 = repo[ps[1]]
189 189 pa = p1.ancestor(p2)
190 190 base, local, other = [x[fn].data() for x in (pa, p1,
191 191 p2)]
192 192 m3 = simplemerge.Merge3Text(base, local, other)
193 193 ml = [l.strip() for l in m3.merge_lines()]
194 194 ml.append("")
195 195 elif at > 0:
196 196 ml = p1[fn].data().split("\n")
197 197 else:
198 198 ml = initialmergedlines
199 199 ml[id * linesperrev] += " r%i" % id
200 200 mergedtext = "\n".join(ml)
201 201 files.append(fn)
202 202 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
203 203
204 204 if overwritten_file:
205 205 fn = "of"
206 206 files.append(fn)
207 207 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
208 208
209 209 if new_file:
210 210 fn = "nf%i" % id
211 211 files.append(fn)
212 212 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
213 213 if len(ps) > 1:
214 214 if not p2:
215 215 p2 = repo[ps[1]]
216 216 for fn in p2:
217 217 if fn.startswith("nf"):
218 218 files.append(fn)
219 219 fctxs[fn] = p2[fn]
220 220
221 221 def fctxfn(repo, cx, path):
222 222 return fctxs.get(path)
223 223
224 224 if len(ps) == 0 or ps[0] < 0:
225 225 pars = [None, None]
226 226 elif len(ps) == 1:
227 227 pars = [nodeids[ps[0]], None]
228 228 else:
229 229 pars = [nodeids[p] for p in ps]
230 230 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
231 231 date=(id, 0),
232 232 user="debugbuilddag",
233 233 extra={'branch': atbranch})
234 234 nodeid = repo.commitctx(cx)
235 235 nodeids.append(nodeid)
236 236 at = id
237 237 elif type == 'l':
238 238 id, name = data
239 239 ui.note(('tag %s\n' % name))
240 240 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
241 241 elif type == 'a':
242 242 ui.note(('branch %s\n' % data))
243 243 atbranch = data
244 244 ui.progress(_('building'), id, unit=_('revisions'), total=total)
245 245 tr.close()
246 246
247 247 if tags:
248 248 repo.vfs.write("localtags", "".join(tags))
249 249 finally:
250 250 ui.progress(_('building'), None)
251 251 release(tr, lock, wlock)
252 252
253 253 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
254 254 indent_string = ' ' * indent
255 255 if all:
256 256 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
257 257 % indent_string)
258 258
259 259 def showchunks(named):
260 260 ui.write("\n%s%s\n" % (indent_string, named))
261 261 chain = None
262 262 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
263 263 node = chunkdata['node']
264 264 p1 = chunkdata['p1']
265 265 p2 = chunkdata['p2']
266 266 cs = chunkdata['cs']
267 267 deltabase = chunkdata['deltabase']
268 268 delta = chunkdata['delta']
269 269 ui.write("%s%s %s %s %s %s %s\n" %
270 270 (indent_string, hex(node), hex(p1), hex(p2),
271 271 hex(cs), hex(deltabase), len(delta)))
272 272 chain = node
273 273
274 274 chunkdata = gen.changelogheader()
275 275 showchunks("changelog")
276 276 chunkdata = gen.manifestheader()
277 277 showchunks("manifest")
278 278 for chunkdata in iter(gen.filelogheader, {}):
279 279 fname = chunkdata['filename']
280 280 showchunks(fname)
281 281 else:
282 282 if isinstance(gen, bundle2.unbundle20):
283 283 raise error.Abort(_('use debugbundle2 for this file'))
284 284 chunkdata = gen.changelogheader()
285 285 chain = None
286 286 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
287 287 node = chunkdata['node']
288 288 ui.write("%s%s\n" % (indent_string, hex(node)))
289 289 chain = node
290 290
291 def _debugobsmarkers(ui, data, all=None, indent=0, **opts):
292 """display version and markers contained in 'data'"""
293 indent_string = ' ' * indent
294 try:
295 version, markers = obsolete._readmarkers(data)
296 except error.UnknownVersion as exc:
297 msg = "%sunsupported version: %s (%d bytes)\n"
298 msg %= indent_string, exc.version, len(data)
299 ui.write(msg)
300 else:
301 msg = "%sversion: %s (%d bytes)\n"
302 msg %= indent_string, version, len(data)
303 ui.write(msg)
304 fm = ui.formatter('debugobsolete', opts)
305 for rawmarker in sorted(markers):
306 m = obsolete.marker(None, rawmarker)
307 fm.startitem()
308 fm.plain(indent_string)
309 cmdutil.showmarker(fm, m)
310 fm.end()
311
291 312 def _debugbundle2(ui, gen, all=None, **opts):
292 313 """lists the contents of a bundle2"""
293 314 if not isinstance(gen, bundle2.unbundle20):
294 315 raise error.Abort(_('not a bundle2 file'))
295 316 ui.write(('Stream params: %s\n' % repr(gen.params)))
296 317 for part in gen.iterparts():
297 318 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
298 319 if part.type == 'changegroup':
299 320 version = part.params.get('version', '01')
300 321 cg = changegroup.getunbundler(version, part, 'UN')
301 322 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
323 if part.type == 'obsmarkers':
324 _debugobsmarkers(ui, part.read(), all=all, indent=4, **opts)
302 325
303 326 @command('debugbundle',
304 327 [('a', 'all', None, _('show all details')),
305 328 ('', 'spec', None, _('print the bundlespec of the bundle'))],
306 329 _('FILE'),
307 330 norepo=True)
308 331 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
309 332 """lists the contents of a bundle"""
310 333 with hg.openpath(ui, bundlepath) as f:
311 334 if spec:
312 335 spec = exchange.getbundlespec(ui, f)
313 336 ui.write('%s\n' % spec)
314 337 return
315 338
316 339 gen = exchange.readbundle(ui, f, bundlepath)
317 340 if isinstance(gen, bundle2.unbundle20):
318 341 return _debugbundle2(ui, gen, all=all, **opts)
319 342 _debugchangegroup(ui, gen, all=all, **opts)
320 343
321 344 @command('debugcheckstate', [], '')
322 345 def debugcheckstate(ui, repo):
323 346 """validate the correctness of the current dirstate"""
324 347 parent1, parent2 = repo.dirstate.parents()
325 348 m1 = repo[parent1].manifest()
326 349 m2 = repo[parent2].manifest()
327 350 errors = 0
328 351 for f in repo.dirstate:
329 352 state = repo.dirstate[f]
330 353 if state in "nr" and f not in m1:
331 354 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
332 355 errors += 1
333 356 if state in "a" and f in m1:
334 357 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
335 358 errors += 1
336 359 if state in "m" and f not in m1 and f not in m2:
337 360 ui.warn(_("%s in state %s, but not in either manifest\n") %
338 361 (f, state))
339 362 errors += 1
340 363 for f in m1:
341 364 state = repo.dirstate[f]
342 365 if state not in "nrm":
343 366 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
344 367 errors += 1
345 368 if errors:
346 369 error = _(".hg/dirstate inconsistent with current parent's manifest")
347 370 raise error.Abort(error)
348 371
349 372 @command('debugcolor',
350 373 [('', 'style', None, _('show all configured styles'))],
351 374 'hg debugcolor')
352 375 def debugcolor(ui, repo, **opts):
353 376 """show available color, effects or style"""
354 377 ui.write(('color mode: %s\n') % ui._colormode)
355 378 if opts.get('style'):
356 379 return _debugdisplaystyle(ui)
357 380 else:
358 381 return _debugdisplaycolor(ui)
359 382
360 383 def _debugdisplaycolor(ui):
361 384 ui = ui.copy()
362 385 ui._styles.clear()
363 386 for effect in color._activeeffects(ui).keys():
364 387 ui._styles[effect] = effect
365 388 if ui._terminfoparams:
366 389 for k, v in ui.configitems('color'):
367 390 if k.startswith('color.'):
368 391 ui._styles[k] = k[6:]
369 392 elif k.startswith('terminfo.'):
370 393 ui._styles[k] = k[9:]
371 394 ui.write(_('available colors:\n'))
372 395 # sort label with a '_' after the other to group '_background' entry.
373 396 items = sorted(ui._styles.items(),
374 397 key=lambda i: ('_' in i[0], i[0], i[1]))
375 398 for colorname, label in items:
376 399 ui.write(('%s\n') % colorname, label=label)
377 400
378 401 def _debugdisplaystyle(ui):
379 402 ui.write(_('available style:\n'))
380 403 width = max(len(s) for s in ui._styles)
381 404 for label, effects in sorted(ui._styles.items()):
382 405 ui.write('%s' % label, label=label)
383 406 if effects:
384 407 # 50
385 408 ui.write(': ')
386 409 ui.write(' ' * (max(0, width - len(label))))
387 410 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
388 411 ui.write('\n')
389 412
390 413 @command('debugcreatestreamclonebundle', [], 'FILE')
391 414 def debugcreatestreamclonebundle(ui, repo, fname):
392 415 """create a stream clone bundle file
393 416
394 417 Stream bundles are special bundles that are essentially archives of
395 418 revlog files. They are commonly used for cloning very quickly.
396 419 """
397 420 requirements, gen = streamclone.generatebundlev1(repo)
398 421 changegroup.writechunks(ui, gen, fname)
399 422
400 423 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
401 424
402 425 @command('debugdag',
403 426 [('t', 'tags', None, _('use tags as labels')),
404 427 ('b', 'branches', None, _('annotate with branch names')),
405 428 ('', 'dots', None, _('use dots for runs')),
406 429 ('s', 'spaces', None, _('separate elements by spaces'))],
407 430 _('[OPTION]... [FILE [REV]...]'),
408 431 optionalrepo=True)
409 432 def debugdag(ui, repo, file_=None, *revs, **opts):
410 433 """format the changelog or an index DAG as a concise textual description
411 434
412 435 If you pass a revlog index, the revlog's DAG is emitted. If you list
413 436 revision numbers, they get labeled in the output as rN.
414 437
415 438 Otherwise, the changelog DAG of the current repo is emitted.
416 439 """
417 440 spaces = opts.get('spaces')
418 441 dots = opts.get('dots')
419 442 if file_:
420 443 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
421 444 file_)
422 445 revs = set((int(r) for r in revs))
423 446 def events():
424 447 for r in rlog:
425 448 yield 'n', (r, list(p for p in rlog.parentrevs(r)
426 449 if p != -1))
427 450 if r in revs:
428 451 yield 'l', (r, "r%i" % r)
429 452 elif repo:
430 453 cl = repo.changelog
431 454 tags = opts.get('tags')
432 455 branches = opts.get('branches')
433 456 if tags:
434 457 labels = {}
435 458 for l, n in repo.tags().items():
436 459 labels.setdefault(cl.rev(n), []).append(l)
437 460 def events():
438 461 b = "default"
439 462 for r in cl:
440 463 if branches:
441 464 newb = cl.read(cl.node(r))[5]['branch']
442 465 if newb != b:
443 466 yield 'a', newb
444 467 b = newb
445 468 yield 'n', (r, list(p for p in cl.parentrevs(r)
446 469 if p != -1))
447 470 if tags:
448 471 ls = labels.get(r)
449 472 if ls:
450 473 for l in ls:
451 474 yield 'l', (r, l)
452 475 else:
453 476 raise error.Abort(_('need repo for changelog dag'))
454 477
455 478 for line in dagparser.dagtextlines(events(),
456 479 addspaces=spaces,
457 480 wraplabels=True,
458 481 wrapannotations=True,
459 482 wrapnonlinear=dots,
460 483 usedots=dots,
461 484 maxlinewidth=70):
462 485 ui.write(line)
463 486 ui.write("\n")
464 487
465 488 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
466 489 def debugdata(ui, repo, file_, rev=None, **opts):
467 490 """dump the contents of a data file revision"""
468 491 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
469 492 if rev is not None:
470 493 raise error.CommandError('debugdata', _('invalid arguments'))
471 494 file_, rev = None, file_
472 495 elif rev is None:
473 496 raise error.CommandError('debugdata', _('invalid arguments'))
474 497 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
475 498 try:
476 499 ui.write(r.revision(r.lookup(rev), raw=True))
477 500 except KeyError:
478 501 raise error.Abort(_('invalid revision identifier %s') % rev)
479 502
480 503 @command('debugdate',
481 504 [('e', 'extended', None, _('try extended date formats'))],
482 505 _('[-e] DATE [RANGE]'),
483 506 norepo=True, optionalrepo=True)
484 507 def debugdate(ui, date, range=None, **opts):
485 508 """parse and display a date"""
486 509 if opts["extended"]:
487 510 d = util.parsedate(date, util.extendeddateformats)
488 511 else:
489 512 d = util.parsedate(date)
490 513 ui.write(("internal: %s %s\n") % d)
491 514 ui.write(("standard: %s\n") % util.datestr(d))
492 515 if range:
493 516 m = util.matchdate(range)
494 517 ui.write(("match: %s\n") % m(d[0]))
495 518
496 519 @command('debugdeltachain',
497 520 cmdutil.debugrevlogopts + cmdutil.formatteropts,
498 521 _('-c|-m|FILE'),
499 522 optionalrepo=True)
500 523 def debugdeltachain(ui, repo, file_=None, **opts):
501 524 """dump information about delta chains in a revlog
502 525
503 526 Output can be templatized. Available template keywords are:
504 527
505 528 :``rev``: revision number
506 529 :``chainid``: delta chain identifier (numbered by unique base)
507 530 :``chainlen``: delta chain length to this revision
508 531 :``prevrev``: previous revision in delta chain
509 532 :``deltatype``: role of delta / how it was computed
510 533 :``compsize``: compressed size of revision
511 534 :``uncompsize``: uncompressed size of revision
512 535 :``chainsize``: total size of compressed revisions in chain
513 536 :``chainratio``: total chain size divided by uncompressed revision size
514 537 (new delta chains typically start at ratio 2.00)
515 538 :``lindist``: linear distance from base revision in delta chain to end
516 539 of this revision
517 540 :``extradist``: total size of revisions not part of this delta chain from
518 541 base of delta chain to end of this revision; a measurement
519 542 of how much extra data we need to read/seek across to read
520 543 the delta chain for this revision
521 544 :``extraratio``: extradist divided by chainsize; another representation of
522 545 how much unrelated data is needed to load this delta chain
523 546 """
524 547 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
525 548 index = r.index
526 549 generaldelta = r.version & revlog.FLAG_GENERALDELTA
527 550
528 551 def revinfo(rev):
529 552 e = index[rev]
530 553 compsize = e[1]
531 554 uncompsize = e[2]
532 555 chainsize = 0
533 556
534 557 if generaldelta:
535 558 if e[3] == e[5]:
536 559 deltatype = 'p1'
537 560 elif e[3] == e[6]:
538 561 deltatype = 'p2'
539 562 elif e[3] == rev - 1:
540 563 deltatype = 'prev'
541 564 elif e[3] == rev:
542 565 deltatype = 'base'
543 566 else:
544 567 deltatype = 'other'
545 568 else:
546 569 if e[3] == rev:
547 570 deltatype = 'base'
548 571 else:
549 572 deltatype = 'prev'
550 573
551 574 chain = r._deltachain(rev)[0]
552 575 for iterrev in chain:
553 576 e = index[iterrev]
554 577 chainsize += e[1]
555 578
556 579 return compsize, uncompsize, deltatype, chain, chainsize
557 580
558 581 fm = ui.formatter('debugdeltachain', opts)
559 582
560 583 fm.plain(' rev chain# chainlen prev delta '
561 584 'size rawsize chainsize ratio lindist extradist '
562 585 'extraratio\n')
563 586
564 587 chainbases = {}
565 588 for rev in r:
566 589 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
567 590 chainbase = chain[0]
568 591 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
569 592 basestart = r.start(chainbase)
570 593 revstart = r.start(rev)
571 594 lineardist = revstart + comp - basestart
572 595 extradist = lineardist - chainsize
573 596 try:
574 597 prevrev = chain[-2]
575 598 except IndexError:
576 599 prevrev = -1
577 600
578 601 chainratio = float(chainsize) / float(uncomp)
579 602 extraratio = float(extradist) / float(chainsize)
580 603
581 604 fm.startitem()
582 605 fm.write('rev chainid chainlen prevrev deltatype compsize '
583 606 'uncompsize chainsize chainratio lindist extradist '
584 607 'extraratio',
585 608 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
586 609 rev, chainid, len(chain), prevrev, deltatype, comp,
587 610 uncomp, chainsize, chainratio, lineardist, extradist,
588 611 extraratio,
589 612 rev=rev, chainid=chainid, chainlen=len(chain),
590 613 prevrev=prevrev, deltatype=deltatype, compsize=comp,
591 614 uncompsize=uncomp, chainsize=chainsize,
592 615 chainratio=chainratio, lindist=lineardist,
593 616 extradist=extradist, extraratio=extraratio)
594 617
595 618 fm.end()
596 619
597 620 @command('debugdirstate|debugstate',
598 621 [('', 'nodates', None, _('do not display the saved mtime')),
599 622 ('', 'datesort', None, _('sort by saved mtime'))],
600 623 _('[OPTION]...'))
601 624 def debugstate(ui, repo, **opts):
602 625 """show the contents of the current dirstate"""
603 626
604 627 nodates = opts.get('nodates')
605 628 datesort = opts.get('datesort')
606 629
607 630 timestr = ""
608 631 if datesort:
609 632 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
610 633 else:
611 634 keyfunc = None # sort by filename
612 635 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
613 636 if ent[3] == -1:
614 637 timestr = 'unset '
615 638 elif nodates:
616 639 timestr = 'set '
617 640 else:
618 641 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
619 642 time.localtime(ent[3]))
620 643 if ent[1] & 0o20000:
621 644 mode = 'lnk'
622 645 else:
623 646 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
624 647 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
625 648 for f in repo.dirstate.copies():
626 649 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
627 650
628 651 @command('debugdiscovery',
629 652 [('', 'old', None, _('use old-style discovery')),
630 653 ('', 'nonheads', None,
631 654 _('use old-style discovery with non-heads included')),
632 655 ] + cmdutil.remoteopts,
633 656 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
634 657 def debugdiscovery(ui, repo, remoteurl="default", **opts):
635 658 """runs the changeset discovery protocol in isolation"""
636 659 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
637 660 opts.get('branch'))
638 661 remote = hg.peer(repo, opts, remoteurl)
639 662 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
640 663
641 664 # make sure tests are repeatable
642 665 random.seed(12323)
643 666
644 667 def doit(localheads, remoteheads, remote=remote):
645 668 if opts.get('old'):
646 669 if localheads:
647 670 raise error.Abort('cannot use localheads with old style '
648 671 'discovery')
649 672 if not util.safehasattr(remote, 'branches'):
650 673 # enable in-client legacy support
651 674 remote = localrepo.locallegacypeer(remote.local())
652 675 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
653 676 force=True)
654 677 common = set(common)
655 678 if not opts.get('nonheads'):
656 679 ui.write(("unpruned common: %s\n") %
657 680 " ".join(sorted(short(n) for n in common)))
658 681 dag = dagutil.revlogdag(repo.changelog)
659 682 all = dag.ancestorset(dag.internalizeall(common))
660 683 common = dag.externalizeall(dag.headsetofconnecteds(all))
661 684 else:
662 685 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
663 686 common = set(common)
664 687 rheads = set(hds)
665 688 lheads = set(repo.heads())
666 689 ui.write(("common heads: %s\n") %
667 690 " ".join(sorted(short(n) for n in common)))
668 691 if lheads <= common:
669 692 ui.write(("local is subset\n"))
670 693 elif rheads <= common:
671 694 ui.write(("remote is subset\n"))
672 695
673 696 serverlogs = opts.get('serverlog')
674 697 if serverlogs:
675 698 for filename in serverlogs:
676 699 with open(filename, 'r') as logfile:
677 700 line = logfile.readline()
678 701 while line:
679 702 parts = line.strip().split(';')
680 703 op = parts[1]
681 704 if op == 'cg':
682 705 pass
683 706 elif op == 'cgss':
684 707 doit(parts[2].split(' '), parts[3].split(' '))
685 708 elif op == 'unb':
686 709 doit(parts[3].split(' '), parts[2].split(' '))
687 710 line = logfile.readline()
688 711 else:
689 712 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
690 713 opts.get('remote_head'))
691 714 localrevs = opts.get('local_head')
692 715 doit(localrevs, remoterevs)
693 716
694 717 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
695 718 def debugextensions(ui, **opts):
696 719 '''show information about active extensions'''
697 720 exts = extensions.extensions(ui)
698 721 hgver = util.version()
699 722 fm = ui.formatter('debugextensions', opts)
700 723 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
701 724 isinternal = extensions.ismoduleinternal(extmod)
702 725 extsource = pycompat.fsencode(extmod.__file__)
703 726 if isinternal:
704 727 exttestedwith = [] # never expose magic string to users
705 728 else:
706 729 exttestedwith = getattr(extmod, 'testedwith', '').split()
707 730 extbuglink = getattr(extmod, 'buglink', None)
708 731
709 732 fm.startitem()
710 733
711 734 if ui.quiet or ui.verbose:
712 735 fm.write('name', '%s\n', extname)
713 736 else:
714 737 fm.write('name', '%s', extname)
715 738 if isinternal or hgver in exttestedwith:
716 739 fm.plain('\n')
717 740 elif not exttestedwith:
718 741 fm.plain(_(' (untested!)\n'))
719 742 else:
720 743 lasttestedversion = exttestedwith[-1]
721 744 fm.plain(' (%s!)\n' % lasttestedversion)
722 745
723 746 fm.condwrite(ui.verbose and extsource, 'source',
724 747 _(' location: %s\n'), extsource or "")
725 748
726 749 if ui.verbose:
727 750 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
728 751 fm.data(bundled=isinternal)
729 752
730 753 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
731 754 _(' tested with: %s\n'),
732 755 fm.formatlist(exttestedwith, name='ver'))
733 756
734 757 fm.condwrite(ui.verbose and extbuglink, 'buglink',
735 758 _(' bug reporting: %s\n'), extbuglink or "")
736 759
737 760 fm.end()
738 761
739 762 @command('debugfileset',
740 763 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
741 764 _('[-r REV] FILESPEC'))
742 765 def debugfileset(ui, repo, expr, **opts):
743 766 '''parse and apply a fileset specification'''
744 767 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
745 768 if ui.verbose:
746 769 tree = fileset.parse(expr)
747 770 ui.note(fileset.prettyformat(tree), "\n")
748 771
749 772 for f in ctx.getfileset(expr):
750 773 ui.write("%s\n" % f)
751 774
752 775 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
753 776 def debugfsinfo(ui, path="."):
754 777 """show information detected about current filesystem"""
755 778 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
756 779 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
757 780 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
758 781 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
759 782 casesensitive = '(unknown)'
760 783 try:
761 784 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
762 785 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
763 786 except OSError:
764 787 pass
765 788 ui.write(('case-sensitive: %s\n') % casesensitive)
766 789
767 790 @command('debuggetbundle',
768 791 [('H', 'head', [], _('id of head node'), _('ID')),
769 792 ('C', 'common', [], _('id of common node'), _('ID')),
770 793 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
771 794 _('REPO FILE [-H|-C ID]...'),
772 795 norepo=True)
773 796 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
774 797 """retrieves a bundle from a repo
775 798
776 799 Every ID must be a full-length hex node id string. Saves the bundle to the
777 800 given file.
778 801 """
779 802 repo = hg.peer(ui, opts, repopath)
780 803 if not repo.capable('getbundle'):
781 804 raise error.Abort("getbundle() not supported by target repository")
782 805 args = {}
783 806 if common:
784 807 args['common'] = [bin(s) for s in common]
785 808 if head:
786 809 args['heads'] = [bin(s) for s in head]
787 810 # TODO: get desired bundlecaps from command line.
788 811 args['bundlecaps'] = None
789 812 bundle = repo.getbundle('debug', **args)
790 813
791 814 bundletype = opts.get('type', 'bzip2').lower()
792 815 btypes = {'none': 'HG10UN',
793 816 'bzip2': 'HG10BZ',
794 817 'gzip': 'HG10GZ',
795 818 'bundle2': 'HG20'}
796 819 bundletype = btypes.get(bundletype)
797 820 if bundletype not in bundle2.bundletypes:
798 821 raise error.Abort(_('unknown bundle type specified with --type'))
799 822 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
800 823
801 824 @command('debugignore', [], '[FILE]')
802 825 def debugignore(ui, repo, *files, **opts):
803 826 """display the combined ignore pattern and information about ignored files
804 827
805 828 With no argument display the combined ignore pattern.
806 829
807 830 Given space separated file names, shows if the given file is ignored and
808 831 if so, show the ignore rule (file and line number) that matched it.
809 832 """
810 833 ignore = repo.dirstate._ignore
811 834 if not files:
812 835 # Show all the patterns
813 836 ui.write("%s\n" % repr(ignore))
814 837 else:
815 838 for f in files:
816 839 nf = util.normpath(f)
817 840 ignored = None
818 841 ignoredata = None
819 842 if nf != '.':
820 843 if ignore(nf):
821 844 ignored = nf
822 845 ignoredata = repo.dirstate._ignorefileandline(nf)
823 846 else:
824 847 for p in util.finddirs(nf):
825 848 if ignore(p):
826 849 ignored = p
827 850 ignoredata = repo.dirstate._ignorefileandline(p)
828 851 break
829 852 if ignored:
830 853 if ignored == nf:
831 854 ui.write(_("%s is ignored\n") % f)
832 855 else:
833 856 ui.write(_("%s is ignored because of "
834 857 "containing folder %s\n")
835 858 % (f, ignored))
836 859 ignorefile, lineno, line = ignoredata
837 860 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
838 861 % (ignorefile, lineno, line))
839 862 else:
840 863 ui.write(_("%s is not ignored\n") % f)
841 864
842 865 @command('debugindex', cmdutil.debugrevlogopts +
843 866 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
844 867 _('[-f FORMAT] -c|-m|FILE'),
845 868 optionalrepo=True)
846 869 def debugindex(ui, repo, file_=None, **opts):
847 870 """dump the contents of an index file"""
848 871 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
849 872 format = opts.get('format', 0)
850 873 if format not in (0, 1):
851 874 raise error.Abort(_("unknown format %d") % format)
852 875
853 876 generaldelta = r.version & revlog.FLAG_GENERALDELTA
854 877 if generaldelta:
855 878 basehdr = ' delta'
856 879 else:
857 880 basehdr = ' base'
858 881
859 882 if ui.debugflag:
860 883 shortfn = hex
861 884 else:
862 885 shortfn = short
863 886
864 887 # There might not be anything in r, so have a sane default
865 888 idlen = 12
866 889 for i in r:
867 890 idlen = len(shortfn(r.node(i)))
868 891 break
869 892
870 893 if format == 0:
871 894 ui.write((" rev offset length " + basehdr + " linkrev"
872 895 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
873 896 elif format == 1:
874 897 ui.write((" rev flag offset length"
875 898 " size " + basehdr + " link p1 p2"
876 899 " %s\n") % "nodeid".rjust(idlen))
877 900
878 901 for i in r:
879 902 node = r.node(i)
880 903 if generaldelta:
881 904 base = r.deltaparent(i)
882 905 else:
883 906 base = r.chainbase(i)
884 907 if format == 0:
885 908 try:
886 909 pp = r.parents(node)
887 910 except Exception:
888 911 pp = [nullid, nullid]
889 912 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
890 913 i, r.start(i), r.length(i), base, r.linkrev(i),
891 914 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
892 915 elif format == 1:
893 916 pr = r.parentrevs(i)
894 917 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
895 918 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
896 919 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
897 920
898 921 @command('debugindexdot', cmdutil.debugrevlogopts,
899 922 _('-c|-m|FILE'), optionalrepo=True)
900 923 def debugindexdot(ui, repo, file_=None, **opts):
901 924 """dump an index DAG as a graphviz dot file"""
902 925 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
903 926 ui.write(("digraph G {\n"))
904 927 for i in r:
905 928 node = r.node(i)
906 929 pp = r.parents(node)
907 930 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
908 931 if pp[1] != nullid:
909 932 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
910 933 ui.write("}\n")
911 934
912 935 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
913 936 def debuginstall(ui, **opts):
914 937 '''test Mercurial installation
915 938
916 939 Returns 0 on success.
917 940 '''
918 941
919 942 def writetemp(contents):
920 943 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
921 944 f = os.fdopen(fd, pycompat.sysstr("wb"))
922 945 f.write(contents)
923 946 f.close()
924 947 return name
925 948
926 949 problems = 0
927 950
928 951 fm = ui.formatter('debuginstall', opts)
929 952 fm.startitem()
930 953
931 954 # encoding
932 955 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
933 956 err = None
934 957 try:
935 958 encoding.fromlocal("test")
936 959 except error.Abort as inst:
937 960 err = inst
938 961 problems += 1
939 962 fm.condwrite(err, 'encodingerror', _(" %s\n"
940 963 " (check that your locale is properly set)\n"), err)
941 964
942 965 # Python
943 966 fm.write('pythonexe', _("checking Python executable (%s)\n"),
944 967 pycompat.sysexecutable)
945 968 fm.write('pythonver', _("checking Python version (%s)\n"),
946 969 ("%d.%d.%d" % sys.version_info[:3]))
947 970 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
948 971 os.path.dirname(pycompat.fsencode(os.__file__)))
949 972
950 973 security = set(sslutil.supportedprotocols)
951 974 if sslutil.hassni:
952 975 security.add('sni')
953 976
954 977 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
955 978 fm.formatlist(sorted(security), name='protocol',
956 979 fmt='%s', sep=','))
957 980
958 981 # These are warnings, not errors. So don't increment problem count. This
959 982 # may change in the future.
960 983 if 'tls1.2' not in security:
961 984 fm.plain(_(' TLS 1.2 not supported by Python install; '
962 985 'network connections lack modern security\n'))
963 986 if 'sni' not in security:
964 987 fm.plain(_(' SNI not supported by Python install; may have '
965 988 'connectivity issues with some servers\n'))
966 989
967 990 # TODO print CA cert info
968 991
969 992 # hg version
970 993 hgver = util.version()
971 994 fm.write('hgver', _("checking Mercurial version (%s)\n"),
972 995 hgver.split('+')[0])
973 996 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
974 997 '+'.join(hgver.split('+')[1:]))
975 998
976 999 # compiled modules
977 1000 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
978 1001 policy.policy)
979 1002 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
980 1003 os.path.dirname(pycompat.fsencode(__file__)))
981 1004
982 1005 if policy.policy in ('c', 'allow'):
983 1006 err = None
984 1007 try:
985 1008 from .cext import (
986 1009 base85,
987 1010 bdiff,
988 1011 mpatch,
989 1012 osutil,
990 1013 )
991 1014 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
992 1015 except Exception as inst:
993 1016 err = inst
994 1017 problems += 1
995 1018 fm.condwrite(err, 'extensionserror', " %s\n", err)
996 1019
997 1020 compengines = util.compengines._engines.values()
998 1021 fm.write('compengines', _('checking registered compression engines (%s)\n'),
999 1022 fm.formatlist(sorted(e.name() for e in compengines),
1000 1023 name='compengine', fmt='%s', sep=', '))
1001 1024 fm.write('compenginesavail', _('checking available compression engines '
1002 1025 '(%s)\n'),
1003 1026 fm.formatlist(sorted(e.name() for e in compengines
1004 1027 if e.available()),
1005 1028 name='compengine', fmt='%s', sep=', '))
1006 1029 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1007 1030 fm.write('compenginesserver', _('checking available compression engines '
1008 1031 'for wire protocol (%s)\n'),
1009 1032 fm.formatlist([e.name() for e in wirecompengines
1010 1033 if e.wireprotosupport()],
1011 1034 name='compengine', fmt='%s', sep=', '))
1012 1035
1013 1036 # templates
1014 1037 p = templater.templatepaths()
1015 1038 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1016 1039 fm.condwrite(not p, '', _(" no template directories found\n"))
1017 1040 if p:
1018 1041 m = templater.templatepath("map-cmdline.default")
1019 1042 if m:
1020 1043 # template found, check if it is working
1021 1044 err = None
1022 1045 try:
1023 1046 templater.templater.frommapfile(m)
1024 1047 except Exception as inst:
1025 1048 err = inst
1026 1049 p = None
1027 1050 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1028 1051 else:
1029 1052 p = None
1030 1053 fm.condwrite(p, 'defaulttemplate',
1031 1054 _("checking default template (%s)\n"), m)
1032 1055 fm.condwrite(not m, 'defaulttemplatenotfound',
1033 1056 _(" template '%s' not found\n"), "default")
1034 1057 if not p:
1035 1058 problems += 1
1036 1059 fm.condwrite(not p, '',
1037 1060 _(" (templates seem to have been installed incorrectly)\n"))
1038 1061
1039 1062 # editor
1040 1063 editor = ui.geteditor()
1041 1064 editor = util.expandpath(editor)
1042 1065 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1043 1066 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1044 1067 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1045 1068 _(" No commit editor set and can't find %s in PATH\n"
1046 1069 " (specify a commit editor in your configuration"
1047 1070 " file)\n"), not cmdpath and editor == 'vi' and editor)
1048 1071 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1049 1072 _(" Can't find editor '%s' in PATH\n"
1050 1073 " (specify a commit editor in your configuration"
1051 1074 " file)\n"), not cmdpath and editor)
1052 1075 if not cmdpath and editor != 'vi':
1053 1076 problems += 1
1054 1077
1055 1078 # check username
1056 1079 username = None
1057 1080 err = None
1058 1081 try:
1059 1082 username = ui.username()
1060 1083 except error.Abort as e:
1061 1084 err = e
1062 1085 problems += 1
1063 1086
1064 1087 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1065 1088 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1066 1089 " (specify a username in your configuration file)\n"), err)
1067 1090
1068 1091 fm.condwrite(not problems, '',
1069 1092 _("no problems detected\n"))
1070 1093 if not problems:
1071 1094 fm.data(problems=problems)
1072 1095 fm.condwrite(problems, 'problems',
1073 1096 _("%d problems detected,"
1074 1097 " please check your install!\n"), problems)
1075 1098 fm.end()
1076 1099
1077 1100 return problems
1078 1101
1079 1102 @command('debugknown', [], _('REPO ID...'), norepo=True)
1080 1103 def debugknown(ui, repopath, *ids, **opts):
1081 1104 """test whether node ids are known to a repo
1082 1105
1083 1106 Every ID must be a full-length hex node id string. Returns a list of 0s
1084 1107 and 1s indicating unknown/known.
1085 1108 """
1086 1109 repo = hg.peer(ui, opts, repopath)
1087 1110 if not repo.capable('known'):
1088 1111 raise error.Abort("known() not supported by target repository")
1089 1112 flags = repo.known([bin(s) for s in ids])
1090 1113 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1091 1114
1092 1115 @command('debuglabelcomplete', [], _('LABEL...'))
1093 1116 def debuglabelcomplete(ui, repo, *args):
1094 1117 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1095 1118 debugnamecomplete(ui, repo, *args)
1096 1119
1097 1120 @command('debuglocks',
1098 1121 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1099 1122 ('W', 'force-wlock', None,
1100 1123 _('free the working state lock (DANGEROUS)'))],
1101 1124 _('[OPTION]...'))
1102 1125 def debuglocks(ui, repo, **opts):
1103 1126 """show or modify state of locks
1104 1127
1105 1128 By default, this command will show which locks are held. This
1106 1129 includes the user and process holding the lock, the amount of time
1107 1130 the lock has been held, and the machine name where the process is
1108 1131 running if it's not local.
1109 1132
1110 1133 Locks protect the integrity of Mercurial's data, so should be
1111 1134 treated with care. System crashes or other interruptions may cause
1112 1135 locks to not be properly released, though Mercurial will usually
1113 1136 detect and remove such stale locks automatically.
1114 1137
1115 1138 However, detecting stale locks may not always be possible (for
1116 1139 instance, on a shared filesystem). Removing locks may also be
1117 1140 blocked by filesystem permissions.
1118 1141
1119 1142 Returns 0 if no locks are held.
1120 1143
1121 1144 """
1122 1145
1123 1146 if opts.get('force_lock'):
1124 1147 repo.svfs.unlink('lock')
1125 1148 if opts.get('force_wlock'):
1126 1149 repo.vfs.unlink('wlock')
1127 1150 if opts.get('force_lock') or opts.get('force_lock'):
1128 1151 return 0
1129 1152
1130 1153 now = time.time()
1131 1154 held = 0
1132 1155
1133 1156 def report(vfs, name, method):
1134 1157 # this causes stale locks to get reaped for more accurate reporting
1135 1158 try:
1136 1159 l = method(False)
1137 1160 except error.LockHeld:
1138 1161 l = None
1139 1162
1140 1163 if l:
1141 1164 l.release()
1142 1165 else:
1143 1166 try:
1144 1167 stat = vfs.lstat(name)
1145 1168 age = now - stat.st_mtime
1146 1169 user = util.username(stat.st_uid)
1147 1170 locker = vfs.readlock(name)
1148 1171 if ":" in locker:
1149 1172 host, pid = locker.split(':')
1150 1173 if host == socket.gethostname():
1151 1174 locker = 'user %s, process %s' % (user, pid)
1152 1175 else:
1153 1176 locker = 'user %s, process %s, host %s' \
1154 1177 % (user, pid, host)
1155 1178 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1156 1179 return 1
1157 1180 except OSError as e:
1158 1181 if e.errno != errno.ENOENT:
1159 1182 raise
1160 1183
1161 1184 ui.write(("%-6s free\n") % (name + ":"))
1162 1185 return 0
1163 1186
1164 1187 held += report(repo.svfs, "lock", repo.lock)
1165 1188 held += report(repo.vfs, "wlock", repo.wlock)
1166 1189
1167 1190 return held
1168 1191
1169 1192 @command('debugmergestate', [], '')
1170 1193 def debugmergestate(ui, repo, *args):
1171 1194 """print merge state
1172 1195
1173 1196 Use --verbose to print out information about whether v1 or v2 merge state
1174 1197 was chosen."""
1175 1198 def _hashornull(h):
1176 1199 if h == nullhex:
1177 1200 return 'null'
1178 1201 else:
1179 1202 return h
1180 1203
1181 1204 def printrecords(version):
1182 1205 ui.write(('* version %s records\n') % version)
1183 1206 if version == 1:
1184 1207 records = v1records
1185 1208 else:
1186 1209 records = v2records
1187 1210
1188 1211 for rtype, record in records:
1189 1212 # pretty print some record types
1190 1213 if rtype == 'L':
1191 1214 ui.write(('local: %s\n') % record)
1192 1215 elif rtype == 'O':
1193 1216 ui.write(('other: %s\n') % record)
1194 1217 elif rtype == 'm':
1195 1218 driver, mdstate = record.split('\0', 1)
1196 1219 ui.write(('merge driver: %s (state "%s")\n')
1197 1220 % (driver, mdstate))
1198 1221 elif rtype in 'FDC':
1199 1222 r = record.split('\0')
1200 1223 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1201 1224 if version == 1:
1202 1225 onode = 'not stored in v1 format'
1203 1226 flags = r[7]
1204 1227 else:
1205 1228 onode, flags = r[7:9]
1206 1229 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1207 1230 % (f, rtype, state, _hashornull(hash)))
1208 1231 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1209 1232 ui.write((' ancestor path: %s (node %s)\n')
1210 1233 % (afile, _hashornull(anode)))
1211 1234 ui.write((' other path: %s (node %s)\n')
1212 1235 % (ofile, _hashornull(onode)))
1213 1236 elif rtype == 'f':
1214 1237 filename, rawextras = record.split('\0', 1)
1215 1238 extras = rawextras.split('\0')
1216 1239 i = 0
1217 1240 extrastrings = []
1218 1241 while i < len(extras):
1219 1242 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1220 1243 i += 2
1221 1244
1222 1245 ui.write(('file extras: %s (%s)\n')
1223 1246 % (filename, ', '.join(extrastrings)))
1224 1247 elif rtype == 'l':
1225 1248 labels = record.split('\0', 2)
1226 1249 labels = [l for l in labels if len(l) > 0]
1227 1250 ui.write(('labels:\n'))
1228 1251 ui.write((' local: %s\n' % labels[0]))
1229 1252 ui.write((' other: %s\n' % labels[1]))
1230 1253 if len(labels) > 2:
1231 1254 ui.write((' base: %s\n' % labels[2]))
1232 1255 else:
1233 1256 ui.write(('unrecognized entry: %s\t%s\n')
1234 1257 % (rtype, record.replace('\0', '\t')))
1235 1258
1236 1259 # Avoid mergestate.read() since it may raise an exception for unsupported
1237 1260 # merge state records. We shouldn't be doing this, but this is OK since this
1238 1261 # command is pretty low-level.
1239 1262 ms = mergemod.mergestate(repo)
1240 1263
1241 1264 # sort so that reasonable information is on top
1242 1265 v1records = ms._readrecordsv1()
1243 1266 v2records = ms._readrecordsv2()
1244 1267 order = 'LOml'
1245 1268 def key(r):
1246 1269 idx = order.find(r[0])
1247 1270 if idx == -1:
1248 1271 return (1, r[1])
1249 1272 else:
1250 1273 return (0, idx)
1251 1274 v1records.sort(key=key)
1252 1275 v2records.sort(key=key)
1253 1276
1254 1277 if not v1records and not v2records:
1255 1278 ui.write(('no merge state found\n'))
1256 1279 elif not v2records:
1257 1280 ui.note(('no version 2 merge state\n'))
1258 1281 printrecords(1)
1259 1282 elif ms._v1v2match(v1records, v2records):
1260 1283 ui.note(('v1 and v2 states match: using v2\n'))
1261 1284 printrecords(2)
1262 1285 else:
1263 1286 ui.note(('v1 and v2 states mismatch: using v1\n'))
1264 1287 printrecords(1)
1265 1288 if ui.verbose:
1266 1289 printrecords(2)
1267 1290
1268 1291 @command('debugnamecomplete', [], _('NAME...'))
1269 1292 def debugnamecomplete(ui, repo, *args):
1270 1293 '''complete "names" - tags, open branch names, bookmark names'''
1271 1294
1272 1295 names = set()
1273 1296 # since we previously only listed open branches, we will handle that
1274 1297 # specially (after this for loop)
1275 1298 for name, ns in repo.names.iteritems():
1276 1299 if name != 'branches':
1277 1300 names.update(ns.listnames(repo))
1278 1301 names.update(tag for (tag, heads, tip, closed)
1279 1302 in repo.branchmap().iterbranches() if not closed)
1280 1303 completions = set()
1281 1304 if not args:
1282 1305 args = ['']
1283 1306 for a in args:
1284 1307 completions.update(n for n in names if n.startswith(a))
1285 1308 ui.write('\n'.join(sorted(completions)))
1286 1309 ui.write('\n')
1287 1310
1288 1311 @command('debugobsolete',
1289 1312 [('', 'flags', 0, _('markers flag')),
1290 1313 ('', 'record-parents', False,
1291 1314 _('record parent information for the precursor')),
1292 1315 ('r', 'rev', [], _('display markers relevant to REV')),
1293 1316 ('', 'index', False, _('display index of the marker')),
1294 1317 ('', 'delete', [], _('delete markers specified by indices')),
1295 1318 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1296 1319 _('[OBSOLETED [REPLACEMENT ...]]'))
1297 1320 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1298 1321 """create arbitrary obsolete marker
1299 1322
1300 1323 With no arguments, displays the list of obsolescence markers."""
1301 1324
1302 1325 def parsenodeid(s):
1303 1326 try:
1304 1327 # We do not use revsingle/revrange functions here to accept
1305 1328 # arbitrary node identifiers, possibly not present in the
1306 1329 # local repository.
1307 1330 n = bin(s)
1308 1331 if len(n) != len(nullid):
1309 1332 raise TypeError()
1310 1333 return n
1311 1334 except TypeError:
1312 1335 raise error.Abort('changeset references must be full hexadecimal '
1313 1336 'node identifiers')
1314 1337
1315 1338 if opts.get('delete'):
1316 1339 indices = []
1317 1340 for v in opts.get('delete'):
1318 1341 try:
1319 1342 indices.append(int(v))
1320 1343 except ValueError:
1321 1344 raise error.Abort(_('invalid index value: %r') % v,
1322 1345 hint=_('use integers for indices'))
1323 1346
1324 1347 if repo.currenttransaction():
1325 1348 raise error.Abort(_('cannot delete obsmarkers in the middle '
1326 1349 'of transaction.'))
1327 1350
1328 1351 with repo.lock():
1329 1352 n = repair.deleteobsmarkers(repo.obsstore, indices)
1330 1353 ui.write(_('deleted %i obsolescence markers\n') % n)
1331 1354
1332 1355 return
1333 1356
1334 1357 if precursor is not None:
1335 1358 if opts['rev']:
1336 1359 raise error.Abort('cannot select revision when creating marker')
1337 1360 metadata = {}
1338 1361 metadata['user'] = opts['user'] or ui.username()
1339 1362 succs = tuple(parsenodeid(succ) for succ in successors)
1340 1363 l = repo.lock()
1341 1364 try:
1342 1365 tr = repo.transaction('debugobsolete')
1343 1366 try:
1344 1367 date = opts.get('date')
1345 1368 if date:
1346 1369 date = util.parsedate(date)
1347 1370 else:
1348 1371 date = None
1349 1372 prec = parsenodeid(precursor)
1350 1373 parents = None
1351 1374 if opts['record_parents']:
1352 1375 if prec not in repo.unfiltered():
1353 1376 raise error.Abort('cannot used --record-parents on '
1354 1377 'unknown changesets')
1355 1378 parents = repo.unfiltered()[prec].parents()
1356 1379 parents = tuple(p.node() for p in parents)
1357 1380 repo.obsstore.create(tr, prec, succs, opts['flags'],
1358 1381 parents=parents, date=date,
1359 1382 metadata=metadata, ui=ui)
1360 1383 tr.close()
1361 1384 except ValueError as exc:
1362 1385 raise error.Abort(_('bad obsmarker input: %s') % exc)
1363 1386 finally:
1364 1387 tr.release()
1365 1388 finally:
1366 1389 l.release()
1367 1390 else:
1368 1391 if opts['rev']:
1369 1392 revs = scmutil.revrange(repo, opts['rev'])
1370 1393 nodes = [repo[r].node() for r in revs]
1371 1394 markers = list(obsolete.getmarkers(repo, nodes=nodes))
1372 1395 markers.sort(key=lambda x: x._data)
1373 1396 else:
1374 1397 markers = obsolete.getmarkers(repo)
1375 1398
1376 1399 markerstoiter = markers
1377 1400 isrelevant = lambda m: True
1378 1401 if opts.get('rev') and opts.get('index'):
1379 1402 markerstoiter = obsolete.getmarkers(repo)
1380 1403 markerset = set(markers)
1381 1404 isrelevant = lambda m: m in markerset
1382 1405
1383 1406 fm = ui.formatter('debugobsolete', opts)
1384 1407 for i, m in enumerate(markerstoiter):
1385 1408 if not isrelevant(m):
1386 1409 # marker can be irrelevant when we're iterating over a set
1387 1410 # of markers (markerstoiter) which is bigger than the set
1388 1411 # of markers we want to display (markers)
1389 1412 # this can happen if both --index and --rev options are
1390 1413 # provided and thus we need to iterate over all of the markers
1391 1414 # to get the correct indices, but only display the ones that
1392 1415 # are relevant to --rev value
1393 1416 continue
1394 1417 fm.startitem()
1395 1418 ind = i if opts.get('index') else None
1396 1419 cmdutil.showmarker(fm, m, index=ind)
1397 1420 fm.end()
1398 1421
1399 1422 @command('debugpathcomplete',
1400 1423 [('f', 'full', None, _('complete an entire path')),
1401 1424 ('n', 'normal', None, _('show only normal files')),
1402 1425 ('a', 'added', None, _('show only added files')),
1403 1426 ('r', 'removed', None, _('show only removed files'))],
1404 1427 _('FILESPEC...'))
1405 1428 def debugpathcomplete(ui, repo, *specs, **opts):
1406 1429 '''complete part or all of a tracked path
1407 1430
1408 1431 This command supports shells that offer path name completion. It
1409 1432 currently completes only files already known to the dirstate.
1410 1433
1411 1434 Completion extends only to the next path segment unless
1412 1435 --full is specified, in which case entire paths are used.'''
1413 1436
1414 1437 def complete(path, acceptable):
1415 1438 dirstate = repo.dirstate
1416 1439 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1417 1440 rootdir = repo.root + pycompat.ossep
1418 1441 if spec != repo.root and not spec.startswith(rootdir):
1419 1442 return [], []
1420 1443 if os.path.isdir(spec):
1421 1444 spec += '/'
1422 1445 spec = spec[len(rootdir):]
1423 1446 fixpaths = pycompat.ossep != '/'
1424 1447 if fixpaths:
1425 1448 spec = spec.replace(pycompat.ossep, '/')
1426 1449 speclen = len(spec)
1427 1450 fullpaths = opts['full']
1428 1451 files, dirs = set(), set()
1429 1452 adddir, addfile = dirs.add, files.add
1430 1453 for f, st in dirstate.iteritems():
1431 1454 if f.startswith(spec) and st[0] in acceptable:
1432 1455 if fixpaths:
1433 1456 f = f.replace('/', pycompat.ossep)
1434 1457 if fullpaths:
1435 1458 addfile(f)
1436 1459 continue
1437 1460 s = f.find(pycompat.ossep, speclen)
1438 1461 if s >= 0:
1439 1462 adddir(f[:s])
1440 1463 else:
1441 1464 addfile(f)
1442 1465 return files, dirs
1443 1466
1444 1467 acceptable = ''
1445 1468 if opts['normal']:
1446 1469 acceptable += 'nm'
1447 1470 if opts['added']:
1448 1471 acceptable += 'a'
1449 1472 if opts['removed']:
1450 1473 acceptable += 'r'
1451 1474 cwd = repo.getcwd()
1452 1475 if not specs:
1453 1476 specs = ['.']
1454 1477
1455 1478 files, dirs = set(), set()
1456 1479 for spec in specs:
1457 1480 f, d = complete(spec, acceptable or 'nmar')
1458 1481 files.update(f)
1459 1482 dirs.update(d)
1460 1483 files.update(dirs)
1461 1484 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1462 1485 ui.write('\n')
1463 1486
1464 1487 @command('debugpickmergetool',
1465 1488 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1466 1489 ('', 'changedelete', None, _('emulate merging change and delete')),
1467 1490 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1468 1491 _('[PATTERN]...'),
1469 1492 inferrepo=True)
1470 1493 def debugpickmergetool(ui, repo, *pats, **opts):
1471 1494 """examine which merge tool is chosen for specified file
1472 1495
1473 1496 As described in :hg:`help merge-tools`, Mercurial examines
1474 1497 configurations below in this order to decide which merge tool is
1475 1498 chosen for specified file.
1476 1499
1477 1500 1. ``--tool`` option
1478 1501 2. ``HGMERGE`` environment variable
1479 1502 3. configurations in ``merge-patterns`` section
1480 1503 4. configuration of ``ui.merge``
1481 1504 5. configurations in ``merge-tools`` section
1482 1505 6. ``hgmerge`` tool (for historical reason only)
1483 1506 7. default tool for fallback (``:merge`` or ``:prompt``)
1484 1507
1485 1508 This command writes out examination result in the style below::
1486 1509
1487 1510 FILE = MERGETOOL
1488 1511
1489 1512 By default, all files known in the first parent context of the
1490 1513 working directory are examined. Use file patterns and/or -I/-X
1491 1514 options to limit target files. -r/--rev is also useful to examine
1492 1515 files in another context without actual updating to it.
1493 1516
1494 1517 With --debug, this command shows warning messages while matching
1495 1518 against ``merge-patterns`` and so on, too. It is recommended to
1496 1519 use this option with explicit file patterns and/or -I/-X options,
1497 1520 because this option increases amount of output per file according
1498 1521 to configurations in hgrc.
1499 1522
1500 1523 With -v/--verbose, this command shows configurations below at
1501 1524 first (only if specified).
1502 1525
1503 1526 - ``--tool`` option
1504 1527 - ``HGMERGE`` environment variable
1505 1528 - configuration of ``ui.merge``
1506 1529
1507 1530 If merge tool is chosen before matching against
1508 1531 ``merge-patterns``, this command can't show any helpful
1509 1532 information, even with --debug. In such case, information above is
1510 1533 useful to know why a merge tool is chosen.
1511 1534 """
1512 1535 overrides = {}
1513 1536 if opts['tool']:
1514 1537 overrides[('ui', 'forcemerge')] = opts['tool']
1515 1538 ui.note(('with --tool %r\n') % (opts['tool']))
1516 1539
1517 1540 with ui.configoverride(overrides, 'debugmergepatterns'):
1518 1541 hgmerge = encoding.environ.get("HGMERGE")
1519 1542 if hgmerge is not None:
1520 1543 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1521 1544 uimerge = ui.config("ui", "merge")
1522 1545 if uimerge:
1523 1546 ui.note(('with ui.merge=%r\n') % (uimerge))
1524 1547
1525 1548 ctx = scmutil.revsingle(repo, opts.get('rev'))
1526 1549 m = scmutil.match(ctx, pats, opts)
1527 1550 changedelete = opts['changedelete']
1528 1551 for path in ctx.walk(m):
1529 1552 fctx = ctx[path]
1530 1553 try:
1531 1554 if not ui.debugflag:
1532 1555 ui.pushbuffer(error=True)
1533 1556 tool, toolpath = filemerge._picktool(repo, ui, path,
1534 1557 fctx.isbinary(),
1535 1558 'l' in fctx.flags(),
1536 1559 changedelete)
1537 1560 finally:
1538 1561 if not ui.debugflag:
1539 1562 ui.popbuffer()
1540 1563 ui.write(('%s = %s\n') % (path, tool))
1541 1564
1542 1565 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1543 1566 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1544 1567 '''access the pushkey key/value protocol
1545 1568
1546 1569 With two args, list the keys in the given namespace.
1547 1570
1548 1571 With five args, set a key to new if it currently is set to old.
1549 1572 Reports success or failure.
1550 1573 '''
1551 1574
1552 1575 target = hg.peer(ui, {}, repopath)
1553 1576 if keyinfo:
1554 1577 key, old, new = keyinfo
1555 1578 r = target.pushkey(namespace, key, old, new)
1556 1579 ui.status(str(r) + '\n')
1557 1580 return not r
1558 1581 else:
1559 1582 for k, v in sorted(target.listkeys(namespace).iteritems()):
1560 1583 ui.write("%s\t%s\n" % (util.escapestr(k),
1561 1584 util.escapestr(v)))
1562 1585
1563 1586 @command('debugpvec', [], _('A B'))
1564 1587 def debugpvec(ui, repo, a, b=None):
1565 1588 ca = scmutil.revsingle(repo, a)
1566 1589 cb = scmutil.revsingle(repo, b)
1567 1590 pa = pvec.ctxpvec(ca)
1568 1591 pb = pvec.ctxpvec(cb)
1569 1592 if pa == pb:
1570 1593 rel = "="
1571 1594 elif pa > pb:
1572 1595 rel = ">"
1573 1596 elif pa < pb:
1574 1597 rel = "<"
1575 1598 elif pa | pb:
1576 1599 rel = "|"
1577 1600 ui.write(_("a: %s\n") % pa)
1578 1601 ui.write(_("b: %s\n") % pb)
1579 1602 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1580 1603 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1581 1604 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1582 1605 pa.distance(pb), rel))
1583 1606
1584 1607 @command('debugrebuilddirstate|debugrebuildstate',
1585 1608 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1586 1609 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1587 1610 'the working copy parent')),
1588 1611 ],
1589 1612 _('[-r REV]'))
1590 1613 def debugrebuilddirstate(ui, repo, rev, **opts):
1591 1614 """rebuild the dirstate as it would look like for the given revision
1592 1615
1593 1616 If no revision is specified the first current parent will be used.
1594 1617
1595 1618 The dirstate will be set to the files of the given revision.
1596 1619 The actual working directory content or existing dirstate
1597 1620 information such as adds or removes is not considered.
1598 1621
1599 1622 ``minimal`` will only rebuild the dirstate status for files that claim to be
1600 1623 tracked but are not in the parent manifest, or that exist in the parent
1601 1624 manifest but are not in the dirstate. It will not change adds, removes, or
1602 1625 modified files that are in the working copy parent.
1603 1626
1604 1627 One use of this command is to make the next :hg:`status` invocation
1605 1628 check the actual file content.
1606 1629 """
1607 1630 ctx = scmutil.revsingle(repo, rev)
1608 1631 with repo.wlock():
1609 1632 dirstate = repo.dirstate
1610 1633 changedfiles = None
1611 1634 # See command doc for what minimal does.
1612 1635 if opts.get('minimal'):
1613 1636 manifestfiles = set(ctx.manifest().keys())
1614 1637 dirstatefiles = set(dirstate)
1615 1638 manifestonly = manifestfiles - dirstatefiles
1616 1639 dsonly = dirstatefiles - manifestfiles
1617 1640 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1618 1641 changedfiles = manifestonly | dsnotadded
1619 1642
1620 1643 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1621 1644
1622 1645 @command('debugrebuildfncache', [], '')
1623 1646 def debugrebuildfncache(ui, repo):
1624 1647 """rebuild the fncache file"""
1625 1648 repair.rebuildfncache(ui, repo)
1626 1649
1627 1650 @command('debugrename',
1628 1651 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1629 1652 _('[-r REV] FILE'))
1630 1653 def debugrename(ui, repo, file1, *pats, **opts):
1631 1654 """dump rename information"""
1632 1655
1633 1656 ctx = scmutil.revsingle(repo, opts.get('rev'))
1634 1657 m = scmutil.match(ctx, (file1,) + pats, opts)
1635 1658 for abs in ctx.walk(m):
1636 1659 fctx = ctx[abs]
1637 1660 o = fctx.filelog().renamed(fctx.filenode())
1638 1661 rel = m.rel(abs)
1639 1662 if o:
1640 1663 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1641 1664 else:
1642 1665 ui.write(_("%s not renamed\n") % rel)
1643 1666
1644 1667 @command('debugrevlog', cmdutil.debugrevlogopts +
1645 1668 [('d', 'dump', False, _('dump index data'))],
1646 1669 _('-c|-m|FILE'),
1647 1670 optionalrepo=True)
1648 1671 def debugrevlog(ui, repo, file_=None, **opts):
1649 1672 """show data and statistics about a revlog"""
1650 1673 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1651 1674
1652 1675 if opts.get("dump"):
1653 1676 numrevs = len(r)
1654 1677 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1655 1678 " rawsize totalsize compression heads chainlen\n"))
1656 1679 ts = 0
1657 1680 heads = set()
1658 1681
1659 1682 for rev in xrange(numrevs):
1660 1683 dbase = r.deltaparent(rev)
1661 1684 if dbase == -1:
1662 1685 dbase = rev
1663 1686 cbase = r.chainbase(rev)
1664 1687 clen = r.chainlen(rev)
1665 1688 p1, p2 = r.parentrevs(rev)
1666 1689 rs = r.rawsize(rev)
1667 1690 ts = ts + rs
1668 1691 heads -= set(r.parentrevs(rev))
1669 1692 heads.add(rev)
1670 1693 try:
1671 1694 compression = ts / r.end(rev)
1672 1695 except ZeroDivisionError:
1673 1696 compression = 0
1674 1697 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1675 1698 "%11d %5d %8d\n" %
1676 1699 (rev, p1, p2, r.start(rev), r.end(rev),
1677 1700 r.start(dbase), r.start(cbase),
1678 1701 r.start(p1), r.start(p2),
1679 1702 rs, ts, compression, len(heads), clen))
1680 1703 return 0
1681 1704
1682 1705 v = r.version
1683 1706 format = v & 0xFFFF
1684 1707 flags = []
1685 1708 gdelta = False
1686 1709 if v & revlog.FLAG_INLINE_DATA:
1687 1710 flags.append('inline')
1688 1711 if v & revlog.FLAG_GENERALDELTA:
1689 1712 gdelta = True
1690 1713 flags.append('generaldelta')
1691 1714 if not flags:
1692 1715 flags = ['(none)']
1693 1716
1694 1717 nummerges = 0
1695 1718 numfull = 0
1696 1719 numprev = 0
1697 1720 nump1 = 0
1698 1721 nump2 = 0
1699 1722 numother = 0
1700 1723 nump1prev = 0
1701 1724 nump2prev = 0
1702 1725 chainlengths = []
1703 1726
1704 1727 datasize = [None, 0, 0]
1705 1728 fullsize = [None, 0, 0]
1706 1729 deltasize = [None, 0, 0]
1707 1730 chunktypecounts = {}
1708 1731 chunktypesizes = {}
1709 1732
1710 1733 def addsize(size, l):
1711 1734 if l[0] is None or size < l[0]:
1712 1735 l[0] = size
1713 1736 if size > l[1]:
1714 1737 l[1] = size
1715 1738 l[2] += size
1716 1739
1717 1740 numrevs = len(r)
1718 1741 for rev in xrange(numrevs):
1719 1742 p1, p2 = r.parentrevs(rev)
1720 1743 delta = r.deltaparent(rev)
1721 1744 if format > 0:
1722 1745 addsize(r.rawsize(rev), datasize)
1723 1746 if p2 != nullrev:
1724 1747 nummerges += 1
1725 1748 size = r.length(rev)
1726 1749 if delta == nullrev:
1727 1750 chainlengths.append(0)
1728 1751 numfull += 1
1729 1752 addsize(size, fullsize)
1730 1753 else:
1731 1754 chainlengths.append(chainlengths[delta] + 1)
1732 1755 addsize(size, deltasize)
1733 1756 if delta == rev - 1:
1734 1757 numprev += 1
1735 1758 if delta == p1:
1736 1759 nump1prev += 1
1737 1760 elif delta == p2:
1738 1761 nump2prev += 1
1739 1762 elif delta == p1:
1740 1763 nump1 += 1
1741 1764 elif delta == p2:
1742 1765 nump2 += 1
1743 1766 elif delta != nullrev:
1744 1767 numother += 1
1745 1768
1746 1769 # Obtain data on the raw chunks in the revlog.
1747 1770 segment = r._getsegmentforrevs(rev, rev)[1]
1748 1771 if segment:
1749 1772 chunktype = segment[0]
1750 1773 else:
1751 1774 chunktype = 'empty'
1752 1775
1753 1776 if chunktype not in chunktypecounts:
1754 1777 chunktypecounts[chunktype] = 0
1755 1778 chunktypesizes[chunktype] = 0
1756 1779
1757 1780 chunktypecounts[chunktype] += 1
1758 1781 chunktypesizes[chunktype] += size
1759 1782
1760 1783 # Adjust size min value for empty cases
1761 1784 for size in (datasize, fullsize, deltasize):
1762 1785 if size[0] is None:
1763 1786 size[0] = 0
1764 1787
1765 1788 numdeltas = numrevs - numfull
1766 1789 numoprev = numprev - nump1prev - nump2prev
1767 1790 totalrawsize = datasize[2]
1768 1791 datasize[2] /= numrevs
1769 1792 fulltotal = fullsize[2]
1770 1793 fullsize[2] /= numfull
1771 1794 deltatotal = deltasize[2]
1772 1795 if numrevs - numfull > 0:
1773 1796 deltasize[2] /= numrevs - numfull
1774 1797 totalsize = fulltotal + deltatotal
1775 1798 avgchainlen = sum(chainlengths) / numrevs
1776 1799 maxchainlen = max(chainlengths)
1777 1800 compratio = 1
1778 1801 if totalsize:
1779 1802 compratio = totalrawsize / totalsize
1780 1803
1781 1804 basedfmtstr = '%%%dd\n'
1782 1805 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1783 1806
1784 1807 def dfmtstr(max):
1785 1808 return basedfmtstr % len(str(max))
1786 1809 def pcfmtstr(max, padding=0):
1787 1810 return basepcfmtstr % (len(str(max)), ' ' * padding)
1788 1811
1789 1812 def pcfmt(value, total):
1790 1813 if total:
1791 1814 return (value, 100 * float(value) / total)
1792 1815 else:
1793 1816 return value, 100.0
1794 1817
1795 1818 ui.write(('format : %d\n') % format)
1796 1819 ui.write(('flags : %s\n') % ', '.join(flags))
1797 1820
1798 1821 ui.write('\n')
1799 1822 fmt = pcfmtstr(totalsize)
1800 1823 fmt2 = dfmtstr(totalsize)
1801 1824 ui.write(('revisions : ') + fmt2 % numrevs)
1802 1825 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1803 1826 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1804 1827 ui.write(('revisions : ') + fmt2 % numrevs)
1805 1828 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1806 1829 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1807 1830 ui.write(('revision size : ') + fmt2 % totalsize)
1808 1831 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1809 1832 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1810 1833
1811 1834 def fmtchunktype(chunktype):
1812 1835 if chunktype == 'empty':
1813 1836 return ' %s : ' % chunktype
1814 1837 elif chunktype in string.ascii_letters:
1815 1838 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1816 1839 else:
1817 1840 return ' 0x%s : ' % hex(chunktype)
1818 1841
1819 1842 ui.write('\n')
1820 1843 ui.write(('chunks : ') + fmt2 % numrevs)
1821 1844 for chunktype in sorted(chunktypecounts):
1822 1845 ui.write(fmtchunktype(chunktype))
1823 1846 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1824 1847 ui.write(('chunks size : ') + fmt2 % totalsize)
1825 1848 for chunktype in sorted(chunktypecounts):
1826 1849 ui.write(fmtchunktype(chunktype))
1827 1850 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1828 1851
1829 1852 ui.write('\n')
1830 1853 fmt = dfmtstr(max(avgchainlen, compratio))
1831 1854 ui.write(('avg chain length : ') + fmt % avgchainlen)
1832 1855 ui.write(('max chain length : ') + fmt % maxchainlen)
1833 1856 ui.write(('compression ratio : ') + fmt % compratio)
1834 1857
1835 1858 if format > 0:
1836 1859 ui.write('\n')
1837 1860 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1838 1861 % tuple(datasize))
1839 1862 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1840 1863 % tuple(fullsize))
1841 1864 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1842 1865 % tuple(deltasize))
1843 1866
1844 1867 if numdeltas > 0:
1845 1868 ui.write('\n')
1846 1869 fmt = pcfmtstr(numdeltas)
1847 1870 fmt2 = pcfmtstr(numdeltas, 4)
1848 1871 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1849 1872 if numprev > 0:
1850 1873 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1851 1874 numprev))
1852 1875 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1853 1876 numprev))
1854 1877 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1855 1878 numprev))
1856 1879 if gdelta:
1857 1880 ui.write(('deltas against p1 : ')
1858 1881 + fmt % pcfmt(nump1, numdeltas))
1859 1882 ui.write(('deltas against p2 : ')
1860 1883 + fmt % pcfmt(nump2, numdeltas))
1861 1884 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1862 1885 numdeltas))
1863 1886
1864 1887 @command('debugrevspec',
1865 1888 [('', 'optimize', None,
1866 1889 _('print parsed tree after optimizing (DEPRECATED)')),
1867 1890 ('p', 'show-stage', [],
1868 1891 _('print parsed tree at the given stage'), _('NAME')),
1869 1892 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1870 1893 ('', 'verify-optimized', False, _('verify optimized result')),
1871 1894 ],
1872 1895 ('REVSPEC'))
1873 1896 def debugrevspec(ui, repo, expr, **opts):
1874 1897 """parse and apply a revision specification
1875 1898
1876 1899 Use -p/--show-stage option to print the parsed tree at the given stages.
1877 1900 Use -p all to print tree at every stage.
1878 1901
1879 1902 Use --verify-optimized to compare the optimized result with the unoptimized
1880 1903 one. Returns 1 if the optimized result differs.
1881 1904 """
1882 1905 stages = [
1883 1906 ('parsed', lambda tree: tree),
1884 1907 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1885 1908 ('concatenated', revsetlang.foldconcat),
1886 1909 ('analyzed', revsetlang.analyze),
1887 1910 ('optimized', revsetlang.optimize),
1888 1911 ]
1889 1912 if opts['no_optimized']:
1890 1913 stages = stages[:-1]
1891 1914 if opts['verify_optimized'] and opts['no_optimized']:
1892 1915 raise error.Abort(_('cannot use --verify-optimized with '
1893 1916 '--no-optimized'))
1894 1917 stagenames = set(n for n, f in stages)
1895 1918
1896 1919 showalways = set()
1897 1920 showchanged = set()
1898 1921 if ui.verbose and not opts['show_stage']:
1899 1922 # show parsed tree by --verbose (deprecated)
1900 1923 showalways.add('parsed')
1901 1924 showchanged.update(['expanded', 'concatenated'])
1902 1925 if opts['optimize']:
1903 1926 showalways.add('optimized')
1904 1927 if opts['show_stage'] and opts['optimize']:
1905 1928 raise error.Abort(_('cannot use --optimize with --show-stage'))
1906 1929 if opts['show_stage'] == ['all']:
1907 1930 showalways.update(stagenames)
1908 1931 else:
1909 1932 for n in opts['show_stage']:
1910 1933 if n not in stagenames:
1911 1934 raise error.Abort(_('invalid stage name: %s') % n)
1912 1935 showalways.update(opts['show_stage'])
1913 1936
1914 1937 treebystage = {}
1915 1938 printedtree = None
1916 1939 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1917 1940 for n, f in stages:
1918 1941 treebystage[n] = tree = f(tree)
1919 1942 if n in showalways or (n in showchanged and tree != printedtree):
1920 1943 if opts['show_stage'] or n != 'parsed':
1921 1944 ui.write(("* %s:\n") % n)
1922 1945 ui.write(revsetlang.prettyformat(tree), "\n")
1923 1946 printedtree = tree
1924 1947
1925 1948 if opts['verify_optimized']:
1926 1949 arevs = revset.makematcher(treebystage['analyzed'])(repo)
1927 1950 brevs = revset.makematcher(treebystage['optimized'])(repo)
1928 1951 if ui.verbose:
1929 1952 ui.note(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
1930 1953 ui.note(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
1931 1954 arevs = list(arevs)
1932 1955 brevs = list(brevs)
1933 1956 if arevs == brevs:
1934 1957 return 0
1935 1958 ui.write(('--- analyzed\n'), label='diff.file_a')
1936 1959 ui.write(('+++ optimized\n'), label='diff.file_b')
1937 1960 sm = difflib.SequenceMatcher(None, arevs, brevs)
1938 1961 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1939 1962 if tag in ('delete', 'replace'):
1940 1963 for c in arevs[alo:ahi]:
1941 1964 ui.write('-%s\n' % c, label='diff.deleted')
1942 1965 if tag in ('insert', 'replace'):
1943 1966 for c in brevs[blo:bhi]:
1944 1967 ui.write('+%s\n' % c, label='diff.inserted')
1945 1968 if tag == 'equal':
1946 1969 for c in arevs[alo:ahi]:
1947 1970 ui.write(' %s\n' % c)
1948 1971 return 1
1949 1972
1950 1973 func = revset.makematcher(tree)
1951 1974 revs = func(repo)
1952 1975 if ui.verbose:
1953 1976 ui.note(("* set:\n"), smartset.prettyformat(revs), "\n")
1954 1977 for c in revs:
1955 1978 ui.write("%s\n" % c)
1956 1979
1957 1980 @command('debugsetparents', [], _('REV1 [REV2]'))
1958 1981 def debugsetparents(ui, repo, rev1, rev2=None):
1959 1982 """manually set the parents of the current working directory
1960 1983
1961 1984 This is useful for writing repository conversion tools, but should
1962 1985 be used with care. For example, neither the working directory nor the
1963 1986 dirstate is updated, so file status may be incorrect after running this
1964 1987 command.
1965 1988
1966 1989 Returns 0 on success.
1967 1990 """
1968 1991
1969 1992 r1 = scmutil.revsingle(repo, rev1).node()
1970 1993 r2 = scmutil.revsingle(repo, rev2, 'null').node()
1971 1994
1972 1995 with repo.wlock():
1973 1996 repo.setparents(r1, r2)
1974 1997
1975 1998 @command('debugsub',
1976 1999 [('r', 'rev', '',
1977 2000 _('revision to check'), _('REV'))],
1978 2001 _('[-r REV] [REV]'))
1979 2002 def debugsub(ui, repo, rev=None):
1980 2003 ctx = scmutil.revsingle(repo, rev, None)
1981 2004 for k, v in sorted(ctx.substate.items()):
1982 2005 ui.write(('path %s\n') % k)
1983 2006 ui.write((' source %s\n') % v[0])
1984 2007 ui.write((' revision %s\n') % v[1])
1985 2008
1986 2009 @command('debugsuccessorssets',
1987 2010 [],
1988 2011 _('[REV]'))
1989 2012 def debugsuccessorssets(ui, repo, *revs):
1990 2013 """show set of successors for revision
1991 2014
1992 2015 A successors set of changeset A is a consistent group of revisions that
1993 2016 succeed A. It contains non-obsolete changesets only.
1994 2017
1995 2018 In most cases a changeset A has a single successors set containing a single
1996 2019 successor (changeset A replaced by A').
1997 2020
1998 2021 A changeset that is made obsolete with no successors are called "pruned".
1999 2022 Such changesets have no successors sets at all.
2000 2023
2001 2024 A changeset that has been "split" will have a successors set containing
2002 2025 more than one successor.
2003 2026
2004 2027 A changeset that has been rewritten in multiple different ways is called
2005 2028 "divergent". Such changesets have multiple successor sets (each of which
2006 2029 may also be split, i.e. have multiple successors).
2007 2030
2008 2031 Results are displayed as follows::
2009 2032
2010 2033 <rev1>
2011 2034 <successors-1A>
2012 2035 <rev2>
2013 2036 <successors-2A>
2014 2037 <successors-2B1> <successors-2B2> <successors-2B3>
2015 2038
2016 2039 Here rev2 has two possible (i.e. divergent) successors sets. The first
2017 2040 holds one element, whereas the second holds three (i.e. the changeset has
2018 2041 been split).
2019 2042 """
2020 2043 # passed to successorssets caching computation from one call to another
2021 2044 cache = {}
2022 2045 ctx2str = str
2023 2046 node2str = short
2024 2047 if ui.debug():
2025 2048 def ctx2str(ctx):
2026 2049 return ctx.hex()
2027 2050 node2str = hex
2028 2051 for rev in scmutil.revrange(repo, revs):
2029 2052 ctx = repo[rev]
2030 2053 ui.write('%s\n'% ctx2str(ctx))
2031 2054 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2032 2055 if succsset:
2033 2056 ui.write(' ')
2034 2057 ui.write(node2str(succsset[0]))
2035 2058 for node in succsset[1:]:
2036 2059 ui.write(' ')
2037 2060 ui.write(node2str(node))
2038 2061 ui.write('\n')
2039 2062
2040 2063 @command('debugtemplate',
2041 2064 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2042 2065 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2043 2066 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2044 2067 optionalrepo=True)
2045 2068 def debugtemplate(ui, repo, tmpl, **opts):
2046 2069 """parse and apply a template
2047 2070
2048 2071 If -r/--rev is given, the template is processed as a log template and
2049 2072 applied to the given changesets. Otherwise, it is processed as a generic
2050 2073 template.
2051 2074
2052 2075 Use --verbose to print the parsed tree.
2053 2076 """
2054 2077 revs = None
2055 2078 if opts['rev']:
2056 2079 if repo is None:
2057 2080 raise error.RepoError(_('there is no Mercurial repository here '
2058 2081 '(.hg not found)'))
2059 2082 revs = scmutil.revrange(repo, opts['rev'])
2060 2083
2061 2084 props = {}
2062 2085 for d in opts['define']:
2063 2086 try:
2064 2087 k, v = (e.strip() for e in d.split('=', 1))
2065 2088 if not k or k == 'ui':
2066 2089 raise ValueError
2067 2090 props[k] = v
2068 2091 except ValueError:
2069 2092 raise error.Abort(_('malformed keyword definition: %s') % d)
2070 2093
2071 2094 if ui.verbose:
2072 2095 aliases = ui.configitems('templatealias')
2073 2096 tree = templater.parse(tmpl)
2074 2097 ui.note(templater.prettyformat(tree), '\n')
2075 2098 newtree = templater.expandaliases(tree, aliases)
2076 2099 if newtree != tree:
2077 2100 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2078 2101
2079 2102 mapfile = None
2080 2103 if revs is None:
2081 2104 k = 'debugtemplate'
2082 2105 t = formatter.maketemplater(ui, k, tmpl)
2083 2106 ui.write(templater.stringify(t(k, ui=ui, **props)))
2084 2107 else:
2085 2108 displayer = cmdutil.changeset_templater(ui, repo, None, opts, tmpl,
2086 2109 mapfile, buffered=False)
2087 2110 for r in revs:
2088 2111 displayer.show(repo[r], **props)
2089 2112 displayer.close()
2090 2113
2091 2114 @command('debugupdatecaches', [])
2092 2115 def debugupdatecaches(ui, repo, *pats, **opts):
2093 2116 """warm all known caches in the repository"""
2094 2117 with repo.wlock():
2095 2118 with repo.lock():
2096 2119 repo.updatecaches()
2097 2120
2098 2121 @command('debugupgraderepo', [
2099 2122 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2100 2123 ('', 'run', False, _('performs an upgrade')),
2101 2124 ])
2102 2125 def debugupgraderepo(ui, repo, run=False, optimize=None):
2103 2126 """upgrade a repository to use different features
2104 2127
2105 2128 If no arguments are specified, the repository is evaluated for upgrade
2106 2129 and a list of problems and potential optimizations is printed.
2107 2130
2108 2131 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2109 2132 can be influenced via additional arguments. More details will be provided
2110 2133 by the command output when run without ``--run``.
2111 2134
2112 2135 During the upgrade, the repository will be locked and no writes will be
2113 2136 allowed.
2114 2137
2115 2138 At the end of the upgrade, the repository may not be readable while new
2116 2139 repository data is swapped in. This window will be as long as it takes to
2117 2140 rename some directories inside the ``.hg`` directory. On most machines, this
2118 2141 should complete almost instantaneously and the chances of a consumer being
2119 2142 unable to access the repository should be low.
2120 2143 """
2121 2144 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2122 2145
2123 2146 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2124 2147 inferrepo=True)
2125 2148 def debugwalk(ui, repo, *pats, **opts):
2126 2149 """show how files match on given patterns"""
2127 2150 m = scmutil.match(repo[None], pats, opts)
2128 2151 ui.write(('matcher: %r\n' % m))
2129 2152 items = list(repo[None].walk(m))
2130 2153 if not items:
2131 2154 return
2132 2155 f = lambda fn: fn
2133 2156 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2134 2157 f = lambda fn: util.normpath(fn)
2135 2158 fmt = 'f %%-%ds %%-%ds %%s' % (
2136 2159 max([len(abs) for abs in items]),
2137 2160 max([len(m.rel(abs)) for abs in items]))
2138 2161 for abs in items:
2139 2162 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2140 2163 ui.write("%s\n" % line.rstrip())
2141 2164
2142 2165 @command('debugwireargs',
2143 2166 [('', 'three', '', 'three'),
2144 2167 ('', 'four', '', 'four'),
2145 2168 ('', 'five', '', 'five'),
2146 2169 ] + cmdutil.remoteopts,
2147 2170 _('REPO [OPTIONS]... [ONE [TWO]]'),
2148 2171 norepo=True)
2149 2172 def debugwireargs(ui, repopath, *vals, **opts):
2150 2173 repo = hg.peer(ui, opts, repopath)
2151 2174 for opt in cmdutil.remoteopts:
2152 2175 del opts[opt[1]]
2153 2176 args = {}
2154 2177 for k, v in opts.iteritems():
2155 2178 if v:
2156 2179 args[k] = v
2157 2180 # run twice to check that we don't mess up the stream for the next command
2158 2181 res1 = repo.debugwireargs(*vals, **args)
2159 2182 res2 = repo.debugwireargs(*vals, **args)
2160 2183 ui.write("%s\n" % res1)
2161 2184 if res1 != res2:
2162 2185 ui.warn("%s\n" % res2)
@@ -1,169 +1,171
1 1 Test changesets filtering during exchanges (some tests are still in
2 2 test-obsolete.t)
3 3
4 4 $ cat >> $HGRCPATH << EOF
5 5 > [experimental]
6 6 > evolution=createmarkers
7 7 > EOF
8 8
9 9 Push does not corrupt remote
10 10 ----------------------------
11 11
12 12 Create a DAG where a changeset reuses a revision from a file first used in an
13 13 extinct changeset.
14 14
15 15 $ hg init local
16 16 $ cd local
17 17 $ echo 'base' > base
18 18 $ hg commit -Am base
19 19 adding base
20 20 $ echo 'A' > A
21 21 $ hg commit -Am A
22 22 adding A
23 23 $ hg up 0
24 24 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
25 25 $ hg revert -ar 1
26 26 adding A
27 27 $ hg commit -Am "A'"
28 28 created new head
29 29 $ hg log -G --template='{desc} {node}'
30 30 @ A' f89bcc95eba5174b1ccc3e33a82e84c96e8338ee
31 31 |
32 32 | o A 9d73aac1b2ed7d53835eaeec212ed41ea47da53a
33 33 |/
34 34 o base d20a80d4def38df63a4b330b7fb688f3d4cae1e3
35 35
36 36 $ hg debugobsolete 9d73aac1b2ed7d53835eaeec212ed41ea47da53a f89bcc95eba5174b1ccc3e33a82e84c96e8338ee
37 37
38 38 Push it. The bundle should not refer to the extinct changeset.
39 39
40 40 $ hg init ../other
41 41 $ hg push ../other
42 42 pushing to ../other
43 43 searching for changes
44 44 adding changesets
45 45 adding manifests
46 46 adding file changes
47 47 added 2 changesets with 2 changes to 2 files
48 48 $ hg -R ../other verify
49 49 checking changesets
50 50 checking manifests
51 51 crosschecking files in changesets and manifests
52 52 checking files
53 53 2 files, 2 changesets, 2 total revisions
54 54
55 55 Adding a changeset going extinct locally
56 56 ------------------------------------------
57 57
58 58 Pull a changeset that will immediatly goes extinct (because you already have a
59 59 marker to obsolete him)
60 60 (test resolution of issue3788)
61 61
62 62 $ hg phase --draft --force f89bcc95eba5
63 63 $ hg phase -R ../other --draft --force f89bcc95eba5
64 64 $ hg commit --amend -m "A''"
65 65 $ hg --hidden --config extensions.mq= strip --no-backup f89bcc95eba5
66 66 $ hg pull ../other
67 67 pulling from ../other
68 68 searching for changes
69 69 adding changesets
70 70 adding manifests
71 71 adding file changes
72 72 added 1 changesets with 0 changes to 1 files (+1 heads)
73 73 (run 'hg heads' to see heads, 'hg merge' to merge)
74 74
75 75 check that bundle is not affected
76 76
77 77 $ hg bundle --hidden --rev f89bcc95eba5 --base "f89bcc95eba5^" ../f89bcc95eba5.hg
78 78 1 changesets found
79 79 $ hg --hidden --config extensions.mq= strip --no-backup f89bcc95eba5
80 80 $ hg unbundle ../f89bcc95eba5.hg
81 81 adding changesets
82 82 adding manifests
83 83 adding file changes
84 84 added 1 changesets with 0 changes to 1 files (+1 heads)
85 85 (run 'hg heads' to see heads)
86 86
87 87 check-that bundle can contain markers:
88 88
89 89 $ hg bundle --hidden --rev f89bcc95eba5 --base "f89bcc95eba5^" ../f89bcc95eba5-obs.hg --config experimental.evolution.bundle-obsmarker=1
90 90 1 changesets found
91 91 $ hg debugbundle ../f89bcc95eba5.hg
92 92 Stream params: sortdict([('Compression', 'BZ')])
93 93 changegroup -- "sortdict([('version', '02'), ('nbchanges', '1')])"
94 94 f89bcc95eba5174b1ccc3e33a82e84c96e8338ee
95 95 $ hg debugbundle ../f89bcc95eba5-obs.hg
96 96 Stream params: sortdict([('Compression', 'BZ')])
97 97 changegroup -- "sortdict([('version', '02'), ('nbchanges', '1')])"
98 98 f89bcc95eba5174b1ccc3e33a82e84c96e8338ee
99 99 obsmarkers -- 'sortdict()'
100 version: 1 (70 bytes)
101 9d73aac1b2ed7d53835eaeec212ed41ea47da53a f89bcc95eba5174b1ccc3e33a82e84c96e8338ee 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
100 102
101 103 $ cd ..
102 104
103 105 pull does not fetch excessive changesets when common node is hidden (issue4982)
104 106 -------------------------------------------------------------------------------
105 107
106 108 initial repo with server and client matching
107 109
108 110 $ hg init pull-hidden-common
109 111 $ cd pull-hidden-common
110 112 $ touch foo
111 113 $ hg -q commit -A -m initial
112 114 $ echo 1 > foo
113 115 $ hg commit -m 1
114 116 $ echo 2a > foo
115 117 $ hg commit -m 2a
116 118 $ cd ..
117 119 $ hg clone --pull pull-hidden-common pull-hidden-common-client
118 120 requesting all changes
119 121 adding changesets
120 122 adding manifests
121 123 adding file changes
122 124 added 3 changesets with 3 changes to 1 files
123 125 updating to branch default
124 126 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
125 127
126 128 server obsoletes the old head
127 129
128 130 $ cd pull-hidden-common
129 131 $ hg -q up -r 1
130 132 $ echo 2b > foo
131 133 $ hg -q commit -m 2b
132 134 $ hg debugobsolete 6a29ed9c68defff1a139e5c6fa9696fb1a75783d bec0734cd68e84477ba7fc1d13e6cff53ab70129
133 135 $ cd ..
134 136
135 137 client only pulls down 1 changeset
136 138
137 139 $ cd pull-hidden-common-client
138 140 $ hg pull --debug
139 141 pulling from $TESTTMP/pull-hidden-common (glob)
140 142 query 1; heads
141 143 searching for changes
142 144 taking quick initial sample
143 145 query 2; still undecided: 2, sample size is: 2
144 146 2 total queries
145 147 1 changesets found
146 148 list of changesets:
147 149 bec0734cd68e84477ba7fc1d13e6cff53ab70129
148 150 listing keys for "phases"
149 151 listing keys for "bookmarks"
150 152 bundle2-output-bundle: "HG20", 3 parts total
151 153 bundle2-output-part: "changegroup" (params: 1 mandatory 1 advisory) streamed payload
152 154 bundle2-output-part: "listkeys" (params: 1 mandatory) 58 bytes payload
153 155 bundle2-output-part: "listkeys" (params: 1 mandatory) empty payload
154 156 bundle2-input-bundle: with-transaction
155 157 bundle2-input-part: "changegroup" (params: 1 mandatory 1 advisory) supported
156 158 adding changesets
157 159 add changeset bec0734cd68e
158 160 adding manifests
159 161 adding file changes
160 162 adding foo revisions
161 163 added 1 changesets with 1 changes to 1 files (+1 heads)
162 164 bundle2-input-part: total payload size 476
163 165 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
164 166 bundle2-input-part: total payload size 58
165 167 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
166 168 bundle2-input-bundle: 2 parts total
167 169 checking for updated bookmarks
168 170 updating the branch cache
169 171 (run 'hg heads' to see heads, 'hg merge' to merge)
General Comments 0
You need to be logged in to leave comments. Login now