##// END OF EJS Templates
debugrevlog: align chain length, reach, and compression ratio...
Yuya Nishihara -
r33062:e21b750c default
parent child Browse files
Show More
@@ -1,2226 +1,2226 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import difflib
11 11 import errno
12 12 import operator
13 13 import os
14 14 import random
15 15 import socket
16 16 import string
17 17 import sys
18 18 import tempfile
19 19 import time
20 20
21 21 from .i18n import _
22 22 from .node import (
23 23 bin,
24 24 hex,
25 25 nullhex,
26 26 nullid,
27 27 nullrev,
28 28 short,
29 29 )
30 30 from . import (
31 31 bundle2,
32 32 changegroup,
33 33 cmdutil,
34 34 color,
35 35 context,
36 36 dagparser,
37 37 dagutil,
38 38 encoding,
39 39 error,
40 40 exchange,
41 41 extensions,
42 42 filemerge,
43 43 fileset,
44 44 formatter,
45 45 hg,
46 46 localrepo,
47 47 lock as lockmod,
48 48 merge as mergemod,
49 49 obsolete,
50 50 phases,
51 51 policy,
52 52 pvec,
53 53 pycompat,
54 54 registrar,
55 55 repair,
56 56 revlog,
57 57 revset,
58 58 revsetlang,
59 59 scmutil,
60 60 setdiscovery,
61 61 simplemerge,
62 62 smartset,
63 63 sslutil,
64 64 streamclone,
65 65 templater,
66 66 treediscovery,
67 67 upgrade,
68 68 util,
69 69 vfs as vfsmod,
70 70 )
71 71
72 72 release = lockmod.release
73 73
74 74 command = registrar.command()
75 75
76 76 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
77 77 def debugancestor(ui, repo, *args):
78 78 """find the ancestor revision of two revisions in a given index"""
79 79 if len(args) == 3:
80 80 index, rev1, rev2 = args
81 81 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
82 82 lookup = r.lookup
83 83 elif len(args) == 2:
84 84 if not repo:
85 85 raise error.Abort(_('there is no Mercurial repository here '
86 86 '(.hg not found)'))
87 87 rev1, rev2 = args
88 88 r = repo.changelog
89 89 lookup = repo.lookup
90 90 else:
91 91 raise error.Abort(_('either two or three arguments required'))
92 92 a = r.ancestor(lookup(rev1), lookup(rev2))
93 93 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
94 94
95 95 @command('debugapplystreamclonebundle', [], 'FILE')
96 96 def debugapplystreamclonebundle(ui, repo, fname):
97 97 """apply a stream clone bundle file"""
98 98 f = hg.openpath(ui, fname)
99 99 gen = exchange.readbundle(ui, f, fname)
100 100 gen.apply(repo)
101 101
102 102 @command('debugbuilddag',
103 103 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
104 104 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
105 105 ('n', 'new-file', None, _('add new file at each rev'))],
106 106 _('[OPTION]... [TEXT]'))
107 107 def debugbuilddag(ui, repo, text=None,
108 108 mergeable_file=False,
109 109 overwritten_file=False,
110 110 new_file=False):
111 111 """builds a repo with a given DAG from scratch in the current empty repo
112 112
113 113 The description of the DAG is read from stdin if not given on the
114 114 command line.
115 115
116 116 Elements:
117 117
118 118 - "+n" is a linear run of n nodes based on the current default parent
119 119 - "." is a single node based on the current default parent
120 120 - "$" resets the default parent to null (implied at the start);
121 121 otherwise the default parent is always the last node created
122 122 - "<p" sets the default parent to the backref p
123 123 - "*p" is a fork at parent p, which is a backref
124 124 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
125 125 - "/p2" is a merge of the preceding node and p2
126 126 - ":tag" defines a local tag for the preceding node
127 127 - "@branch" sets the named branch for subsequent nodes
128 128 - "#...\\n" is a comment up to the end of the line
129 129
130 130 Whitespace between the above elements is ignored.
131 131
132 132 A backref is either
133 133
134 134 - a number n, which references the node curr-n, where curr is the current
135 135 node, or
136 136 - the name of a local tag you placed earlier using ":tag", or
137 137 - empty to denote the default parent.
138 138
139 139 All string valued-elements are either strictly alphanumeric, or must
140 140 be enclosed in double quotes ("..."), with "\\" as escape character.
141 141 """
142 142
143 143 if text is None:
144 144 ui.status(_("reading DAG from stdin\n"))
145 145 text = ui.fin.read()
146 146
147 147 cl = repo.changelog
148 148 if len(cl) > 0:
149 149 raise error.Abort(_('repository is not empty'))
150 150
151 151 # determine number of revs in DAG
152 152 total = 0
153 153 for type, data in dagparser.parsedag(text):
154 154 if type == 'n':
155 155 total += 1
156 156
157 157 if mergeable_file:
158 158 linesperrev = 2
159 159 # make a file with k lines per rev
160 160 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
161 161 initialmergedlines.append("")
162 162
163 163 tags = []
164 164
165 165 wlock = lock = tr = None
166 166 try:
167 167 wlock = repo.wlock()
168 168 lock = repo.lock()
169 169 tr = repo.transaction("builddag")
170 170
171 171 at = -1
172 172 atbranch = 'default'
173 173 nodeids = []
174 174 id = 0
175 175 ui.progress(_('building'), id, unit=_('revisions'), total=total)
176 176 for type, data in dagparser.parsedag(text):
177 177 if type == 'n':
178 178 ui.note(('node %s\n' % str(data)))
179 179 id, ps = data
180 180
181 181 files = []
182 182 fctxs = {}
183 183
184 184 p2 = None
185 185 if mergeable_file:
186 186 fn = "mf"
187 187 p1 = repo[ps[0]]
188 188 if len(ps) > 1:
189 189 p2 = repo[ps[1]]
190 190 pa = p1.ancestor(p2)
191 191 base, local, other = [x[fn].data() for x in (pa, p1,
192 192 p2)]
193 193 m3 = simplemerge.Merge3Text(base, local, other)
194 194 ml = [l.strip() for l in m3.merge_lines()]
195 195 ml.append("")
196 196 elif at > 0:
197 197 ml = p1[fn].data().split("\n")
198 198 else:
199 199 ml = initialmergedlines
200 200 ml[id * linesperrev] += " r%i" % id
201 201 mergedtext = "\n".join(ml)
202 202 files.append(fn)
203 203 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
204 204
205 205 if overwritten_file:
206 206 fn = "of"
207 207 files.append(fn)
208 208 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
209 209
210 210 if new_file:
211 211 fn = "nf%i" % id
212 212 files.append(fn)
213 213 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
214 214 if len(ps) > 1:
215 215 if not p2:
216 216 p2 = repo[ps[1]]
217 217 for fn in p2:
218 218 if fn.startswith("nf"):
219 219 files.append(fn)
220 220 fctxs[fn] = p2[fn]
221 221
222 222 def fctxfn(repo, cx, path):
223 223 return fctxs.get(path)
224 224
225 225 if len(ps) == 0 or ps[0] < 0:
226 226 pars = [None, None]
227 227 elif len(ps) == 1:
228 228 pars = [nodeids[ps[0]], None]
229 229 else:
230 230 pars = [nodeids[p] for p in ps]
231 231 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
232 232 date=(id, 0),
233 233 user="debugbuilddag",
234 234 extra={'branch': atbranch})
235 235 nodeid = repo.commitctx(cx)
236 236 nodeids.append(nodeid)
237 237 at = id
238 238 elif type == 'l':
239 239 id, name = data
240 240 ui.note(('tag %s\n' % name))
241 241 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
242 242 elif type == 'a':
243 243 ui.note(('branch %s\n' % data))
244 244 atbranch = data
245 245 ui.progress(_('building'), id, unit=_('revisions'), total=total)
246 246 tr.close()
247 247
248 248 if tags:
249 249 repo.vfs.write("localtags", "".join(tags))
250 250 finally:
251 251 ui.progress(_('building'), None)
252 252 release(tr, lock, wlock)
253 253
254 254 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
255 255 indent_string = ' ' * indent
256 256 if all:
257 257 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
258 258 % indent_string)
259 259
260 260 def showchunks(named):
261 261 ui.write("\n%s%s\n" % (indent_string, named))
262 262 chain = None
263 263 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
264 264 node = chunkdata['node']
265 265 p1 = chunkdata['p1']
266 266 p2 = chunkdata['p2']
267 267 cs = chunkdata['cs']
268 268 deltabase = chunkdata['deltabase']
269 269 delta = chunkdata['delta']
270 270 ui.write("%s%s %s %s %s %s %s\n" %
271 271 (indent_string, hex(node), hex(p1), hex(p2),
272 272 hex(cs), hex(deltabase), len(delta)))
273 273 chain = node
274 274
275 275 chunkdata = gen.changelogheader()
276 276 showchunks("changelog")
277 277 chunkdata = gen.manifestheader()
278 278 showchunks("manifest")
279 279 for chunkdata in iter(gen.filelogheader, {}):
280 280 fname = chunkdata['filename']
281 281 showchunks(fname)
282 282 else:
283 283 if isinstance(gen, bundle2.unbundle20):
284 284 raise error.Abort(_('use debugbundle2 for this file'))
285 285 chunkdata = gen.changelogheader()
286 286 chain = None
287 287 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
288 288 node = chunkdata['node']
289 289 ui.write("%s%s\n" % (indent_string, hex(node)))
290 290 chain = node
291 291
292 292 def _debugobsmarkers(ui, part, indent=0, **opts):
293 293 """display version and markers contained in 'data'"""
294 294 data = part.read()
295 295 indent_string = ' ' * indent
296 296 try:
297 297 version, markers = obsolete._readmarkers(data)
298 298 except error.UnknownVersion as exc:
299 299 msg = "%sunsupported version: %s (%d bytes)\n"
300 300 msg %= indent_string, exc.version, len(data)
301 301 ui.write(msg)
302 302 else:
303 303 msg = "%sversion: %s (%d bytes)\n"
304 304 msg %= indent_string, version, len(data)
305 305 ui.write(msg)
306 306 fm = ui.formatter('debugobsolete', opts)
307 307 for rawmarker in sorted(markers):
308 308 m = obsolete.marker(None, rawmarker)
309 309 fm.startitem()
310 310 fm.plain(indent_string)
311 311 cmdutil.showmarker(fm, m)
312 312 fm.end()
313 313
314 314 def _debugphaseheads(ui, data, indent=0):
315 315 """display version and markers contained in 'data'"""
316 316 indent_string = ' ' * indent
317 317 headsbyphase = bundle2._readphaseheads(data)
318 318 for phase in phases.allphases:
319 319 for head in headsbyphase[phase]:
320 320 ui.write(indent_string)
321 321 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
322 322
323 323 def _debugbundle2(ui, gen, all=None, **opts):
324 324 """lists the contents of a bundle2"""
325 325 if not isinstance(gen, bundle2.unbundle20):
326 326 raise error.Abort(_('not a bundle2 file'))
327 327 ui.write(('Stream params: %s\n' % repr(gen.params)))
328 328 parttypes = opts.get('part_type', [])
329 329 for part in gen.iterparts():
330 330 if parttypes and part.type not in parttypes:
331 331 continue
332 332 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
333 333 if part.type == 'changegroup':
334 334 version = part.params.get('version', '01')
335 335 cg = changegroup.getunbundler(version, part, 'UN')
336 336 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
337 337 if part.type == 'obsmarkers':
338 338 _debugobsmarkers(ui, part, indent=4, **opts)
339 339 if part.type == 'phase-heads':
340 340 _debugphaseheads(ui, part, indent=4)
341 341
342 342 @command('debugbundle',
343 343 [('a', 'all', None, _('show all details')),
344 344 ('', 'part-type', [], _('show only the named part type')),
345 345 ('', 'spec', None, _('print the bundlespec of the bundle'))],
346 346 _('FILE'),
347 347 norepo=True)
348 348 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
349 349 """lists the contents of a bundle"""
350 350 with hg.openpath(ui, bundlepath) as f:
351 351 if spec:
352 352 spec = exchange.getbundlespec(ui, f)
353 353 ui.write('%s\n' % spec)
354 354 return
355 355
356 356 gen = exchange.readbundle(ui, f, bundlepath)
357 357 if isinstance(gen, bundle2.unbundle20):
358 358 return _debugbundle2(ui, gen, all=all, **opts)
359 359 _debugchangegroup(ui, gen, all=all, **opts)
360 360
361 361 @command('debugcheckstate', [], '')
362 362 def debugcheckstate(ui, repo):
363 363 """validate the correctness of the current dirstate"""
364 364 parent1, parent2 = repo.dirstate.parents()
365 365 m1 = repo[parent1].manifest()
366 366 m2 = repo[parent2].manifest()
367 367 errors = 0
368 368 for f in repo.dirstate:
369 369 state = repo.dirstate[f]
370 370 if state in "nr" and f not in m1:
371 371 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
372 372 errors += 1
373 373 if state in "a" and f in m1:
374 374 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
375 375 errors += 1
376 376 if state in "m" and f not in m1 and f not in m2:
377 377 ui.warn(_("%s in state %s, but not in either manifest\n") %
378 378 (f, state))
379 379 errors += 1
380 380 for f in m1:
381 381 state = repo.dirstate[f]
382 382 if state not in "nrm":
383 383 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
384 384 errors += 1
385 385 if errors:
386 386 error = _(".hg/dirstate inconsistent with current parent's manifest")
387 387 raise error.Abort(error)
388 388
389 389 @command('debugcolor',
390 390 [('', 'style', None, _('show all configured styles'))],
391 391 'hg debugcolor')
392 392 def debugcolor(ui, repo, **opts):
393 393 """show available color, effects or style"""
394 394 ui.write(('color mode: %s\n') % ui._colormode)
395 395 if opts.get('style'):
396 396 return _debugdisplaystyle(ui)
397 397 else:
398 398 return _debugdisplaycolor(ui)
399 399
400 400 def _debugdisplaycolor(ui):
401 401 ui = ui.copy()
402 402 ui._styles.clear()
403 403 for effect in color._activeeffects(ui).keys():
404 404 ui._styles[effect] = effect
405 405 if ui._terminfoparams:
406 406 for k, v in ui.configitems('color'):
407 407 if k.startswith('color.'):
408 408 ui._styles[k] = k[6:]
409 409 elif k.startswith('terminfo.'):
410 410 ui._styles[k] = k[9:]
411 411 ui.write(_('available colors:\n'))
412 412 # sort label with a '_' after the other to group '_background' entry.
413 413 items = sorted(ui._styles.items(),
414 414 key=lambda i: ('_' in i[0], i[0], i[1]))
415 415 for colorname, label in items:
416 416 ui.write(('%s\n') % colorname, label=label)
417 417
418 418 def _debugdisplaystyle(ui):
419 419 ui.write(_('available style:\n'))
420 420 width = max(len(s) for s in ui._styles)
421 421 for label, effects in sorted(ui._styles.items()):
422 422 ui.write('%s' % label, label=label)
423 423 if effects:
424 424 # 50
425 425 ui.write(': ')
426 426 ui.write(' ' * (max(0, width - len(label))))
427 427 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
428 428 ui.write('\n')
429 429
430 430 @command('debugcreatestreamclonebundle', [], 'FILE')
431 431 def debugcreatestreamclonebundle(ui, repo, fname):
432 432 """create a stream clone bundle file
433 433
434 434 Stream bundles are special bundles that are essentially archives of
435 435 revlog files. They are commonly used for cloning very quickly.
436 436 """
437 437 # TODO we may want to turn this into an abort when this functionality
438 438 # is moved into `hg bundle`.
439 439 if phases.hassecret(repo):
440 440 ui.warn(_('(warning: stream clone bundle will contain secret '
441 441 'revisions)\n'))
442 442
443 443 requirements, gen = streamclone.generatebundlev1(repo)
444 444 changegroup.writechunks(ui, gen, fname)
445 445
446 446 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
447 447
448 448 @command('debugdag',
449 449 [('t', 'tags', None, _('use tags as labels')),
450 450 ('b', 'branches', None, _('annotate with branch names')),
451 451 ('', 'dots', None, _('use dots for runs')),
452 452 ('s', 'spaces', None, _('separate elements by spaces'))],
453 453 _('[OPTION]... [FILE [REV]...]'),
454 454 optionalrepo=True)
455 455 def debugdag(ui, repo, file_=None, *revs, **opts):
456 456 """format the changelog or an index DAG as a concise textual description
457 457
458 458 If you pass a revlog index, the revlog's DAG is emitted. If you list
459 459 revision numbers, they get labeled in the output as rN.
460 460
461 461 Otherwise, the changelog DAG of the current repo is emitted.
462 462 """
463 463 spaces = opts.get('spaces')
464 464 dots = opts.get('dots')
465 465 if file_:
466 466 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
467 467 file_)
468 468 revs = set((int(r) for r in revs))
469 469 def events():
470 470 for r in rlog:
471 471 yield 'n', (r, list(p for p in rlog.parentrevs(r)
472 472 if p != -1))
473 473 if r in revs:
474 474 yield 'l', (r, "r%i" % r)
475 475 elif repo:
476 476 cl = repo.changelog
477 477 tags = opts.get('tags')
478 478 branches = opts.get('branches')
479 479 if tags:
480 480 labels = {}
481 481 for l, n in repo.tags().items():
482 482 labels.setdefault(cl.rev(n), []).append(l)
483 483 def events():
484 484 b = "default"
485 485 for r in cl:
486 486 if branches:
487 487 newb = cl.read(cl.node(r))[5]['branch']
488 488 if newb != b:
489 489 yield 'a', newb
490 490 b = newb
491 491 yield 'n', (r, list(p for p in cl.parentrevs(r)
492 492 if p != -1))
493 493 if tags:
494 494 ls = labels.get(r)
495 495 if ls:
496 496 for l in ls:
497 497 yield 'l', (r, l)
498 498 else:
499 499 raise error.Abort(_('need repo for changelog dag'))
500 500
501 501 for line in dagparser.dagtextlines(events(),
502 502 addspaces=spaces,
503 503 wraplabels=True,
504 504 wrapannotations=True,
505 505 wrapnonlinear=dots,
506 506 usedots=dots,
507 507 maxlinewidth=70):
508 508 ui.write(line)
509 509 ui.write("\n")
510 510
511 511 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
512 512 def debugdata(ui, repo, file_, rev=None, **opts):
513 513 """dump the contents of a data file revision"""
514 514 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
515 515 if rev is not None:
516 516 raise error.CommandError('debugdata', _('invalid arguments'))
517 517 file_, rev = None, file_
518 518 elif rev is None:
519 519 raise error.CommandError('debugdata', _('invalid arguments'))
520 520 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
521 521 try:
522 522 ui.write(r.revision(r.lookup(rev), raw=True))
523 523 except KeyError:
524 524 raise error.Abort(_('invalid revision identifier %s') % rev)
525 525
526 526 @command('debugdate',
527 527 [('e', 'extended', None, _('try extended date formats'))],
528 528 _('[-e] DATE [RANGE]'),
529 529 norepo=True, optionalrepo=True)
530 530 def debugdate(ui, date, range=None, **opts):
531 531 """parse and display a date"""
532 532 if opts["extended"]:
533 533 d = util.parsedate(date, util.extendeddateformats)
534 534 else:
535 535 d = util.parsedate(date)
536 536 ui.write(("internal: %s %s\n") % d)
537 537 ui.write(("standard: %s\n") % util.datestr(d))
538 538 if range:
539 539 m = util.matchdate(range)
540 540 ui.write(("match: %s\n") % m(d[0]))
541 541
542 542 @command('debugdeltachain',
543 543 cmdutil.debugrevlogopts + cmdutil.formatteropts,
544 544 _('-c|-m|FILE'),
545 545 optionalrepo=True)
546 546 def debugdeltachain(ui, repo, file_=None, **opts):
547 547 """dump information about delta chains in a revlog
548 548
549 549 Output can be templatized. Available template keywords are:
550 550
551 551 :``rev``: revision number
552 552 :``chainid``: delta chain identifier (numbered by unique base)
553 553 :``chainlen``: delta chain length to this revision
554 554 :``prevrev``: previous revision in delta chain
555 555 :``deltatype``: role of delta / how it was computed
556 556 :``compsize``: compressed size of revision
557 557 :``uncompsize``: uncompressed size of revision
558 558 :``chainsize``: total size of compressed revisions in chain
559 559 :``chainratio``: total chain size divided by uncompressed revision size
560 560 (new delta chains typically start at ratio 2.00)
561 561 :``lindist``: linear distance from base revision in delta chain to end
562 562 of this revision
563 563 :``extradist``: total size of revisions not part of this delta chain from
564 564 base of delta chain to end of this revision; a measurement
565 565 of how much extra data we need to read/seek across to read
566 566 the delta chain for this revision
567 567 :``extraratio``: extradist divided by chainsize; another representation of
568 568 how much unrelated data is needed to load this delta chain
569 569 """
570 570 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
571 571 index = r.index
572 572 generaldelta = r.version & revlog.FLAG_GENERALDELTA
573 573
574 574 def revinfo(rev):
575 575 e = index[rev]
576 576 compsize = e[1]
577 577 uncompsize = e[2]
578 578 chainsize = 0
579 579
580 580 if generaldelta:
581 581 if e[3] == e[5]:
582 582 deltatype = 'p1'
583 583 elif e[3] == e[6]:
584 584 deltatype = 'p2'
585 585 elif e[3] == rev - 1:
586 586 deltatype = 'prev'
587 587 elif e[3] == rev:
588 588 deltatype = 'base'
589 589 else:
590 590 deltatype = 'other'
591 591 else:
592 592 if e[3] == rev:
593 593 deltatype = 'base'
594 594 else:
595 595 deltatype = 'prev'
596 596
597 597 chain = r._deltachain(rev)[0]
598 598 for iterrev in chain:
599 599 e = index[iterrev]
600 600 chainsize += e[1]
601 601
602 602 return compsize, uncompsize, deltatype, chain, chainsize
603 603
604 604 fm = ui.formatter('debugdeltachain', opts)
605 605
606 606 fm.plain(' rev chain# chainlen prev delta '
607 607 'size rawsize chainsize ratio lindist extradist '
608 608 'extraratio\n')
609 609
610 610 chainbases = {}
611 611 for rev in r:
612 612 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
613 613 chainbase = chain[0]
614 614 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
615 615 basestart = r.start(chainbase)
616 616 revstart = r.start(rev)
617 617 lineardist = revstart + comp - basestart
618 618 extradist = lineardist - chainsize
619 619 try:
620 620 prevrev = chain[-2]
621 621 except IndexError:
622 622 prevrev = -1
623 623
624 624 chainratio = float(chainsize) / float(uncomp)
625 625 extraratio = float(extradist) / float(chainsize)
626 626
627 627 fm.startitem()
628 628 fm.write('rev chainid chainlen prevrev deltatype compsize '
629 629 'uncompsize chainsize chainratio lindist extradist '
630 630 'extraratio',
631 631 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
632 632 rev, chainid, len(chain), prevrev, deltatype, comp,
633 633 uncomp, chainsize, chainratio, lineardist, extradist,
634 634 extraratio,
635 635 rev=rev, chainid=chainid, chainlen=len(chain),
636 636 prevrev=prevrev, deltatype=deltatype, compsize=comp,
637 637 uncompsize=uncomp, chainsize=chainsize,
638 638 chainratio=chainratio, lindist=lineardist,
639 639 extradist=extradist, extraratio=extraratio)
640 640
641 641 fm.end()
642 642
643 643 @command('debugdirstate|debugstate',
644 644 [('', 'nodates', None, _('do not display the saved mtime')),
645 645 ('', 'datesort', None, _('sort by saved mtime'))],
646 646 _('[OPTION]...'))
647 647 def debugstate(ui, repo, **opts):
648 648 """show the contents of the current dirstate"""
649 649
650 650 nodates = opts.get('nodates')
651 651 datesort = opts.get('datesort')
652 652
653 653 timestr = ""
654 654 if datesort:
655 655 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
656 656 else:
657 657 keyfunc = None # sort by filename
658 658 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
659 659 if ent[3] == -1:
660 660 timestr = 'unset '
661 661 elif nodates:
662 662 timestr = 'set '
663 663 else:
664 664 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
665 665 time.localtime(ent[3]))
666 666 if ent[1] & 0o20000:
667 667 mode = 'lnk'
668 668 else:
669 669 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
670 670 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
671 671 for f in repo.dirstate.copies():
672 672 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
673 673
674 674 @command('debugdiscovery',
675 675 [('', 'old', None, _('use old-style discovery')),
676 676 ('', 'nonheads', None,
677 677 _('use old-style discovery with non-heads included')),
678 678 ] + cmdutil.remoteopts,
679 679 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
680 680 def debugdiscovery(ui, repo, remoteurl="default", **opts):
681 681 """runs the changeset discovery protocol in isolation"""
682 682 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
683 683 opts.get('branch'))
684 684 remote = hg.peer(repo, opts, remoteurl)
685 685 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
686 686
687 687 # make sure tests are repeatable
688 688 random.seed(12323)
689 689
690 690 def doit(localheads, remoteheads, remote=remote):
691 691 if opts.get('old'):
692 692 if localheads:
693 693 raise error.Abort('cannot use localheads with old style '
694 694 'discovery')
695 695 if not util.safehasattr(remote, 'branches'):
696 696 # enable in-client legacy support
697 697 remote = localrepo.locallegacypeer(remote.local())
698 698 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
699 699 force=True)
700 700 common = set(common)
701 701 if not opts.get('nonheads'):
702 702 ui.write(("unpruned common: %s\n") %
703 703 " ".join(sorted(short(n) for n in common)))
704 704 dag = dagutil.revlogdag(repo.changelog)
705 705 all = dag.ancestorset(dag.internalizeall(common))
706 706 common = dag.externalizeall(dag.headsetofconnecteds(all))
707 707 else:
708 708 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
709 709 common = set(common)
710 710 rheads = set(hds)
711 711 lheads = set(repo.heads())
712 712 ui.write(("common heads: %s\n") %
713 713 " ".join(sorted(short(n) for n in common)))
714 714 if lheads <= common:
715 715 ui.write(("local is subset\n"))
716 716 elif rheads <= common:
717 717 ui.write(("remote is subset\n"))
718 718
719 719 serverlogs = opts.get('serverlog')
720 720 if serverlogs:
721 721 for filename in serverlogs:
722 722 with open(filename, 'r') as logfile:
723 723 line = logfile.readline()
724 724 while line:
725 725 parts = line.strip().split(';')
726 726 op = parts[1]
727 727 if op == 'cg':
728 728 pass
729 729 elif op == 'cgss':
730 730 doit(parts[2].split(' '), parts[3].split(' '))
731 731 elif op == 'unb':
732 732 doit(parts[3].split(' '), parts[2].split(' '))
733 733 line = logfile.readline()
734 734 else:
735 735 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
736 736 opts.get('remote_head'))
737 737 localrevs = opts.get('local_head')
738 738 doit(localrevs, remoterevs)
739 739
740 740 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
741 741 def debugextensions(ui, **opts):
742 742 '''show information about active extensions'''
743 743 exts = extensions.extensions(ui)
744 744 hgver = util.version()
745 745 fm = ui.formatter('debugextensions', opts)
746 746 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
747 747 isinternal = extensions.ismoduleinternal(extmod)
748 748 extsource = pycompat.fsencode(extmod.__file__)
749 749 if isinternal:
750 750 exttestedwith = [] # never expose magic string to users
751 751 else:
752 752 exttestedwith = getattr(extmod, 'testedwith', '').split()
753 753 extbuglink = getattr(extmod, 'buglink', None)
754 754
755 755 fm.startitem()
756 756
757 757 if ui.quiet or ui.verbose:
758 758 fm.write('name', '%s\n', extname)
759 759 else:
760 760 fm.write('name', '%s', extname)
761 761 if isinternal or hgver in exttestedwith:
762 762 fm.plain('\n')
763 763 elif not exttestedwith:
764 764 fm.plain(_(' (untested!)\n'))
765 765 else:
766 766 lasttestedversion = exttestedwith[-1]
767 767 fm.plain(' (%s!)\n' % lasttestedversion)
768 768
769 769 fm.condwrite(ui.verbose and extsource, 'source',
770 770 _(' location: %s\n'), extsource or "")
771 771
772 772 if ui.verbose:
773 773 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
774 774 fm.data(bundled=isinternal)
775 775
776 776 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
777 777 _(' tested with: %s\n'),
778 778 fm.formatlist(exttestedwith, name='ver'))
779 779
780 780 fm.condwrite(ui.verbose and extbuglink, 'buglink',
781 781 _(' bug reporting: %s\n'), extbuglink or "")
782 782
783 783 fm.end()
784 784
785 785 @command('debugfileset',
786 786 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
787 787 _('[-r REV] FILESPEC'))
788 788 def debugfileset(ui, repo, expr, **opts):
789 789 '''parse and apply a fileset specification'''
790 790 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
791 791 if ui.verbose:
792 792 tree = fileset.parse(expr)
793 793 ui.note(fileset.prettyformat(tree), "\n")
794 794
795 795 for f in ctx.getfileset(expr):
796 796 ui.write("%s\n" % f)
797 797
798 798 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
799 799 def debugfsinfo(ui, path="."):
800 800 """show information detected about current filesystem"""
801 801 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
802 802 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
803 803 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
804 804 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
805 805 casesensitive = '(unknown)'
806 806 try:
807 807 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
808 808 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
809 809 except OSError:
810 810 pass
811 811 ui.write(('case-sensitive: %s\n') % casesensitive)
812 812
813 813 @command('debuggetbundle',
814 814 [('H', 'head', [], _('id of head node'), _('ID')),
815 815 ('C', 'common', [], _('id of common node'), _('ID')),
816 816 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
817 817 _('REPO FILE [-H|-C ID]...'),
818 818 norepo=True)
819 819 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
820 820 """retrieves a bundle from a repo
821 821
822 822 Every ID must be a full-length hex node id string. Saves the bundle to the
823 823 given file.
824 824 """
825 825 repo = hg.peer(ui, opts, repopath)
826 826 if not repo.capable('getbundle'):
827 827 raise error.Abort("getbundle() not supported by target repository")
828 828 args = {}
829 829 if common:
830 830 args['common'] = [bin(s) for s in common]
831 831 if head:
832 832 args['heads'] = [bin(s) for s in head]
833 833 # TODO: get desired bundlecaps from command line.
834 834 args['bundlecaps'] = None
835 835 bundle = repo.getbundle('debug', **args)
836 836
837 837 bundletype = opts.get('type', 'bzip2').lower()
838 838 btypes = {'none': 'HG10UN',
839 839 'bzip2': 'HG10BZ',
840 840 'gzip': 'HG10GZ',
841 841 'bundle2': 'HG20'}
842 842 bundletype = btypes.get(bundletype)
843 843 if bundletype not in bundle2.bundletypes:
844 844 raise error.Abort(_('unknown bundle type specified with --type'))
845 845 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
846 846
847 847 @command('debugignore', [], '[FILE]')
848 848 def debugignore(ui, repo, *files, **opts):
849 849 """display the combined ignore pattern and information about ignored files
850 850
851 851 With no argument display the combined ignore pattern.
852 852
853 853 Given space separated file names, shows if the given file is ignored and
854 854 if so, show the ignore rule (file and line number) that matched it.
855 855 """
856 856 ignore = repo.dirstate._ignore
857 857 if not files:
858 858 # Show all the patterns
859 859 ui.write("%s\n" % repr(ignore))
860 860 else:
861 861 for f in files:
862 862 nf = util.normpath(f)
863 863 ignored = None
864 864 ignoredata = None
865 865 if nf != '.':
866 866 if ignore(nf):
867 867 ignored = nf
868 868 ignoredata = repo.dirstate._ignorefileandline(nf)
869 869 else:
870 870 for p in util.finddirs(nf):
871 871 if ignore(p):
872 872 ignored = p
873 873 ignoredata = repo.dirstate._ignorefileandline(p)
874 874 break
875 875 if ignored:
876 876 if ignored == nf:
877 877 ui.write(_("%s is ignored\n") % f)
878 878 else:
879 879 ui.write(_("%s is ignored because of "
880 880 "containing folder %s\n")
881 881 % (f, ignored))
882 882 ignorefile, lineno, line = ignoredata
883 883 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
884 884 % (ignorefile, lineno, line))
885 885 else:
886 886 ui.write(_("%s is not ignored\n") % f)
887 887
888 888 @command('debugindex', cmdutil.debugrevlogopts +
889 889 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
890 890 _('[-f FORMAT] -c|-m|FILE'),
891 891 optionalrepo=True)
892 892 def debugindex(ui, repo, file_=None, **opts):
893 893 """dump the contents of an index file"""
894 894 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
895 895 format = opts.get('format', 0)
896 896 if format not in (0, 1):
897 897 raise error.Abort(_("unknown format %d") % format)
898 898
899 899 generaldelta = r.version & revlog.FLAG_GENERALDELTA
900 900 if generaldelta:
901 901 basehdr = ' delta'
902 902 else:
903 903 basehdr = ' base'
904 904
905 905 if ui.debugflag:
906 906 shortfn = hex
907 907 else:
908 908 shortfn = short
909 909
910 910 # There might not be anything in r, so have a sane default
911 911 idlen = 12
912 912 for i in r:
913 913 idlen = len(shortfn(r.node(i)))
914 914 break
915 915
916 916 if format == 0:
917 917 ui.write((" rev offset length " + basehdr + " linkrev"
918 918 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
919 919 elif format == 1:
920 920 ui.write((" rev flag offset length"
921 921 " size " + basehdr + " link p1 p2"
922 922 " %s\n") % "nodeid".rjust(idlen))
923 923
924 924 for i in r:
925 925 node = r.node(i)
926 926 if generaldelta:
927 927 base = r.deltaparent(i)
928 928 else:
929 929 base = r.chainbase(i)
930 930 if format == 0:
931 931 try:
932 932 pp = r.parents(node)
933 933 except Exception:
934 934 pp = [nullid, nullid]
935 935 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
936 936 i, r.start(i), r.length(i), base, r.linkrev(i),
937 937 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
938 938 elif format == 1:
939 939 pr = r.parentrevs(i)
940 940 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
941 941 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
942 942 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
943 943
944 944 @command('debugindexdot', cmdutil.debugrevlogopts,
945 945 _('-c|-m|FILE'), optionalrepo=True)
946 946 def debugindexdot(ui, repo, file_=None, **opts):
947 947 """dump an index DAG as a graphviz dot file"""
948 948 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
949 949 ui.write(("digraph G {\n"))
950 950 for i in r:
951 951 node = r.node(i)
952 952 pp = r.parents(node)
953 953 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
954 954 if pp[1] != nullid:
955 955 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
956 956 ui.write("}\n")
957 957
958 958 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
959 959 def debuginstall(ui, **opts):
960 960 '''test Mercurial installation
961 961
962 962 Returns 0 on success.
963 963 '''
964 964
965 965 def writetemp(contents):
966 966 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
967 967 f = os.fdopen(fd, pycompat.sysstr("wb"))
968 968 f.write(contents)
969 969 f.close()
970 970 return name
971 971
972 972 problems = 0
973 973
974 974 fm = ui.formatter('debuginstall', opts)
975 975 fm.startitem()
976 976
977 977 # encoding
978 978 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
979 979 err = None
980 980 try:
981 981 encoding.fromlocal("test")
982 982 except error.Abort as inst:
983 983 err = inst
984 984 problems += 1
985 985 fm.condwrite(err, 'encodingerror', _(" %s\n"
986 986 " (check that your locale is properly set)\n"), err)
987 987
988 988 # Python
989 989 fm.write('pythonexe', _("checking Python executable (%s)\n"),
990 990 pycompat.sysexecutable)
991 991 fm.write('pythonver', _("checking Python version (%s)\n"),
992 992 ("%d.%d.%d" % sys.version_info[:3]))
993 993 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
994 994 os.path.dirname(pycompat.fsencode(os.__file__)))
995 995
996 996 security = set(sslutil.supportedprotocols)
997 997 if sslutil.hassni:
998 998 security.add('sni')
999 999
1000 1000 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1001 1001 fm.formatlist(sorted(security), name='protocol',
1002 1002 fmt='%s', sep=','))
1003 1003
1004 1004 # These are warnings, not errors. So don't increment problem count. This
1005 1005 # may change in the future.
1006 1006 if 'tls1.2' not in security:
1007 1007 fm.plain(_(' TLS 1.2 not supported by Python install; '
1008 1008 'network connections lack modern security\n'))
1009 1009 if 'sni' not in security:
1010 1010 fm.plain(_(' SNI not supported by Python install; may have '
1011 1011 'connectivity issues with some servers\n'))
1012 1012
1013 1013 # TODO print CA cert info
1014 1014
1015 1015 # hg version
1016 1016 hgver = util.version()
1017 1017 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1018 1018 hgver.split('+')[0])
1019 1019 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1020 1020 '+'.join(hgver.split('+')[1:]))
1021 1021
1022 1022 # compiled modules
1023 1023 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1024 1024 policy.policy)
1025 1025 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1026 1026 os.path.dirname(pycompat.fsencode(__file__)))
1027 1027
1028 1028 if policy.policy in ('c', 'allow'):
1029 1029 err = None
1030 1030 try:
1031 1031 from .cext import (
1032 1032 base85,
1033 1033 bdiff,
1034 1034 mpatch,
1035 1035 osutil,
1036 1036 )
1037 1037 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1038 1038 except Exception as inst:
1039 1039 err = inst
1040 1040 problems += 1
1041 1041 fm.condwrite(err, 'extensionserror', " %s\n", err)
1042 1042
1043 1043 compengines = util.compengines._engines.values()
1044 1044 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1045 1045 fm.formatlist(sorted(e.name() for e in compengines),
1046 1046 name='compengine', fmt='%s', sep=', '))
1047 1047 fm.write('compenginesavail', _('checking available compression engines '
1048 1048 '(%s)\n'),
1049 1049 fm.formatlist(sorted(e.name() for e in compengines
1050 1050 if e.available()),
1051 1051 name='compengine', fmt='%s', sep=', '))
1052 1052 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1053 1053 fm.write('compenginesserver', _('checking available compression engines '
1054 1054 'for wire protocol (%s)\n'),
1055 1055 fm.formatlist([e.name() for e in wirecompengines
1056 1056 if e.wireprotosupport()],
1057 1057 name='compengine', fmt='%s', sep=', '))
1058 1058
1059 1059 # templates
1060 1060 p = templater.templatepaths()
1061 1061 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1062 1062 fm.condwrite(not p, '', _(" no template directories found\n"))
1063 1063 if p:
1064 1064 m = templater.templatepath("map-cmdline.default")
1065 1065 if m:
1066 1066 # template found, check if it is working
1067 1067 err = None
1068 1068 try:
1069 1069 templater.templater.frommapfile(m)
1070 1070 except Exception as inst:
1071 1071 err = inst
1072 1072 p = None
1073 1073 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1074 1074 else:
1075 1075 p = None
1076 1076 fm.condwrite(p, 'defaulttemplate',
1077 1077 _("checking default template (%s)\n"), m)
1078 1078 fm.condwrite(not m, 'defaulttemplatenotfound',
1079 1079 _(" template '%s' not found\n"), "default")
1080 1080 if not p:
1081 1081 problems += 1
1082 1082 fm.condwrite(not p, '',
1083 1083 _(" (templates seem to have been installed incorrectly)\n"))
1084 1084
1085 1085 # editor
1086 1086 editor = ui.geteditor()
1087 1087 editor = util.expandpath(editor)
1088 1088 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1089 1089 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1090 1090 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1091 1091 _(" No commit editor set and can't find %s in PATH\n"
1092 1092 " (specify a commit editor in your configuration"
1093 1093 " file)\n"), not cmdpath and editor == 'vi' and editor)
1094 1094 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1095 1095 _(" Can't find editor '%s' in PATH\n"
1096 1096 " (specify a commit editor in your configuration"
1097 1097 " file)\n"), not cmdpath and editor)
1098 1098 if not cmdpath and editor != 'vi':
1099 1099 problems += 1
1100 1100
1101 1101 # check username
1102 1102 username = None
1103 1103 err = None
1104 1104 try:
1105 1105 username = ui.username()
1106 1106 except error.Abort as e:
1107 1107 err = e
1108 1108 problems += 1
1109 1109
1110 1110 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1111 1111 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1112 1112 " (specify a username in your configuration file)\n"), err)
1113 1113
1114 1114 fm.condwrite(not problems, '',
1115 1115 _("no problems detected\n"))
1116 1116 if not problems:
1117 1117 fm.data(problems=problems)
1118 1118 fm.condwrite(problems, 'problems',
1119 1119 _("%d problems detected,"
1120 1120 " please check your install!\n"), problems)
1121 1121 fm.end()
1122 1122
1123 1123 return problems
1124 1124
1125 1125 @command('debugknown', [], _('REPO ID...'), norepo=True)
1126 1126 def debugknown(ui, repopath, *ids, **opts):
1127 1127 """test whether node ids are known to a repo
1128 1128
1129 1129 Every ID must be a full-length hex node id string. Returns a list of 0s
1130 1130 and 1s indicating unknown/known.
1131 1131 """
1132 1132 repo = hg.peer(ui, opts, repopath)
1133 1133 if not repo.capable('known'):
1134 1134 raise error.Abort("known() not supported by target repository")
1135 1135 flags = repo.known([bin(s) for s in ids])
1136 1136 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1137 1137
1138 1138 @command('debuglabelcomplete', [], _('LABEL...'))
1139 1139 def debuglabelcomplete(ui, repo, *args):
1140 1140 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1141 1141 debugnamecomplete(ui, repo, *args)
1142 1142
1143 1143 @command('debuglocks',
1144 1144 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1145 1145 ('W', 'force-wlock', None,
1146 1146 _('free the working state lock (DANGEROUS)'))],
1147 1147 _('[OPTION]...'))
1148 1148 def debuglocks(ui, repo, **opts):
1149 1149 """show or modify state of locks
1150 1150
1151 1151 By default, this command will show which locks are held. This
1152 1152 includes the user and process holding the lock, the amount of time
1153 1153 the lock has been held, and the machine name where the process is
1154 1154 running if it's not local.
1155 1155
1156 1156 Locks protect the integrity of Mercurial's data, so should be
1157 1157 treated with care. System crashes or other interruptions may cause
1158 1158 locks to not be properly released, though Mercurial will usually
1159 1159 detect and remove such stale locks automatically.
1160 1160
1161 1161 However, detecting stale locks may not always be possible (for
1162 1162 instance, on a shared filesystem). Removing locks may also be
1163 1163 blocked by filesystem permissions.
1164 1164
1165 1165 Returns 0 if no locks are held.
1166 1166
1167 1167 """
1168 1168
1169 1169 if opts.get('force_lock'):
1170 1170 repo.svfs.unlink('lock')
1171 1171 if opts.get('force_wlock'):
1172 1172 repo.vfs.unlink('wlock')
1173 1173 if opts.get('force_lock') or opts.get('force_lock'):
1174 1174 return 0
1175 1175
1176 1176 now = time.time()
1177 1177 held = 0
1178 1178
1179 1179 def report(vfs, name, method):
1180 1180 # this causes stale locks to get reaped for more accurate reporting
1181 1181 try:
1182 1182 l = method(False)
1183 1183 except error.LockHeld:
1184 1184 l = None
1185 1185
1186 1186 if l:
1187 1187 l.release()
1188 1188 else:
1189 1189 try:
1190 1190 stat = vfs.lstat(name)
1191 1191 age = now - stat.st_mtime
1192 1192 user = util.username(stat.st_uid)
1193 1193 locker = vfs.readlock(name)
1194 1194 if ":" in locker:
1195 1195 host, pid = locker.split(':')
1196 1196 if host == socket.gethostname():
1197 1197 locker = 'user %s, process %s' % (user, pid)
1198 1198 else:
1199 1199 locker = 'user %s, process %s, host %s' \
1200 1200 % (user, pid, host)
1201 1201 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1202 1202 return 1
1203 1203 except OSError as e:
1204 1204 if e.errno != errno.ENOENT:
1205 1205 raise
1206 1206
1207 1207 ui.write(("%-6s free\n") % (name + ":"))
1208 1208 return 0
1209 1209
1210 1210 held += report(repo.svfs, "lock", repo.lock)
1211 1211 held += report(repo.vfs, "wlock", repo.wlock)
1212 1212
1213 1213 return held
1214 1214
1215 1215 @command('debugmergestate', [], '')
1216 1216 def debugmergestate(ui, repo, *args):
1217 1217 """print merge state
1218 1218
1219 1219 Use --verbose to print out information about whether v1 or v2 merge state
1220 1220 was chosen."""
1221 1221 def _hashornull(h):
1222 1222 if h == nullhex:
1223 1223 return 'null'
1224 1224 else:
1225 1225 return h
1226 1226
1227 1227 def printrecords(version):
1228 1228 ui.write(('* version %s records\n') % version)
1229 1229 if version == 1:
1230 1230 records = v1records
1231 1231 else:
1232 1232 records = v2records
1233 1233
1234 1234 for rtype, record in records:
1235 1235 # pretty print some record types
1236 1236 if rtype == 'L':
1237 1237 ui.write(('local: %s\n') % record)
1238 1238 elif rtype == 'O':
1239 1239 ui.write(('other: %s\n') % record)
1240 1240 elif rtype == 'm':
1241 1241 driver, mdstate = record.split('\0', 1)
1242 1242 ui.write(('merge driver: %s (state "%s")\n')
1243 1243 % (driver, mdstate))
1244 1244 elif rtype in 'FDC':
1245 1245 r = record.split('\0')
1246 1246 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1247 1247 if version == 1:
1248 1248 onode = 'not stored in v1 format'
1249 1249 flags = r[7]
1250 1250 else:
1251 1251 onode, flags = r[7:9]
1252 1252 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1253 1253 % (f, rtype, state, _hashornull(hash)))
1254 1254 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1255 1255 ui.write((' ancestor path: %s (node %s)\n')
1256 1256 % (afile, _hashornull(anode)))
1257 1257 ui.write((' other path: %s (node %s)\n')
1258 1258 % (ofile, _hashornull(onode)))
1259 1259 elif rtype == 'f':
1260 1260 filename, rawextras = record.split('\0', 1)
1261 1261 extras = rawextras.split('\0')
1262 1262 i = 0
1263 1263 extrastrings = []
1264 1264 while i < len(extras):
1265 1265 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1266 1266 i += 2
1267 1267
1268 1268 ui.write(('file extras: %s (%s)\n')
1269 1269 % (filename, ', '.join(extrastrings)))
1270 1270 elif rtype == 'l':
1271 1271 labels = record.split('\0', 2)
1272 1272 labels = [l for l in labels if len(l) > 0]
1273 1273 ui.write(('labels:\n'))
1274 1274 ui.write((' local: %s\n' % labels[0]))
1275 1275 ui.write((' other: %s\n' % labels[1]))
1276 1276 if len(labels) > 2:
1277 1277 ui.write((' base: %s\n' % labels[2]))
1278 1278 else:
1279 1279 ui.write(('unrecognized entry: %s\t%s\n')
1280 1280 % (rtype, record.replace('\0', '\t')))
1281 1281
1282 1282 # Avoid mergestate.read() since it may raise an exception for unsupported
1283 1283 # merge state records. We shouldn't be doing this, but this is OK since this
1284 1284 # command is pretty low-level.
1285 1285 ms = mergemod.mergestate(repo)
1286 1286
1287 1287 # sort so that reasonable information is on top
1288 1288 v1records = ms._readrecordsv1()
1289 1289 v2records = ms._readrecordsv2()
1290 1290 order = 'LOml'
1291 1291 def key(r):
1292 1292 idx = order.find(r[0])
1293 1293 if idx == -1:
1294 1294 return (1, r[1])
1295 1295 else:
1296 1296 return (0, idx)
1297 1297 v1records.sort(key=key)
1298 1298 v2records.sort(key=key)
1299 1299
1300 1300 if not v1records and not v2records:
1301 1301 ui.write(('no merge state found\n'))
1302 1302 elif not v2records:
1303 1303 ui.note(('no version 2 merge state\n'))
1304 1304 printrecords(1)
1305 1305 elif ms._v1v2match(v1records, v2records):
1306 1306 ui.note(('v1 and v2 states match: using v2\n'))
1307 1307 printrecords(2)
1308 1308 else:
1309 1309 ui.note(('v1 and v2 states mismatch: using v1\n'))
1310 1310 printrecords(1)
1311 1311 if ui.verbose:
1312 1312 printrecords(2)
1313 1313
1314 1314 @command('debugnamecomplete', [], _('NAME...'))
1315 1315 def debugnamecomplete(ui, repo, *args):
1316 1316 '''complete "names" - tags, open branch names, bookmark names'''
1317 1317
1318 1318 names = set()
1319 1319 # since we previously only listed open branches, we will handle that
1320 1320 # specially (after this for loop)
1321 1321 for name, ns in repo.names.iteritems():
1322 1322 if name != 'branches':
1323 1323 names.update(ns.listnames(repo))
1324 1324 names.update(tag for (tag, heads, tip, closed)
1325 1325 in repo.branchmap().iterbranches() if not closed)
1326 1326 completions = set()
1327 1327 if not args:
1328 1328 args = ['']
1329 1329 for a in args:
1330 1330 completions.update(n for n in names if n.startswith(a))
1331 1331 ui.write('\n'.join(sorted(completions)))
1332 1332 ui.write('\n')
1333 1333
1334 1334 @command('debugobsolete',
1335 1335 [('', 'flags', 0, _('markers flag')),
1336 1336 ('', 'record-parents', False,
1337 1337 _('record parent information for the precursor')),
1338 1338 ('r', 'rev', [], _('display markers relevant to REV')),
1339 1339 ('', 'exclusive', False, _('restrict display to markers only '
1340 1340 'relevant to REV')),
1341 1341 ('', 'index', False, _('display index of the marker')),
1342 1342 ('', 'delete', [], _('delete markers specified by indices')),
1343 1343 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1344 1344 _('[OBSOLETED [REPLACEMENT ...]]'))
1345 1345 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1346 1346 """create arbitrary obsolete marker
1347 1347
1348 1348 With no arguments, displays the list of obsolescence markers."""
1349 1349
1350 1350 def parsenodeid(s):
1351 1351 try:
1352 1352 # We do not use revsingle/revrange functions here to accept
1353 1353 # arbitrary node identifiers, possibly not present in the
1354 1354 # local repository.
1355 1355 n = bin(s)
1356 1356 if len(n) != len(nullid):
1357 1357 raise TypeError()
1358 1358 return n
1359 1359 except TypeError:
1360 1360 raise error.Abort('changeset references must be full hexadecimal '
1361 1361 'node identifiers')
1362 1362
1363 1363 if opts.get('delete'):
1364 1364 indices = []
1365 1365 for v in opts.get('delete'):
1366 1366 try:
1367 1367 indices.append(int(v))
1368 1368 except ValueError:
1369 1369 raise error.Abort(_('invalid index value: %r') % v,
1370 1370 hint=_('use integers for indices'))
1371 1371
1372 1372 if repo.currenttransaction():
1373 1373 raise error.Abort(_('cannot delete obsmarkers in the middle '
1374 1374 'of transaction.'))
1375 1375
1376 1376 with repo.lock():
1377 1377 n = repair.deleteobsmarkers(repo.obsstore, indices)
1378 1378 ui.write(_('deleted %i obsolescence markers\n') % n)
1379 1379
1380 1380 return
1381 1381
1382 1382 if precursor is not None:
1383 1383 if opts['rev']:
1384 1384 raise error.Abort('cannot select revision when creating marker')
1385 1385 metadata = {}
1386 1386 metadata['user'] = opts['user'] or ui.username()
1387 1387 succs = tuple(parsenodeid(succ) for succ in successors)
1388 1388 l = repo.lock()
1389 1389 try:
1390 1390 tr = repo.transaction('debugobsolete')
1391 1391 try:
1392 1392 date = opts.get('date')
1393 1393 if date:
1394 1394 date = util.parsedate(date)
1395 1395 else:
1396 1396 date = None
1397 1397 prec = parsenodeid(precursor)
1398 1398 parents = None
1399 1399 if opts['record_parents']:
1400 1400 if prec not in repo.unfiltered():
1401 1401 raise error.Abort('cannot used --record-parents on '
1402 1402 'unknown changesets')
1403 1403 parents = repo.unfiltered()[prec].parents()
1404 1404 parents = tuple(p.node() for p in parents)
1405 1405 repo.obsstore.create(tr, prec, succs, opts['flags'],
1406 1406 parents=parents, date=date,
1407 1407 metadata=metadata, ui=ui)
1408 1408 tr.close()
1409 1409 except ValueError as exc:
1410 1410 raise error.Abort(_('bad obsmarker input: %s') % exc)
1411 1411 finally:
1412 1412 tr.release()
1413 1413 finally:
1414 1414 l.release()
1415 1415 else:
1416 1416 if opts['rev']:
1417 1417 revs = scmutil.revrange(repo, opts['rev'])
1418 1418 nodes = [repo[r].node() for r in revs]
1419 1419 markers = list(obsolete.getmarkers(repo, nodes=nodes,
1420 1420 exclusive=opts['exclusive']))
1421 1421 markers.sort(key=lambda x: x._data)
1422 1422 else:
1423 1423 markers = obsolete.getmarkers(repo)
1424 1424
1425 1425 markerstoiter = markers
1426 1426 isrelevant = lambda m: True
1427 1427 if opts.get('rev') and opts.get('index'):
1428 1428 markerstoiter = obsolete.getmarkers(repo)
1429 1429 markerset = set(markers)
1430 1430 isrelevant = lambda m: m in markerset
1431 1431
1432 1432 fm = ui.formatter('debugobsolete', opts)
1433 1433 for i, m in enumerate(markerstoiter):
1434 1434 if not isrelevant(m):
1435 1435 # marker can be irrelevant when we're iterating over a set
1436 1436 # of markers (markerstoiter) which is bigger than the set
1437 1437 # of markers we want to display (markers)
1438 1438 # this can happen if both --index and --rev options are
1439 1439 # provided and thus we need to iterate over all of the markers
1440 1440 # to get the correct indices, but only display the ones that
1441 1441 # are relevant to --rev value
1442 1442 continue
1443 1443 fm.startitem()
1444 1444 ind = i if opts.get('index') else None
1445 1445 cmdutil.showmarker(fm, m, index=ind)
1446 1446 fm.end()
1447 1447
1448 1448 @command('debugpathcomplete',
1449 1449 [('f', 'full', None, _('complete an entire path')),
1450 1450 ('n', 'normal', None, _('show only normal files')),
1451 1451 ('a', 'added', None, _('show only added files')),
1452 1452 ('r', 'removed', None, _('show only removed files'))],
1453 1453 _('FILESPEC...'))
1454 1454 def debugpathcomplete(ui, repo, *specs, **opts):
1455 1455 '''complete part or all of a tracked path
1456 1456
1457 1457 This command supports shells that offer path name completion. It
1458 1458 currently completes only files already known to the dirstate.
1459 1459
1460 1460 Completion extends only to the next path segment unless
1461 1461 --full is specified, in which case entire paths are used.'''
1462 1462
1463 1463 def complete(path, acceptable):
1464 1464 dirstate = repo.dirstate
1465 1465 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1466 1466 rootdir = repo.root + pycompat.ossep
1467 1467 if spec != repo.root and not spec.startswith(rootdir):
1468 1468 return [], []
1469 1469 if os.path.isdir(spec):
1470 1470 spec += '/'
1471 1471 spec = spec[len(rootdir):]
1472 1472 fixpaths = pycompat.ossep != '/'
1473 1473 if fixpaths:
1474 1474 spec = spec.replace(pycompat.ossep, '/')
1475 1475 speclen = len(spec)
1476 1476 fullpaths = opts['full']
1477 1477 files, dirs = set(), set()
1478 1478 adddir, addfile = dirs.add, files.add
1479 1479 for f, st in dirstate.iteritems():
1480 1480 if f.startswith(spec) and st[0] in acceptable:
1481 1481 if fixpaths:
1482 1482 f = f.replace('/', pycompat.ossep)
1483 1483 if fullpaths:
1484 1484 addfile(f)
1485 1485 continue
1486 1486 s = f.find(pycompat.ossep, speclen)
1487 1487 if s >= 0:
1488 1488 adddir(f[:s])
1489 1489 else:
1490 1490 addfile(f)
1491 1491 return files, dirs
1492 1492
1493 1493 acceptable = ''
1494 1494 if opts['normal']:
1495 1495 acceptable += 'nm'
1496 1496 if opts['added']:
1497 1497 acceptable += 'a'
1498 1498 if opts['removed']:
1499 1499 acceptable += 'r'
1500 1500 cwd = repo.getcwd()
1501 1501 if not specs:
1502 1502 specs = ['.']
1503 1503
1504 1504 files, dirs = set(), set()
1505 1505 for spec in specs:
1506 1506 f, d = complete(spec, acceptable or 'nmar')
1507 1507 files.update(f)
1508 1508 dirs.update(d)
1509 1509 files.update(dirs)
1510 1510 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1511 1511 ui.write('\n')
1512 1512
1513 1513 @command('debugpickmergetool',
1514 1514 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1515 1515 ('', 'changedelete', None, _('emulate merging change and delete')),
1516 1516 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1517 1517 _('[PATTERN]...'),
1518 1518 inferrepo=True)
1519 1519 def debugpickmergetool(ui, repo, *pats, **opts):
1520 1520 """examine which merge tool is chosen for specified file
1521 1521
1522 1522 As described in :hg:`help merge-tools`, Mercurial examines
1523 1523 configurations below in this order to decide which merge tool is
1524 1524 chosen for specified file.
1525 1525
1526 1526 1. ``--tool`` option
1527 1527 2. ``HGMERGE`` environment variable
1528 1528 3. configurations in ``merge-patterns`` section
1529 1529 4. configuration of ``ui.merge``
1530 1530 5. configurations in ``merge-tools`` section
1531 1531 6. ``hgmerge`` tool (for historical reason only)
1532 1532 7. default tool for fallback (``:merge`` or ``:prompt``)
1533 1533
1534 1534 This command writes out examination result in the style below::
1535 1535
1536 1536 FILE = MERGETOOL
1537 1537
1538 1538 By default, all files known in the first parent context of the
1539 1539 working directory are examined. Use file patterns and/or -I/-X
1540 1540 options to limit target files. -r/--rev is also useful to examine
1541 1541 files in another context without actual updating to it.
1542 1542
1543 1543 With --debug, this command shows warning messages while matching
1544 1544 against ``merge-patterns`` and so on, too. It is recommended to
1545 1545 use this option with explicit file patterns and/or -I/-X options,
1546 1546 because this option increases amount of output per file according
1547 1547 to configurations in hgrc.
1548 1548
1549 1549 With -v/--verbose, this command shows configurations below at
1550 1550 first (only if specified).
1551 1551
1552 1552 - ``--tool`` option
1553 1553 - ``HGMERGE`` environment variable
1554 1554 - configuration of ``ui.merge``
1555 1555
1556 1556 If merge tool is chosen before matching against
1557 1557 ``merge-patterns``, this command can't show any helpful
1558 1558 information, even with --debug. In such case, information above is
1559 1559 useful to know why a merge tool is chosen.
1560 1560 """
1561 1561 overrides = {}
1562 1562 if opts['tool']:
1563 1563 overrides[('ui', 'forcemerge')] = opts['tool']
1564 1564 ui.note(('with --tool %r\n') % (opts['tool']))
1565 1565
1566 1566 with ui.configoverride(overrides, 'debugmergepatterns'):
1567 1567 hgmerge = encoding.environ.get("HGMERGE")
1568 1568 if hgmerge is not None:
1569 1569 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1570 1570 uimerge = ui.config("ui", "merge")
1571 1571 if uimerge:
1572 1572 ui.note(('with ui.merge=%r\n') % (uimerge))
1573 1573
1574 1574 ctx = scmutil.revsingle(repo, opts.get('rev'))
1575 1575 m = scmutil.match(ctx, pats, opts)
1576 1576 changedelete = opts['changedelete']
1577 1577 for path in ctx.walk(m):
1578 1578 fctx = ctx[path]
1579 1579 try:
1580 1580 if not ui.debugflag:
1581 1581 ui.pushbuffer(error=True)
1582 1582 tool, toolpath = filemerge._picktool(repo, ui, path,
1583 1583 fctx.isbinary(),
1584 1584 'l' in fctx.flags(),
1585 1585 changedelete)
1586 1586 finally:
1587 1587 if not ui.debugflag:
1588 1588 ui.popbuffer()
1589 1589 ui.write(('%s = %s\n') % (path, tool))
1590 1590
1591 1591 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1592 1592 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1593 1593 '''access the pushkey key/value protocol
1594 1594
1595 1595 With two args, list the keys in the given namespace.
1596 1596
1597 1597 With five args, set a key to new if it currently is set to old.
1598 1598 Reports success or failure.
1599 1599 '''
1600 1600
1601 1601 target = hg.peer(ui, {}, repopath)
1602 1602 if keyinfo:
1603 1603 key, old, new = keyinfo
1604 1604 r = target.pushkey(namespace, key, old, new)
1605 1605 ui.status(str(r) + '\n')
1606 1606 return not r
1607 1607 else:
1608 1608 for k, v in sorted(target.listkeys(namespace).iteritems()):
1609 1609 ui.write("%s\t%s\n" % (util.escapestr(k),
1610 1610 util.escapestr(v)))
1611 1611
1612 1612 @command('debugpvec', [], _('A B'))
1613 1613 def debugpvec(ui, repo, a, b=None):
1614 1614 ca = scmutil.revsingle(repo, a)
1615 1615 cb = scmutil.revsingle(repo, b)
1616 1616 pa = pvec.ctxpvec(ca)
1617 1617 pb = pvec.ctxpvec(cb)
1618 1618 if pa == pb:
1619 1619 rel = "="
1620 1620 elif pa > pb:
1621 1621 rel = ">"
1622 1622 elif pa < pb:
1623 1623 rel = "<"
1624 1624 elif pa | pb:
1625 1625 rel = "|"
1626 1626 ui.write(_("a: %s\n") % pa)
1627 1627 ui.write(_("b: %s\n") % pb)
1628 1628 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1629 1629 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1630 1630 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1631 1631 pa.distance(pb), rel))
1632 1632
1633 1633 @command('debugrebuilddirstate|debugrebuildstate',
1634 1634 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1635 1635 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1636 1636 'the working copy parent')),
1637 1637 ],
1638 1638 _('[-r REV]'))
1639 1639 def debugrebuilddirstate(ui, repo, rev, **opts):
1640 1640 """rebuild the dirstate as it would look like for the given revision
1641 1641
1642 1642 If no revision is specified the first current parent will be used.
1643 1643
1644 1644 The dirstate will be set to the files of the given revision.
1645 1645 The actual working directory content or existing dirstate
1646 1646 information such as adds or removes is not considered.
1647 1647
1648 1648 ``minimal`` will only rebuild the dirstate status for files that claim to be
1649 1649 tracked but are not in the parent manifest, or that exist in the parent
1650 1650 manifest but are not in the dirstate. It will not change adds, removes, or
1651 1651 modified files that are in the working copy parent.
1652 1652
1653 1653 One use of this command is to make the next :hg:`status` invocation
1654 1654 check the actual file content.
1655 1655 """
1656 1656 ctx = scmutil.revsingle(repo, rev)
1657 1657 with repo.wlock():
1658 1658 dirstate = repo.dirstate
1659 1659 changedfiles = None
1660 1660 # See command doc for what minimal does.
1661 1661 if opts.get('minimal'):
1662 1662 manifestfiles = set(ctx.manifest().keys())
1663 1663 dirstatefiles = set(dirstate)
1664 1664 manifestonly = manifestfiles - dirstatefiles
1665 1665 dsonly = dirstatefiles - manifestfiles
1666 1666 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1667 1667 changedfiles = manifestonly | dsnotadded
1668 1668
1669 1669 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1670 1670
1671 1671 @command('debugrebuildfncache', [], '')
1672 1672 def debugrebuildfncache(ui, repo):
1673 1673 """rebuild the fncache file"""
1674 1674 repair.rebuildfncache(ui, repo)
1675 1675
1676 1676 @command('debugrename',
1677 1677 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1678 1678 _('[-r REV] FILE'))
1679 1679 def debugrename(ui, repo, file1, *pats, **opts):
1680 1680 """dump rename information"""
1681 1681
1682 1682 ctx = scmutil.revsingle(repo, opts.get('rev'))
1683 1683 m = scmutil.match(ctx, (file1,) + pats, opts)
1684 1684 for abs in ctx.walk(m):
1685 1685 fctx = ctx[abs]
1686 1686 o = fctx.filelog().renamed(fctx.filenode())
1687 1687 rel = m.rel(abs)
1688 1688 if o:
1689 1689 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1690 1690 else:
1691 1691 ui.write(_("%s not renamed\n") % rel)
1692 1692
1693 1693 @command('debugrevlog', cmdutil.debugrevlogopts +
1694 1694 [('d', 'dump', False, _('dump index data'))],
1695 1695 _('-c|-m|FILE'),
1696 1696 optionalrepo=True)
1697 1697 def debugrevlog(ui, repo, file_=None, **opts):
1698 1698 """show data and statistics about a revlog"""
1699 1699 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1700 1700
1701 1701 if opts.get("dump"):
1702 1702 numrevs = len(r)
1703 1703 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1704 1704 " rawsize totalsize compression heads chainlen\n"))
1705 1705 ts = 0
1706 1706 heads = set()
1707 1707
1708 1708 for rev in xrange(numrevs):
1709 1709 dbase = r.deltaparent(rev)
1710 1710 if dbase == -1:
1711 1711 dbase = rev
1712 1712 cbase = r.chainbase(rev)
1713 1713 clen = r.chainlen(rev)
1714 1714 p1, p2 = r.parentrevs(rev)
1715 1715 rs = r.rawsize(rev)
1716 1716 ts = ts + rs
1717 1717 heads -= set(r.parentrevs(rev))
1718 1718 heads.add(rev)
1719 1719 try:
1720 1720 compression = ts / r.end(rev)
1721 1721 except ZeroDivisionError:
1722 1722 compression = 0
1723 1723 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1724 1724 "%11d %5d %8d\n" %
1725 1725 (rev, p1, p2, r.start(rev), r.end(rev),
1726 1726 r.start(dbase), r.start(cbase),
1727 1727 r.start(p1), r.start(p2),
1728 1728 rs, ts, compression, len(heads), clen))
1729 1729 return 0
1730 1730
1731 1731 v = r.version
1732 1732 format = v & 0xFFFF
1733 1733 flags = []
1734 1734 gdelta = False
1735 1735 if v & revlog.FLAG_INLINE_DATA:
1736 1736 flags.append('inline')
1737 1737 if v & revlog.FLAG_GENERALDELTA:
1738 1738 gdelta = True
1739 1739 flags.append('generaldelta')
1740 1740 if not flags:
1741 1741 flags = ['(none)']
1742 1742
1743 1743 nummerges = 0
1744 1744 numfull = 0
1745 1745 numprev = 0
1746 1746 nump1 = 0
1747 1747 nump2 = 0
1748 1748 numother = 0
1749 1749 nump1prev = 0
1750 1750 nump2prev = 0
1751 1751 chainlengths = []
1752 1752 chainbases = []
1753 1753 chainspans = []
1754 1754
1755 1755 datasize = [None, 0, 0]
1756 1756 fullsize = [None, 0, 0]
1757 1757 deltasize = [None, 0, 0]
1758 1758 chunktypecounts = {}
1759 1759 chunktypesizes = {}
1760 1760
1761 1761 def addsize(size, l):
1762 1762 if l[0] is None or size < l[0]:
1763 1763 l[0] = size
1764 1764 if size > l[1]:
1765 1765 l[1] = size
1766 1766 l[2] += size
1767 1767
1768 1768 numrevs = len(r)
1769 1769 for rev in xrange(numrevs):
1770 1770 p1, p2 = r.parentrevs(rev)
1771 1771 delta = r.deltaparent(rev)
1772 1772 if format > 0:
1773 1773 addsize(r.rawsize(rev), datasize)
1774 1774 if p2 != nullrev:
1775 1775 nummerges += 1
1776 1776 size = r.length(rev)
1777 1777 if delta == nullrev:
1778 1778 chainlengths.append(0)
1779 1779 chainbases.append(r.start(rev))
1780 1780 chainspans.append(size)
1781 1781 numfull += 1
1782 1782 addsize(size, fullsize)
1783 1783 else:
1784 1784 chainlengths.append(chainlengths[delta] + 1)
1785 1785 baseaddr = chainbases[delta]
1786 1786 revaddr = r.start(rev)
1787 1787 chainbases.append(baseaddr)
1788 1788 chainspans.append((revaddr - baseaddr) + size)
1789 1789 addsize(size, deltasize)
1790 1790 if delta == rev - 1:
1791 1791 numprev += 1
1792 1792 if delta == p1:
1793 1793 nump1prev += 1
1794 1794 elif delta == p2:
1795 1795 nump2prev += 1
1796 1796 elif delta == p1:
1797 1797 nump1 += 1
1798 1798 elif delta == p2:
1799 1799 nump2 += 1
1800 1800 elif delta != nullrev:
1801 1801 numother += 1
1802 1802
1803 1803 # Obtain data on the raw chunks in the revlog.
1804 1804 segment = r._getsegmentforrevs(rev, rev)[1]
1805 1805 if segment:
1806 1806 chunktype = segment[0]
1807 1807 else:
1808 1808 chunktype = 'empty'
1809 1809
1810 1810 if chunktype not in chunktypecounts:
1811 1811 chunktypecounts[chunktype] = 0
1812 1812 chunktypesizes[chunktype] = 0
1813 1813
1814 1814 chunktypecounts[chunktype] += 1
1815 1815 chunktypesizes[chunktype] += size
1816 1816
1817 1817 # Adjust size min value for empty cases
1818 1818 for size in (datasize, fullsize, deltasize):
1819 1819 if size[0] is None:
1820 1820 size[0] = 0
1821 1821
1822 1822 numdeltas = numrevs - numfull
1823 1823 numoprev = numprev - nump1prev - nump2prev
1824 1824 totalrawsize = datasize[2]
1825 1825 datasize[2] /= numrevs
1826 1826 fulltotal = fullsize[2]
1827 1827 fullsize[2] /= numfull
1828 1828 deltatotal = deltasize[2]
1829 1829 if numrevs - numfull > 0:
1830 1830 deltasize[2] /= numrevs - numfull
1831 1831 totalsize = fulltotal + deltatotal
1832 1832 avgchainlen = sum(chainlengths) / numrevs
1833 1833 maxchainlen = max(chainlengths)
1834 1834 maxchainspan = max(chainspans)
1835 1835 compratio = 1
1836 1836 if totalsize:
1837 1837 compratio = totalrawsize / totalsize
1838 1838
1839 1839 basedfmtstr = '%%%dd\n'
1840 1840 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1841 1841
1842 1842 def dfmtstr(max):
1843 1843 return basedfmtstr % len(str(max))
1844 1844 def pcfmtstr(max, padding=0):
1845 1845 return basepcfmtstr % (len(str(max)), ' ' * padding)
1846 1846
1847 1847 def pcfmt(value, total):
1848 1848 if total:
1849 1849 return (value, 100 * float(value) / total)
1850 1850 else:
1851 1851 return value, 100.0
1852 1852
1853 1853 ui.write(('format : %d\n') % format)
1854 1854 ui.write(('flags : %s\n') % ', '.join(flags))
1855 1855
1856 1856 ui.write('\n')
1857 1857 fmt = pcfmtstr(totalsize)
1858 1858 fmt2 = dfmtstr(totalsize)
1859 1859 ui.write(('revisions : ') + fmt2 % numrevs)
1860 1860 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1861 1861 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1862 1862 ui.write(('revisions : ') + fmt2 % numrevs)
1863 1863 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1864 1864 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1865 1865 ui.write(('revision size : ') + fmt2 % totalsize)
1866 1866 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1867 1867 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1868 1868
1869 1869 def fmtchunktype(chunktype):
1870 1870 if chunktype == 'empty':
1871 1871 return ' %s : ' % chunktype
1872 1872 elif chunktype in string.ascii_letters:
1873 1873 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1874 1874 else:
1875 1875 return ' 0x%s : ' % hex(chunktype)
1876 1876
1877 1877 ui.write('\n')
1878 1878 ui.write(('chunks : ') + fmt2 % numrevs)
1879 1879 for chunktype in sorted(chunktypecounts):
1880 1880 ui.write(fmtchunktype(chunktype))
1881 1881 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1882 1882 ui.write(('chunks size : ') + fmt2 % totalsize)
1883 1883 for chunktype in sorted(chunktypecounts):
1884 1884 ui.write(fmtchunktype(chunktype))
1885 1885 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1886 1886
1887 1887 ui.write('\n')
1888 fmt = dfmtstr(max(avgchainlen, compratio))
1888 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
1889 1889 ui.write(('avg chain length : ') + fmt % avgchainlen)
1890 1890 ui.write(('max chain length : ') + fmt % maxchainlen)
1891 ui.write(('max chain reach : ') + fmt % maxchainspan)
1891 ui.write(('max chain reach : ') + fmt % maxchainspan)
1892 1892 ui.write(('compression ratio : ') + fmt % compratio)
1893 1893
1894 1894 if format > 0:
1895 1895 ui.write('\n')
1896 1896 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1897 1897 % tuple(datasize))
1898 1898 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1899 1899 % tuple(fullsize))
1900 1900 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1901 1901 % tuple(deltasize))
1902 1902
1903 1903 if numdeltas > 0:
1904 1904 ui.write('\n')
1905 1905 fmt = pcfmtstr(numdeltas)
1906 1906 fmt2 = pcfmtstr(numdeltas, 4)
1907 1907 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1908 1908 if numprev > 0:
1909 1909 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1910 1910 numprev))
1911 1911 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1912 1912 numprev))
1913 1913 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1914 1914 numprev))
1915 1915 if gdelta:
1916 1916 ui.write(('deltas against p1 : ')
1917 1917 + fmt % pcfmt(nump1, numdeltas))
1918 1918 ui.write(('deltas against p2 : ')
1919 1919 + fmt % pcfmt(nump2, numdeltas))
1920 1920 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1921 1921 numdeltas))
1922 1922
1923 1923 @command('debugrevspec',
1924 1924 [('', 'optimize', None,
1925 1925 _('print parsed tree after optimizing (DEPRECATED)')),
1926 1926 ('', 'show-revs', True, _('print list of result revisions (default)')),
1927 1927 ('s', 'show-set', None, _('print internal representation of result set')),
1928 1928 ('p', 'show-stage', [],
1929 1929 _('print parsed tree at the given stage'), _('NAME')),
1930 1930 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1931 1931 ('', 'verify-optimized', False, _('verify optimized result')),
1932 1932 ],
1933 1933 ('REVSPEC'))
1934 1934 def debugrevspec(ui, repo, expr, **opts):
1935 1935 """parse and apply a revision specification
1936 1936
1937 1937 Use -p/--show-stage option to print the parsed tree at the given stages.
1938 1938 Use -p all to print tree at every stage.
1939 1939
1940 1940 Use --no-show-revs option with -s or -p to print only the set
1941 1941 representation or the parsed tree respectively.
1942 1942
1943 1943 Use --verify-optimized to compare the optimized result with the unoptimized
1944 1944 one. Returns 1 if the optimized result differs.
1945 1945 """
1946 1946 stages = [
1947 1947 ('parsed', lambda tree: tree),
1948 1948 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1949 1949 ('concatenated', revsetlang.foldconcat),
1950 1950 ('analyzed', revsetlang.analyze),
1951 1951 ('optimized', revsetlang.optimize),
1952 1952 ]
1953 1953 if opts['no_optimized']:
1954 1954 stages = stages[:-1]
1955 1955 if opts['verify_optimized'] and opts['no_optimized']:
1956 1956 raise error.Abort(_('cannot use --verify-optimized with '
1957 1957 '--no-optimized'))
1958 1958 stagenames = set(n for n, f in stages)
1959 1959
1960 1960 showalways = set()
1961 1961 showchanged = set()
1962 1962 if ui.verbose and not opts['show_stage']:
1963 1963 # show parsed tree by --verbose (deprecated)
1964 1964 showalways.add('parsed')
1965 1965 showchanged.update(['expanded', 'concatenated'])
1966 1966 if opts['optimize']:
1967 1967 showalways.add('optimized')
1968 1968 if opts['show_stage'] and opts['optimize']:
1969 1969 raise error.Abort(_('cannot use --optimize with --show-stage'))
1970 1970 if opts['show_stage'] == ['all']:
1971 1971 showalways.update(stagenames)
1972 1972 else:
1973 1973 for n in opts['show_stage']:
1974 1974 if n not in stagenames:
1975 1975 raise error.Abort(_('invalid stage name: %s') % n)
1976 1976 showalways.update(opts['show_stage'])
1977 1977
1978 1978 treebystage = {}
1979 1979 printedtree = None
1980 1980 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1981 1981 for n, f in stages:
1982 1982 treebystage[n] = tree = f(tree)
1983 1983 if n in showalways or (n in showchanged and tree != printedtree):
1984 1984 if opts['show_stage'] or n != 'parsed':
1985 1985 ui.write(("* %s:\n") % n)
1986 1986 ui.write(revsetlang.prettyformat(tree), "\n")
1987 1987 printedtree = tree
1988 1988
1989 1989 if opts['verify_optimized']:
1990 1990 arevs = revset.makematcher(treebystage['analyzed'])(repo)
1991 1991 brevs = revset.makematcher(treebystage['optimized'])(repo)
1992 1992 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
1993 1993 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
1994 1994 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
1995 1995 arevs = list(arevs)
1996 1996 brevs = list(brevs)
1997 1997 if arevs == brevs:
1998 1998 return 0
1999 1999 ui.write(('--- analyzed\n'), label='diff.file_a')
2000 2000 ui.write(('+++ optimized\n'), label='diff.file_b')
2001 2001 sm = difflib.SequenceMatcher(None, arevs, brevs)
2002 2002 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2003 2003 if tag in ('delete', 'replace'):
2004 2004 for c in arevs[alo:ahi]:
2005 2005 ui.write('-%s\n' % c, label='diff.deleted')
2006 2006 if tag in ('insert', 'replace'):
2007 2007 for c in brevs[blo:bhi]:
2008 2008 ui.write('+%s\n' % c, label='diff.inserted')
2009 2009 if tag == 'equal':
2010 2010 for c in arevs[alo:ahi]:
2011 2011 ui.write(' %s\n' % c)
2012 2012 return 1
2013 2013
2014 2014 func = revset.makematcher(tree)
2015 2015 revs = func(repo)
2016 2016 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2017 2017 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2018 2018 if not opts['show_revs']:
2019 2019 return
2020 2020 for c in revs:
2021 2021 ui.write("%s\n" % c)
2022 2022
2023 2023 @command('debugsetparents', [], _('REV1 [REV2]'))
2024 2024 def debugsetparents(ui, repo, rev1, rev2=None):
2025 2025 """manually set the parents of the current working directory
2026 2026
2027 2027 This is useful for writing repository conversion tools, but should
2028 2028 be used with care. For example, neither the working directory nor the
2029 2029 dirstate is updated, so file status may be incorrect after running this
2030 2030 command.
2031 2031
2032 2032 Returns 0 on success.
2033 2033 """
2034 2034
2035 2035 r1 = scmutil.revsingle(repo, rev1).node()
2036 2036 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2037 2037
2038 2038 with repo.wlock():
2039 2039 repo.setparents(r1, r2)
2040 2040
2041 2041 @command('debugsub',
2042 2042 [('r', 'rev', '',
2043 2043 _('revision to check'), _('REV'))],
2044 2044 _('[-r REV] [REV]'))
2045 2045 def debugsub(ui, repo, rev=None):
2046 2046 ctx = scmutil.revsingle(repo, rev, None)
2047 2047 for k, v in sorted(ctx.substate.items()):
2048 2048 ui.write(('path %s\n') % k)
2049 2049 ui.write((' source %s\n') % v[0])
2050 2050 ui.write((' revision %s\n') % v[1])
2051 2051
2052 2052 @command('debugsuccessorssets',
2053 2053 [],
2054 2054 _('[REV]'))
2055 2055 def debugsuccessorssets(ui, repo, *revs):
2056 2056 """show set of successors for revision
2057 2057
2058 2058 A successors set of changeset A is a consistent group of revisions that
2059 2059 succeed A. It contains non-obsolete changesets only.
2060 2060
2061 2061 In most cases a changeset A has a single successors set containing a single
2062 2062 successor (changeset A replaced by A').
2063 2063
2064 2064 A changeset that is made obsolete with no successors are called "pruned".
2065 2065 Such changesets have no successors sets at all.
2066 2066
2067 2067 A changeset that has been "split" will have a successors set containing
2068 2068 more than one successor.
2069 2069
2070 2070 A changeset that has been rewritten in multiple different ways is called
2071 2071 "divergent". Such changesets have multiple successor sets (each of which
2072 2072 may also be split, i.e. have multiple successors).
2073 2073
2074 2074 Results are displayed as follows::
2075 2075
2076 2076 <rev1>
2077 2077 <successors-1A>
2078 2078 <rev2>
2079 2079 <successors-2A>
2080 2080 <successors-2B1> <successors-2B2> <successors-2B3>
2081 2081
2082 2082 Here rev2 has two possible (i.e. divergent) successors sets. The first
2083 2083 holds one element, whereas the second holds three (i.e. the changeset has
2084 2084 been split).
2085 2085 """
2086 2086 # passed to successorssets caching computation from one call to another
2087 2087 cache = {}
2088 2088 ctx2str = str
2089 2089 node2str = short
2090 2090 if ui.debug():
2091 2091 def ctx2str(ctx):
2092 2092 return ctx.hex()
2093 2093 node2str = hex
2094 2094 for rev in scmutil.revrange(repo, revs):
2095 2095 ctx = repo[rev]
2096 2096 ui.write('%s\n'% ctx2str(ctx))
2097 2097 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2098 2098 if succsset:
2099 2099 ui.write(' ')
2100 2100 ui.write(node2str(succsset[0]))
2101 2101 for node in succsset[1:]:
2102 2102 ui.write(' ')
2103 2103 ui.write(node2str(node))
2104 2104 ui.write('\n')
2105 2105
2106 2106 @command('debugtemplate',
2107 2107 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2108 2108 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2109 2109 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2110 2110 optionalrepo=True)
2111 2111 def debugtemplate(ui, repo, tmpl, **opts):
2112 2112 """parse and apply a template
2113 2113
2114 2114 If -r/--rev is given, the template is processed as a log template and
2115 2115 applied to the given changesets. Otherwise, it is processed as a generic
2116 2116 template.
2117 2117
2118 2118 Use --verbose to print the parsed tree.
2119 2119 """
2120 2120 revs = None
2121 2121 if opts['rev']:
2122 2122 if repo is None:
2123 2123 raise error.RepoError(_('there is no Mercurial repository here '
2124 2124 '(.hg not found)'))
2125 2125 revs = scmutil.revrange(repo, opts['rev'])
2126 2126
2127 2127 props = {}
2128 2128 for d in opts['define']:
2129 2129 try:
2130 2130 k, v = (e.strip() for e in d.split('=', 1))
2131 2131 if not k or k == 'ui':
2132 2132 raise ValueError
2133 2133 props[k] = v
2134 2134 except ValueError:
2135 2135 raise error.Abort(_('malformed keyword definition: %s') % d)
2136 2136
2137 2137 if ui.verbose:
2138 2138 aliases = ui.configitems('templatealias')
2139 2139 tree = templater.parse(tmpl)
2140 2140 ui.note(templater.prettyformat(tree), '\n')
2141 2141 newtree = templater.expandaliases(tree, aliases)
2142 2142 if newtree != tree:
2143 2143 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2144 2144
2145 2145 if revs is None:
2146 2146 t = formatter.maketemplater(ui, tmpl)
2147 2147 props['ui'] = ui
2148 2148 ui.write(t.render(props))
2149 2149 else:
2150 2150 displayer = cmdutil.makelogtemplater(ui, repo, tmpl)
2151 2151 for r in revs:
2152 2152 displayer.show(repo[r], **props)
2153 2153 displayer.close()
2154 2154
2155 2155 @command('debugupdatecaches', [])
2156 2156 def debugupdatecaches(ui, repo, *pats, **opts):
2157 2157 """warm all known caches in the repository"""
2158 2158 with repo.wlock():
2159 2159 with repo.lock():
2160 2160 repo.updatecaches()
2161 2161
2162 2162 @command('debugupgraderepo', [
2163 2163 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2164 2164 ('', 'run', False, _('performs an upgrade')),
2165 2165 ])
2166 2166 def debugupgraderepo(ui, repo, run=False, optimize=None):
2167 2167 """upgrade a repository to use different features
2168 2168
2169 2169 If no arguments are specified, the repository is evaluated for upgrade
2170 2170 and a list of problems and potential optimizations is printed.
2171 2171
2172 2172 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2173 2173 can be influenced via additional arguments. More details will be provided
2174 2174 by the command output when run without ``--run``.
2175 2175
2176 2176 During the upgrade, the repository will be locked and no writes will be
2177 2177 allowed.
2178 2178
2179 2179 At the end of the upgrade, the repository may not be readable while new
2180 2180 repository data is swapped in. This window will be as long as it takes to
2181 2181 rename some directories inside the ``.hg`` directory. On most machines, this
2182 2182 should complete almost instantaneously and the chances of a consumer being
2183 2183 unable to access the repository should be low.
2184 2184 """
2185 2185 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2186 2186
2187 2187 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2188 2188 inferrepo=True)
2189 2189 def debugwalk(ui, repo, *pats, **opts):
2190 2190 """show how files match on given patterns"""
2191 2191 m = scmutil.match(repo[None], pats, opts)
2192 2192 ui.write(('matcher: %r\n' % m))
2193 2193 items = list(repo[None].walk(m))
2194 2194 if not items:
2195 2195 return
2196 2196 f = lambda fn: fn
2197 2197 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2198 2198 f = lambda fn: util.normpath(fn)
2199 2199 fmt = 'f %%-%ds %%-%ds %%s' % (
2200 2200 max([len(abs) for abs in items]),
2201 2201 max([len(m.rel(abs)) for abs in items]))
2202 2202 for abs in items:
2203 2203 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2204 2204 ui.write("%s\n" % line.rstrip())
2205 2205
2206 2206 @command('debugwireargs',
2207 2207 [('', 'three', '', 'three'),
2208 2208 ('', 'four', '', 'four'),
2209 2209 ('', 'five', '', 'five'),
2210 2210 ] + cmdutil.remoteopts,
2211 2211 _('REPO [OPTIONS]... [ONE [TWO]]'),
2212 2212 norepo=True)
2213 2213 def debugwireargs(ui, repopath, *vals, **opts):
2214 2214 repo = hg.peer(ui, opts, repopath)
2215 2215 for opt in cmdutil.remoteopts:
2216 2216 del opts[opt[1]]
2217 2217 args = {}
2218 2218 for k, v in opts.iteritems():
2219 2219 if v:
2220 2220 args[k] = v
2221 2221 # run twice to check that we don't mess up the stream for the next command
2222 2222 res1 = repo.debugwireargs(*vals, **args)
2223 2223 res2 = repo.debugwireargs(*vals, **args)
2224 2224 ui.write("%s\n" % res1)
2225 2225 if res1 != res2:
2226 2226 ui.warn("%s\n" % res2)
@@ -1,156 +1,156 b''
1 1 $ cat << EOF >> $HGRCPATH
2 2 > [format]
3 3 > usegeneraldelta=yes
4 4 > EOF
5 5
6 6 $ hg init debugrevlog
7 7 $ cd debugrevlog
8 8 $ echo a > a
9 9 $ hg ci -Am adda
10 10 adding a
11 11 $ hg debugrevlog -m
12 12 format : 1
13 13 flags : inline, generaldelta
14 14
15 15 revisions : 1
16 16 merges : 0 ( 0.00%)
17 17 normal : 1 (100.00%)
18 18 revisions : 1
19 19 full : 1 (100.00%)
20 20 deltas : 0 ( 0.00%)
21 21 revision size : 44
22 22 full : 44 (100.00%)
23 23 deltas : 0 ( 0.00%)
24 24
25 25 chunks : 1
26 26 0x75 (u) : 1 (100.00%)
27 27 chunks size : 44
28 28 0x75 (u) : 44 (100.00%)
29 29
30 avg chain length : 0
31 max chain length : 0
32 max chain reach : 44
33 compression ratio : 0
30 avg chain length : 0
31 max chain length : 0
32 max chain reach : 44
33 compression ratio : 0
34 34
35 35 uncompressed data size (min/max/avg) : 43 / 43 / 43
36 36 full revision size (min/max/avg) : 44 / 44 / 44
37 37 delta size (min/max/avg) : 0 / 0 / 0
38 38
39 39 Test debugindex, with and without the --debug flag
40 40 $ hg debugindex a
41 41 rev offset length ..... linkrev nodeid p1 p2 (re)
42 42 0 0 3 .... 0 b789fdd96dc2 000000000000 000000000000 (re)
43 43 $ hg --debug debugindex a
44 44 rev offset length ..... linkrev nodeid p1 p2 (re)
45 45 0 0 3 .... 0 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 (re)
46 46 $ hg debugindex -f 1 a
47 47 rev flag offset length size ..... link p1 p2 nodeid (re)
48 48 0 0000 0 3 2 .... 0 -1 -1 b789fdd96dc2 (re)
49 49 $ hg --debug debugindex -f 1 a
50 50 rev flag offset length size ..... link p1 p2 nodeid (re)
51 51 0 0000 0 3 2 .... 0 -1 -1 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 (re)
52 52
53 53 debugdelta chain basic output
54 54
55 55 $ hg debugdeltachain -m
56 56 rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio
57 57 0 1 1 -1 base 44 43 44 1.02326 44 0 0.00000
58 58
59 59 $ hg debugdeltachain -m -T '{rev} {chainid} {chainlen}\n'
60 60 0 1 1
61 61
62 62 $ hg debugdeltachain -m -Tjson
63 63 [
64 64 {
65 65 "chainid": 1,
66 66 "chainlen": 1,
67 67 "chainratio": 1.02325581395,
68 68 "chainsize": 44,
69 69 "compsize": 44,
70 70 "deltatype": "base",
71 71 "extradist": 0,
72 72 "extraratio": 0.0,
73 73 "lindist": 44,
74 74 "prevrev": -1,
75 75 "rev": 0,
76 76 "uncompsize": 43
77 77 }
78 78 ]
79 79
80 80 Test max chain len
81 81 $ cat >> $HGRCPATH << EOF
82 82 > [format]
83 83 > maxchainlen=4
84 84 > EOF
85 85
86 86 $ printf "This test checks if maxchainlen config value is respected also it can serve as basic test for debugrevlog -d <file>.\n" >> a
87 87 $ hg ci -m a
88 88 $ printf "b\n" >> a
89 89 $ hg ci -m a
90 90 $ printf "c\n" >> a
91 91 $ hg ci -m a
92 92 $ printf "d\n" >> a
93 93 $ hg ci -m a
94 94 $ printf "e\n" >> a
95 95 $ hg ci -m a
96 96 $ printf "f\n" >> a
97 97 $ hg ci -m a
98 98 $ printf 'g\n' >> a
99 99 $ hg ci -m a
100 100 $ printf 'h\n' >> a
101 101 $ hg ci -m a
102 102 $ hg debugrevlog -d a
103 103 # rev p1rev p2rev start end deltastart base p1 p2 rawsize totalsize compression heads chainlen
104 104 0 -1 -1 0 ??? 0 0 0 0 ??? ???? ? 1 0 (glob)
105 105 1 0 -1 ??? ??? 0 0 0 0 ??? ???? ? 1 1 (glob)
106 106 2 1 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 2 (glob)
107 107 3 2 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 3 (glob)
108 108 4 3 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 4 (glob)
109 109 5 4 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 0 (glob)
110 110 6 5 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 1 (glob)
111 111 7 6 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 2 (glob)
112 112 8 7 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 3 (glob)
113 113
114 114 Test WdirUnsupported exception
115 115
116 116 $ hg debugdata -c ffffffffffffffffffffffffffffffffffffffff
117 117 abort: working directory revision cannot be specified
118 118 [255]
119 119
120 120 Test cache warming command
121 121
122 122 $ rm -rf .hg/cache/
123 123 $ hg debugupdatecaches --debug
124 124 updating the branch cache
125 125 $ ls -r .hg/cache/*
126 126 .hg/cache/rbc-revs-v1
127 127 .hg/cache/rbc-names-v1
128 128 .hg/cache/branch2-served
129 129
130 130 $ cd ..
131 131
132 132 Test internal debugstacktrace command
133 133
134 134 $ cat > debugstacktrace.py << EOF
135 135 > from mercurial.util import debugstacktrace, dst, sys
136 136 > def f():
137 137 > debugstacktrace(f=sys.stdout)
138 138 > g()
139 139 > def g():
140 140 > dst('hello from g\\n', skip=1)
141 141 > h()
142 142 > def h():
143 143 > dst('hi ...\\nfrom h hidden in g', 1, depth=2)
144 144 > f()
145 145 > EOF
146 146 $ $PYTHON debugstacktrace.py
147 147 stacktrace at:
148 148 debugstacktrace.py:10 in * (glob)
149 149 debugstacktrace.py:3 in f
150 150 hello from g at:
151 151 debugstacktrace.py:10 in * (glob)
152 152 debugstacktrace.py:4 in f
153 153 hi ...
154 154 from h hidden in g at:
155 155 debugstacktrace.py:4 in f
156 156 debugstacktrace.py:7 in g
General Comments 0
You need to be logged in to leave comments. Login now