##// END OF EJS Templates
debugfsinfo: improve case-sensitive testing...
Jun Wu -
r31634:35738db2 default
parent child Browse files
Show More
@@ -1,2112 +1,2115
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import difflib
11 11 import errno
12 12 import operator
13 13 import os
14 14 import random
15 15 import socket
16 16 import string
17 17 import sys
18 18 import tempfile
19 19 import time
20 20
21 21 from .i18n import _
22 22 from .node import (
23 23 bin,
24 24 hex,
25 25 nullhex,
26 26 nullid,
27 27 nullrev,
28 28 short,
29 29 )
30 30 from . import (
31 31 bundle2,
32 32 changegroup,
33 33 cmdutil,
34 34 color,
35 35 commands,
36 36 context,
37 37 dagparser,
38 38 dagutil,
39 39 encoding,
40 40 error,
41 41 exchange,
42 42 extensions,
43 43 fileset,
44 44 formatter,
45 45 hg,
46 46 localrepo,
47 47 lock as lockmod,
48 48 merge as mergemod,
49 49 obsolete,
50 50 policy,
51 51 pvec,
52 52 pycompat,
53 53 repair,
54 54 revlog,
55 55 revset,
56 56 revsetlang,
57 57 scmutil,
58 58 setdiscovery,
59 59 simplemerge,
60 60 smartset,
61 61 sslutil,
62 62 streamclone,
63 63 templater,
64 64 treediscovery,
65 65 util,
66 66 vfs as vfsmod,
67 67 )
68 68
69 69 release = lockmod.release
70 70
71 71 # We reuse the command table from commands because it is easier than
72 72 # teaching dispatch about multiple tables.
73 73 command = cmdutil.command(commands.table)
74 74
75 75 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
76 76 def debugancestor(ui, repo, *args):
77 77 """find the ancestor revision of two revisions in a given index"""
78 78 if len(args) == 3:
79 79 index, rev1, rev2 = args
80 80 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
81 81 lookup = r.lookup
82 82 elif len(args) == 2:
83 83 if not repo:
84 84 raise error.Abort(_('there is no Mercurial repository here '
85 85 '(.hg not found)'))
86 86 rev1, rev2 = args
87 87 r = repo.changelog
88 88 lookup = repo.lookup
89 89 else:
90 90 raise error.Abort(_('either two or three arguments required'))
91 91 a = r.ancestor(lookup(rev1), lookup(rev2))
92 92 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
93 93
94 94 @command('debugapplystreamclonebundle', [], 'FILE')
95 95 def debugapplystreamclonebundle(ui, repo, fname):
96 96 """apply a stream clone bundle file"""
97 97 f = hg.openpath(ui, fname)
98 98 gen = exchange.readbundle(ui, f, fname)
99 99 gen.apply(repo)
100 100
101 101 @command('debugbuilddag',
102 102 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
103 103 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
104 104 ('n', 'new-file', None, _('add new file at each rev'))],
105 105 _('[OPTION]... [TEXT]'))
106 106 def debugbuilddag(ui, repo, text=None,
107 107 mergeable_file=False,
108 108 overwritten_file=False,
109 109 new_file=False):
110 110 """builds a repo with a given DAG from scratch in the current empty repo
111 111
112 112 The description of the DAG is read from stdin if not given on the
113 113 command line.
114 114
115 115 Elements:
116 116
117 117 - "+n" is a linear run of n nodes based on the current default parent
118 118 - "." is a single node based on the current default parent
119 119 - "$" resets the default parent to null (implied at the start);
120 120 otherwise the default parent is always the last node created
121 121 - "<p" sets the default parent to the backref p
122 122 - "*p" is a fork at parent p, which is a backref
123 123 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
124 124 - "/p2" is a merge of the preceding node and p2
125 125 - ":tag" defines a local tag for the preceding node
126 126 - "@branch" sets the named branch for subsequent nodes
127 127 - "#...\\n" is a comment up to the end of the line
128 128
129 129 Whitespace between the above elements is ignored.
130 130
131 131 A backref is either
132 132
133 133 - a number n, which references the node curr-n, where curr is the current
134 134 node, or
135 135 - the name of a local tag you placed earlier using ":tag", or
136 136 - empty to denote the default parent.
137 137
138 138 All string valued-elements are either strictly alphanumeric, or must
139 139 be enclosed in double quotes ("..."), with "\\" as escape character.
140 140 """
141 141
142 142 if text is None:
143 143 ui.status(_("reading DAG from stdin\n"))
144 144 text = ui.fin.read()
145 145
146 146 cl = repo.changelog
147 147 if len(cl) > 0:
148 148 raise error.Abort(_('repository is not empty'))
149 149
150 150 # determine number of revs in DAG
151 151 total = 0
152 152 for type, data in dagparser.parsedag(text):
153 153 if type == 'n':
154 154 total += 1
155 155
156 156 if mergeable_file:
157 157 linesperrev = 2
158 158 # make a file with k lines per rev
159 159 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
160 160 initialmergedlines.append("")
161 161
162 162 tags = []
163 163
164 164 wlock = lock = tr = None
165 165 try:
166 166 wlock = repo.wlock()
167 167 lock = repo.lock()
168 168 tr = repo.transaction("builddag")
169 169
170 170 at = -1
171 171 atbranch = 'default'
172 172 nodeids = []
173 173 id = 0
174 174 ui.progress(_('building'), id, unit=_('revisions'), total=total)
175 175 for type, data in dagparser.parsedag(text):
176 176 if type == 'n':
177 177 ui.note(('node %s\n' % str(data)))
178 178 id, ps = data
179 179
180 180 files = []
181 181 fctxs = {}
182 182
183 183 p2 = None
184 184 if mergeable_file:
185 185 fn = "mf"
186 186 p1 = repo[ps[0]]
187 187 if len(ps) > 1:
188 188 p2 = repo[ps[1]]
189 189 pa = p1.ancestor(p2)
190 190 base, local, other = [x[fn].data() for x in (pa, p1,
191 191 p2)]
192 192 m3 = simplemerge.Merge3Text(base, local, other)
193 193 ml = [l.strip() for l in m3.merge_lines()]
194 194 ml.append("")
195 195 elif at > 0:
196 196 ml = p1[fn].data().split("\n")
197 197 else:
198 198 ml = initialmergedlines
199 199 ml[id * linesperrev] += " r%i" % id
200 200 mergedtext = "\n".join(ml)
201 201 files.append(fn)
202 202 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
203 203
204 204 if overwritten_file:
205 205 fn = "of"
206 206 files.append(fn)
207 207 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
208 208
209 209 if new_file:
210 210 fn = "nf%i" % id
211 211 files.append(fn)
212 212 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
213 213 if len(ps) > 1:
214 214 if not p2:
215 215 p2 = repo[ps[1]]
216 216 for fn in p2:
217 217 if fn.startswith("nf"):
218 218 files.append(fn)
219 219 fctxs[fn] = p2[fn]
220 220
221 221 def fctxfn(repo, cx, path):
222 222 return fctxs.get(path)
223 223
224 224 if len(ps) == 0 or ps[0] < 0:
225 225 pars = [None, None]
226 226 elif len(ps) == 1:
227 227 pars = [nodeids[ps[0]], None]
228 228 else:
229 229 pars = [nodeids[p] for p in ps]
230 230 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
231 231 date=(id, 0),
232 232 user="debugbuilddag",
233 233 extra={'branch': atbranch})
234 234 nodeid = repo.commitctx(cx)
235 235 nodeids.append(nodeid)
236 236 at = id
237 237 elif type == 'l':
238 238 id, name = data
239 239 ui.note(('tag %s\n' % name))
240 240 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
241 241 elif type == 'a':
242 242 ui.note(('branch %s\n' % data))
243 243 atbranch = data
244 244 ui.progress(_('building'), id, unit=_('revisions'), total=total)
245 245 tr.close()
246 246
247 247 if tags:
248 248 repo.vfs.write("localtags", "".join(tags))
249 249 finally:
250 250 ui.progress(_('building'), None)
251 251 release(tr, lock, wlock)
252 252
253 253 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
254 254 indent_string = ' ' * indent
255 255 if all:
256 256 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
257 257 % indent_string)
258 258
259 259 def showchunks(named):
260 260 ui.write("\n%s%s\n" % (indent_string, named))
261 261 chain = None
262 262 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
263 263 node = chunkdata['node']
264 264 p1 = chunkdata['p1']
265 265 p2 = chunkdata['p2']
266 266 cs = chunkdata['cs']
267 267 deltabase = chunkdata['deltabase']
268 268 delta = chunkdata['delta']
269 269 ui.write("%s%s %s %s %s %s %s\n" %
270 270 (indent_string, hex(node), hex(p1), hex(p2),
271 271 hex(cs), hex(deltabase), len(delta)))
272 272 chain = node
273 273
274 274 chunkdata = gen.changelogheader()
275 275 showchunks("changelog")
276 276 chunkdata = gen.manifestheader()
277 277 showchunks("manifest")
278 278 for chunkdata in iter(gen.filelogheader, {}):
279 279 fname = chunkdata['filename']
280 280 showchunks(fname)
281 281 else:
282 282 if isinstance(gen, bundle2.unbundle20):
283 283 raise error.Abort(_('use debugbundle2 for this file'))
284 284 chunkdata = gen.changelogheader()
285 285 chain = None
286 286 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
287 287 node = chunkdata['node']
288 288 ui.write("%s%s\n" % (indent_string, hex(node)))
289 289 chain = node
290 290
291 291 def _debugbundle2(ui, gen, all=None, **opts):
292 292 """lists the contents of a bundle2"""
293 293 if not isinstance(gen, bundle2.unbundle20):
294 294 raise error.Abort(_('not a bundle2 file'))
295 295 ui.write(('Stream params: %s\n' % repr(gen.params)))
296 296 for part in gen.iterparts():
297 297 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
298 298 if part.type == 'changegroup':
299 299 version = part.params.get('version', '01')
300 300 cg = changegroup.getunbundler(version, part, 'UN')
301 301 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
302 302
303 303 @command('debugbundle',
304 304 [('a', 'all', None, _('show all details')),
305 305 ('', 'spec', None, _('print the bundlespec of the bundle'))],
306 306 _('FILE'),
307 307 norepo=True)
308 308 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
309 309 """lists the contents of a bundle"""
310 310 with hg.openpath(ui, bundlepath) as f:
311 311 if spec:
312 312 spec = exchange.getbundlespec(ui, f)
313 313 ui.write('%s\n' % spec)
314 314 return
315 315
316 316 gen = exchange.readbundle(ui, f, bundlepath)
317 317 if isinstance(gen, bundle2.unbundle20):
318 318 return _debugbundle2(ui, gen, all=all, **opts)
319 319 _debugchangegroup(ui, gen, all=all, **opts)
320 320
321 321 @command('debugcheckstate', [], '')
322 322 def debugcheckstate(ui, repo):
323 323 """validate the correctness of the current dirstate"""
324 324 parent1, parent2 = repo.dirstate.parents()
325 325 m1 = repo[parent1].manifest()
326 326 m2 = repo[parent2].manifest()
327 327 errors = 0
328 328 for f in repo.dirstate:
329 329 state = repo.dirstate[f]
330 330 if state in "nr" and f not in m1:
331 331 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
332 332 errors += 1
333 333 if state in "a" and f in m1:
334 334 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
335 335 errors += 1
336 336 if state in "m" and f not in m1 and f not in m2:
337 337 ui.warn(_("%s in state %s, but not in either manifest\n") %
338 338 (f, state))
339 339 errors += 1
340 340 for f in m1:
341 341 state = repo.dirstate[f]
342 342 if state not in "nrm":
343 343 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
344 344 errors += 1
345 345 if errors:
346 346 error = _(".hg/dirstate inconsistent with current parent's manifest")
347 347 raise error.Abort(error)
348 348
349 349 @command('debugcolor',
350 350 [('', 'style', None, _('show all configured styles'))],
351 351 'hg debugcolor')
352 352 def debugcolor(ui, repo, **opts):
353 353 """show available color, effects or style"""
354 354 ui.write(('color mode: %s\n') % ui._colormode)
355 355 if opts.get('style'):
356 356 return _debugdisplaystyle(ui)
357 357 else:
358 358 return _debugdisplaycolor(ui)
359 359
360 360 def _debugdisplaycolor(ui):
361 361 ui = ui.copy()
362 362 ui._styles.clear()
363 363 for effect in color._effects.keys():
364 364 ui._styles[effect] = effect
365 365 if ui._terminfoparams:
366 366 for k, v in ui.configitems('color'):
367 367 if k.startswith('color.'):
368 368 ui._styles[k] = k[6:]
369 369 elif k.startswith('terminfo.'):
370 370 ui._styles[k] = k[9:]
371 371 ui.write(_('available colors:\n'))
372 372 # sort label with a '_' after the other to group '_background' entry.
373 373 items = sorted(ui._styles.items(),
374 374 key=lambda i: ('_' in i[0], i[0], i[1]))
375 375 for colorname, label in items:
376 376 ui.write(('%s\n') % colorname, label=label)
377 377
378 378 def _debugdisplaystyle(ui):
379 379 ui.write(_('available style:\n'))
380 380 width = max(len(s) for s in ui._styles)
381 381 for label, effects in sorted(ui._styles.items()):
382 382 ui.write('%s' % label, label=label)
383 383 if effects:
384 384 # 50
385 385 ui.write(': ')
386 386 ui.write(' ' * (max(0, width - len(label))))
387 387 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
388 388 ui.write('\n')
389 389
390 390 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
391 391 def debugcommands(ui, cmd='', *args):
392 392 """list all available commands and options"""
393 393 for cmd, vals in sorted(commands.table.iteritems()):
394 394 cmd = cmd.split('|')[0].strip('^')
395 395 opts = ', '.join([i[1] for i in vals[1]])
396 396 ui.write('%s: %s\n' % (cmd, opts))
397 397
398 398 @command('debugcomplete',
399 399 [('o', 'options', None, _('show the command options'))],
400 400 _('[-o] CMD'),
401 401 norepo=True)
402 402 def debugcomplete(ui, cmd='', **opts):
403 403 """returns the completion list associated with the given command"""
404 404
405 405 if opts.get('options'):
406 406 options = []
407 407 otables = [commands.globalopts]
408 408 if cmd:
409 409 aliases, entry = cmdutil.findcmd(cmd, commands.table, False)
410 410 otables.append(entry[1])
411 411 for t in otables:
412 412 for o in t:
413 413 if "(DEPRECATED)" in o[3]:
414 414 continue
415 415 if o[0]:
416 416 options.append('-%s' % o[0])
417 417 options.append('--%s' % o[1])
418 418 ui.write("%s\n" % "\n".join(options))
419 419 return
420 420
421 421 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, commands.table)
422 422 if ui.verbose:
423 423 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
424 424 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
425 425
426 426 @command('debugcreatestreamclonebundle', [], 'FILE')
427 427 def debugcreatestreamclonebundle(ui, repo, fname):
428 428 """create a stream clone bundle file
429 429
430 430 Stream bundles are special bundles that are essentially archives of
431 431 revlog files. They are commonly used for cloning very quickly.
432 432 """
433 433 requirements, gen = streamclone.generatebundlev1(repo)
434 434 changegroup.writechunks(ui, gen, fname)
435 435
436 436 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
437 437
438 438 @command('debugdag',
439 439 [('t', 'tags', None, _('use tags as labels')),
440 440 ('b', 'branches', None, _('annotate with branch names')),
441 441 ('', 'dots', None, _('use dots for runs')),
442 442 ('s', 'spaces', None, _('separate elements by spaces'))],
443 443 _('[OPTION]... [FILE [REV]...]'),
444 444 optionalrepo=True)
445 445 def debugdag(ui, repo, file_=None, *revs, **opts):
446 446 """format the changelog or an index DAG as a concise textual description
447 447
448 448 If you pass a revlog index, the revlog's DAG is emitted. If you list
449 449 revision numbers, they get labeled in the output as rN.
450 450
451 451 Otherwise, the changelog DAG of the current repo is emitted.
452 452 """
453 453 spaces = opts.get('spaces')
454 454 dots = opts.get('dots')
455 455 if file_:
456 456 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
457 457 file_)
458 458 revs = set((int(r) for r in revs))
459 459 def events():
460 460 for r in rlog:
461 461 yield 'n', (r, list(p for p in rlog.parentrevs(r)
462 462 if p != -1))
463 463 if r in revs:
464 464 yield 'l', (r, "r%i" % r)
465 465 elif repo:
466 466 cl = repo.changelog
467 467 tags = opts.get('tags')
468 468 branches = opts.get('branches')
469 469 if tags:
470 470 labels = {}
471 471 for l, n in repo.tags().items():
472 472 labels.setdefault(cl.rev(n), []).append(l)
473 473 def events():
474 474 b = "default"
475 475 for r in cl:
476 476 if branches:
477 477 newb = cl.read(cl.node(r))[5]['branch']
478 478 if newb != b:
479 479 yield 'a', newb
480 480 b = newb
481 481 yield 'n', (r, list(p for p in cl.parentrevs(r)
482 482 if p != -1))
483 483 if tags:
484 484 ls = labels.get(r)
485 485 if ls:
486 486 for l in ls:
487 487 yield 'l', (r, l)
488 488 else:
489 489 raise error.Abort(_('need repo for changelog dag'))
490 490
491 491 for line in dagparser.dagtextlines(events(),
492 492 addspaces=spaces,
493 493 wraplabels=True,
494 494 wrapannotations=True,
495 495 wrapnonlinear=dots,
496 496 usedots=dots,
497 497 maxlinewidth=70):
498 498 ui.write(line)
499 499 ui.write("\n")
500 500
501 501 @command('debugdata', commands.debugrevlogopts, _('-c|-m|FILE REV'))
502 502 def debugdata(ui, repo, file_, rev=None, **opts):
503 503 """dump the contents of a data file revision"""
504 504 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
505 505 if rev is not None:
506 506 raise error.CommandError('debugdata', _('invalid arguments'))
507 507 file_, rev = None, file_
508 508 elif rev is None:
509 509 raise error.CommandError('debugdata', _('invalid arguments'))
510 510 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
511 511 try:
512 512 ui.write(r.revision(r.lookup(rev), raw=True))
513 513 except KeyError:
514 514 raise error.Abort(_('invalid revision identifier %s') % rev)
515 515
516 516 @command('debugdate',
517 517 [('e', 'extended', None, _('try extended date formats'))],
518 518 _('[-e] DATE [RANGE]'),
519 519 norepo=True, optionalrepo=True)
520 520 def debugdate(ui, date, range=None, **opts):
521 521 """parse and display a date"""
522 522 if opts["extended"]:
523 523 d = util.parsedate(date, util.extendeddateformats)
524 524 else:
525 525 d = util.parsedate(date)
526 526 ui.write(("internal: %s %s\n") % d)
527 527 ui.write(("standard: %s\n") % util.datestr(d))
528 528 if range:
529 529 m = util.matchdate(range)
530 530 ui.write(("match: %s\n") % m(d[0]))
531 531
532 532 @command('debugdeltachain',
533 533 commands.debugrevlogopts + commands.formatteropts,
534 534 _('-c|-m|FILE'),
535 535 optionalrepo=True)
536 536 def debugdeltachain(ui, repo, file_=None, **opts):
537 537 """dump information about delta chains in a revlog
538 538
539 539 Output can be templatized. Available template keywords are:
540 540
541 541 :``rev``: revision number
542 542 :``chainid``: delta chain identifier (numbered by unique base)
543 543 :``chainlen``: delta chain length to this revision
544 544 :``prevrev``: previous revision in delta chain
545 545 :``deltatype``: role of delta / how it was computed
546 546 :``compsize``: compressed size of revision
547 547 :``uncompsize``: uncompressed size of revision
548 548 :``chainsize``: total size of compressed revisions in chain
549 549 :``chainratio``: total chain size divided by uncompressed revision size
550 550 (new delta chains typically start at ratio 2.00)
551 551 :``lindist``: linear distance from base revision in delta chain to end
552 552 of this revision
553 553 :``extradist``: total size of revisions not part of this delta chain from
554 554 base of delta chain to end of this revision; a measurement
555 555 of how much extra data we need to read/seek across to read
556 556 the delta chain for this revision
557 557 :``extraratio``: extradist divided by chainsize; another representation of
558 558 how much unrelated data is needed to load this delta chain
559 559 """
560 560 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
561 561 index = r.index
562 562 generaldelta = r.version & revlog.REVLOGGENERALDELTA
563 563
564 564 def revinfo(rev):
565 565 e = index[rev]
566 566 compsize = e[1]
567 567 uncompsize = e[2]
568 568 chainsize = 0
569 569
570 570 if generaldelta:
571 571 if e[3] == e[5]:
572 572 deltatype = 'p1'
573 573 elif e[3] == e[6]:
574 574 deltatype = 'p2'
575 575 elif e[3] == rev - 1:
576 576 deltatype = 'prev'
577 577 elif e[3] == rev:
578 578 deltatype = 'base'
579 579 else:
580 580 deltatype = 'other'
581 581 else:
582 582 if e[3] == rev:
583 583 deltatype = 'base'
584 584 else:
585 585 deltatype = 'prev'
586 586
587 587 chain = r._deltachain(rev)[0]
588 588 for iterrev in chain:
589 589 e = index[iterrev]
590 590 chainsize += e[1]
591 591
592 592 return compsize, uncompsize, deltatype, chain, chainsize
593 593
594 594 fm = ui.formatter('debugdeltachain', opts)
595 595
596 596 fm.plain(' rev chain# chainlen prev delta '
597 597 'size rawsize chainsize ratio lindist extradist '
598 598 'extraratio\n')
599 599
600 600 chainbases = {}
601 601 for rev in r:
602 602 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
603 603 chainbase = chain[0]
604 604 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
605 605 basestart = r.start(chainbase)
606 606 revstart = r.start(rev)
607 607 lineardist = revstart + comp - basestart
608 608 extradist = lineardist - chainsize
609 609 try:
610 610 prevrev = chain[-2]
611 611 except IndexError:
612 612 prevrev = -1
613 613
614 614 chainratio = float(chainsize) / float(uncomp)
615 615 extraratio = float(extradist) / float(chainsize)
616 616
617 617 fm.startitem()
618 618 fm.write('rev chainid chainlen prevrev deltatype compsize '
619 619 'uncompsize chainsize chainratio lindist extradist '
620 620 'extraratio',
621 621 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
622 622 rev, chainid, len(chain), prevrev, deltatype, comp,
623 623 uncomp, chainsize, chainratio, lineardist, extradist,
624 624 extraratio,
625 625 rev=rev, chainid=chainid, chainlen=len(chain),
626 626 prevrev=prevrev, deltatype=deltatype, compsize=comp,
627 627 uncompsize=uncomp, chainsize=chainsize,
628 628 chainratio=chainratio, lindist=lineardist,
629 629 extradist=extradist, extraratio=extraratio)
630 630
631 631 fm.end()
632 632
633 633 @command('debugdirstate|debugstate',
634 634 [('', 'nodates', None, _('do not display the saved mtime')),
635 635 ('', 'datesort', None, _('sort by saved mtime'))],
636 636 _('[OPTION]...'))
637 637 def debugstate(ui, repo, **opts):
638 638 """show the contents of the current dirstate"""
639 639
640 640 nodates = opts.get('nodates')
641 641 datesort = opts.get('datesort')
642 642
643 643 timestr = ""
644 644 if datesort:
645 645 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
646 646 else:
647 647 keyfunc = None # sort by filename
648 648 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
649 649 if ent[3] == -1:
650 650 timestr = 'unset '
651 651 elif nodates:
652 652 timestr = 'set '
653 653 else:
654 654 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
655 655 time.localtime(ent[3]))
656 656 if ent[1] & 0o20000:
657 657 mode = 'lnk'
658 658 else:
659 659 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
660 660 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
661 661 for f in repo.dirstate.copies():
662 662 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
663 663
664 664 @command('debugdiscovery',
665 665 [('', 'old', None, _('use old-style discovery')),
666 666 ('', 'nonheads', None,
667 667 _('use old-style discovery with non-heads included')),
668 668 ] + commands.remoteopts,
669 669 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
670 670 def debugdiscovery(ui, repo, remoteurl="default", **opts):
671 671 """runs the changeset discovery protocol in isolation"""
672 672 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
673 673 opts.get('branch'))
674 674 remote = hg.peer(repo, opts, remoteurl)
675 675 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
676 676
677 677 # make sure tests are repeatable
678 678 random.seed(12323)
679 679
680 680 def doit(localheads, remoteheads, remote=remote):
681 681 if opts.get('old'):
682 682 if localheads:
683 683 raise error.Abort('cannot use localheads with old style '
684 684 'discovery')
685 685 if not util.safehasattr(remote, 'branches'):
686 686 # enable in-client legacy support
687 687 remote = localrepo.locallegacypeer(remote.local())
688 688 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
689 689 force=True)
690 690 common = set(common)
691 691 if not opts.get('nonheads'):
692 692 ui.write(("unpruned common: %s\n") %
693 693 " ".join(sorted(short(n) for n in common)))
694 694 dag = dagutil.revlogdag(repo.changelog)
695 695 all = dag.ancestorset(dag.internalizeall(common))
696 696 common = dag.externalizeall(dag.headsetofconnecteds(all))
697 697 else:
698 698 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
699 699 common = set(common)
700 700 rheads = set(hds)
701 701 lheads = set(repo.heads())
702 702 ui.write(("common heads: %s\n") %
703 703 " ".join(sorted(short(n) for n in common)))
704 704 if lheads <= common:
705 705 ui.write(("local is subset\n"))
706 706 elif rheads <= common:
707 707 ui.write(("remote is subset\n"))
708 708
709 709 serverlogs = opts.get('serverlog')
710 710 if serverlogs:
711 711 for filename in serverlogs:
712 712 with open(filename, 'r') as logfile:
713 713 line = logfile.readline()
714 714 while line:
715 715 parts = line.strip().split(';')
716 716 op = parts[1]
717 717 if op == 'cg':
718 718 pass
719 719 elif op == 'cgss':
720 720 doit(parts[2].split(' '), parts[3].split(' '))
721 721 elif op == 'unb':
722 722 doit(parts[3].split(' '), parts[2].split(' '))
723 723 line = logfile.readline()
724 724 else:
725 725 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
726 726 opts.get('remote_head'))
727 727 localrevs = opts.get('local_head')
728 728 doit(localrevs, remoterevs)
729 729
730 730 @command('debugextensions', commands.formatteropts, [], norepo=True)
731 731 def debugextensions(ui, **opts):
732 732 '''show information about active extensions'''
733 733 exts = extensions.extensions(ui)
734 734 hgver = util.version()
735 735 fm = ui.formatter('debugextensions', opts)
736 736 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
737 737 isinternal = extensions.ismoduleinternal(extmod)
738 738 extsource = pycompat.fsencode(extmod.__file__)
739 739 if isinternal:
740 740 exttestedwith = [] # never expose magic string to users
741 741 else:
742 742 exttestedwith = getattr(extmod, 'testedwith', '').split()
743 743 extbuglink = getattr(extmod, 'buglink', None)
744 744
745 745 fm.startitem()
746 746
747 747 if ui.quiet or ui.verbose:
748 748 fm.write('name', '%s\n', extname)
749 749 else:
750 750 fm.write('name', '%s', extname)
751 751 if isinternal or hgver in exttestedwith:
752 752 fm.plain('\n')
753 753 elif not exttestedwith:
754 754 fm.plain(_(' (untested!)\n'))
755 755 else:
756 756 lasttestedversion = exttestedwith[-1]
757 757 fm.plain(' (%s!)\n' % lasttestedversion)
758 758
759 759 fm.condwrite(ui.verbose and extsource, 'source',
760 760 _(' location: %s\n'), extsource or "")
761 761
762 762 if ui.verbose:
763 763 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
764 764 fm.data(bundled=isinternal)
765 765
766 766 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
767 767 _(' tested with: %s\n'),
768 768 fm.formatlist(exttestedwith, name='ver'))
769 769
770 770 fm.condwrite(ui.verbose and extbuglink, 'buglink',
771 771 _(' bug reporting: %s\n'), extbuglink or "")
772 772
773 773 fm.end()
774 774
775 775 @command('debugfileset',
776 776 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
777 777 _('[-r REV] FILESPEC'))
778 778 def debugfileset(ui, repo, expr, **opts):
779 779 '''parse and apply a fileset specification'''
780 780 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
781 781 if ui.verbose:
782 782 tree = fileset.parse(expr)
783 783 ui.note(fileset.prettyformat(tree), "\n")
784 784
785 785 for f in ctx.getfileset(expr):
786 786 ui.write("%s\n" % f)
787 787
788 788 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
789 789 def debugfsinfo(ui, path="."):
790 790 """show information detected about current filesystem"""
791 util.writefile('.debugfsinfo', '')
792 791 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
793 792 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
794 793 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
795 794 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
796 ui.write(('case-sensitive: %s\n') % (util.fscasesensitive('.debugfsinfo')
797 and 'yes' or 'no'))
798 util.tryunlink('.debugfsinfo')
795 casesensitive = '(unknown)'
796 try:
797 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
798 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
799 except OSError:
800 pass
801 ui.write(('case-sensitive: %s\n') % casesensitive)
799 802
800 803 @command('debuggetbundle',
801 804 [('H', 'head', [], _('id of head node'), _('ID')),
802 805 ('C', 'common', [], _('id of common node'), _('ID')),
803 806 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
804 807 _('REPO FILE [-H|-C ID]...'),
805 808 norepo=True)
806 809 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
807 810 """retrieves a bundle from a repo
808 811
809 812 Every ID must be a full-length hex node id string. Saves the bundle to the
810 813 given file.
811 814 """
812 815 repo = hg.peer(ui, opts, repopath)
813 816 if not repo.capable('getbundle'):
814 817 raise error.Abort("getbundle() not supported by target repository")
815 818 args = {}
816 819 if common:
817 820 args['common'] = [bin(s) for s in common]
818 821 if head:
819 822 args['heads'] = [bin(s) for s in head]
820 823 # TODO: get desired bundlecaps from command line.
821 824 args['bundlecaps'] = None
822 825 bundle = repo.getbundle('debug', **args)
823 826
824 827 bundletype = opts.get('type', 'bzip2').lower()
825 828 btypes = {'none': 'HG10UN',
826 829 'bzip2': 'HG10BZ',
827 830 'gzip': 'HG10GZ',
828 831 'bundle2': 'HG20'}
829 832 bundletype = btypes.get(bundletype)
830 833 if bundletype not in bundle2.bundletypes:
831 834 raise error.Abort(_('unknown bundle type specified with --type'))
832 835 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
833 836
834 837 @command('debugignore', [], '[FILE]')
835 838 def debugignore(ui, repo, *files, **opts):
836 839 """display the combined ignore pattern and information about ignored files
837 840
838 841 With no argument display the combined ignore pattern.
839 842
840 843 Given space separated file names, shows if the given file is ignored and
841 844 if so, show the ignore rule (file and line number) that matched it.
842 845 """
843 846 ignore = repo.dirstate._ignore
844 847 if not files:
845 848 # Show all the patterns
846 849 includepat = getattr(ignore, 'includepat', None)
847 850 if includepat is not None:
848 851 ui.write("%s\n" % includepat)
849 852 else:
850 853 raise error.Abort(_("no ignore patterns found"))
851 854 else:
852 855 for f in files:
853 856 nf = util.normpath(f)
854 857 ignored = None
855 858 ignoredata = None
856 859 if nf != '.':
857 860 if ignore(nf):
858 861 ignored = nf
859 862 ignoredata = repo.dirstate._ignorefileandline(nf)
860 863 else:
861 864 for p in util.finddirs(nf):
862 865 if ignore(p):
863 866 ignored = p
864 867 ignoredata = repo.dirstate._ignorefileandline(p)
865 868 break
866 869 if ignored:
867 870 if ignored == nf:
868 871 ui.write(_("%s is ignored\n") % f)
869 872 else:
870 873 ui.write(_("%s is ignored because of "
871 874 "containing folder %s\n")
872 875 % (f, ignored))
873 876 ignorefile, lineno, line = ignoredata
874 877 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
875 878 % (ignorefile, lineno, line))
876 879 else:
877 880 ui.write(_("%s is not ignored\n") % f)
878 881
879 882 @command('debugindex', commands.debugrevlogopts +
880 883 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
881 884 _('[-f FORMAT] -c|-m|FILE'),
882 885 optionalrepo=True)
883 886 def debugindex(ui, repo, file_=None, **opts):
884 887 """dump the contents of an index file"""
885 888 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
886 889 format = opts.get('format', 0)
887 890 if format not in (0, 1):
888 891 raise error.Abort(_("unknown format %d") % format)
889 892
890 893 generaldelta = r.version & revlog.REVLOGGENERALDELTA
891 894 if generaldelta:
892 895 basehdr = ' delta'
893 896 else:
894 897 basehdr = ' base'
895 898
896 899 if ui.debugflag:
897 900 shortfn = hex
898 901 else:
899 902 shortfn = short
900 903
901 904 # There might not be anything in r, so have a sane default
902 905 idlen = 12
903 906 for i in r:
904 907 idlen = len(shortfn(r.node(i)))
905 908 break
906 909
907 910 if format == 0:
908 911 ui.write((" rev offset length " + basehdr + " linkrev"
909 912 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
910 913 elif format == 1:
911 914 ui.write((" rev flag offset length"
912 915 " size " + basehdr + " link p1 p2"
913 916 " %s\n") % "nodeid".rjust(idlen))
914 917
915 918 for i in r:
916 919 node = r.node(i)
917 920 if generaldelta:
918 921 base = r.deltaparent(i)
919 922 else:
920 923 base = r.chainbase(i)
921 924 if format == 0:
922 925 try:
923 926 pp = r.parents(node)
924 927 except Exception:
925 928 pp = [nullid, nullid]
926 929 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
927 930 i, r.start(i), r.length(i), base, r.linkrev(i),
928 931 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
929 932 elif format == 1:
930 933 pr = r.parentrevs(i)
931 934 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
932 935 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
933 936 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
934 937
935 938 @command('debugindexdot', commands.debugrevlogopts,
936 939 _('-c|-m|FILE'), optionalrepo=True)
937 940 def debugindexdot(ui, repo, file_=None, **opts):
938 941 """dump an index DAG as a graphviz dot file"""
939 942 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
940 943 ui.write(("digraph G {\n"))
941 944 for i in r:
942 945 node = r.node(i)
943 946 pp = r.parents(node)
944 947 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
945 948 if pp[1] != nullid:
946 949 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
947 950 ui.write("}\n")
948 951
949 952 @command('debuginstall', [] + commands.formatteropts, '', norepo=True)
950 953 def debuginstall(ui, **opts):
951 954 '''test Mercurial installation
952 955
953 956 Returns 0 on success.
954 957 '''
955 958
956 959 def writetemp(contents):
957 960 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
958 961 f = os.fdopen(fd, pycompat.sysstr("wb"))
959 962 f.write(contents)
960 963 f.close()
961 964 return name
962 965
963 966 problems = 0
964 967
965 968 fm = ui.formatter('debuginstall', opts)
966 969 fm.startitem()
967 970
968 971 # encoding
969 972 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
970 973 err = None
971 974 try:
972 975 encoding.fromlocal("test")
973 976 except error.Abort as inst:
974 977 err = inst
975 978 problems += 1
976 979 fm.condwrite(err, 'encodingerror', _(" %s\n"
977 980 " (check that your locale is properly set)\n"), err)
978 981
979 982 # Python
980 983 fm.write('pythonexe', _("checking Python executable (%s)\n"),
981 984 pycompat.sysexecutable)
982 985 fm.write('pythonver', _("checking Python version (%s)\n"),
983 986 ("%d.%d.%d" % sys.version_info[:3]))
984 987 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
985 988 os.path.dirname(pycompat.fsencode(os.__file__)))
986 989
987 990 security = set(sslutil.supportedprotocols)
988 991 if sslutil.hassni:
989 992 security.add('sni')
990 993
991 994 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
992 995 fm.formatlist(sorted(security), name='protocol',
993 996 fmt='%s', sep=','))
994 997
995 998 # These are warnings, not errors. So don't increment problem count. This
996 999 # may change in the future.
997 1000 if 'tls1.2' not in security:
998 1001 fm.plain(_(' TLS 1.2 not supported by Python install; '
999 1002 'network connections lack modern security\n'))
1000 1003 if 'sni' not in security:
1001 1004 fm.plain(_(' SNI not supported by Python install; may have '
1002 1005 'connectivity issues with some servers\n'))
1003 1006
1004 1007 # TODO print CA cert info
1005 1008
1006 1009 # hg version
1007 1010 hgver = util.version()
1008 1011 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1009 1012 hgver.split('+')[0])
1010 1013 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1011 1014 '+'.join(hgver.split('+')[1:]))
1012 1015
1013 1016 # compiled modules
1014 1017 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1015 1018 policy.policy)
1016 1019 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1017 1020 os.path.dirname(pycompat.fsencode(__file__)))
1018 1021
1019 1022 err = None
1020 1023 try:
1021 1024 from . import (
1022 1025 base85,
1023 1026 bdiff,
1024 1027 mpatch,
1025 1028 osutil,
1026 1029 )
1027 1030 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1028 1031 except Exception as inst:
1029 1032 err = inst
1030 1033 problems += 1
1031 1034 fm.condwrite(err, 'extensionserror', " %s\n", err)
1032 1035
1033 1036 compengines = util.compengines._engines.values()
1034 1037 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1035 1038 fm.formatlist(sorted(e.name() for e in compengines),
1036 1039 name='compengine', fmt='%s', sep=', '))
1037 1040 fm.write('compenginesavail', _('checking available compression engines '
1038 1041 '(%s)\n'),
1039 1042 fm.formatlist(sorted(e.name() for e in compengines
1040 1043 if e.available()),
1041 1044 name='compengine', fmt='%s', sep=', '))
1042 1045 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1043 1046 fm.write('compenginesserver', _('checking available compression engines '
1044 1047 'for wire protocol (%s)\n'),
1045 1048 fm.formatlist([e.name() for e in wirecompengines
1046 1049 if e.wireprotosupport()],
1047 1050 name='compengine', fmt='%s', sep=', '))
1048 1051
1049 1052 # templates
1050 1053 p = templater.templatepaths()
1051 1054 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1052 1055 fm.condwrite(not p, '', _(" no template directories found\n"))
1053 1056 if p:
1054 1057 m = templater.templatepath("map-cmdline.default")
1055 1058 if m:
1056 1059 # template found, check if it is working
1057 1060 err = None
1058 1061 try:
1059 1062 templater.templater.frommapfile(m)
1060 1063 except Exception as inst:
1061 1064 err = inst
1062 1065 p = None
1063 1066 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1064 1067 else:
1065 1068 p = None
1066 1069 fm.condwrite(p, 'defaulttemplate',
1067 1070 _("checking default template (%s)\n"), m)
1068 1071 fm.condwrite(not m, 'defaulttemplatenotfound',
1069 1072 _(" template '%s' not found\n"), "default")
1070 1073 if not p:
1071 1074 problems += 1
1072 1075 fm.condwrite(not p, '',
1073 1076 _(" (templates seem to have been installed incorrectly)\n"))
1074 1077
1075 1078 # editor
1076 1079 editor = ui.geteditor()
1077 1080 editor = util.expandpath(editor)
1078 1081 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1079 1082 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1080 1083 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1081 1084 _(" No commit editor set and can't find %s in PATH\n"
1082 1085 " (specify a commit editor in your configuration"
1083 1086 " file)\n"), not cmdpath and editor == 'vi' and editor)
1084 1087 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1085 1088 _(" Can't find editor '%s' in PATH\n"
1086 1089 " (specify a commit editor in your configuration"
1087 1090 " file)\n"), not cmdpath and editor)
1088 1091 if not cmdpath and editor != 'vi':
1089 1092 problems += 1
1090 1093
1091 1094 # check username
1092 1095 username = None
1093 1096 err = None
1094 1097 try:
1095 1098 username = ui.username()
1096 1099 except error.Abort as e:
1097 1100 err = e
1098 1101 problems += 1
1099 1102
1100 1103 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1101 1104 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1102 1105 " (specify a username in your configuration file)\n"), err)
1103 1106
1104 1107 fm.condwrite(not problems, '',
1105 1108 _("no problems detected\n"))
1106 1109 if not problems:
1107 1110 fm.data(problems=problems)
1108 1111 fm.condwrite(problems, 'problems',
1109 1112 _("%d problems detected,"
1110 1113 " please check your install!\n"), problems)
1111 1114 fm.end()
1112 1115
1113 1116 return problems
1114 1117
1115 1118 @command('debugknown', [], _('REPO ID...'), norepo=True)
1116 1119 def debugknown(ui, repopath, *ids, **opts):
1117 1120 """test whether node ids are known to a repo
1118 1121
1119 1122 Every ID must be a full-length hex node id string. Returns a list of 0s
1120 1123 and 1s indicating unknown/known.
1121 1124 """
1122 1125 repo = hg.peer(ui, opts, repopath)
1123 1126 if not repo.capable('known'):
1124 1127 raise error.Abort("known() not supported by target repository")
1125 1128 flags = repo.known([bin(s) for s in ids])
1126 1129 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1127 1130
1128 1131 @command('debuglabelcomplete', [], _('LABEL...'))
1129 1132 def debuglabelcomplete(ui, repo, *args):
1130 1133 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1131 1134 debugnamecomplete(ui, repo, *args)
1132 1135
1133 1136 @command('debuglocks',
1134 1137 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1135 1138 ('W', 'force-wlock', None,
1136 1139 _('free the working state lock (DANGEROUS)'))],
1137 1140 _('[OPTION]...'))
1138 1141 def debuglocks(ui, repo, **opts):
1139 1142 """show or modify state of locks
1140 1143
1141 1144 By default, this command will show which locks are held. This
1142 1145 includes the user and process holding the lock, the amount of time
1143 1146 the lock has been held, and the machine name where the process is
1144 1147 running if it's not local.
1145 1148
1146 1149 Locks protect the integrity of Mercurial's data, so should be
1147 1150 treated with care. System crashes or other interruptions may cause
1148 1151 locks to not be properly released, though Mercurial will usually
1149 1152 detect and remove such stale locks automatically.
1150 1153
1151 1154 However, detecting stale locks may not always be possible (for
1152 1155 instance, on a shared filesystem). Removing locks may also be
1153 1156 blocked by filesystem permissions.
1154 1157
1155 1158 Returns 0 if no locks are held.
1156 1159
1157 1160 """
1158 1161
1159 1162 if opts.get('force_lock'):
1160 1163 repo.svfs.unlink('lock')
1161 1164 if opts.get('force_wlock'):
1162 1165 repo.vfs.unlink('wlock')
1163 1166 if opts.get('force_lock') or opts.get('force_lock'):
1164 1167 return 0
1165 1168
1166 1169 now = time.time()
1167 1170 held = 0
1168 1171
1169 1172 def report(vfs, name, method):
1170 1173 # this causes stale locks to get reaped for more accurate reporting
1171 1174 try:
1172 1175 l = method(False)
1173 1176 except error.LockHeld:
1174 1177 l = None
1175 1178
1176 1179 if l:
1177 1180 l.release()
1178 1181 else:
1179 1182 try:
1180 1183 stat = vfs.lstat(name)
1181 1184 age = now - stat.st_mtime
1182 1185 user = util.username(stat.st_uid)
1183 1186 locker = vfs.readlock(name)
1184 1187 if ":" in locker:
1185 1188 host, pid = locker.split(':')
1186 1189 if host == socket.gethostname():
1187 1190 locker = 'user %s, process %s' % (user, pid)
1188 1191 else:
1189 1192 locker = 'user %s, process %s, host %s' \
1190 1193 % (user, pid, host)
1191 1194 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1192 1195 return 1
1193 1196 except OSError as e:
1194 1197 if e.errno != errno.ENOENT:
1195 1198 raise
1196 1199
1197 1200 ui.write(("%-6s free\n") % (name + ":"))
1198 1201 return 0
1199 1202
1200 1203 held += report(repo.svfs, "lock", repo.lock)
1201 1204 held += report(repo.vfs, "wlock", repo.wlock)
1202 1205
1203 1206 return held
1204 1207
1205 1208 @command('debugmergestate', [], '')
1206 1209 def debugmergestate(ui, repo, *args):
1207 1210 """print merge state
1208 1211
1209 1212 Use --verbose to print out information about whether v1 or v2 merge state
1210 1213 was chosen."""
1211 1214 def _hashornull(h):
1212 1215 if h == nullhex:
1213 1216 return 'null'
1214 1217 else:
1215 1218 return h
1216 1219
1217 1220 def printrecords(version):
1218 1221 ui.write(('* version %s records\n') % version)
1219 1222 if version == 1:
1220 1223 records = v1records
1221 1224 else:
1222 1225 records = v2records
1223 1226
1224 1227 for rtype, record in records:
1225 1228 # pretty print some record types
1226 1229 if rtype == 'L':
1227 1230 ui.write(('local: %s\n') % record)
1228 1231 elif rtype == 'O':
1229 1232 ui.write(('other: %s\n') % record)
1230 1233 elif rtype == 'm':
1231 1234 driver, mdstate = record.split('\0', 1)
1232 1235 ui.write(('merge driver: %s (state "%s")\n')
1233 1236 % (driver, mdstate))
1234 1237 elif rtype in 'FDC':
1235 1238 r = record.split('\0')
1236 1239 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1237 1240 if version == 1:
1238 1241 onode = 'not stored in v1 format'
1239 1242 flags = r[7]
1240 1243 else:
1241 1244 onode, flags = r[7:9]
1242 1245 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1243 1246 % (f, rtype, state, _hashornull(hash)))
1244 1247 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1245 1248 ui.write((' ancestor path: %s (node %s)\n')
1246 1249 % (afile, _hashornull(anode)))
1247 1250 ui.write((' other path: %s (node %s)\n')
1248 1251 % (ofile, _hashornull(onode)))
1249 1252 elif rtype == 'f':
1250 1253 filename, rawextras = record.split('\0', 1)
1251 1254 extras = rawextras.split('\0')
1252 1255 i = 0
1253 1256 extrastrings = []
1254 1257 while i < len(extras):
1255 1258 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1256 1259 i += 2
1257 1260
1258 1261 ui.write(('file extras: %s (%s)\n')
1259 1262 % (filename, ', '.join(extrastrings)))
1260 1263 elif rtype == 'l':
1261 1264 labels = record.split('\0', 2)
1262 1265 labels = [l for l in labels if len(l) > 0]
1263 1266 ui.write(('labels:\n'))
1264 1267 ui.write((' local: %s\n' % labels[0]))
1265 1268 ui.write((' other: %s\n' % labels[1]))
1266 1269 if len(labels) > 2:
1267 1270 ui.write((' base: %s\n' % labels[2]))
1268 1271 else:
1269 1272 ui.write(('unrecognized entry: %s\t%s\n')
1270 1273 % (rtype, record.replace('\0', '\t')))
1271 1274
1272 1275 # Avoid mergestate.read() since it may raise an exception for unsupported
1273 1276 # merge state records. We shouldn't be doing this, but this is OK since this
1274 1277 # command is pretty low-level.
1275 1278 ms = mergemod.mergestate(repo)
1276 1279
1277 1280 # sort so that reasonable information is on top
1278 1281 v1records = ms._readrecordsv1()
1279 1282 v2records = ms._readrecordsv2()
1280 1283 order = 'LOml'
1281 1284 def key(r):
1282 1285 idx = order.find(r[0])
1283 1286 if idx == -1:
1284 1287 return (1, r[1])
1285 1288 else:
1286 1289 return (0, idx)
1287 1290 v1records.sort(key=key)
1288 1291 v2records.sort(key=key)
1289 1292
1290 1293 if not v1records and not v2records:
1291 1294 ui.write(('no merge state found\n'))
1292 1295 elif not v2records:
1293 1296 ui.note(('no version 2 merge state\n'))
1294 1297 printrecords(1)
1295 1298 elif ms._v1v2match(v1records, v2records):
1296 1299 ui.note(('v1 and v2 states match: using v2\n'))
1297 1300 printrecords(2)
1298 1301 else:
1299 1302 ui.note(('v1 and v2 states mismatch: using v1\n'))
1300 1303 printrecords(1)
1301 1304 if ui.verbose:
1302 1305 printrecords(2)
1303 1306
1304 1307 @command('debugnamecomplete', [], _('NAME...'))
1305 1308 def debugnamecomplete(ui, repo, *args):
1306 1309 '''complete "names" - tags, open branch names, bookmark names'''
1307 1310
1308 1311 names = set()
1309 1312 # since we previously only listed open branches, we will handle that
1310 1313 # specially (after this for loop)
1311 1314 for name, ns in repo.names.iteritems():
1312 1315 if name != 'branches':
1313 1316 names.update(ns.listnames(repo))
1314 1317 names.update(tag for (tag, heads, tip, closed)
1315 1318 in repo.branchmap().iterbranches() if not closed)
1316 1319 completions = set()
1317 1320 if not args:
1318 1321 args = ['']
1319 1322 for a in args:
1320 1323 completions.update(n for n in names if n.startswith(a))
1321 1324 ui.write('\n'.join(sorted(completions)))
1322 1325 ui.write('\n')
1323 1326
1324 1327 @command('debugobsolete',
1325 1328 [('', 'flags', 0, _('markers flag')),
1326 1329 ('', 'record-parents', False,
1327 1330 _('record parent information for the precursor')),
1328 1331 ('r', 'rev', [], _('display markers relevant to REV')),
1329 1332 ('', 'index', False, _('display index of the marker')),
1330 1333 ('', 'delete', [], _('delete markers specified by indices')),
1331 1334 ] + commands.commitopts2 + commands.formatteropts,
1332 1335 _('[OBSOLETED [REPLACEMENT ...]]'))
1333 1336 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1334 1337 """create arbitrary obsolete marker
1335 1338
1336 1339 With no arguments, displays the list of obsolescence markers."""
1337 1340
1338 1341 def parsenodeid(s):
1339 1342 try:
1340 1343 # We do not use revsingle/revrange functions here to accept
1341 1344 # arbitrary node identifiers, possibly not present in the
1342 1345 # local repository.
1343 1346 n = bin(s)
1344 1347 if len(n) != len(nullid):
1345 1348 raise TypeError()
1346 1349 return n
1347 1350 except TypeError:
1348 1351 raise error.Abort('changeset references must be full hexadecimal '
1349 1352 'node identifiers')
1350 1353
1351 1354 if opts.get('delete'):
1352 1355 indices = []
1353 1356 for v in opts.get('delete'):
1354 1357 try:
1355 1358 indices.append(int(v))
1356 1359 except ValueError:
1357 1360 raise error.Abort(_('invalid index value: %r') % v,
1358 1361 hint=_('use integers for indices'))
1359 1362
1360 1363 if repo.currenttransaction():
1361 1364 raise error.Abort(_('cannot delete obsmarkers in the middle '
1362 1365 'of transaction.'))
1363 1366
1364 1367 with repo.lock():
1365 1368 n = repair.deleteobsmarkers(repo.obsstore, indices)
1366 1369 ui.write(_('deleted %i obsolescence markers\n') % n)
1367 1370
1368 1371 return
1369 1372
1370 1373 if precursor is not None:
1371 1374 if opts['rev']:
1372 1375 raise error.Abort('cannot select revision when creating marker')
1373 1376 metadata = {}
1374 1377 metadata['user'] = opts['user'] or ui.username()
1375 1378 succs = tuple(parsenodeid(succ) for succ in successors)
1376 1379 l = repo.lock()
1377 1380 try:
1378 1381 tr = repo.transaction('debugobsolete')
1379 1382 try:
1380 1383 date = opts.get('date')
1381 1384 if date:
1382 1385 date = util.parsedate(date)
1383 1386 else:
1384 1387 date = None
1385 1388 prec = parsenodeid(precursor)
1386 1389 parents = None
1387 1390 if opts['record_parents']:
1388 1391 if prec not in repo.unfiltered():
1389 1392 raise error.Abort('cannot used --record-parents on '
1390 1393 'unknown changesets')
1391 1394 parents = repo.unfiltered()[prec].parents()
1392 1395 parents = tuple(p.node() for p in parents)
1393 1396 repo.obsstore.create(tr, prec, succs, opts['flags'],
1394 1397 parents=parents, date=date,
1395 1398 metadata=metadata)
1396 1399 tr.close()
1397 1400 except ValueError as exc:
1398 1401 raise error.Abort(_('bad obsmarker input: %s') % exc)
1399 1402 finally:
1400 1403 tr.release()
1401 1404 finally:
1402 1405 l.release()
1403 1406 else:
1404 1407 if opts['rev']:
1405 1408 revs = scmutil.revrange(repo, opts['rev'])
1406 1409 nodes = [repo[r].node() for r in revs]
1407 1410 markers = list(obsolete.getmarkers(repo, nodes=nodes))
1408 1411 markers.sort(key=lambda x: x._data)
1409 1412 else:
1410 1413 markers = obsolete.getmarkers(repo)
1411 1414
1412 1415 markerstoiter = markers
1413 1416 isrelevant = lambda m: True
1414 1417 if opts.get('rev') and opts.get('index'):
1415 1418 markerstoiter = obsolete.getmarkers(repo)
1416 1419 markerset = set(markers)
1417 1420 isrelevant = lambda m: m in markerset
1418 1421
1419 1422 fm = ui.formatter('debugobsolete', opts)
1420 1423 for i, m in enumerate(markerstoiter):
1421 1424 if not isrelevant(m):
1422 1425 # marker can be irrelevant when we're iterating over a set
1423 1426 # of markers (markerstoiter) which is bigger than the set
1424 1427 # of markers we want to display (markers)
1425 1428 # this can happen if both --index and --rev options are
1426 1429 # provided and thus we need to iterate over all of the markers
1427 1430 # to get the correct indices, but only display the ones that
1428 1431 # are relevant to --rev value
1429 1432 continue
1430 1433 fm.startitem()
1431 1434 ind = i if opts.get('index') else None
1432 1435 cmdutil.showmarker(fm, m, index=ind)
1433 1436 fm.end()
1434 1437
1435 1438 @command('debugpathcomplete',
1436 1439 [('f', 'full', None, _('complete an entire path')),
1437 1440 ('n', 'normal', None, _('show only normal files')),
1438 1441 ('a', 'added', None, _('show only added files')),
1439 1442 ('r', 'removed', None, _('show only removed files'))],
1440 1443 _('FILESPEC...'))
1441 1444 def debugpathcomplete(ui, repo, *specs, **opts):
1442 1445 '''complete part or all of a tracked path
1443 1446
1444 1447 This command supports shells that offer path name completion. It
1445 1448 currently completes only files already known to the dirstate.
1446 1449
1447 1450 Completion extends only to the next path segment unless
1448 1451 --full is specified, in which case entire paths are used.'''
1449 1452
1450 1453 def complete(path, acceptable):
1451 1454 dirstate = repo.dirstate
1452 1455 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1453 1456 rootdir = repo.root + pycompat.ossep
1454 1457 if spec != repo.root and not spec.startswith(rootdir):
1455 1458 return [], []
1456 1459 if os.path.isdir(spec):
1457 1460 spec += '/'
1458 1461 spec = spec[len(rootdir):]
1459 1462 fixpaths = pycompat.ossep != '/'
1460 1463 if fixpaths:
1461 1464 spec = spec.replace(pycompat.ossep, '/')
1462 1465 speclen = len(spec)
1463 1466 fullpaths = opts['full']
1464 1467 files, dirs = set(), set()
1465 1468 adddir, addfile = dirs.add, files.add
1466 1469 for f, st in dirstate.iteritems():
1467 1470 if f.startswith(spec) and st[0] in acceptable:
1468 1471 if fixpaths:
1469 1472 f = f.replace('/', pycompat.ossep)
1470 1473 if fullpaths:
1471 1474 addfile(f)
1472 1475 continue
1473 1476 s = f.find(pycompat.ossep, speclen)
1474 1477 if s >= 0:
1475 1478 adddir(f[:s])
1476 1479 else:
1477 1480 addfile(f)
1478 1481 return files, dirs
1479 1482
1480 1483 acceptable = ''
1481 1484 if opts['normal']:
1482 1485 acceptable += 'nm'
1483 1486 if opts['added']:
1484 1487 acceptable += 'a'
1485 1488 if opts['removed']:
1486 1489 acceptable += 'r'
1487 1490 cwd = repo.getcwd()
1488 1491 if not specs:
1489 1492 specs = ['.']
1490 1493
1491 1494 files, dirs = set(), set()
1492 1495 for spec in specs:
1493 1496 f, d = complete(spec, acceptable or 'nmar')
1494 1497 files.update(f)
1495 1498 dirs.update(d)
1496 1499 files.update(dirs)
1497 1500 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1498 1501 ui.write('\n')
1499 1502
1500 1503 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1501 1504 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1502 1505 '''access the pushkey key/value protocol
1503 1506
1504 1507 With two args, list the keys in the given namespace.
1505 1508
1506 1509 With five args, set a key to new if it currently is set to old.
1507 1510 Reports success or failure.
1508 1511 '''
1509 1512
1510 1513 target = hg.peer(ui, {}, repopath)
1511 1514 if keyinfo:
1512 1515 key, old, new = keyinfo
1513 1516 r = target.pushkey(namespace, key, old, new)
1514 1517 ui.status(str(r) + '\n')
1515 1518 return not r
1516 1519 else:
1517 1520 for k, v in sorted(target.listkeys(namespace).iteritems()):
1518 1521 ui.write("%s\t%s\n" % (util.escapestr(k),
1519 1522 util.escapestr(v)))
1520 1523
1521 1524 @command('debugpvec', [], _('A B'))
1522 1525 def debugpvec(ui, repo, a, b=None):
1523 1526 ca = scmutil.revsingle(repo, a)
1524 1527 cb = scmutil.revsingle(repo, b)
1525 1528 pa = pvec.ctxpvec(ca)
1526 1529 pb = pvec.ctxpvec(cb)
1527 1530 if pa == pb:
1528 1531 rel = "="
1529 1532 elif pa > pb:
1530 1533 rel = ">"
1531 1534 elif pa < pb:
1532 1535 rel = "<"
1533 1536 elif pa | pb:
1534 1537 rel = "|"
1535 1538 ui.write(_("a: %s\n") % pa)
1536 1539 ui.write(_("b: %s\n") % pb)
1537 1540 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1538 1541 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1539 1542 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1540 1543 pa.distance(pb), rel))
1541 1544
1542 1545 @command('debugrebuilddirstate|debugrebuildstate',
1543 1546 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1544 1547 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1545 1548 'the working copy parent')),
1546 1549 ],
1547 1550 _('[-r REV]'))
1548 1551 def debugrebuilddirstate(ui, repo, rev, **opts):
1549 1552 """rebuild the dirstate as it would look like for the given revision
1550 1553
1551 1554 If no revision is specified the first current parent will be used.
1552 1555
1553 1556 The dirstate will be set to the files of the given revision.
1554 1557 The actual working directory content or existing dirstate
1555 1558 information such as adds or removes is not considered.
1556 1559
1557 1560 ``minimal`` will only rebuild the dirstate status for files that claim to be
1558 1561 tracked but are not in the parent manifest, or that exist in the parent
1559 1562 manifest but are not in the dirstate. It will not change adds, removes, or
1560 1563 modified files that are in the working copy parent.
1561 1564
1562 1565 One use of this command is to make the next :hg:`status` invocation
1563 1566 check the actual file content.
1564 1567 """
1565 1568 ctx = scmutil.revsingle(repo, rev)
1566 1569 with repo.wlock():
1567 1570 dirstate = repo.dirstate
1568 1571 changedfiles = None
1569 1572 # See command doc for what minimal does.
1570 1573 if opts.get('minimal'):
1571 1574 manifestfiles = set(ctx.manifest().keys())
1572 1575 dirstatefiles = set(dirstate)
1573 1576 manifestonly = manifestfiles - dirstatefiles
1574 1577 dsonly = dirstatefiles - manifestfiles
1575 1578 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1576 1579 changedfiles = manifestonly | dsnotadded
1577 1580
1578 1581 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1579 1582
1580 1583 @command('debugrebuildfncache', [], '')
1581 1584 def debugrebuildfncache(ui, repo):
1582 1585 """rebuild the fncache file"""
1583 1586 repair.rebuildfncache(ui, repo)
1584 1587
1585 1588 @command('debugrename',
1586 1589 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1587 1590 _('[-r REV] FILE'))
1588 1591 def debugrename(ui, repo, file1, *pats, **opts):
1589 1592 """dump rename information"""
1590 1593
1591 1594 ctx = scmutil.revsingle(repo, opts.get('rev'))
1592 1595 m = scmutil.match(ctx, (file1,) + pats, opts)
1593 1596 for abs in ctx.walk(m):
1594 1597 fctx = ctx[abs]
1595 1598 o = fctx.filelog().renamed(fctx.filenode())
1596 1599 rel = m.rel(abs)
1597 1600 if o:
1598 1601 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1599 1602 else:
1600 1603 ui.write(_("%s not renamed\n") % rel)
1601 1604
1602 1605 @command('debugrevlog', commands.debugrevlogopts +
1603 1606 [('d', 'dump', False, _('dump index data'))],
1604 1607 _('-c|-m|FILE'),
1605 1608 optionalrepo=True)
1606 1609 def debugrevlog(ui, repo, file_=None, **opts):
1607 1610 """show data and statistics about a revlog"""
1608 1611 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1609 1612
1610 1613 if opts.get("dump"):
1611 1614 numrevs = len(r)
1612 1615 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1613 1616 " rawsize totalsize compression heads chainlen\n"))
1614 1617 ts = 0
1615 1618 heads = set()
1616 1619
1617 1620 for rev in xrange(numrevs):
1618 1621 dbase = r.deltaparent(rev)
1619 1622 if dbase == -1:
1620 1623 dbase = rev
1621 1624 cbase = r.chainbase(rev)
1622 1625 clen = r.chainlen(rev)
1623 1626 p1, p2 = r.parentrevs(rev)
1624 1627 rs = r.rawsize(rev)
1625 1628 ts = ts + rs
1626 1629 heads -= set(r.parentrevs(rev))
1627 1630 heads.add(rev)
1628 1631 try:
1629 1632 compression = ts / r.end(rev)
1630 1633 except ZeroDivisionError:
1631 1634 compression = 0
1632 1635 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1633 1636 "%11d %5d %8d\n" %
1634 1637 (rev, p1, p2, r.start(rev), r.end(rev),
1635 1638 r.start(dbase), r.start(cbase),
1636 1639 r.start(p1), r.start(p2),
1637 1640 rs, ts, compression, len(heads), clen))
1638 1641 return 0
1639 1642
1640 1643 v = r.version
1641 1644 format = v & 0xFFFF
1642 1645 flags = []
1643 1646 gdelta = False
1644 1647 if v & revlog.REVLOGNGINLINEDATA:
1645 1648 flags.append('inline')
1646 1649 if v & revlog.REVLOGGENERALDELTA:
1647 1650 gdelta = True
1648 1651 flags.append('generaldelta')
1649 1652 if not flags:
1650 1653 flags = ['(none)']
1651 1654
1652 1655 nummerges = 0
1653 1656 numfull = 0
1654 1657 numprev = 0
1655 1658 nump1 = 0
1656 1659 nump2 = 0
1657 1660 numother = 0
1658 1661 nump1prev = 0
1659 1662 nump2prev = 0
1660 1663 chainlengths = []
1661 1664
1662 1665 datasize = [None, 0, 0]
1663 1666 fullsize = [None, 0, 0]
1664 1667 deltasize = [None, 0, 0]
1665 1668 chunktypecounts = {}
1666 1669 chunktypesizes = {}
1667 1670
1668 1671 def addsize(size, l):
1669 1672 if l[0] is None or size < l[0]:
1670 1673 l[0] = size
1671 1674 if size > l[1]:
1672 1675 l[1] = size
1673 1676 l[2] += size
1674 1677
1675 1678 numrevs = len(r)
1676 1679 for rev in xrange(numrevs):
1677 1680 p1, p2 = r.parentrevs(rev)
1678 1681 delta = r.deltaparent(rev)
1679 1682 if format > 0:
1680 1683 addsize(r.rawsize(rev), datasize)
1681 1684 if p2 != nullrev:
1682 1685 nummerges += 1
1683 1686 size = r.length(rev)
1684 1687 if delta == nullrev:
1685 1688 chainlengths.append(0)
1686 1689 numfull += 1
1687 1690 addsize(size, fullsize)
1688 1691 else:
1689 1692 chainlengths.append(chainlengths[delta] + 1)
1690 1693 addsize(size, deltasize)
1691 1694 if delta == rev - 1:
1692 1695 numprev += 1
1693 1696 if delta == p1:
1694 1697 nump1prev += 1
1695 1698 elif delta == p2:
1696 1699 nump2prev += 1
1697 1700 elif delta == p1:
1698 1701 nump1 += 1
1699 1702 elif delta == p2:
1700 1703 nump2 += 1
1701 1704 elif delta != nullrev:
1702 1705 numother += 1
1703 1706
1704 1707 # Obtain data on the raw chunks in the revlog.
1705 1708 chunk = r._chunkraw(rev, rev)[1]
1706 1709 if chunk:
1707 1710 chunktype = chunk[0]
1708 1711 else:
1709 1712 chunktype = 'empty'
1710 1713
1711 1714 if chunktype not in chunktypecounts:
1712 1715 chunktypecounts[chunktype] = 0
1713 1716 chunktypesizes[chunktype] = 0
1714 1717
1715 1718 chunktypecounts[chunktype] += 1
1716 1719 chunktypesizes[chunktype] += size
1717 1720
1718 1721 # Adjust size min value for empty cases
1719 1722 for size in (datasize, fullsize, deltasize):
1720 1723 if size[0] is None:
1721 1724 size[0] = 0
1722 1725
1723 1726 numdeltas = numrevs - numfull
1724 1727 numoprev = numprev - nump1prev - nump2prev
1725 1728 totalrawsize = datasize[2]
1726 1729 datasize[2] /= numrevs
1727 1730 fulltotal = fullsize[2]
1728 1731 fullsize[2] /= numfull
1729 1732 deltatotal = deltasize[2]
1730 1733 if numrevs - numfull > 0:
1731 1734 deltasize[2] /= numrevs - numfull
1732 1735 totalsize = fulltotal + deltatotal
1733 1736 avgchainlen = sum(chainlengths) / numrevs
1734 1737 maxchainlen = max(chainlengths)
1735 1738 compratio = 1
1736 1739 if totalsize:
1737 1740 compratio = totalrawsize / totalsize
1738 1741
1739 1742 basedfmtstr = '%%%dd\n'
1740 1743 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1741 1744
1742 1745 def dfmtstr(max):
1743 1746 return basedfmtstr % len(str(max))
1744 1747 def pcfmtstr(max, padding=0):
1745 1748 return basepcfmtstr % (len(str(max)), ' ' * padding)
1746 1749
1747 1750 def pcfmt(value, total):
1748 1751 if total:
1749 1752 return (value, 100 * float(value) / total)
1750 1753 else:
1751 1754 return value, 100.0
1752 1755
1753 1756 ui.write(('format : %d\n') % format)
1754 1757 ui.write(('flags : %s\n') % ', '.join(flags))
1755 1758
1756 1759 ui.write('\n')
1757 1760 fmt = pcfmtstr(totalsize)
1758 1761 fmt2 = dfmtstr(totalsize)
1759 1762 ui.write(('revisions : ') + fmt2 % numrevs)
1760 1763 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1761 1764 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1762 1765 ui.write(('revisions : ') + fmt2 % numrevs)
1763 1766 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1764 1767 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1765 1768 ui.write(('revision size : ') + fmt2 % totalsize)
1766 1769 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1767 1770 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1768 1771
1769 1772 def fmtchunktype(chunktype):
1770 1773 if chunktype == 'empty':
1771 1774 return ' %s : ' % chunktype
1772 1775 elif chunktype in string.ascii_letters:
1773 1776 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1774 1777 else:
1775 1778 return ' 0x%s : ' % hex(chunktype)
1776 1779
1777 1780 ui.write('\n')
1778 1781 ui.write(('chunks : ') + fmt2 % numrevs)
1779 1782 for chunktype in sorted(chunktypecounts):
1780 1783 ui.write(fmtchunktype(chunktype))
1781 1784 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1782 1785 ui.write(('chunks size : ') + fmt2 % totalsize)
1783 1786 for chunktype in sorted(chunktypecounts):
1784 1787 ui.write(fmtchunktype(chunktype))
1785 1788 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1786 1789
1787 1790 ui.write('\n')
1788 1791 fmt = dfmtstr(max(avgchainlen, compratio))
1789 1792 ui.write(('avg chain length : ') + fmt % avgchainlen)
1790 1793 ui.write(('max chain length : ') + fmt % maxchainlen)
1791 1794 ui.write(('compression ratio : ') + fmt % compratio)
1792 1795
1793 1796 if format > 0:
1794 1797 ui.write('\n')
1795 1798 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1796 1799 % tuple(datasize))
1797 1800 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1798 1801 % tuple(fullsize))
1799 1802 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1800 1803 % tuple(deltasize))
1801 1804
1802 1805 if numdeltas > 0:
1803 1806 ui.write('\n')
1804 1807 fmt = pcfmtstr(numdeltas)
1805 1808 fmt2 = pcfmtstr(numdeltas, 4)
1806 1809 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1807 1810 if numprev > 0:
1808 1811 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1809 1812 numprev))
1810 1813 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1811 1814 numprev))
1812 1815 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1813 1816 numprev))
1814 1817 if gdelta:
1815 1818 ui.write(('deltas against p1 : ')
1816 1819 + fmt % pcfmt(nump1, numdeltas))
1817 1820 ui.write(('deltas against p2 : ')
1818 1821 + fmt % pcfmt(nump2, numdeltas))
1819 1822 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1820 1823 numdeltas))
1821 1824
1822 1825 @command('debugrevspec',
1823 1826 [('', 'optimize', None,
1824 1827 _('print parsed tree after optimizing (DEPRECATED)')),
1825 1828 ('p', 'show-stage', [],
1826 1829 _('print parsed tree at the given stage'), _('NAME')),
1827 1830 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1828 1831 ('', 'verify-optimized', False, _('verify optimized result')),
1829 1832 ],
1830 1833 ('REVSPEC'))
1831 1834 def debugrevspec(ui, repo, expr, **opts):
1832 1835 """parse and apply a revision specification
1833 1836
1834 1837 Use -p/--show-stage option to print the parsed tree at the given stages.
1835 1838 Use -p all to print tree at every stage.
1836 1839
1837 1840 Use --verify-optimized to compare the optimized result with the unoptimized
1838 1841 one. Returns 1 if the optimized result differs.
1839 1842 """
1840 1843 stages = [
1841 1844 ('parsed', lambda tree: tree),
1842 1845 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1843 1846 ('concatenated', revsetlang.foldconcat),
1844 1847 ('analyzed', revsetlang.analyze),
1845 1848 ('optimized', revsetlang.optimize),
1846 1849 ]
1847 1850 if opts['no_optimized']:
1848 1851 stages = stages[:-1]
1849 1852 if opts['verify_optimized'] and opts['no_optimized']:
1850 1853 raise error.Abort(_('cannot use --verify-optimized with '
1851 1854 '--no-optimized'))
1852 1855 stagenames = set(n for n, f in stages)
1853 1856
1854 1857 showalways = set()
1855 1858 showchanged = set()
1856 1859 if ui.verbose and not opts['show_stage']:
1857 1860 # show parsed tree by --verbose (deprecated)
1858 1861 showalways.add('parsed')
1859 1862 showchanged.update(['expanded', 'concatenated'])
1860 1863 if opts['optimize']:
1861 1864 showalways.add('optimized')
1862 1865 if opts['show_stage'] and opts['optimize']:
1863 1866 raise error.Abort(_('cannot use --optimize with --show-stage'))
1864 1867 if opts['show_stage'] == ['all']:
1865 1868 showalways.update(stagenames)
1866 1869 else:
1867 1870 for n in opts['show_stage']:
1868 1871 if n not in stagenames:
1869 1872 raise error.Abort(_('invalid stage name: %s') % n)
1870 1873 showalways.update(opts['show_stage'])
1871 1874
1872 1875 treebystage = {}
1873 1876 printedtree = None
1874 1877 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1875 1878 for n, f in stages:
1876 1879 treebystage[n] = tree = f(tree)
1877 1880 if n in showalways or (n in showchanged and tree != printedtree):
1878 1881 if opts['show_stage'] or n != 'parsed':
1879 1882 ui.write(("* %s:\n") % n)
1880 1883 ui.write(revsetlang.prettyformat(tree), "\n")
1881 1884 printedtree = tree
1882 1885
1883 1886 if opts['verify_optimized']:
1884 1887 arevs = revset.makematcher(treebystage['analyzed'])(repo)
1885 1888 brevs = revset.makematcher(treebystage['optimized'])(repo)
1886 1889 if ui.verbose:
1887 1890 ui.note(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
1888 1891 ui.note(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
1889 1892 arevs = list(arevs)
1890 1893 brevs = list(brevs)
1891 1894 if arevs == brevs:
1892 1895 return 0
1893 1896 ui.write(('--- analyzed\n'), label='diff.file_a')
1894 1897 ui.write(('+++ optimized\n'), label='diff.file_b')
1895 1898 sm = difflib.SequenceMatcher(None, arevs, brevs)
1896 1899 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1897 1900 if tag in ('delete', 'replace'):
1898 1901 for c in arevs[alo:ahi]:
1899 1902 ui.write('-%s\n' % c, label='diff.deleted')
1900 1903 if tag in ('insert', 'replace'):
1901 1904 for c in brevs[blo:bhi]:
1902 1905 ui.write('+%s\n' % c, label='diff.inserted')
1903 1906 if tag == 'equal':
1904 1907 for c in arevs[alo:ahi]:
1905 1908 ui.write(' %s\n' % c)
1906 1909 return 1
1907 1910
1908 1911 func = revset.makematcher(tree)
1909 1912 revs = func(repo)
1910 1913 if ui.verbose:
1911 1914 ui.note(("* set:\n"), smartset.prettyformat(revs), "\n")
1912 1915 for c in revs:
1913 1916 ui.write("%s\n" % c)
1914 1917
1915 1918 @command('debugsetparents', [], _('REV1 [REV2]'))
1916 1919 def debugsetparents(ui, repo, rev1, rev2=None):
1917 1920 """manually set the parents of the current working directory
1918 1921
1919 1922 This is useful for writing repository conversion tools, but should
1920 1923 be used with care. For example, neither the working directory nor the
1921 1924 dirstate is updated, so file status may be incorrect after running this
1922 1925 command.
1923 1926
1924 1927 Returns 0 on success.
1925 1928 """
1926 1929
1927 1930 r1 = scmutil.revsingle(repo, rev1).node()
1928 1931 r2 = scmutil.revsingle(repo, rev2, 'null').node()
1929 1932
1930 1933 with repo.wlock():
1931 1934 repo.setparents(r1, r2)
1932 1935
1933 1936 @command('debugsub',
1934 1937 [('r', 'rev', '',
1935 1938 _('revision to check'), _('REV'))],
1936 1939 _('[-r REV] [REV]'))
1937 1940 def debugsub(ui, repo, rev=None):
1938 1941 ctx = scmutil.revsingle(repo, rev, None)
1939 1942 for k, v in sorted(ctx.substate.items()):
1940 1943 ui.write(('path %s\n') % k)
1941 1944 ui.write((' source %s\n') % v[0])
1942 1945 ui.write((' revision %s\n') % v[1])
1943 1946
1944 1947 @command('debugsuccessorssets',
1945 1948 [],
1946 1949 _('[REV]'))
1947 1950 def debugsuccessorssets(ui, repo, *revs):
1948 1951 """show set of successors for revision
1949 1952
1950 1953 A successors set of changeset A is a consistent group of revisions that
1951 1954 succeed A. It contains non-obsolete changesets only.
1952 1955
1953 1956 In most cases a changeset A has a single successors set containing a single
1954 1957 successor (changeset A replaced by A').
1955 1958
1956 1959 A changeset that is made obsolete with no successors are called "pruned".
1957 1960 Such changesets have no successors sets at all.
1958 1961
1959 1962 A changeset that has been "split" will have a successors set containing
1960 1963 more than one successor.
1961 1964
1962 1965 A changeset that has been rewritten in multiple different ways is called
1963 1966 "divergent". Such changesets have multiple successor sets (each of which
1964 1967 may also be split, i.e. have multiple successors).
1965 1968
1966 1969 Results are displayed as follows::
1967 1970
1968 1971 <rev1>
1969 1972 <successors-1A>
1970 1973 <rev2>
1971 1974 <successors-2A>
1972 1975 <successors-2B1> <successors-2B2> <successors-2B3>
1973 1976
1974 1977 Here rev2 has two possible (i.e. divergent) successors sets. The first
1975 1978 holds one element, whereas the second holds three (i.e. the changeset has
1976 1979 been split).
1977 1980 """
1978 1981 # passed to successorssets caching computation from one call to another
1979 1982 cache = {}
1980 1983 ctx2str = str
1981 1984 node2str = short
1982 1985 if ui.debug():
1983 1986 def ctx2str(ctx):
1984 1987 return ctx.hex()
1985 1988 node2str = hex
1986 1989 for rev in scmutil.revrange(repo, revs):
1987 1990 ctx = repo[rev]
1988 1991 ui.write('%s\n'% ctx2str(ctx))
1989 1992 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
1990 1993 if succsset:
1991 1994 ui.write(' ')
1992 1995 ui.write(node2str(succsset[0]))
1993 1996 for node in succsset[1:]:
1994 1997 ui.write(' ')
1995 1998 ui.write(node2str(node))
1996 1999 ui.write('\n')
1997 2000
1998 2001 @command('debugtemplate',
1999 2002 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2000 2003 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2001 2004 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2002 2005 optionalrepo=True)
2003 2006 def debugtemplate(ui, repo, tmpl, **opts):
2004 2007 """parse and apply a template
2005 2008
2006 2009 If -r/--rev is given, the template is processed as a log template and
2007 2010 applied to the given changesets. Otherwise, it is processed as a generic
2008 2011 template.
2009 2012
2010 2013 Use --verbose to print the parsed tree.
2011 2014 """
2012 2015 revs = None
2013 2016 if opts['rev']:
2014 2017 if repo is None:
2015 2018 raise error.RepoError(_('there is no Mercurial repository here '
2016 2019 '(.hg not found)'))
2017 2020 revs = scmutil.revrange(repo, opts['rev'])
2018 2021
2019 2022 props = {}
2020 2023 for d in opts['define']:
2021 2024 try:
2022 2025 k, v = (e.strip() for e in d.split('=', 1))
2023 2026 if not k or k == 'ui':
2024 2027 raise ValueError
2025 2028 props[k] = v
2026 2029 except ValueError:
2027 2030 raise error.Abort(_('malformed keyword definition: %s') % d)
2028 2031
2029 2032 if ui.verbose:
2030 2033 aliases = ui.configitems('templatealias')
2031 2034 tree = templater.parse(tmpl)
2032 2035 ui.note(templater.prettyformat(tree), '\n')
2033 2036 newtree = templater.expandaliases(tree, aliases)
2034 2037 if newtree != tree:
2035 2038 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2036 2039
2037 2040 mapfile = None
2038 2041 if revs is None:
2039 2042 k = 'debugtemplate'
2040 2043 t = formatter.maketemplater(ui, k, tmpl)
2041 2044 ui.write(templater.stringify(t(k, ui=ui, **props)))
2042 2045 else:
2043 2046 displayer = cmdutil.changeset_templater(ui, repo, None, opts, tmpl,
2044 2047 mapfile, buffered=False)
2045 2048 for r in revs:
2046 2049 displayer.show(repo[r], **props)
2047 2050 displayer.close()
2048 2051
2049 2052 @command('debugupgraderepo', [
2050 2053 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2051 2054 ('', 'run', False, _('performs an upgrade')),
2052 2055 ])
2053 2056 def debugupgraderepo(ui, repo, run=False, optimize=None):
2054 2057 """upgrade a repository to use different features
2055 2058
2056 2059 If no arguments are specified, the repository is evaluated for upgrade
2057 2060 and a list of problems and potential optimizations is printed.
2058 2061
2059 2062 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2060 2063 can be influenced via additional arguments. More details will be provided
2061 2064 by the command output when run without ``--run``.
2062 2065
2063 2066 During the upgrade, the repository will be locked and no writes will be
2064 2067 allowed.
2065 2068
2066 2069 At the end of the upgrade, the repository may not be readable while new
2067 2070 repository data is swapped in. This window will be as long as it takes to
2068 2071 rename some directories inside the ``.hg`` directory. On most machines, this
2069 2072 should complete almost instantaneously and the chances of a consumer being
2070 2073 unable to access the repository should be low.
2071 2074 """
2072 2075 return repair.upgraderepo(ui, repo, run=run, optimize=optimize)
2073 2076
2074 2077 @command('debugwalk', commands.walkopts, _('[OPTION]... [FILE]...'),
2075 2078 inferrepo=True)
2076 2079 def debugwalk(ui, repo, *pats, **opts):
2077 2080 """show how files match on given patterns"""
2078 2081 m = scmutil.match(repo[None], pats, opts)
2079 2082 items = list(repo.walk(m))
2080 2083 if not items:
2081 2084 return
2082 2085 f = lambda fn: fn
2083 2086 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2084 2087 f = lambda fn: util.normpath(fn)
2085 2088 fmt = 'f %%-%ds %%-%ds %%s' % (
2086 2089 max([len(abs) for abs in items]),
2087 2090 max([len(m.rel(abs)) for abs in items]))
2088 2091 for abs in items:
2089 2092 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2090 2093 ui.write("%s\n" % line.rstrip())
2091 2094
2092 2095 @command('debugwireargs',
2093 2096 [('', 'three', '', 'three'),
2094 2097 ('', 'four', '', 'four'),
2095 2098 ('', 'five', '', 'five'),
2096 2099 ] + commands.remoteopts,
2097 2100 _('REPO [OPTIONS]... [ONE [TWO]]'),
2098 2101 norepo=True)
2099 2102 def debugwireargs(ui, repopath, *vals, **opts):
2100 2103 repo = hg.peer(ui, opts, repopath)
2101 2104 for opt in commands.remoteopts:
2102 2105 del opts[opt[1]]
2103 2106 args = {}
2104 2107 for k, v in opts.iteritems():
2105 2108 if v:
2106 2109 args[k] = v
2107 2110 # run twice to check that we don't mess up the stream for the next command
2108 2111 res1 = repo.debugwireargs(*vals, **args)
2109 2112 res2 = repo.debugwireargs(*vals, **args)
2110 2113 ui.write("%s\n" % res1)
2111 2114 if res1 != res2:
2112 2115 ui.warn("%s\n" % res2)
General Comments 0
You need to be logged in to leave comments. Login now