##// END OF EJS Templates
vfs: use 'vfs' module directly in 'mercurial.debugcommand'...
Pierre-Yves David -
r31239:9cdba607 default
parent child Browse files
Show More
@@ -1,2110 +1,2111
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import difflib
11 11 import errno
12 12 import operator
13 13 import os
14 14 import random
15 15 import socket
16 16 import string
17 17 import sys
18 18 import tempfile
19 19 import time
20 20
21 21 from .i18n import _
22 22 from .node import (
23 23 bin,
24 24 hex,
25 25 nullhex,
26 26 nullid,
27 27 nullrev,
28 28 short,
29 29 )
30 30 from . import (
31 31 bundle2,
32 32 changegroup,
33 33 cmdutil,
34 34 color,
35 35 commands,
36 36 context,
37 37 dagparser,
38 38 dagutil,
39 39 encoding,
40 40 error,
41 41 exchange,
42 42 extensions,
43 43 fileset,
44 44 formatter,
45 45 hg,
46 46 localrepo,
47 47 lock as lockmod,
48 48 merge as mergemod,
49 49 obsolete,
50 50 policy,
51 51 pvec,
52 52 pycompat,
53 53 repair,
54 54 revlog,
55 55 revset,
56 56 revsetlang,
57 57 scmutil,
58 58 setdiscovery,
59 59 simplemerge,
60 60 smartset,
61 61 sslutil,
62 62 streamclone,
63 63 templater,
64 64 treediscovery,
65 65 util,
66 vfs as vfsmod,
66 67 )
67 68
68 69 release = lockmod.release
69 70
70 71 # We reuse the command table from commands because it is easier than
71 72 # teaching dispatch about multiple tables.
72 73 command = cmdutil.command(commands.table)
73 74
74 75 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
75 76 def debugancestor(ui, repo, *args):
76 77 """find the ancestor revision of two revisions in a given index"""
77 78 if len(args) == 3:
78 79 index, rev1, rev2 = args
79 r = revlog.revlog(scmutil.vfs(pycompat.getcwd(), audit=False), index)
80 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
80 81 lookup = r.lookup
81 82 elif len(args) == 2:
82 83 if not repo:
83 84 raise error.Abort(_('there is no Mercurial repository here '
84 85 '(.hg not found)'))
85 86 rev1, rev2 = args
86 87 r = repo.changelog
87 88 lookup = repo.lookup
88 89 else:
89 90 raise error.Abort(_('either two or three arguments required'))
90 91 a = r.ancestor(lookup(rev1), lookup(rev2))
91 92 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
92 93
93 94 @command('debugapplystreamclonebundle', [], 'FILE')
94 95 def debugapplystreamclonebundle(ui, repo, fname):
95 96 """apply a stream clone bundle file"""
96 97 f = hg.openpath(ui, fname)
97 98 gen = exchange.readbundle(ui, f, fname)
98 99 gen.apply(repo)
99 100
100 101 @command('debugbuilddag',
101 102 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
102 103 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
103 104 ('n', 'new-file', None, _('add new file at each rev'))],
104 105 _('[OPTION]... [TEXT]'))
105 106 def debugbuilddag(ui, repo, text=None,
106 107 mergeable_file=False,
107 108 overwritten_file=False,
108 109 new_file=False):
109 110 """builds a repo with a given DAG from scratch in the current empty repo
110 111
111 112 The description of the DAG is read from stdin if not given on the
112 113 command line.
113 114
114 115 Elements:
115 116
116 117 - "+n" is a linear run of n nodes based on the current default parent
117 118 - "." is a single node based on the current default parent
118 119 - "$" resets the default parent to null (implied at the start);
119 120 otherwise the default parent is always the last node created
120 121 - "<p" sets the default parent to the backref p
121 122 - "*p" is a fork at parent p, which is a backref
122 123 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
123 124 - "/p2" is a merge of the preceding node and p2
124 125 - ":tag" defines a local tag for the preceding node
125 126 - "@branch" sets the named branch for subsequent nodes
126 127 - "#...\\n" is a comment up to the end of the line
127 128
128 129 Whitespace between the above elements is ignored.
129 130
130 131 A backref is either
131 132
132 133 - a number n, which references the node curr-n, where curr is the current
133 134 node, or
134 135 - the name of a local tag you placed earlier using ":tag", or
135 136 - empty to denote the default parent.
136 137
137 138 All string valued-elements are either strictly alphanumeric, or must
138 139 be enclosed in double quotes ("..."), with "\\" as escape character.
139 140 """
140 141
141 142 if text is None:
142 143 ui.status(_("reading DAG from stdin\n"))
143 144 text = ui.fin.read()
144 145
145 146 cl = repo.changelog
146 147 if len(cl) > 0:
147 148 raise error.Abort(_('repository is not empty'))
148 149
149 150 # determine number of revs in DAG
150 151 total = 0
151 152 for type, data in dagparser.parsedag(text):
152 153 if type == 'n':
153 154 total += 1
154 155
155 156 if mergeable_file:
156 157 linesperrev = 2
157 158 # make a file with k lines per rev
158 159 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
159 160 initialmergedlines.append("")
160 161
161 162 tags = []
162 163
163 164 wlock = lock = tr = None
164 165 try:
165 166 wlock = repo.wlock()
166 167 lock = repo.lock()
167 168 tr = repo.transaction("builddag")
168 169
169 170 at = -1
170 171 atbranch = 'default'
171 172 nodeids = []
172 173 id = 0
173 174 ui.progress(_('building'), id, unit=_('revisions'), total=total)
174 175 for type, data in dagparser.parsedag(text):
175 176 if type == 'n':
176 177 ui.note(('node %s\n' % str(data)))
177 178 id, ps = data
178 179
179 180 files = []
180 181 fctxs = {}
181 182
182 183 p2 = None
183 184 if mergeable_file:
184 185 fn = "mf"
185 186 p1 = repo[ps[0]]
186 187 if len(ps) > 1:
187 188 p2 = repo[ps[1]]
188 189 pa = p1.ancestor(p2)
189 190 base, local, other = [x[fn].data() for x in (pa, p1,
190 191 p2)]
191 192 m3 = simplemerge.Merge3Text(base, local, other)
192 193 ml = [l.strip() for l in m3.merge_lines()]
193 194 ml.append("")
194 195 elif at > 0:
195 196 ml = p1[fn].data().split("\n")
196 197 else:
197 198 ml = initialmergedlines
198 199 ml[id * linesperrev] += " r%i" % id
199 200 mergedtext = "\n".join(ml)
200 201 files.append(fn)
201 202 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
202 203
203 204 if overwritten_file:
204 205 fn = "of"
205 206 files.append(fn)
206 207 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
207 208
208 209 if new_file:
209 210 fn = "nf%i" % id
210 211 files.append(fn)
211 212 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
212 213 if len(ps) > 1:
213 214 if not p2:
214 215 p2 = repo[ps[1]]
215 216 for fn in p2:
216 217 if fn.startswith("nf"):
217 218 files.append(fn)
218 219 fctxs[fn] = p2[fn]
219 220
220 221 def fctxfn(repo, cx, path):
221 222 return fctxs.get(path)
222 223
223 224 if len(ps) == 0 or ps[0] < 0:
224 225 pars = [None, None]
225 226 elif len(ps) == 1:
226 227 pars = [nodeids[ps[0]], None]
227 228 else:
228 229 pars = [nodeids[p] for p in ps]
229 230 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
230 231 date=(id, 0),
231 232 user="debugbuilddag",
232 233 extra={'branch': atbranch})
233 234 nodeid = repo.commitctx(cx)
234 235 nodeids.append(nodeid)
235 236 at = id
236 237 elif type == 'l':
237 238 id, name = data
238 239 ui.note(('tag %s\n' % name))
239 240 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
240 241 elif type == 'a':
241 242 ui.note(('branch %s\n' % data))
242 243 atbranch = data
243 244 ui.progress(_('building'), id, unit=_('revisions'), total=total)
244 245 tr.close()
245 246
246 247 if tags:
247 248 repo.vfs.write("localtags", "".join(tags))
248 249 finally:
249 250 ui.progress(_('building'), None)
250 251 release(tr, lock, wlock)
251 252
252 253 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
253 254 indent_string = ' ' * indent
254 255 if all:
255 256 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
256 257 % indent_string)
257 258
258 259 def showchunks(named):
259 260 ui.write("\n%s%s\n" % (indent_string, named))
260 261 chain = None
261 262 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
262 263 node = chunkdata['node']
263 264 p1 = chunkdata['p1']
264 265 p2 = chunkdata['p2']
265 266 cs = chunkdata['cs']
266 267 deltabase = chunkdata['deltabase']
267 268 delta = chunkdata['delta']
268 269 ui.write("%s%s %s %s %s %s %s\n" %
269 270 (indent_string, hex(node), hex(p1), hex(p2),
270 271 hex(cs), hex(deltabase), len(delta)))
271 272 chain = node
272 273
273 274 chunkdata = gen.changelogheader()
274 275 showchunks("changelog")
275 276 chunkdata = gen.manifestheader()
276 277 showchunks("manifest")
277 278 for chunkdata in iter(gen.filelogheader, {}):
278 279 fname = chunkdata['filename']
279 280 showchunks(fname)
280 281 else:
281 282 if isinstance(gen, bundle2.unbundle20):
282 283 raise error.Abort(_('use debugbundle2 for this file'))
283 284 chunkdata = gen.changelogheader()
284 285 chain = None
285 286 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
286 287 node = chunkdata['node']
287 288 ui.write("%s%s\n" % (indent_string, hex(node)))
288 289 chain = node
289 290
290 291 def _debugbundle2(ui, gen, all=None, **opts):
291 292 """lists the contents of a bundle2"""
292 293 if not isinstance(gen, bundle2.unbundle20):
293 294 raise error.Abort(_('not a bundle2 file'))
294 295 ui.write(('Stream params: %s\n' % repr(gen.params)))
295 296 for part in gen.iterparts():
296 297 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
297 298 if part.type == 'changegroup':
298 299 version = part.params.get('version', '01')
299 300 cg = changegroup.getunbundler(version, part, 'UN')
300 301 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
301 302
302 303 @command('debugbundle',
303 304 [('a', 'all', None, _('show all details')),
304 305 ('', 'spec', None, _('print the bundlespec of the bundle'))],
305 306 _('FILE'),
306 307 norepo=True)
307 308 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
308 309 """lists the contents of a bundle"""
309 310 with hg.openpath(ui, bundlepath) as f:
310 311 if spec:
311 312 spec = exchange.getbundlespec(ui, f)
312 313 ui.write('%s\n' % spec)
313 314 return
314 315
315 316 gen = exchange.readbundle(ui, f, bundlepath)
316 317 if isinstance(gen, bundle2.unbundle20):
317 318 return _debugbundle2(ui, gen, all=all, **opts)
318 319 _debugchangegroup(ui, gen, all=all, **opts)
319 320
320 321 @command('debugcheckstate', [], '')
321 322 def debugcheckstate(ui, repo):
322 323 """validate the correctness of the current dirstate"""
323 324 parent1, parent2 = repo.dirstate.parents()
324 325 m1 = repo[parent1].manifest()
325 326 m2 = repo[parent2].manifest()
326 327 errors = 0
327 328 for f in repo.dirstate:
328 329 state = repo.dirstate[f]
329 330 if state in "nr" and f not in m1:
330 331 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
331 332 errors += 1
332 333 if state in "a" and f in m1:
333 334 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
334 335 errors += 1
335 336 if state in "m" and f not in m1 and f not in m2:
336 337 ui.warn(_("%s in state %s, but not in either manifest\n") %
337 338 (f, state))
338 339 errors += 1
339 340 for f in m1:
340 341 state = repo.dirstate[f]
341 342 if state not in "nrm":
342 343 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
343 344 errors += 1
344 345 if errors:
345 346 error = _(".hg/dirstate inconsistent with current parent's manifest")
346 347 raise error.Abort(error)
347 348
348 349 @command('debugcolor',
349 350 [('', 'style', None, _('show all configured styles'))],
350 351 'hg debugcolor')
351 352 def debugcolor(ui, repo, **opts):
352 353 """show available color, effects or style"""
353 354 ui.write(('color mode: %s\n') % ui._colormode)
354 355 if opts.get('style'):
355 356 return _debugdisplaystyle(ui)
356 357 else:
357 358 return _debugdisplaycolor(ui)
358 359
359 360 def _debugdisplaycolor(ui):
360 361 ui = ui.copy()
361 362 ui._styles.clear()
362 363 for effect in color._effects.keys():
363 364 ui._styles[effect] = effect
364 365 if ui._terminfoparams:
365 366 for k, v in ui.configitems('color'):
366 367 if k.startswith('color.'):
367 368 ui._styles[k] = k[6:]
368 369 elif k.startswith('terminfo.'):
369 370 ui._styles[k] = k[9:]
370 371 ui.write(_('available colors:\n'))
371 372 # sort label with a '_' after the other to group '_background' entry.
372 373 items = sorted(ui._styles.items(),
373 374 key=lambda i: ('_' in i[0], i[0], i[1]))
374 375 for colorname, label in items:
375 376 ui.write(('%s\n') % colorname, label=label)
376 377
377 378 def _debugdisplaystyle(ui):
378 379 ui.write(_('available style:\n'))
379 380 width = max(len(s) for s in ui._styles)
380 381 for label, effects in sorted(ui._styles.items()):
381 382 ui.write('%s' % label, label=label)
382 383 if effects:
383 384 # 50
384 385 ui.write(': ')
385 386 ui.write(' ' * (max(0, width - len(label))))
386 387 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
387 388 ui.write('\n')
388 389
389 390 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
390 391 def debugcommands(ui, cmd='', *args):
391 392 """list all available commands and options"""
392 393 for cmd, vals in sorted(commands.table.iteritems()):
393 394 cmd = cmd.split('|')[0].strip('^')
394 395 opts = ', '.join([i[1] for i in vals[1]])
395 396 ui.write('%s: %s\n' % (cmd, opts))
396 397
397 398 @command('debugcomplete',
398 399 [('o', 'options', None, _('show the command options'))],
399 400 _('[-o] CMD'),
400 401 norepo=True)
401 402 def debugcomplete(ui, cmd='', **opts):
402 403 """returns the completion list associated with the given command"""
403 404
404 405 if opts.get('options'):
405 406 options = []
406 407 otables = [commands.globalopts]
407 408 if cmd:
408 409 aliases, entry = cmdutil.findcmd(cmd, commands.table, False)
409 410 otables.append(entry[1])
410 411 for t in otables:
411 412 for o in t:
412 413 if "(DEPRECATED)" in o[3]:
413 414 continue
414 415 if o[0]:
415 416 options.append('-%s' % o[0])
416 417 options.append('--%s' % o[1])
417 418 ui.write("%s\n" % "\n".join(options))
418 419 return
419 420
420 421 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, commands.table)
421 422 if ui.verbose:
422 423 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
423 424 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
424 425
425 426 @command('debugcreatestreamclonebundle', [], 'FILE')
426 427 def debugcreatestreamclonebundle(ui, repo, fname):
427 428 """create a stream clone bundle file
428 429
429 430 Stream bundles are special bundles that are essentially archives of
430 431 revlog files. They are commonly used for cloning very quickly.
431 432 """
432 433 requirements, gen = streamclone.generatebundlev1(repo)
433 434 changegroup.writechunks(ui, gen, fname)
434 435
435 436 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
436 437
437 438 @command('debugdag',
438 439 [('t', 'tags', None, _('use tags as labels')),
439 440 ('b', 'branches', None, _('annotate with branch names')),
440 441 ('', 'dots', None, _('use dots for runs')),
441 442 ('s', 'spaces', None, _('separate elements by spaces'))],
442 443 _('[OPTION]... [FILE [REV]...]'),
443 444 optionalrepo=True)
444 445 def debugdag(ui, repo, file_=None, *revs, **opts):
445 446 """format the changelog or an index DAG as a concise textual description
446 447
447 448 If you pass a revlog index, the revlog's DAG is emitted. If you list
448 449 revision numbers, they get labeled in the output as rN.
449 450
450 451 Otherwise, the changelog DAG of the current repo is emitted.
451 452 """
452 453 spaces = opts.get('spaces')
453 454 dots = opts.get('dots')
454 455 if file_:
455 rlog = revlog.revlog(scmutil.vfs(pycompat.getcwd(), audit=False),
456 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
456 457 file_)
457 458 revs = set((int(r) for r in revs))
458 459 def events():
459 460 for r in rlog:
460 461 yield 'n', (r, list(p for p in rlog.parentrevs(r)
461 462 if p != -1))
462 463 if r in revs:
463 464 yield 'l', (r, "r%i" % r)
464 465 elif repo:
465 466 cl = repo.changelog
466 467 tags = opts.get('tags')
467 468 branches = opts.get('branches')
468 469 if tags:
469 470 labels = {}
470 471 for l, n in repo.tags().items():
471 472 labels.setdefault(cl.rev(n), []).append(l)
472 473 def events():
473 474 b = "default"
474 475 for r in cl:
475 476 if branches:
476 477 newb = cl.read(cl.node(r))[5]['branch']
477 478 if newb != b:
478 479 yield 'a', newb
479 480 b = newb
480 481 yield 'n', (r, list(p for p in cl.parentrevs(r)
481 482 if p != -1))
482 483 if tags:
483 484 ls = labels.get(r)
484 485 if ls:
485 486 for l in ls:
486 487 yield 'l', (r, l)
487 488 else:
488 489 raise error.Abort(_('need repo for changelog dag'))
489 490
490 491 for line in dagparser.dagtextlines(events(),
491 492 addspaces=spaces,
492 493 wraplabels=True,
493 494 wrapannotations=True,
494 495 wrapnonlinear=dots,
495 496 usedots=dots,
496 497 maxlinewidth=70):
497 498 ui.write(line)
498 499 ui.write("\n")
499 500
500 501 @command('debugdata', commands.debugrevlogopts, _('-c|-m|FILE REV'))
501 502 def debugdata(ui, repo, file_, rev=None, **opts):
502 503 """dump the contents of a data file revision"""
503 504 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
504 505 if rev is not None:
505 506 raise error.CommandError('debugdata', _('invalid arguments'))
506 507 file_, rev = None, file_
507 508 elif rev is None:
508 509 raise error.CommandError('debugdata', _('invalid arguments'))
509 510 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
510 511 try:
511 512 ui.write(r.revision(r.lookup(rev), raw=True))
512 513 except KeyError:
513 514 raise error.Abort(_('invalid revision identifier %s') % rev)
514 515
515 516 @command('debugdate',
516 517 [('e', 'extended', None, _('try extended date formats'))],
517 518 _('[-e] DATE [RANGE]'),
518 519 norepo=True, optionalrepo=True)
519 520 def debugdate(ui, date, range=None, **opts):
520 521 """parse and display a date"""
521 522 if opts["extended"]:
522 523 d = util.parsedate(date, util.extendeddateformats)
523 524 else:
524 525 d = util.parsedate(date)
525 526 ui.write(("internal: %s %s\n") % d)
526 527 ui.write(("standard: %s\n") % util.datestr(d))
527 528 if range:
528 529 m = util.matchdate(range)
529 530 ui.write(("match: %s\n") % m(d[0]))
530 531
531 532 @command('debugdeltachain',
532 533 commands.debugrevlogopts + commands.formatteropts,
533 534 _('-c|-m|FILE'),
534 535 optionalrepo=True)
535 536 def debugdeltachain(ui, repo, file_=None, **opts):
536 537 """dump information about delta chains in a revlog
537 538
538 539 Output can be templatized. Available template keywords are:
539 540
540 541 :``rev``: revision number
541 542 :``chainid``: delta chain identifier (numbered by unique base)
542 543 :``chainlen``: delta chain length to this revision
543 544 :``prevrev``: previous revision in delta chain
544 545 :``deltatype``: role of delta / how it was computed
545 546 :``compsize``: compressed size of revision
546 547 :``uncompsize``: uncompressed size of revision
547 548 :``chainsize``: total size of compressed revisions in chain
548 549 :``chainratio``: total chain size divided by uncompressed revision size
549 550 (new delta chains typically start at ratio 2.00)
550 551 :``lindist``: linear distance from base revision in delta chain to end
551 552 of this revision
552 553 :``extradist``: total size of revisions not part of this delta chain from
553 554 base of delta chain to end of this revision; a measurement
554 555 of how much extra data we need to read/seek across to read
555 556 the delta chain for this revision
556 557 :``extraratio``: extradist divided by chainsize; another representation of
557 558 how much unrelated data is needed to load this delta chain
558 559 """
559 560 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
560 561 index = r.index
561 562 generaldelta = r.version & revlog.REVLOGGENERALDELTA
562 563
563 564 def revinfo(rev):
564 565 e = index[rev]
565 566 compsize = e[1]
566 567 uncompsize = e[2]
567 568 chainsize = 0
568 569
569 570 if generaldelta:
570 571 if e[3] == e[5]:
571 572 deltatype = 'p1'
572 573 elif e[3] == e[6]:
573 574 deltatype = 'p2'
574 575 elif e[3] == rev - 1:
575 576 deltatype = 'prev'
576 577 elif e[3] == rev:
577 578 deltatype = 'base'
578 579 else:
579 580 deltatype = 'other'
580 581 else:
581 582 if e[3] == rev:
582 583 deltatype = 'base'
583 584 else:
584 585 deltatype = 'prev'
585 586
586 587 chain = r._deltachain(rev)[0]
587 588 for iterrev in chain:
588 589 e = index[iterrev]
589 590 chainsize += e[1]
590 591
591 592 return compsize, uncompsize, deltatype, chain, chainsize
592 593
593 594 fm = ui.formatter('debugdeltachain', opts)
594 595
595 596 fm.plain(' rev chain# chainlen prev delta '
596 597 'size rawsize chainsize ratio lindist extradist '
597 598 'extraratio\n')
598 599
599 600 chainbases = {}
600 601 for rev in r:
601 602 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
602 603 chainbase = chain[0]
603 604 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
604 605 basestart = r.start(chainbase)
605 606 revstart = r.start(rev)
606 607 lineardist = revstart + comp - basestart
607 608 extradist = lineardist - chainsize
608 609 try:
609 610 prevrev = chain[-2]
610 611 except IndexError:
611 612 prevrev = -1
612 613
613 614 chainratio = float(chainsize) / float(uncomp)
614 615 extraratio = float(extradist) / float(chainsize)
615 616
616 617 fm.startitem()
617 618 fm.write('rev chainid chainlen prevrev deltatype compsize '
618 619 'uncompsize chainsize chainratio lindist extradist '
619 620 'extraratio',
620 621 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
621 622 rev, chainid, len(chain), prevrev, deltatype, comp,
622 623 uncomp, chainsize, chainratio, lineardist, extradist,
623 624 extraratio,
624 625 rev=rev, chainid=chainid, chainlen=len(chain),
625 626 prevrev=prevrev, deltatype=deltatype, compsize=comp,
626 627 uncompsize=uncomp, chainsize=chainsize,
627 628 chainratio=chainratio, lindist=lineardist,
628 629 extradist=extradist, extraratio=extraratio)
629 630
630 631 fm.end()
631 632
632 633 @command('debugdirstate|debugstate',
633 634 [('', 'nodates', None, _('do not display the saved mtime')),
634 635 ('', 'datesort', None, _('sort by saved mtime'))],
635 636 _('[OPTION]...'))
636 637 def debugstate(ui, repo, **opts):
637 638 """show the contents of the current dirstate"""
638 639
639 640 nodates = opts.get('nodates')
640 641 datesort = opts.get('datesort')
641 642
642 643 timestr = ""
643 644 if datesort:
644 645 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
645 646 else:
646 647 keyfunc = None # sort by filename
647 648 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
648 649 if ent[3] == -1:
649 650 timestr = 'unset '
650 651 elif nodates:
651 652 timestr = 'set '
652 653 else:
653 654 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
654 655 time.localtime(ent[3]))
655 656 if ent[1] & 0o20000:
656 657 mode = 'lnk'
657 658 else:
658 659 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
659 660 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
660 661 for f in repo.dirstate.copies():
661 662 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
662 663
663 664 @command('debugdiscovery',
664 665 [('', 'old', None, _('use old-style discovery')),
665 666 ('', 'nonheads', None,
666 667 _('use old-style discovery with non-heads included')),
667 668 ] + commands.remoteopts,
668 669 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
669 670 def debugdiscovery(ui, repo, remoteurl="default", **opts):
670 671 """runs the changeset discovery protocol in isolation"""
671 672 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
672 673 opts.get('branch'))
673 674 remote = hg.peer(repo, opts, remoteurl)
674 675 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
675 676
676 677 # make sure tests are repeatable
677 678 random.seed(12323)
678 679
679 680 def doit(localheads, remoteheads, remote=remote):
680 681 if opts.get('old'):
681 682 if localheads:
682 683 raise error.Abort('cannot use localheads with old style '
683 684 'discovery')
684 685 if not util.safehasattr(remote, 'branches'):
685 686 # enable in-client legacy support
686 687 remote = localrepo.locallegacypeer(remote.local())
687 688 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
688 689 force=True)
689 690 common = set(common)
690 691 if not opts.get('nonheads'):
691 692 ui.write(("unpruned common: %s\n") %
692 693 " ".join(sorted(short(n) for n in common)))
693 694 dag = dagutil.revlogdag(repo.changelog)
694 695 all = dag.ancestorset(dag.internalizeall(common))
695 696 common = dag.externalizeall(dag.headsetofconnecteds(all))
696 697 else:
697 698 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
698 699 common = set(common)
699 700 rheads = set(hds)
700 701 lheads = set(repo.heads())
701 702 ui.write(("common heads: %s\n") %
702 703 " ".join(sorted(short(n) for n in common)))
703 704 if lheads <= common:
704 705 ui.write(("local is subset\n"))
705 706 elif rheads <= common:
706 707 ui.write(("remote is subset\n"))
707 708
708 709 serverlogs = opts.get('serverlog')
709 710 if serverlogs:
710 711 for filename in serverlogs:
711 712 with open(filename, 'r') as logfile:
712 713 line = logfile.readline()
713 714 while line:
714 715 parts = line.strip().split(';')
715 716 op = parts[1]
716 717 if op == 'cg':
717 718 pass
718 719 elif op == 'cgss':
719 720 doit(parts[2].split(' '), parts[3].split(' '))
720 721 elif op == 'unb':
721 722 doit(parts[3].split(' '), parts[2].split(' '))
722 723 line = logfile.readline()
723 724 else:
724 725 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
725 726 opts.get('remote_head'))
726 727 localrevs = opts.get('local_head')
727 728 doit(localrevs, remoterevs)
728 729
729 730 @command('debugextensions', commands.formatteropts, [], norepo=True)
730 731 def debugextensions(ui, **opts):
731 732 '''show information about active extensions'''
732 733 exts = extensions.extensions(ui)
733 734 hgver = util.version()
734 735 fm = ui.formatter('debugextensions', opts)
735 736 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
736 737 isinternal = extensions.ismoduleinternal(extmod)
737 738 extsource = pycompat.fsencode(extmod.__file__)
738 739 if isinternal:
739 740 exttestedwith = [] # never expose magic string to users
740 741 else:
741 742 exttestedwith = getattr(extmod, 'testedwith', '').split()
742 743 extbuglink = getattr(extmod, 'buglink', None)
743 744
744 745 fm.startitem()
745 746
746 747 if ui.quiet or ui.verbose:
747 748 fm.write('name', '%s\n', extname)
748 749 else:
749 750 fm.write('name', '%s', extname)
750 751 if isinternal or hgver in exttestedwith:
751 752 fm.plain('\n')
752 753 elif not exttestedwith:
753 754 fm.plain(_(' (untested!)\n'))
754 755 else:
755 756 lasttestedversion = exttestedwith[-1]
756 757 fm.plain(' (%s!)\n' % lasttestedversion)
757 758
758 759 fm.condwrite(ui.verbose and extsource, 'source',
759 760 _(' location: %s\n'), extsource or "")
760 761
761 762 if ui.verbose:
762 763 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
763 764 fm.data(bundled=isinternal)
764 765
765 766 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
766 767 _(' tested with: %s\n'),
767 768 fm.formatlist(exttestedwith, name='ver'))
768 769
769 770 fm.condwrite(ui.verbose and extbuglink, 'buglink',
770 771 _(' bug reporting: %s\n'), extbuglink or "")
771 772
772 773 fm.end()
773 774
774 775 @command('debugfileset',
775 776 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
776 777 _('[-r REV] FILESPEC'))
777 778 def debugfileset(ui, repo, expr, **opts):
778 779 '''parse and apply a fileset specification'''
779 780 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
780 781 if ui.verbose:
781 782 tree = fileset.parse(expr)
782 783 ui.note(fileset.prettyformat(tree), "\n")
783 784
784 785 for f in ctx.getfileset(expr):
785 786 ui.write("%s\n" % f)
786 787
787 788 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
788 789 def debugfsinfo(ui, path="."):
789 790 """show information detected about current filesystem"""
790 791 util.writefile('.debugfsinfo', '')
791 792 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
792 793 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
793 794 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
794 795 ui.write(('case-sensitive: %s\n') % (util.fscasesensitive('.debugfsinfo')
795 796 and 'yes' or 'no'))
796 797 os.unlink('.debugfsinfo')
797 798
798 799 @command('debuggetbundle',
799 800 [('H', 'head', [], _('id of head node'), _('ID')),
800 801 ('C', 'common', [], _('id of common node'), _('ID')),
801 802 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
802 803 _('REPO FILE [-H|-C ID]...'),
803 804 norepo=True)
804 805 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
805 806 """retrieves a bundle from a repo
806 807
807 808 Every ID must be a full-length hex node id string. Saves the bundle to the
808 809 given file.
809 810 """
810 811 repo = hg.peer(ui, opts, repopath)
811 812 if not repo.capable('getbundle'):
812 813 raise error.Abort("getbundle() not supported by target repository")
813 814 args = {}
814 815 if common:
815 816 args['common'] = [bin(s) for s in common]
816 817 if head:
817 818 args['heads'] = [bin(s) for s in head]
818 819 # TODO: get desired bundlecaps from command line.
819 820 args['bundlecaps'] = None
820 821 bundle = repo.getbundle('debug', **args)
821 822
822 823 bundletype = opts.get('type', 'bzip2').lower()
823 824 btypes = {'none': 'HG10UN',
824 825 'bzip2': 'HG10BZ',
825 826 'gzip': 'HG10GZ',
826 827 'bundle2': 'HG20'}
827 828 bundletype = btypes.get(bundletype)
828 829 if bundletype not in bundle2.bundletypes:
829 830 raise error.Abort(_('unknown bundle type specified with --type'))
830 831 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
831 832
832 833 @command('debugignore', [], '[FILE]')
833 834 def debugignore(ui, repo, *files, **opts):
834 835 """display the combined ignore pattern and information about ignored files
835 836
836 837 With no argument display the combined ignore pattern.
837 838
838 839 Given space separated file names, shows if the given file is ignored and
839 840 if so, show the ignore rule (file and line number) that matched it.
840 841 """
841 842 ignore = repo.dirstate._ignore
842 843 if not files:
843 844 # Show all the patterns
844 845 includepat = getattr(ignore, 'includepat', None)
845 846 if includepat is not None:
846 847 ui.write("%s\n" % includepat)
847 848 else:
848 849 raise error.Abort(_("no ignore patterns found"))
849 850 else:
850 851 for f in files:
851 852 nf = util.normpath(f)
852 853 ignored = None
853 854 ignoredata = None
854 855 if nf != '.':
855 856 if ignore(nf):
856 857 ignored = nf
857 858 ignoredata = repo.dirstate._ignorefileandline(nf)
858 859 else:
859 860 for p in util.finddirs(nf):
860 861 if ignore(p):
861 862 ignored = p
862 863 ignoredata = repo.dirstate._ignorefileandline(p)
863 864 break
864 865 if ignored:
865 866 if ignored == nf:
866 867 ui.write(_("%s is ignored\n") % f)
867 868 else:
868 869 ui.write(_("%s is ignored because of "
869 870 "containing folder %s\n")
870 871 % (f, ignored))
871 872 ignorefile, lineno, line = ignoredata
872 873 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
873 874 % (ignorefile, lineno, line))
874 875 else:
875 876 ui.write(_("%s is not ignored\n") % f)
876 877
877 878 @command('debugindex', commands.debugrevlogopts +
878 879 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
879 880 _('[-f FORMAT] -c|-m|FILE'),
880 881 optionalrepo=True)
881 882 def debugindex(ui, repo, file_=None, **opts):
882 883 """dump the contents of an index file"""
883 884 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
884 885 format = opts.get('format', 0)
885 886 if format not in (0, 1):
886 887 raise error.Abort(_("unknown format %d") % format)
887 888
888 889 generaldelta = r.version & revlog.REVLOGGENERALDELTA
889 890 if generaldelta:
890 891 basehdr = ' delta'
891 892 else:
892 893 basehdr = ' base'
893 894
894 895 if ui.debugflag:
895 896 shortfn = hex
896 897 else:
897 898 shortfn = short
898 899
899 900 # There might not be anything in r, so have a sane default
900 901 idlen = 12
901 902 for i in r:
902 903 idlen = len(shortfn(r.node(i)))
903 904 break
904 905
905 906 if format == 0:
906 907 ui.write((" rev offset length " + basehdr + " linkrev"
907 908 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
908 909 elif format == 1:
909 910 ui.write((" rev flag offset length"
910 911 " size " + basehdr + " link p1 p2"
911 912 " %s\n") % "nodeid".rjust(idlen))
912 913
913 914 for i in r:
914 915 node = r.node(i)
915 916 if generaldelta:
916 917 base = r.deltaparent(i)
917 918 else:
918 919 base = r.chainbase(i)
919 920 if format == 0:
920 921 try:
921 922 pp = r.parents(node)
922 923 except Exception:
923 924 pp = [nullid, nullid]
924 925 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
925 926 i, r.start(i), r.length(i), base, r.linkrev(i),
926 927 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
927 928 elif format == 1:
928 929 pr = r.parentrevs(i)
929 930 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
930 931 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
931 932 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
932 933
933 934 @command('debugindexdot', commands.debugrevlogopts,
934 935 _('-c|-m|FILE'), optionalrepo=True)
935 936 def debugindexdot(ui, repo, file_=None, **opts):
936 937 """dump an index DAG as a graphviz dot file"""
937 938 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
938 939 ui.write(("digraph G {\n"))
939 940 for i in r:
940 941 node = r.node(i)
941 942 pp = r.parents(node)
942 943 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
943 944 if pp[1] != nullid:
944 945 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
945 946 ui.write("}\n")
946 947
947 948 @command('debuginstall', [] + commands.formatteropts, '', norepo=True)
948 949 def debuginstall(ui, **opts):
949 950 '''test Mercurial installation
950 951
951 952 Returns 0 on success.
952 953 '''
953 954
954 955 def writetemp(contents):
955 956 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
956 957 f = os.fdopen(fd, pycompat.sysstr("wb"))
957 958 f.write(contents)
958 959 f.close()
959 960 return name
960 961
961 962 problems = 0
962 963
963 964 fm = ui.formatter('debuginstall', opts)
964 965 fm.startitem()
965 966
966 967 # encoding
967 968 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
968 969 err = None
969 970 try:
970 971 encoding.fromlocal("test")
971 972 except error.Abort as inst:
972 973 err = inst
973 974 problems += 1
974 975 fm.condwrite(err, 'encodingerror', _(" %s\n"
975 976 " (check that your locale is properly set)\n"), err)
976 977
977 978 # Python
978 979 fm.write('pythonexe', _("checking Python executable (%s)\n"),
979 980 pycompat.sysexecutable)
980 981 fm.write('pythonver', _("checking Python version (%s)\n"),
981 982 ("%d.%d.%d" % sys.version_info[:3]))
982 983 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
983 984 os.path.dirname(pycompat.fsencode(os.__file__)))
984 985
985 986 security = set(sslutil.supportedprotocols)
986 987 if sslutil.hassni:
987 988 security.add('sni')
988 989
989 990 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
990 991 fm.formatlist(sorted(security), name='protocol',
991 992 fmt='%s', sep=','))
992 993
993 994 # These are warnings, not errors. So don't increment problem count. This
994 995 # may change in the future.
995 996 if 'tls1.2' not in security:
996 997 fm.plain(_(' TLS 1.2 not supported by Python install; '
997 998 'network connections lack modern security\n'))
998 999 if 'sni' not in security:
999 1000 fm.plain(_(' SNI not supported by Python install; may have '
1000 1001 'connectivity issues with some servers\n'))
1001 1002
1002 1003 # TODO print CA cert info
1003 1004
1004 1005 # hg version
1005 1006 hgver = util.version()
1006 1007 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1007 1008 hgver.split('+')[0])
1008 1009 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1009 1010 '+'.join(hgver.split('+')[1:]))
1010 1011
1011 1012 # compiled modules
1012 1013 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1013 1014 policy.policy)
1014 1015 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1015 1016 os.path.dirname(pycompat.fsencode(__file__)))
1016 1017
1017 1018 err = None
1018 1019 try:
1019 1020 from . import (
1020 1021 base85,
1021 1022 bdiff,
1022 1023 mpatch,
1023 1024 osutil,
1024 1025 )
1025 1026 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1026 1027 except Exception as inst:
1027 1028 err = inst
1028 1029 problems += 1
1029 1030 fm.condwrite(err, 'extensionserror', " %s\n", err)
1030 1031
1031 1032 compengines = util.compengines._engines.values()
1032 1033 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1033 1034 fm.formatlist(sorted(e.name() for e in compengines),
1034 1035 name='compengine', fmt='%s', sep=', '))
1035 1036 fm.write('compenginesavail', _('checking available compression engines '
1036 1037 '(%s)\n'),
1037 1038 fm.formatlist(sorted(e.name() for e in compengines
1038 1039 if e.available()),
1039 1040 name='compengine', fmt='%s', sep=', '))
1040 1041 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1041 1042 fm.write('compenginesserver', _('checking available compression engines '
1042 1043 'for wire protocol (%s)\n'),
1043 1044 fm.formatlist([e.name() for e in wirecompengines
1044 1045 if e.wireprotosupport()],
1045 1046 name='compengine', fmt='%s', sep=', '))
1046 1047
1047 1048 # templates
1048 1049 p = templater.templatepaths()
1049 1050 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1050 1051 fm.condwrite(not p, '', _(" no template directories found\n"))
1051 1052 if p:
1052 1053 m = templater.templatepath("map-cmdline.default")
1053 1054 if m:
1054 1055 # template found, check if it is working
1055 1056 err = None
1056 1057 try:
1057 1058 templater.templater.frommapfile(m)
1058 1059 except Exception as inst:
1059 1060 err = inst
1060 1061 p = None
1061 1062 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1062 1063 else:
1063 1064 p = None
1064 1065 fm.condwrite(p, 'defaulttemplate',
1065 1066 _("checking default template (%s)\n"), m)
1066 1067 fm.condwrite(not m, 'defaulttemplatenotfound',
1067 1068 _(" template '%s' not found\n"), "default")
1068 1069 if not p:
1069 1070 problems += 1
1070 1071 fm.condwrite(not p, '',
1071 1072 _(" (templates seem to have been installed incorrectly)\n"))
1072 1073
1073 1074 # editor
1074 1075 editor = ui.geteditor()
1075 1076 editor = util.expandpath(editor)
1076 1077 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1077 1078 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1078 1079 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1079 1080 _(" No commit editor set and can't find %s in PATH\n"
1080 1081 " (specify a commit editor in your configuration"
1081 1082 " file)\n"), not cmdpath and editor == 'vi' and editor)
1082 1083 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1083 1084 _(" Can't find editor '%s' in PATH\n"
1084 1085 " (specify a commit editor in your configuration"
1085 1086 " file)\n"), not cmdpath and editor)
1086 1087 if not cmdpath and editor != 'vi':
1087 1088 problems += 1
1088 1089
1089 1090 # check username
1090 1091 username = None
1091 1092 err = None
1092 1093 try:
1093 1094 username = ui.username()
1094 1095 except error.Abort as e:
1095 1096 err = e
1096 1097 problems += 1
1097 1098
1098 1099 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1099 1100 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1100 1101 " (specify a username in your configuration file)\n"), err)
1101 1102
1102 1103 fm.condwrite(not problems, '',
1103 1104 _("no problems detected\n"))
1104 1105 if not problems:
1105 1106 fm.data(problems=problems)
1106 1107 fm.condwrite(problems, 'problems',
1107 1108 _("%d problems detected,"
1108 1109 " please check your install!\n"), problems)
1109 1110 fm.end()
1110 1111
1111 1112 return problems
1112 1113
1113 1114 @command('debugknown', [], _('REPO ID...'), norepo=True)
1114 1115 def debugknown(ui, repopath, *ids, **opts):
1115 1116 """test whether node ids are known to a repo
1116 1117
1117 1118 Every ID must be a full-length hex node id string. Returns a list of 0s
1118 1119 and 1s indicating unknown/known.
1119 1120 """
1120 1121 repo = hg.peer(ui, opts, repopath)
1121 1122 if not repo.capable('known'):
1122 1123 raise error.Abort("known() not supported by target repository")
1123 1124 flags = repo.known([bin(s) for s in ids])
1124 1125 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1125 1126
1126 1127 @command('debuglabelcomplete', [], _('LABEL...'))
1127 1128 def debuglabelcomplete(ui, repo, *args):
1128 1129 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1129 1130 commands.debugnamecomplete(ui, repo, *args)
1130 1131
1131 1132 @command('debuglocks',
1132 1133 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1133 1134 ('W', 'force-wlock', None,
1134 1135 _('free the working state lock (DANGEROUS)'))],
1135 1136 _('[OPTION]...'))
1136 1137 def debuglocks(ui, repo, **opts):
1137 1138 """show or modify state of locks
1138 1139
1139 1140 By default, this command will show which locks are held. This
1140 1141 includes the user and process holding the lock, the amount of time
1141 1142 the lock has been held, and the machine name where the process is
1142 1143 running if it's not local.
1143 1144
1144 1145 Locks protect the integrity of Mercurial's data, so should be
1145 1146 treated with care. System crashes or other interruptions may cause
1146 1147 locks to not be properly released, though Mercurial will usually
1147 1148 detect and remove such stale locks automatically.
1148 1149
1149 1150 However, detecting stale locks may not always be possible (for
1150 1151 instance, on a shared filesystem). Removing locks may also be
1151 1152 blocked by filesystem permissions.
1152 1153
1153 1154 Returns 0 if no locks are held.
1154 1155
1155 1156 """
1156 1157
1157 1158 if opts.get('force_lock'):
1158 1159 repo.svfs.unlink('lock')
1159 1160 if opts.get('force_wlock'):
1160 1161 repo.vfs.unlink('wlock')
1161 1162 if opts.get('force_lock') or opts.get('force_lock'):
1162 1163 return 0
1163 1164
1164 1165 now = time.time()
1165 1166 held = 0
1166 1167
1167 1168 def report(vfs, name, method):
1168 1169 # this causes stale locks to get reaped for more accurate reporting
1169 1170 try:
1170 1171 l = method(False)
1171 1172 except error.LockHeld:
1172 1173 l = None
1173 1174
1174 1175 if l:
1175 1176 l.release()
1176 1177 else:
1177 1178 try:
1178 1179 stat = vfs.lstat(name)
1179 1180 age = now - stat.st_mtime
1180 1181 user = util.username(stat.st_uid)
1181 1182 locker = vfs.readlock(name)
1182 1183 if ":" in locker:
1183 1184 host, pid = locker.split(':')
1184 1185 if host == socket.gethostname():
1185 1186 locker = 'user %s, process %s' % (user, pid)
1186 1187 else:
1187 1188 locker = 'user %s, process %s, host %s' \
1188 1189 % (user, pid, host)
1189 1190 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1190 1191 return 1
1191 1192 except OSError as e:
1192 1193 if e.errno != errno.ENOENT:
1193 1194 raise
1194 1195
1195 1196 ui.write(("%-6s free\n") % (name + ":"))
1196 1197 return 0
1197 1198
1198 1199 held += report(repo.svfs, "lock", repo.lock)
1199 1200 held += report(repo.vfs, "wlock", repo.wlock)
1200 1201
1201 1202 return held
1202 1203
1203 1204 @command('debugmergestate', [], '')
1204 1205 def debugmergestate(ui, repo, *args):
1205 1206 """print merge state
1206 1207
1207 1208 Use --verbose to print out information about whether v1 or v2 merge state
1208 1209 was chosen."""
1209 1210 def _hashornull(h):
1210 1211 if h == nullhex:
1211 1212 return 'null'
1212 1213 else:
1213 1214 return h
1214 1215
1215 1216 def printrecords(version):
1216 1217 ui.write(('* version %s records\n') % version)
1217 1218 if version == 1:
1218 1219 records = v1records
1219 1220 else:
1220 1221 records = v2records
1221 1222
1222 1223 for rtype, record in records:
1223 1224 # pretty print some record types
1224 1225 if rtype == 'L':
1225 1226 ui.write(('local: %s\n') % record)
1226 1227 elif rtype == 'O':
1227 1228 ui.write(('other: %s\n') % record)
1228 1229 elif rtype == 'm':
1229 1230 driver, mdstate = record.split('\0', 1)
1230 1231 ui.write(('merge driver: %s (state "%s")\n')
1231 1232 % (driver, mdstate))
1232 1233 elif rtype in 'FDC':
1233 1234 r = record.split('\0')
1234 1235 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1235 1236 if version == 1:
1236 1237 onode = 'not stored in v1 format'
1237 1238 flags = r[7]
1238 1239 else:
1239 1240 onode, flags = r[7:9]
1240 1241 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1241 1242 % (f, rtype, state, _hashornull(hash)))
1242 1243 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1243 1244 ui.write((' ancestor path: %s (node %s)\n')
1244 1245 % (afile, _hashornull(anode)))
1245 1246 ui.write((' other path: %s (node %s)\n')
1246 1247 % (ofile, _hashornull(onode)))
1247 1248 elif rtype == 'f':
1248 1249 filename, rawextras = record.split('\0', 1)
1249 1250 extras = rawextras.split('\0')
1250 1251 i = 0
1251 1252 extrastrings = []
1252 1253 while i < len(extras):
1253 1254 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1254 1255 i += 2
1255 1256
1256 1257 ui.write(('file extras: %s (%s)\n')
1257 1258 % (filename, ', '.join(extrastrings)))
1258 1259 elif rtype == 'l':
1259 1260 labels = record.split('\0', 2)
1260 1261 labels = [l for l in labels if len(l) > 0]
1261 1262 ui.write(('labels:\n'))
1262 1263 ui.write((' local: %s\n' % labels[0]))
1263 1264 ui.write((' other: %s\n' % labels[1]))
1264 1265 if len(labels) > 2:
1265 1266 ui.write((' base: %s\n' % labels[2]))
1266 1267 else:
1267 1268 ui.write(('unrecognized entry: %s\t%s\n')
1268 1269 % (rtype, record.replace('\0', '\t')))
1269 1270
1270 1271 # Avoid mergestate.read() since it may raise an exception for unsupported
1271 1272 # merge state records. We shouldn't be doing this, but this is OK since this
1272 1273 # command is pretty low-level.
1273 1274 ms = mergemod.mergestate(repo)
1274 1275
1275 1276 # sort so that reasonable information is on top
1276 1277 v1records = ms._readrecordsv1()
1277 1278 v2records = ms._readrecordsv2()
1278 1279 order = 'LOml'
1279 1280 def key(r):
1280 1281 idx = order.find(r[0])
1281 1282 if idx == -1:
1282 1283 return (1, r[1])
1283 1284 else:
1284 1285 return (0, idx)
1285 1286 v1records.sort(key=key)
1286 1287 v2records.sort(key=key)
1287 1288
1288 1289 if not v1records and not v2records:
1289 1290 ui.write(('no merge state found\n'))
1290 1291 elif not v2records:
1291 1292 ui.note(('no version 2 merge state\n'))
1292 1293 printrecords(1)
1293 1294 elif ms._v1v2match(v1records, v2records):
1294 1295 ui.note(('v1 and v2 states match: using v2\n'))
1295 1296 printrecords(2)
1296 1297 else:
1297 1298 ui.note(('v1 and v2 states mismatch: using v1\n'))
1298 1299 printrecords(1)
1299 1300 if ui.verbose:
1300 1301 printrecords(2)
1301 1302
1302 1303 @command('debugnamecomplete', [], _('NAME...'))
1303 1304 def debugnamecomplete(ui, repo, *args):
1304 1305 '''complete "names" - tags, open branch names, bookmark names'''
1305 1306
1306 1307 names = set()
1307 1308 # since we previously only listed open branches, we will handle that
1308 1309 # specially (after this for loop)
1309 1310 for name, ns in repo.names.iteritems():
1310 1311 if name != 'branches':
1311 1312 names.update(ns.listnames(repo))
1312 1313 names.update(tag for (tag, heads, tip, closed)
1313 1314 in repo.branchmap().iterbranches() if not closed)
1314 1315 completions = set()
1315 1316 if not args:
1316 1317 args = ['']
1317 1318 for a in args:
1318 1319 completions.update(n for n in names if n.startswith(a))
1319 1320 ui.write('\n'.join(sorted(completions)))
1320 1321 ui.write('\n')
1321 1322
1322 1323 @command('debugobsolete',
1323 1324 [('', 'flags', 0, _('markers flag')),
1324 1325 ('', 'record-parents', False,
1325 1326 _('record parent information for the precursor')),
1326 1327 ('r', 'rev', [], _('display markers relevant to REV')),
1327 1328 ('', 'index', False, _('display index of the marker')),
1328 1329 ('', 'delete', [], _('delete markers specified by indices')),
1329 1330 ] + commands.commitopts2 + commands.formatteropts,
1330 1331 _('[OBSOLETED [REPLACEMENT ...]]'))
1331 1332 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1332 1333 """create arbitrary obsolete marker
1333 1334
1334 1335 With no arguments, displays the list of obsolescence markers."""
1335 1336
1336 1337 def parsenodeid(s):
1337 1338 try:
1338 1339 # We do not use revsingle/revrange functions here to accept
1339 1340 # arbitrary node identifiers, possibly not present in the
1340 1341 # local repository.
1341 1342 n = bin(s)
1342 1343 if len(n) != len(nullid):
1343 1344 raise TypeError()
1344 1345 return n
1345 1346 except TypeError:
1346 1347 raise error.Abort('changeset references must be full hexadecimal '
1347 1348 'node identifiers')
1348 1349
1349 1350 if opts.get('delete'):
1350 1351 indices = []
1351 1352 for v in opts.get('delete'):
1352 1353 try:
1353 1354 indices.append(int(v))
1354 1355 except ValueError:
1355 1356 raise error.Abort(_('invalid index value: %r') % v,
1356 1357 hint=_('use integers for indices'))
1357 1358
1358 1359 if repo.currenttransaction():
1359 1360 raise error.Abort(_('cannot delete obsmarkers in the middle '
1360 1361 'of transaction.'))
1361 1362
1362 1363 with repo.lock():
1363 1364 n = repair.deleteobsmarkers(repo.obsstore, indices)
1364 1365 ui.write(_('deleted %i obsolescence markers\n') % n)
1365 1366
1366 1367 return
1367 1368
1368 1369 if precursor is not None:
1369 1370 if opts['rev']:
1370 1371 raise error.Abort('cannot select revision when creating marker')
1371 1372 metadata = {}
1372 1373 metadata['user'] = opts['user'] or ui.username()
1373 1374 succs = tuple(parsenodeid(succ) for succ in successors)
1374 1375 l = repo.lock()
1375 1376 try:
1376 1377 tr = repo.transaction('debugobsolete')
1377 1378 try:
1378 1379 date = opts.get('date')
1379 1380 if date:
1380 1381 date = util.parsedate(date)
1381 1382 else:
1382 1383 date = None
1383 1384 prec = parsenodeid(precursor)
1384 1385 parents = None
1385 1386 if opts['record_parents']:
1386 1387 if prec not in repo.unfiltered():
1387 1388 raise error.Abort('cannot used --record-parents on '
1388 1389 'unknown changesets')
1389 1390 parents = repo.unfiltered()[prec].parents()
1390 1391 parents = tuple(p.node() for p in parents)
1391 1392 repo.obsstore.create(tr, prec, succs, opts['flags'],
1392 1393 parents=parents, date=date,
1393 1394 metadata=metadata)
1394 1395 tr.close()
1395 1396 except ValueError as exc:
1396 1397 raise error.Abort(_('bad obsmarker input: %s') % exc)
1397 1398 finally:
1398 1399 tr.release()
1399 1400 finally:
1400 1401 l.release()
1401 1402 else:
1402 1403 if opts['rev']:
1403 1404 revs = scmutil.revrange(repo, opts['rev'])
1404 1405 nodes = [repo[r].node() for r in revs]
1405 1406 markers = list(obsolete.getmarkers(repo, nodes=nodes))
1406 1407 markers.sort(key=lambda x: x._data)
1407 1408 else:
1408 1409 markers = obsolete.getmarkers(repo)
1409 1410
1410 1411 markerstoiter = markers
1411 1412 isrelevant = lambda m: True
1412 1413 if opts.get('rev') and opts.get('index'):
1413 1414 markerstoiter = obsolete.getmarkers(repo)
1414 1415 markerset = set(markers)
1415 1416 isrelevant = lambda m: m in markerset
1416 1417
1417 1418 fm = ui.formatter('debugobsolete', opts)
1418 1419 for i, m in enumerate(markerstoiter):
1419 1420 if not isrelevant(m):
1420 1421 # marker can be irrelevant when we're iterating over a set
1421 1422 # of markers (markerstoiter) which is bigger than the set
1422 1423 # of markers we want to display (markers)
1423 1424 # this can happen if both --index and --rev options are
1424 1425 # provided and thus we need to iterate over all of the markers
1425 1426 # to get the correct indices, but only display the ones that
1426 1427 # are relevant to --rev value
1427 1428 continue
1428 1429 fm.startitem()
1429 1430 ind = i if opts.get('index') else None
1430 1431 cmdutil.showmarker(fm, m, index=ind)
1431 1432 fm.end()
1432 1433
1433 1434 @command('debugpathcomplete',
1434 1435 [('f', 'full', None, _('complete an entire path')),
1435 1436 ('n', 'normal', None, _('show only normal files')),
1436 1437 ('a', 'added', None, _('show only added files')),
1437 1438 ('r', 'removed', None, _('show only removed files'))],
1438 1439 _('FILESPEC...'))
1439 1440 def debugpathcomplete(ui, repo, *specs, **opts):
1440 1441 '''complete part or all of a tracked path
1441 1442
1442 1443 This command supports shells that offer path name completion. It
1443 1444 currently completes only files already known to the dirstate.
1444 1445
1445 1446 Completion extends only to the next path segment unless
1446 1447 --full is specified, in which case entire paths are used.'''
1447 1448
1448 1449 def complete(path, acceptable):
1449 1450 dirstate = repo.dirstate
1450 1451 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1451 1452 rootdir = repo.root + pycompat.ossep
1452 1453 if spec != repo.root and not spec.startswith(rootdir):
1453 1454 return [], []
1454 1455 if os.path.isdir(spec):
1455 1456 spec += '/'
1456 1457 spec = spec[len(rootdir):]
1457 1458 fixpaths = pycompat.ossep != '/'
1458 1459 if fixpaths:
1459 1460 spec = spec.replace(pycompat.ossep, '/')
1460 1461 speclen = len(spec)
1461 1462 fullpaths = opts['full']
1462 1463 files, dirs = set(), set()
1463 1464 adddir, addfile = dirs.add, files.add
1464 1465 for f, st in dirstate.iteritems():
1465 1466 if f.startswith(spec) and st[0] in acceptable:
1466 1467 if fixpaths:
1467 1468 f = f.replace('/', pycompat.ossep)
1468 1469 if fullpaths:
1469 1470 addfile(f)
1470 1471 continue
1471 1472 s = f.find(pycompat.ossep, speclen)
1472 1473 if s >= 0:
1473 1474 adddir(f[:s])
1474 1475 else:
1475 1476 addfile(f)
1476 1477 return files, dirs
1477 1478
1478 1479 acceptable = ''
1479 1480 if opts['normal']:
1480 1481 acceptable += 'nm'
1481 1482 if opts['added']:
1482 1483 acceptable += 'a'
1483 1484 if opts['removed']:
1484 1485 acceptable += 'r'
1485 1486 cwd = repo.getcwd()
1486 1487 if not specs:
1487 1488 specs = ['.']
1488 1489
1489 1490 files, dirs = set(), set()
1490 1491 for spec in specs:
1491 1492 f, d = complete(spec, acceptable or 'nmar')
1492 1493 files.update(f)
1493 1494 dirs.update(d)
1494 1495 files.update(dirs)
1495 1496 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1496 1497 ui.write('\n')
1497 1498
1498 1499 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1499 1500 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1500 1501 '''access the pushkey key/value protocol
1501 1502
1502 1503 With two args, list the keys in the given namespace.
1503 1504
1504 1505 With five args, set a key to new if it currently is set to old.
1505 1506 Reports success or failure.
1506 1507 '''
1507 1508
1508 1509 target = hg.peer(ui, {}, repopath)
1509 1510 if keyinfo:
1510 1511 key, old, new = keyinfo
1511 1512 r = target.pushkey(namespace, key, old, new)
1512 1513 ui.status(str(r) + '\n')
1513 1514 return not r
1514 1515 else:
1515 1516 for k, v in sorted(target.listkeys(namespace).iteritems()):
1516 1517 ui.write("%s\t%s\n" % (k.encode('string-escape'),
1517 1518 v.encode('string-escape')))
1518 1519
1519 1520 @command('debugpvec', [], _('A B'))
1520 1521 def debugpvec(ui, repo, a, b=None):
1521 1522 ca = scmutil.revsingle(repo, a)
1522 1523 cb = scmutil.revsingle(repo, b)
1523 1524 pa = pvec.ctxpvec(ca)
1524 1525 pb = pvec.ctxpvec(cb)
1525 1526 if pa == pb:
1526 1527 rel = "="
1527 1528 elif pa > pb:
1528 1529 rel = ">"
1529 1530 elif pa < pb:
1530 1531 rel = "<"
1531 1532 elif pa | pb:
1532 1533 rel = "|"
1533 1534 ui.write(_("a: %s\n") % pa)
1534 1535 ui.write(_("b: %s\n") % pb)
1535 1536 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1536 1537 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1537 1538 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1538 1539 pa.distance(pb), rel))
1539 1540
1540 1541 @command('debugrebuilddirstate|debugrebuildstate',
1541 1542 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1542 1543 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1543 1544 'the working copy parent')),
1544 1545 ],
1545 1546 _('[-r REV]'))
1546 1547 def debugrebuilddirstate(ui, repo, rev, **opts):
1547 1548 """rebuild the dirstate as it would look like for the given revision
1548 1549
1549 1550 If no revision is specified the first current parent will be used.
1550 1551
1551 1552 The dirstate will be set to the files of the given revision.
1552 1553 The actual working directory content or existing dirstate
1553 1554 information such as adds or removes is not considered.
1554 1555
1555 1556 ``minimal`` will only rebuild the dirstate status for files that claim to be
1556 1557 tracked but are not in the parent manifest, or that exist in the parent
1557 1558 manifest but are not in the dirstate. It will not change adds, removes, or
1558 1559 modified files that are in the working copy parent.
1559 1560
1560 1561 One use of this command is to make the next :hg:`status` invocation
1561 1562 check the actual file content.
1562 1563 """
1563 1564 ctx = scmutil.revsingle(repo, rev)
1564 1565 with repo.wlock():
1565 1566 dirstate = repo.dirstate
1566 1567 changedfiles = None
1567 1568 # See command doc for what minimal does.
1568 1569 if opts.get('minimal'):
1569 1570 manifestfiles = set(ctx.manifest().keys())
1570 1571 dirstatefiles = set(dirstate)
1571 1572 manifestonly = manifestfiles - dirstatefiles
1572 1573 dsonly = dirstatefiles - manifestfiles
1573 1574 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1574 1575 changedfiles = manifestonly | dsnotadded
1575 1576
1576 1577 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1577 1578
1578 1579 @command('debugrebuildfncache', [], '')
1579 1580 def debugrebuildfncache(ui, repo):
1580 1581 """rebuild the fncache file"""
1581 1582 repair.rebuildfncache(ui, repo)
1582 1583
1583 1584 @command('debugrename',
1584 1585 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1585 1586 _('[-r REV] FILE'))
1586 1587 def debugrename(ui, repo, file1, *pats, **opts):
1587 1588 """dump rename information"""
1588 1589
1589 1590 ctx = scmutil.revsingle(repo, opts.get('rev'))
1590 1591 m = scmutil.match(ctx, (file1,) + pats, opts)
1591 1592 for abs in ctx.walk(m):
1592 1593 fctx = ctx[abs]
1593 1594 o = fctx.filelog().renamed(fctx.filenode())
1594 1595 rel = m.rel(abs)
1595 1596 if o:
1596 1597 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1597 1598 else:
1598 1599 ui.write(_("%s not renamed\n") % rel)
1599 1600
1600 1601 @command('debugrevlog', commands.debugrevlogopts +
1601 1602 [('d', 'dump', False, _('dump index data'))],
1602 1603 _('-c|-m|FILE'),
1603 1604 optionalrepo=True)
1604 1605 def debugrevlog(ui, repo, file_=None, **opts):
1605 1606 """show data and statistics about a revlog"""
1606 1607 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1607 1608
1608 1609 if opts.get("dump"):
1609 1610 numrevs = len(r)
1610 1611 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1611 1612 " rawsize totalsize compression heads chainlen\n"))
1612 1613 ts = 0
1613 1614 heads = set()
1614 1615
1615 1616 for rev in xrange(numrevs):
1616 1617 dbase = r.deltaparent(rev)
1617 1618 if dbase == -1:
1618 1619 dbase = rev
1619 1620 cbase = r.chainbase(rev)
1620 1621 clen = r.chainlen(rev)
1621 1622 p1, p2 = r.parentrevs(rev)
1622 1623 rs = r.rawsize(rev)
1623 1624 ts = ts + rs
1624 1625 heads -= set(r.parentrevs(rev))
1625 1626 heads.add(rev)
1626 1627 try:
1627 1628 compression = ts / r.end(rev)
1628 1629 except ZeroDivisionError:
1629 1630 compression = 0
1630 1631 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1631 1632 "%11d %5d %8d\n" %
1632 1633 (rev, p1, p2, r.start(rev), r.end(rev),
1633 1634 r.start(dbase), r.start(cbase),
1634 1635 r.start(p1), r.start(p2),
1635 1636 rs, ts, compression, len(heads), clen))
1636 1637 return 0
1637 1638
1638 1639 v = r.version
1639 1640 format = v & 0xFFFF
1640 1641 flags = []
1641 1642 gdelta = False
1642 1643 if v & revlog.REVLOGNGINLINEDATA:
1643 1644 flags.append('inline')
1644 1645 if v & revlog.REVLOGGENERALDELTA:
1645 1646 gdelta = True
1646 1647 flags.append('generaldelta')
1647 1648 if not flags:
1648 1649 flags = ['(none)']
1649 1650
1650 1651 nummerges = 0
1651 1652 numfull = 0
1652 1653 numprev = 0
1653 1654 nump1 = 0
1654 1655 nump2 = 0
1655 1656 numother = 0
1656 1657 nump1prev = 0
1657 1658 nump2prev = 0
1658 1659 chainlengths = []
1659 1660
1660 1661 datasize = [None, 0, 0]
1661 1662 fullsize = [None, 0, 0]
1662 1663 deltasize = [None, 0, 0]
1663 1664 chunktypecounts = {}
1664 1665 chunktypesizes = {}
1665 1666
1666 1667 def addsize(size, l):
1667 1668 if l[0] is None or size < l[0]:
1668 1669 l[0] = size
1669 1670 if size > l[1]:
1670 1671 l[1] = size
1671 1672 l[2] += size
1672 1673
1673 1674 numrevs = len(r)
1674 1675 for rev in xrange(numrevs):
1675 1676 p1, p2 = r.parentrevs(rev)
1676 1677 delta = r.deltaparent(rev)
1677 1678 if format > 0:
1678 1679 addsize(r.rawsize(rev), datasize)
1679 1680 if p2 != nullrev:
1680 1681 nummerges += 1
1681 1682 size = r.length(rev)
1682 1683 if delta == nullrev:
1683 1684 chainlengths.append(0)
1684 1685 numfull += 1
1685 1686 addsize(size, fullsize)
1686 1687 else:
1687 1688 chainlengths.append(chainlengths[delta] + 1)
1688 1689 addsize(size, deltasize)
1689 1690 if delta == rev - 1:
1690 1691 numprev += 1
1691 1692 if delta == p1:
1692 1693 nump1prev += 1
1693 1694 elif delta == p2:
1694 1695 nump2prev += 1
1695 1696 elif delta == p1:
1696 1697 nump1 += 1
1697 1698 elif delta == p2:
1698 1699 nump2 += 1
1699 1700 elif delta != nullrev:
1700 1701 numother += 1
1701 1702
1702 1703 # Obtain data on the raw chunks in the revlog.
1703 1704 chunk = r._chunkraw(rev, rev)[1]
1704 1705 if chunk:
1705 1706 chunktype = chunk[0]
1706 1707 else:
1707 1708 chunktype = 'empty'
1708 1709
1709 1710 if chunktype not in chunktypecounts:
1710 1711 chunktypecounts[chunktype] = 0
1711 1712 chunktypesizes[chunktype] = 0
1712 1713
1713 1714 chunktypecounts[chunktype] += 1
1714 1715 chunktypesizes[chunktype] += size
1715 1716
1716 1717 # Adjust size min value for empty cases
1717 1718 for size in (datasize, fullsize, deltasize):
1718 1719 if size[0] is None:
1719 1720 size[0] = 0
1720 1721
1721 1722 numdeltas = numrevs - numfull
1722 1723 numoprev = numprev - nump1prev - nump2prev
1723 1724 totalrawsize = datasize[2]
1724 1725 datasize[2] /= numrevs
1725 1726 fulltotal = fullsize[2]
1726 1727 fullsize[2] /= numfull
1727 1728 deltatotal = deltasize[2]
1728 1729 if numrevs - numfull > 0:
1729 1730 deltasize[2] /= numrevs - numfull
1730 1731 totalsize = fulltotal + deltatotal
1731 1732 avgchainlen = sum(chainlengths) / numrevs
1732 1733 maxchainlen = max(chainlengths)
1733 1734 compratio = 1
1734 1735 if totalsize:
1735 1736 compratio = totalrawsize / totalsize
1736 1737
1737 1738 basedfmtstr = '%%%dd\n'
1738 1739 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1739 1740
1740 1741 def dfmtstr(max):
1741 1742 return basedfmtstr % len(str(max))
1742 1743 def pcfmtstr(max, padding=0):
1743 1744 return basepcfmtstr % (len(str(max)), ' ' * padding)
1744 1745
1745 1746 def pcfmt(value, total):
1746 1747 if total:
1747 1748 return (value, 100 * float(value) / total)
1748 1749 else:
1749 1750 return value, 100.0
1750 1751
1751 1752 ui.write(('format : %d\n') % format)
1752 1753 ui.write(('flags : %s\n') % ', '.join(flags))
1753 1754
1754 1755 ui.write('\n')
1755 1756 fmt = pcfmtstr(totalsize)
1756 1757 fmt2 = dfmtstr(totalsize)
1757 1758 ui.write(('revisions : ') + fmt2 % numrevs)
1758 1759 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1759 1760 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1760 1761 ui.write(('revisions : ') + fmt2 % numrevs)
1761 1762 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1762 1763 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1763 1764 ui.write(('revision size : ') + fmt2 % totalsize)
1764 1765 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1765 1766 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1766 1767
1767 1768 def fmtchunktype(chunktype):
1768 1769 if chunktype == 'empty':
1769 1770 return ' %s : ' % chunktype
1770 1771 elif chunktype in string.ascii_letters:
1771 1772 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1772 1773 else:
1773 1774 return ' 0x%s : ' % hex(chunktype)
1774 1775
1775 1776 ui.write('\n')
1776 1777 ui.write(('chunks : ') + fmt2 % numrevs)
1777 1778 for chunktype in sorted(chunktypecounts):
1778 1779 ui.write(fmtchunktype(chunktype))
1779 1780 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1780 1781 ui.write(('chunks size : ') + fmt2 % totalsize)
1781 1782 for chunktype in sorted(chunktypecounts):
1782 1783 ui.write(fmtchunktype(chunktype))
1783 1784 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1784 1785
1785 1786 ui.write('\n')
1786 1787 fmt = dfmtstr(max(avgchainlen, compratio))
1787 1788 ui.write(('avg chain length : ') + fmt % avgchainlen)
1788 1789 ui.write(('max chain length : ') + fmt % maxchainlen)
1789 1790 ui.write(('compression ratio : ') + fmt % compratio)
1790 1791
1791 1792 if format > 0:
1792 1793 ui.write('\n')
1793 1794 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1794 1795 % tuple(datasize))
1795 1796 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1796 1797 % tuple(fullsize))
1797 1798 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1798 1799 % tuple(deltasize))
1799 1800
1800 1801 if numdeltas > 0:
1801 1802 ui.write('\n')
1802 1803 fmt = pcfmtstr(numdeltas)
1803 1804 fmt2 = pcfmtstr(numdeltas, 4)
1804 1805 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1805 1806 if numprev > 0:
1806 1807 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1807 1808 numprev))
1808 1809 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1809 1810 numprev))
1810 1811 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1811 1812 numprev))
1812 1813 if gdelta:
1813 1814 ui.write(('deltas against p1 : ')
1814 1815 + fmt % pcfmt(nump1, numdeltas))
1815 1816 ui.write(('deltas against p2 : ')
1816 1817 + fmt % pcfmt(nump2, numdeltas))
1817 1818 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1818 1819 numdeltas))
1819 1820
1820 1821 @command('debugrevspec',
1821 1822 [('', 'optimize', None,
1822 1823 _('print parsed tree after optimizing (DEPRECATED)')),
1823 1824 ('p', 'show-stage', [],
1824 1825 _('print parsed tree at the given stage'), _('NAME')),
1825 1826 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1826 1827 ('', 'verify-optimized', False, _('verify optimized result')),
1827 1828 ],
1828 1829 ('REVSPEC'))
1829 1830 def debugrevspec(ui, repo, expr, **opts):
1830 1831 """parse and apply a revision specification
1831 1832
1832 1833 Use -p/--show-stage option to print the parsed tree at the given stages.
1833 1834 Use -p all to print tree at every stage.
1834 1835
1835 1836 Use --verify-optimized to compare the optimized result with the unoptimized
1836 1837 one. Returns 1 if the optimized result differs.
1837 1838 """
1838 1839 stages = [
1839 1840 ('parsed', lambda tree: tree),
1840 1841 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1841 1842 ('concatenated', revsetlang.foldconcat),
1842 1843 ('analyzed', revsetlang.analyze),
1843 1844 ('optimized', revsetlang.optimize),
1844 1845 ]
1845 1846 if opts['no_optimized']:
1846 1847 stages = stages[:-1]
1847 1848 if opts['verify_optimized'] and opts['no_optimized']:
1848 1849 raise error.Abort(_('cannot use --verify-optimized with '
1849 1850 '--no-optimized'))
1850 1851 stagenames = set(n for n, f in stages)
1851 1852
1852 1853 showalways = set()
1853 1854 showchanged = set()
1854 1855 if ui.verbose and not opts['show_stage']:
1855 1856 # show parsed tree by --verbose (deprecated)
1856 1857 showalways.add('parsed')
1857 1858 showchanged.update(['expanded', 'concatenated'])
1858 1859 if opts['optimize']:
1859 1860 showalways.add('optimized')
1860 1861 if opts['show_stage'] and opts['optimize']:
1861 1862 raise error.Abort(_('cannot use --optimize with --show-stage'))
1862 1863 if opts['show_stage'] == ['all']:
1863 1864 showalways.update(stagenames)
1864 1865 else:
1865 1866 for n in opts['show_stage']:
1866 1867 if n not in stagenames:
1867 1868 raise error.Abort(_('invalid stage name: %s') % n)
1868 1869 showalways.update(opts['show_stage'])
1869 1870
1870 1871 treebystage = {}
1871 1872 printedtree = None
1872 1873 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1873 1874 for n, f in stages:
1874 1875 treebystage[n] = tree = f(tree)
1875 1876 if n in showalways or (n in showchanged and tree != printedtree):
1876 1877 if opts['show_stage'] or n != 'parsed':
1877 1878 ui.write(("* %s:\n") % n)
1878 1879 ui.write(revsetlang.prettyformat(tree), "\n")
1879 1880 printedtree = tree
1880 1881
1881 1882 if opts['verify_optimized']:
1882 1883 arevs = revset.makematcher(treebystage['analyzed'])(repo)
1883 1884 brevs = revset.makematcher(treebystage['optimized'])(repo)
1884 1885 if ui.verbose:
1885 1886 ui.note(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
1886 1887 ui.note(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
1887 1888 arevs = list(arevs)
1888 1889 brevs = list(brevs)
1889 1890 if arevs == brevs:
1890 1891 return 0
1891 1892 ui.write(('--- analyzed\n'), label='diff.file_a')
1892 1893 ui.write(('+++ optimized\n'), label='diff.file_b')
1893 1894 sm = difflib.SequenceMatcher(None, arevs, brevs)
1894 1895 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1895 1896 if tag in ('delete', 'replace'):
1896 1897 for c in arevs[alo:ahi]:
1897 1898 ui.write('-%s\n' % c, label='diff.deleted')
1898 1899 if tag in ('insert', 'replace'):
1899 1900 for c in brevs[blo:bhi]:
1900 1901 ui.write('+%s\n' % c, label='diff.inserted')
1901 1902 if tag == 'equal':
1902 1903 for c in arevs[alo:ahi]:
1903 1904 ui.write(' %s\n' % c)
1904 1905 return 1
1905 1906
1906 1907 func = revset.makematcher(tree)
1907 1908 revs = func(repo)
1908 1909 if ui.verbose:
1909 1910 ui.note(("* set:\n"), smartset.prettyformat(revs), "\n")
1910 1911 for c in revs:
1911 1912 ui.write("%s\n" % c)
1912 1913
1913 1914 @command('debugsetparents', [], _('REV1 [REV2]'))
1914 1915 def debugsetparents(ui, repo, rev1, rev2=None):
1915 1916 """manually set the parents of the current working directory
1916 1917
1917 1918 This is useful for writing repository conversion tools, but should
1918 1919 be used with care. For example, neither the working directory nor the
1919 1920 dirstate is updated, so file status may be incorrect after running this
1920 1921 command.
1921 1922
1922 1923 Returns 0 on success.
1923 1924 """
1924 1925
1925 1926 r1 = scmutil.revsingle(repo, rev1).node()
1926 1927 r2 = scmutil.revsingle(repo, rev2, 'null').node()
1927 1928
1928 1929 with repo.wlock():
1929 1930 repo.setparents(r1, r2)
1930 1931
1931 1932 @command('debugsub',
1932 1933 [('r', 'rev', '',
1933 1934 _('revision to check'), _('REV'))],
1934 1935 _('[-r REV] [REV]'))
1935 1936 def debugsub(ui, repo, rev=None):
1936 1937 ctx = scmutil.revsingle(repo, rev, None)
1937 1938 for k, v in sorted(ctx.substate.items()):
1938 1939 ui.write(('path %s\n') % k)
1939 1940 ui.write((' source %s\n') % v[0])
1940 1941 ui.write((' revision %s\n') % v[1])
1941 1942
1942 1943 @command('debugsuccessorssets',
1943 1944 [],
1944 1945 _('[REV]'))
1945 1946 def debugsuccessorssets(ui, repo, *revs):
1946 1947 """show set of successors for revision
1947 1948
1948 1949 A successors set of changeset A is a consistent group of revisions that
1949 1950 succeed A. It contains non-obsolete changesets only.
1950 1951
1951 1952 In most cases a changeset A has a single successors set containing a single
1952 1953 successor (changeset A replaced by A').
1953 1954
1954 1955 A changeset that is made obsolete with no successors are called "pruned".
1955 1956 Such changesets have no successors sets at all.
1956 1957
1957 1958 A changeset that has been "split" will have a successors set containing
1958 1959 more than one successor.
1959 1960
1960 1961 A changeset that has been rewritten in multiple different ways is called
1961 1962 "divergent". Such changesets have multiple successor sets (each of which
1962 1963 may also be split, i.e. have multiple successors).
1963 1964
1964 1965 Results are displayed as follows::
1965 1966
1966 1967 <rev1>
1967 1968 <successors-1A>
1968 1969 <rev2>
1969 1970 <successors-2A>
1970 1971 <successors-2B1> <successors-2B2> <successors-2B3>
1971 1972
1972 1973 Here rev2 has two possible (i.e. divergent) successors sets. The first
1973 1974 holds one element, whereas the second holds three (i.e. the changeset has
1974 1975 been split).
1975 1976 """
1976 1977 # passed to successorssets caching computation from one call to another
1977 1978 cache = {}
1978 1979 ctx2str = str
1979 1980 node2str = short
1980 1981 if ui.debug():
1981 1982 def ctx2str(ctx):
1982 1983 return ctx.hex()
1983 1984 node2str = hex
1984 1985 for rev in scmutil.revrange(repo, revs):
1985 1986 ctx = repo[rev]
1986 1987 ui.write('%s\n'% ctx2str(ctx))
1987 1988 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
1988 1989 if succsset:
1989 1990 ui.write(' ')
1990 1991 ui.write(node2str(succsset[0]))
1991 1992 for node in succsset[1:]:
1992 1993 ui.write(' ')
1993 1994 ui.write(node2str(node))
1994 1995 ui.write('\n')
1995 1996
1996 1997 @command('debugtemplate',
1997 1998 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
1998 1999 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
1999 2000 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2000 2001 optionalrepo=True)
2001 2002 def debugtemplate(ui, repo, tmpl, **opts):
2002 2003 """parse and apply a template
2003 2004
2004 2005 If -r/--rev is given, the template is processed as a log template and
2005 2006 applied to the given changesets. Otherwise, it is processed as a generic
2006 2007 template.
2007 2008
2008 2009 Use --verbose to print the parsed tree.
2009 2010 """
2010 2011 revs = None
2011 2012 if opts['rev']:
2012 2013 if repo is None:
2013 2014 raise error.RepoError(_('there is no Mercurial repository here '
2014 2015 '(.hg not found)'))
2015 2016 revs = scmutil.revrange(repo, opts['rev'])
2016 2017
2017 2018 props = {}
2018 2019 for d in opts['define']:
2019 2020 try:
2020 2021 k, v = (e.strip() for e in d.split('=', 1))
2021 2022 if not k:
2022 2023 raise ValueError
2023 2024 props[k] = v
2024 2025 except ValueError:
2025 2026 raise error.Abort(_('malformed keyword definition: %s') % d)
2026 2027
2027 2028 if ui.verbose:
2028 2029 aliases = ui.configitems('templatealias')
2029 2030 tree = templater.parse(tmpl)
2030 2031 ui.note(templater.prettyformat(tree), '\n')
2031 2032 newtree = templater.expandaliases(tree, aliases)
2032 2033 if newtree != tree:
2033 2034 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2034 2035
2035 2036 mapfile = None
2036 2037 if revs is None:
2037 2038 k = 'debugtemplate'
2038 2039 t = formatter.maketemplater(ui, k, tmpl)
2039 2040 ui.write(templater.stringify(t(k, **props)))
2040 2041 else:
2041 2042 displayer = cmdutil.changeset_templater(ui, repo, None, opts, tmpl,
2042 2043 mapfile, buffered=False)
2043 2044 for r in revs:
2044 2045 displayer.show(repo[r], **props)
2045 2046 displayer.close()
2046 2047
2047 2048 @command('debugupgraderepo', [
2048 2049 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2049 2050 ('', 'run', False, _('performs an upgrade')),
2050 2051 ])
2051 2052 def debugupgraderepo(ui, repo, run=False, optimize=None):
2052 2053 """upgrade a repository to use different features
2053 2054
2054 2055 If no arguments are specified, the repository is evaluated for upgrade
2055 2056 and a list of problems and potential optimizations is printed.
2056 2057
2057 2058 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2058 2059 can be influenced via additional arguments. More details will be provided
2059 2060 by the command output when run without ``--run``.
2060 2061
2061 2062 During the upgrade, the repository will be locked and no writes will be
2062 2063 allowed.
2063 2064
2064 2065 At the end of the upgrade, the repository may not be readable while new
2065 2066 repository data is swapped in. This window will be as long as it takes to
2066 2067 rename some directories inside the ``.hg`` directory. On most machines, this
2067 2068 should complete almost instantaneously and the chances of a consumer being
2068 2069 unable to access the repository should be low.
2069 2070 """
2070 2071 return repair.upgraderepo(ui, repo, run=run, optimize=optimize)
2071 2072
2072 2073 @command('debugwalk', commands.walkopts, _('[OPTION]... [FILE]...'),
2073 2074 inferrepo=True)
2074 2075 def debugwalk(ui, repo, *pats, **opts):
2075 2076 """show how files match on given patterns"""
2076 2077 m = scmutil.match(repo[None], pats, opts)
2077 2078 items = list(repo.walk(m))
2078 2079 if not items:
2079 2080 return
2080 2081 f = lambda fn: fn
2081 2082 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2082 2083 f = lambda fn: util.normpath(fn)
2083 2084 fmt = 'f %%-%ds %%-%ds %%s' % (
2084 2085 max([len(abs) for abs in items]),
2085 2086 max([len(m.rel(abs)) for abs in items]))
2086 2087 for abs in items:
2087 2088 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2088 2089 ui.write("%s\n" % line.rstrip())
2089 2090
2090 2091 @command('debugwireargs',
2091 2092 [('', 'three', '', 'three'),
2092 2093 ('', 'four', '', 'four'),
2093 2094 ('', 'five', '', 'five'),
2094 2095 ] + commands.remoteopts,
2095 2096 _('REPO [OPTIONS]... [ONE [TWO]]'),
2096 2097 norepo=True)
2097 2098 def debugwireargs(ui, repopath, *vals, **opts):
2098 2099 repo = hg.peer(ui, opts, repopath)
2099 2100 for opt in commands.remoteopts:
2100 2101 del opts[opt[1]]
2101 2102 args = {}
2102 2103 for k, v in opts.iteritems():
2103 2104 if v:
2104 2105 args[k] = v
2105 2106 # run twice to check that we don't mess up the stream for the next command
2106 2107 res1 = repo.debugwireargs(*vals, **args)
2107 2108 res2 = repo.debugwireargs(*vals, **args)
2108 2109 ui.write("%s\n" % res1)
2109 2110 if res1 != res2:
2110 2111 ui.warn("%s\n" % res2)
General Comments 0
You need to be logged in to leave comments. Login now