##// END OF EJS Templates
debugdeltachain: output information about sparse read if enabled
Paul Morelle -
r35050:5cbbef8d default
parent child Browse files
Show More
@@ -1,2328 +1,2364 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import operator
15 15 import os
16 16 import random
17 17 import socket
18 18 import ssl
19 19 import string
20 20 import sys
21 21 import tempfile
22 22 import time
23 23
24 24 from .i18n import _
25 25 from .node import (
26 26 bin,
27 27 hex,
28 28 nullhex,
29 29 nullid,
30 30 nullrev,
31 31 short,
32 32 )
33 33 from . import (
34 34 bundle2,
35 35 changegroup,
36 36 cmdutil,
37 37 color,
38 38 context,
39 39 dagparser,
40 40 dagutil,
41 41 encoding,
42 42 error,
43 43 exchange,
44 44 extensions,
45 45 filemerge,
46 46 fileset,
47 47 formatter,
48 48 hg,
49 49 localrepo,
50 50 lock as lockmod,
51 51 merge as mergemod,
52 52 obsolete,
53 53 obsutil,
54 54 phases,
55 55 policy,
56 56 pvec,
57 57 pycompat,
58 58 registrar,
59 59 repair,
60 60 revlog,
61 61 revset,
62 62 revsetlang,
63 63 scmutil,
64 64 setdiscovery,
65 65 simplemerge,
66 66 smartset,
67 67 sslutil,
68 68 streamclone,
69 69 templater,
70 70 treediscovery,
71 71 upgrade,
72 72 util,
73 73 vfs as vfsmod,
74 74 )
75 75
76 76 release = lockmod.release
77 77
78 78 command = registrar.command()
79 79
80 80 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
81 81 def debugancestor(ui, repo, *args):
82 82 """find the ancestor revision of two revisions in a given index"""
83 83 if len(args) == 3:
84 84 index, rev1, rev2 = args
85 85 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
86 86 lookup = r.lookup
87 87 elif len(args) == 2:
88 88 if not repo:
89 89 raise error.Abort(_('there is no Mercurial repository here '
90 90 '(.hg not found)'))
91 91 rev1, rev2 = args
92 92 r = repo.changelog
93 93 lookup = repo.lookup
94 94 else:
95 95 raise error.Abort(_('either two or three arguments required'))
96 96 a = r.ancestor(lookup(rev1), lookup(rev2))
97 97 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
98 98
99 99 @command('debugapplystreamclonebundle', [], 'FILE')
100 100 def debugapplystreamclonebundle(ui, repo, fname):
101 101 """apply a stream clone bundle file"""
102 102 f = hg.openpath(ui, fname)
103 103 gen = exchange.readbundle(ui, f, fname)
104 104 gen.apply(repo)
105 105
106 106 @command('debugbuilddag',
107 107 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
108 108 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
109 109 ('n', 'new-file', None, _('add new file at each rev'))],
110 110 _('[OPTION]... [TEXT]'))
111 111 def debugbuilddag(ui, repo, text=None,
112 112 mergeable_file=False,
113 113 overwritten_file=False,
114 114 new_file=False):
115 115 """builds a repo with a given DAG from scratch in the current empty repo
116 116
117 117 The description of the DAG is read from stdin if not given on the
118 118 command line.
119 119
120 120 Elements:
121 121
122 122 - "+n" is a linear run of n nodes based on the current default parent
123 123 - "." is a single node based on the current default parent
124 124 - "$" resets the default parent to null (implied at the start);
125 125 otherwise the default parent is always the last node created
126 126 - "<p" sets the default parent to the backref p
127 127 - "*p" is a fork at parent p, which is a backref
128 128 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
129 129 - "/p2" is a merge of the preceding node and p2
130 130 - ":tag" defines a local tag for the preceding node
131 131 - "@branch" sets the named branch for subsequent nodes
132 132 - "#...\\n" is a comment up to the end of the line
133 133
134 134 Whitespace between the above elements is ignored.
135 135
136 136 A backref is either
137 137
138 138 - a number n, which references the node curr-n, where curr is the current
139 139 node, or
140 140 - the name of a local tag you placed earlier using ":tag", or
141 141 - empty to denote the default parent.
142 142
143 143 All string valued-elements are either strictly alphanumeric, or must
144 144 be enclosed in double quotes ("..."), with "\\" as escape character.
145 145 """
146 146
147 147 if text is None:
148 148 ui.status(_("reading DAG from stdin\n"))
149 149 text = ui.fin.read()
150 150
151 151 cl = repo.changelog
152 152 if len(cl) > 0:
153 153 raise error.Abort(_('repository is not empty'))
154 154
155 155 # determine number of revs in DAG
156 156 total = 0
157 157 for type, data in dagparser.parsedag(text):
158 158 if type == 'n':
159 159 total += 1
160 160
161 161 if mergeable_file:
162 162 linesperrev = 2
163 163 # make a file with k lines per rev
164 164 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
165 165 initialmergedlines.append("")
166 166
167 167 tags = []
168 168
169 169 wlock = lock = tr = None
170 170 try:
171 171 wlock = repo.wlock()
172 172 lock = repo.lock()
173 173 tr = repo.transaction("builddag")
174 174
175 175 at = -1
176 176 atbranch = 'default'
177 177 nodeids = []
178 178 id = 0
179 179 ui.progress(_('building'), id, unit=_('revisions'), total=total)
180 180 for type, data in dagparser.parsedag(text):
181 181 if type == 'n':
182 182 ui.note(('node %s\n' % str(data)))
183 183 id, ps = data
184 184
185 185 files = []
186 186 fctxs = {}
187 187
188 188 p2 = None
189 189 if mergeable_file:
190 190 fn = "mf"
191 191 p1 = repo[ps[0]]
192 192 if len(ps) > 1:
193 193 p2 = repo[ps[1]]
194 194 pa = p1.ancestor(p2)
195 195 base, local, other = [x[fn].data() for x in (pa, p1,
196 196 p2)]
197 197 m3 = simplemerge.Merge3Text(base, local, other)
198 198 ml = [l.strip() for l in m3.merge_lines()]
199 199 ml.append("")
200 200 elif at > 0:
201 201 ml = p1[fn].data().split("\n")
202 202 else:
203 203 ml = initialmergedlines
204 204 ml[id * linesperrev] += " r%i" % id
205 205 mergedtext = "\n".join(ml)
206 206 files.append(fn)
207 207 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
208 208
209 209 if overwritten_file:
210 210 fn = "of"
211 211 files.append(fn)
212 212 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
213 213
214 214 if new_file:
215 215 fn = "nf%i" % id
216 216 files.append(fn)
217 217 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
218 218 if len(ps) > 1:
219 219 if not p2:
220 220 p2 = repo[ps[1]]
221 221 for fn in p2:
222 222 if fn.startswith("nf"):
223 223 files.append(fn)
224 224 fctxs[fn] = p2[fn]
225 225
226 226 def fctxfn(repo, cx, path):
227 227 return fctxs.get(path)
228 228
229 229 if len(ps) == 0 or ps[0] < 0:
230 230 pars = [None, None]
231 231 elif len(ps) == 1:
232 232 pars = [nodeids[ps[0]], None]
233 233 else:
234 234 pars = [nodeids[p] for p in ps]
235 235 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
236 236 date=(id, 0),
237 237 user="debugbuilddag",
238 238 extra={'branch': atbranch})
239 239 nodeid = repo.commitctx(cx)
240 240 nodeids.append(nodeid)
241 241 at = id
242 242 elif type == 'l':
243 243 id, name = data
244 244 ui.note(('tag %s\n' % name))
245 245 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
246 246 elif type == 'a':
247 247 ui.note(('branch %s\n' % data))
248 248 atbranch = data
249 249 ui.progress(_('building'), id, unit=_('revisions'), total=total)
250 250 tr.close()
251 251
252 252 if tags:
253 253 repo.vfs.write("localtags", "".join(tags))
254 254 finally:
255 255 ui.progress(_('building'), None)
256 256 release(tr, lock, wlock)
257 257
258 258 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
259 259 indent_string = ' ' * indent
260 260 if all:
261 261 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
262 262 % indent_string)
263 263
264 264 def showchunks(named):
265 265 ui.write("\n%s%s\n" % (indent_string, named))
266 266 for deltadata in gen.deltaiter():
267 267 node, p1, p2, cs, deltabase, delta, flags = deltadata
268 268 ui.write("%s%s %s %s %s %s %s\n" %
269 269 (indent_string, hex(node), hex(p1), hex(p2),
270 270 hex(cs), hex(deltabase), len(delta)))
271 271
272 272 chunkdata = gen.changelogheader()
273 273 showchunks("changelog")
274 274 chunkdata = gen.manifestheader()
275 275 showchunks("manifest")
276 276 for chunkdata in iter(gen.filelogheader, {}):
277 277 fname = chunkdata['filename']
278 278 showchunks(fname)
279 279 else:
280 280 if isinstance(gen, bundle2.unbundle20):
281 281 raise error.Abort(_('use debugbundle2 for this file'))
282 282 chunkdata = gen.changelogheader()
283 283 for deltadata in gen.deltaiter():
284 284 node, p1, p2, cs, deltabase, delta, flags = deltadata
285 285 ui.write("%s%s\n" % (indent_string, hex(node)))
286 286
287 287 def _debugobsmarkers(ui, part, indent=0, **opts):
288 288 """display version and markers contained in 'data'"""
289 289 opts = pycompat.byteskwargs(opts)
290 290 data = part.read()
291 291 indent_string = ' ' * indent
292 292 try:
293 293 version, markers = obsolete._readmarkers(data)
294 294 except error.UnknownVersion as exc:
295 295 msg = "%sunsupported version: %s (%d bytes)\n"
296 296 msg %= indent_string, exc.version, len(data)
297 297 ui.write(msg)
298 298 else:
299 299 msg = "%sversion: %s (%d bytes)\n"
300 300 msg %= indent_string, version, len(data)
301 301 ui.write(msg)
302 302 fm = ui.formatter('debugobsolete', opts)
303 303 for rawmarker in sorted(markers):
304 304 m = obsutil.marker(None, rawmarker)
305 305 fm.startitem()
306 306 fm.plain(indent_string)
307 307 cmdutil.showmarker(fm, m)
308 308 fm.end()
309 309
310 310 def _debugphaseheads(ui, data, indent=0):
311 311 """display version and markers contained in 'data'"""
312 312 indent_string = ' ' * indent
313 313 headsbyphase = phases.binarydecode(data)
314 314 for phase in phases.allphases:
315 315 for head in headsbyphase[phase]:
316 316 ui.write(indent_string)
317 317 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
318 318
319 319 def _quasirepr(thing):
320 320 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
321 321 return '{%s}' % (
322 322 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
323 323 return pycompat.bytestr(repr(thing))
324 324
325 325 def _debugbundle2(ui, gen, all=None, **opts):
326 326 """lists the contents of a bundle2"""
327 327 if not isinstance(gen, bundle2.unbundle20):
328 328 raise error.Abort(_('not a bundle2 file'))
329 329 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
330 330 parttypes = opts.get(r'part_type', [])
331 331 for part in gen.iterparts():
332 332 if parttypes and part.type not in parttypes:
333 333 continue
334 334 ui.write('%s -- %s\n' % (part.type, _quasirepr(part.params)))
335 335 if part.type == 'changegroup':
336 336 version = part.params.get('version', '01')
337 337 cg = changegroup.getunbundler(version, part, 'UN')
338 338 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
339 339 if part.type == 'obsmarkers':
340 340 _debugobsmarkers(ui, part, indent=4, **opts)
341 341 if part.type == 'phase-heads':
342 342 _debugphaseheads(ui, part, indent=4)
343 343
344 344 @command('debugbundle',
345 345 [('a', 'all', None, _('show all details')),
346 346 ('', 'part-type', [], _('show only the named part type')),
347 347 ('', 'spec', None, _('print the bundlespec of the bundle'))],
348 348 _('FILE'),
349 349 norepo=True)
350 350 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
351 351 """lists the contents of a bundle"""
352 352 with hg.openpath(ui, bundlepath) as f:
353 353 if spec:
354 354 spec = exchange.getbundlespec(ui, f)
355 355 ui.write('%s\n' % spec)
356 356 return
357 357
358 358 gen = exchange.readbundle(ui, f, bundlepath)
359 359 if isinstance(gen, bundle2.unbundle20):
360 360 return _debugbundle2(ui, gen, all=all, **opts)
361 361 _debugchangegroup(ui, gen, all=all, **opts)
362 362
363 363 @command('debugcapabilities',
364 364 [], _('PATH'),
365 365 norepo=True)
366 366 def debugcapabilities(ui, path, **opts):
367 367 """lists the capabilities of a remote peer"""
368 368 peer = hg.peer(ui, opts, path)
369 369 caps = peer.capabilities()
370 370 ui.write(('Main capabilities:\n'))
371 371 for c in sorted(caps):
372 372 ui.write((' %s\n') % c)
373 373 b2caps = bundle2.bundle2caps(peer)
374 374 if b2caps:
375 375 ui.write(('Bundle2 capabilities:\n'))
376 376 for key, values in sorted(b2caps.iteritems()):
377 377 ui.write((' %s\n') % key)
378 378 for v in values:
379 379 ui.write((' %s\n') % v)
380 380
381 381 @command('debugcheckstate', [], '')
382 382 def debugcheckstate(ui, repo):
383 383 """validate the correctness of the current dirstate"""
384 384 parent1, parent2 = repo.dirstate.parents()
385 385 m1 = repo[parent1].manifest()
386 386 m2 = repo[parent2].manifest()
387 387 errors = 0
388 388 for f in repo.dirstate:
389 389 state = repo.dirstate[f]
390 390 if state in "nr" and f not in m1:
391 391 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
392 392 errors += 1
393 393 if state in "a" and f in m1:
394 394 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
395 395 errors += 1
396 396 if state in "m" and f not in m1 and f not in m2:
397 397 ui.warn(_("%s in state %s, but not in either manifest\n") %
398 398 (f, state))
399 399 errors += 1
400 400 for f in m1:
401 401 state = repo.dirstate[f]
402 402 if state not in "nrm":
403 403 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
404 404 errors += 1
405 405 if errors:
406 406 error = _(".hg/dirstate inconsistent with current parent's manifest")
407 407 raise error.Abort(error)
408 408
409 409 @command('debugcolor',
410 410 [('', 'style', None, _('show all configured styles'))],
411 411 'hg debugcolor')
412 412 def debugcolor(ui, repo, **opts):
413 413 """show available color, effects or style"""
414 414 ui.write(('color mode: %s\n') % ui._colormode)
415 415 if opts.get(r'style'):
416 416 return _debugdisplaystyle(ui)
417 417 else:
418 418 return _debugdisplaycolor(ui)
419 419
420 420 def _debugdisplaycolor(ui):
421 421 ui = ui.copy()
422 422 ui._styles.clear()
423 423 for effect in color._activeeffects(ui).keys():
424 424 ui._styles[effect] = effect
425 425 if ui._terminfoparams:
426 426 for k, v in ui.configitems('color'):
427 427 if k.startswith('color.'):
428 428 ui._styles[k] = k[6:]
429 429 elif k.startswith('terminfo.'):
430 430 ui._styles[k] = k[9:]
431 431 ui.write(_('available colors:\n'))
432 432 # sort label with a '_' after the other to group '_background' entry.
433 433 items = sorted(ui._styles.items(),
434 434 key=lambda i: ('_' in i[0], i[0], i[1]))
435 435 for colorname, label in items:
436 436 ui.write(('%s\n') % colorname, label=label)
437 437
438 438 def _debugdisplaystyle(ui):
439 439 ui.write(_('available style:\n'))
440 440 width = max(len(s) for s in ui._styles)
441 441 for label, effects in sorted(ui._styles.items()):
442 442 ui.write('%s' % label, label=label)
443 443 if effects:
444 444 # 50
445 445 ui.write(': ')
446 446 ui.write(' ' * (max(0, width - len(label))))
447 447 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
448 448 ui.write('\n')
449 449
450 450 @command('debugcreatestreamclonebundle', [], 'FILE')
451 451 def debugcreatestreamclonebundle(ui, repo, fname):
452 452 """create a stream clone bundle file
453 453
454 454 Stream bundles are special bundles that are essentially archives of
455 455 revlog files. They are commonly used for cloning very quickly.
456 456 """
457 457 # TODO we may want to turn this into an abort when this functionality
458 458 # is moved into `hg bundle`.
459 459 if phases.hassecret(repo):
460 460 ui.warn(_('(warning: stream clone bundle will contain secret '
461 461 'revisions)\n'))
462 462
463 463 requirements, gen = streamclone.generatebundlev1(repo)
464 464 changegroup.writechunks(ui, gen, fname)
465 465
466 466 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
467 467
468 468 @command('debugdag',
469 469 [('t', 'tags', None, _('use tags as labels')),
470 470 ('b', 'branches', None, _('annotate with branch names')),
471 471 ('', 'dots', None, _('use dots for runs')),
472 472 ('s', 'spaces', None, _('separate elements by spaces'))],
473 473 _('[OPTION]... [FILE [REV]...]'),
474 474 optionalrepo=True)
475 475 def debugdag(ui, repo, file_=None, *revs, **opts):
476 476 """format the changelog or an index DAG as a concise textual description
477 477
478 478 If you pass a revlog index, the revlog's DAG is emitted. If you list
479 479 revision numbers, they get labeled in the output as rN.
480 480
481 481 Otherwise, the changelog DAG of the current repo is emitted.
482 482 """
483 483 spaces = opts.get(r'spaces')
484 484 dots = opts.get(r'dots')
485 485 if file_:
486 486 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
487 487 file_)
488 488 revs = set((int(r) for r in revs))
489 489 def events():
490 490 for r in rlog:
491 491 yield 'n', (r, list(p for p in rlog.parentrevs(r)
492 492 if p != -1))
493 493 if r in revs:
494 494 yield 'l', (r, "r%i" % r)
495 495 elif repo:
496 496 cl = repo.changelog
497 497 tags = opts.get(r'tags')
498 498 branches = opts.get(r'branches')
499 499 if tags:
500 500 labels = {}
501 501 for l, n in repo.tags().items():
502 502 labels.setdefault(cl.rev(n), []).append(l)
503 503 def events():
504 504 b = "default"
505 505 for r in cl:
506 506 if branches:
507 507 newb = cl.read(cl.node(r))[5]['branch']
508 508 if newb != b:
509 509 yield 'a', newb
510 510 b = newb
511 511 yield 'n', (r, list(p for p in cl.parentrevs(r)
512 512 if p != -1))
513 513 if tags:
514 514 ls = labels.get(r)
515 515 if ls:
516 516 for l in ls:
517 517 yield 'l', (r, l)
518 518 else:
519 519 raise error.Abort(_('need repo for changelog dag'))
520 520
521 521 for line in dagparser.dagtextlines(events(),
522 522 addspaces=spaces,
523 523 wraplabels=True,
524 524 wrapannotations=True,
525 525 wrapnonlinear=dots,
526 526 usedots=dots,
527 527 maxlinewidth=70):
528 528 ui.write(line)
529 529 ui.write("\n")
530 530
531 531 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
532 532 def debugdata(ui, repo, file_, rev=None, **opts):
533 533 """dump the contents of a data file revision"""
534 534 opts = pycompat.byteskwargs(opts)
535 535 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
536 536 if rev is not None:
537 537 raise error.CommandError('debugdata', _('invalid arguments'))
538 538 file_, rev = None, file_
539 539 elif rev is None:
540 540 raise error.CommandError('debugdata', _('invalid arguments'))
541 541 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
542 542 try:
543 543 ui.write(r.revision(r.lookup(rev), raw=True))
544 544 except KeyError:
545 545 raise error.Abort(_('invalid revision identifier %s') % rev)
546 546
547 547 @command('debugdate',
548 548 [('e', 'extended', None, _('try extended date formats'))],
549 549 _('[-e] DATE [RANGE]'),
550 550 norepo=True, optionalrepo=True)
551 551 def debugdate(ui, date, range=None, **opts):
552 552 """parse and display a date"""
553 553 if opts[r"extended"]:
554 554 d = util.parsedate(date, util.extendeddateformats)
555 555 else:
556 556 d = util.parsedate(date)
557 557 ui.write(("internal: %s %s\n") % d)
558 558 ui.write(("standard: %s\n") % util.datestr(d))
559 559 if range:
560 560 m = util.matchdate(range)
561 561 ui.write(("match: %s\n") % m(d[0]))
562 562
563 563 @command('debugdeltachain',
564 564 cmdutil.debugrevlogopts + cmdutil.formatteropts,
565 565 _('-c|-m|FILE'),
566 566 optionalrepo=True)
567 567 def debugdeltachain(ui, repo, file_=None, **opts):
568 568 """dump information about delta chains in a revlog
569 569
570 570 Output can be templatized. Available template keywords are:
571 571
572 572 :``rev``: revision number
573 573 :``chainid``: delta chain identifier (numbered by unique base)
574 574 :``chainlen``: delta chain length to this revision
575 575 :``prevrev``: previous revision in delta chain
576 576 :``deltatype``: role of delta / how it was computed
577 577 :``compsize``: compressed size of revision
578 578 :``uncompsize``: uncompressed size of revision
579 579 :``chainsize``: total size of compressed revisions in chain
580 580 :``chainratio``: total chain size divided by uncompressed revision size
581 581 (new delta chains typically start at ratio 2.00)
582 582 :``lindist``: linear distance from base revision in delta chain to end
583 583 of this revision
584 584 :``extradist``: total size of revisions not part of this delta chain from
585 585 base of delta chain to end of this revision; a measurement
586 586 of how much extra data we need to read/seek across to read
587 587 the delta chain for this revision
588 588 :``extraratio``: extradist divided by chainsize; another representation of
589 589 how much unrelated data is needed to load this delta chain
590
591 If the repository is configured to use the sparse read, additional keywords
592 are available:
593
594 :``readsize``: total size of data read from the disk for a revision
595 (sum of the sizes of all the blocks)
596 :``largestblock``: size of the largest block of data read from the disk
597 :``readdensity``: density of useful bytes in the data read from the disk
598
599 The sparse read can be enabled with experimental.sparse-read = True
590 600 """
591 601 opts = pycompat.byteskwargs(opts)
592 602 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
593 603 index = r.index
594 604 generaldelta = r.version & revlog.FLAG_GENERALDELTA
605 withsparseread = getattr(r, '_withsparseread', False)
595 606
596 607 def revinfo(rev):
597 608 e = index[rev]
598 609 compsize = e[1]
599 610 uncompsize = e[2]
600 611 chainsize = 0
601 612
602 613 if generaldelta:
603 614 if e[3] == e[5]:
604 615 deltatype = 'p1'
605 616 elif e[3] == e[6]:
606 617 deltatype = 'p2'
607 618 elif e[3] == rev - 1:
608 619 deltatype = 'prev'
609 620 elif e[3] == rev:
610 621 deltatype = 'base'
611 622 else:
612 623 deltatype = 'other'
613 624 else:
614 625 if e[3] == rev:
615 626 deltatype = 'base'
616 627 else:
617 628 deltatype = 'prev'
618 629
619 630 chain = r._deltachain(rev)[0]
620 631 for iterrev in chain:
621 632 e = index[iterrev]
622 633 chainsize += e[1]
623 634
624 635 return compsize, uncompsize, deltatype, chain, chainsize
625 636
626 637 fm = ui.formatter('debugdeltachain', opts)
627 638
628 639 fm.plain(' rev chain# chainlen prev delta '
629 640 'size rawsize chainsize ratio lindist extradist '
630 'extraratio\n')
641 'extraratio')
642 if withsparseread:
643 fm.plain(' readsize largestblk rddensity')
644 fm.plain('\n')
631 645
632 646 chainbases = {}
633 647 for rev in r:
634 648 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
635 649 chainbase = chain[0]
636 650 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
637 basestart = r.start(chainbase)
638 revstart = r.start(rev)
651 start = r.start
652 length = r.length
653 basestart = start(chainbase)
654 revstart = start(rev)
639 655 lineardist = revstart + comp - basestart
640 656 extradist = lineardist - chainsize
641 657 try:
642 658 prevrev = chain[-2]
643 659 except IndexError:
644 660 prevrev = -1
645 661
646 662 chainratio = float(chainsize) / float(uncomp)
647 663 extraratio = float(extradist) / float(chainsize)
648 664
649 665 fm.startitem()
650 666 fm.write('rev chainid chainlen prevrev deltatype compsize '
651 667 'uncompsize chainsize chainratio lindist extradist '
652 668 'extraratio',
653 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
669 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
654 670 rev, chainid, len(chain), prevrev, deltatype, comp,
655 671 uncomp, chainsize, chainratio, lineardist, extradist,
656 672 extraratio,
657 673 rev=rev, chainid=chainid, chainlen=len(chain),
658 674 prevrev=prevrev, deltatype=deltatype, compsize=comp,
659 675 uncompsize=uncomp, chainsize=chainsize,
660 676 chainratio=chainratio, lindist=lineardist,
661 677 extradist=extradist, extraratio=extraratio)
678 if withsparseread:
679 readsize = 0
680 largestblock = 0
681 for revschunk in revlog._slicechunk(r, chain):
682 blkend = start(revschunk[-1]) + length(revschunk[-1])
683 blksize = blkend - start(revschunk[0])
684
685 readsize += blksize
686 if largestblock < blksize:
687 largestblock = blksize
688
689 readdensity = float(chainsize) / float(readsize)
690
691 fm.write('readsize largestblock readdensity',
692 ' %10d %10d %9.5f',
693 readsize, largestblock, readdensity,
694 readsize=readsize, largestblock=largestblock,
695 readdensity=readdensity)
696
697 fm.plain('\n')
662 698
663 699 fm.end()
664 700
665 701 @command('debugdirstate|debugstate',
666 702 [('', 'nodates', None, _('do not display the saved mtime')),
667 703 ('', 'datesort', None, _('sort by saved mtime'))],
668 704 _('[OPTION]...'))
669 705 def debugstate(ui, repo, **opts):
670 706 """show the contents of the current dirstate"""
671 707
672 708 nodates = opts.get(r'nodates')
673 709 datesort = opts.get(r'datesort')
674 710
675 711 timestr = ""
676 712 if datesort:
677 713 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
678 714 else:
679 715 keyfunc = None # sort by filename
680 716 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
681 717 if ent[3] == -1:
682 718 timestr = 'unset '
683 719 elif nodates:
684 720 timestr = 'set '
685 721 else:
686 722 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
687 723 time.localtime(ent[3]))
688 724 if ent[1] & 0o20000:
689 725 mode = 'lnk'
690 726 else:
691 727 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
692 728 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
693 729 for f in repo.dirstate.copies():
694 730 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
695 731
696 732 @command('debugdiscovery',
697 733 [('', 'old', None, _('use old-style discovery')),
698 734 ('', 'nonheads', None,
699 735 _('use old-style discovery with non-heads included')),
700 736 ] + cmdutil.remoteopts,
701 737 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
702 738 def debugdiscovery(ui, repo, remoteurl="default", **opts):
703 739 """runs the changeset discovery protocol in isolation"""
704 740 opts = pycompat.byteskwargs(opts)
705 741 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
706 742 opts.get('branch'))
707 743 remote = hg.peer(repo, opts, remoteurl)
708 744 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
709 745
710 746 # make sure tests are repeatable
711 747 random.seed(12323)
712 748
713 749 def doit(localheads, remoteheads, remote=remote):
714 750 if opts.get('old'):
715 751 if localheads:
716 752 raise error.Abort('cannot use localheads with old style '
717 753 'discovery')
718 754 if not util.safehasattr(remote, 'branches'):
719 755 # enable in-client legacy support
720 756 remote = localrepo.locallegacypeer(remote.local())
721 757 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
722 758 force=True)
723 759 common = set(common)
724 760 if not opts.get('nonheads'):
725 761 ui.write(("unpruned common: %s\n") %
726 762 " ".join(sorted(short(n) for n in common)))
727 763 dag = dagutil.revlogdag(repo.changelog)
728 764 all = dag.ancestorset(dag.internalizeall(common))
729 765 common = dag.externalizeall(dag.headsetofconnecteds(all))
730 766 else:
731 767 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
732 768 common = set(common)
733 769 rheads = set(hds)
734 770 lheads = set(repo.heads())
735 771 ui.write(("common heads: %s\n") %
736 772 " ".join(sorted(short(n) for n in common)))
737 773 if lheads <= common:
738 774 ui.write(("local is subset\n"))
739 775 elif rheads <= common:
740 776 ui.write(("remote is subset\n"))
741 777
742 778 serverlogs = opts.get('serverlog')
743 779 if serverlogs:
744 780 for filename in serverlogs:
745 781 with open(filename, 'r') as logfile:
746 782 line = logfile.readline()
747 783 while line:
748 784 parts = line.strip().split(';')
749 785 op = parts[1]
750 786 if op == 'cg':
751 787 pass
752 788 elif op == 'cgss':
753 789 doit(parts[2].split(' '), parts[3].split(' '))
754 790 elif op == 'unb':
755 791 doit(parts[3].split(' '), parts[2].split(' '))
756 792 line = logfile.readline()
757 793 else:
758 794 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
759 795 opts.get('remote_head'))
760 796 localrevs = opts.get('local_head')
761 797 doit(localrevs, remoterevs)
762 798
763 799 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
764 800 def debugextensions(ui, **opts):
765 801 '''show information about active extensions'''
766 802 opts = pycompat.byteskwargs(opts)
767 803 exts = extensions.extensions(ui)
768 804 hgver = util.version()
769 805 fm = ui.formatter('debugextensions', opts)
770 806 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
771 807 isinternal = extensions.ismoduleinternal(extmod)
772 808 extsource = pycompat.fsencode(extmod.__file__)
773 809 if isinternal:
774 810 exttestedwith = [] # never expose magic string to users
775 811 else:
776 812 exttestedwith = getattr(extmod, 'testedwith', '').split()
777 813 extbuglink = getattr(extmod, 'buglink', None)
778 814
779 815 fm.startitem()
780 816
781 817 if ui.quiet or ui.verbose:
782 818 fm.write('name', '%s\n', extname)
783 819 else:
784 820 fm.write('name', '%s', extname)
785 821 if isinternal or hgver in exttestedwith:
786 822 fm.plain('\n')
787 823 elif not exttestedwith:
788 824 fm.plain(_(' (untested!)\n'))
789 825 else:
790 826 lasttestedversion = exttestedwith[-1]
791 827 fm.plain(' (%s!)\n' % lasttestedversion)
792 828
793 829 fm.condwrite(ui.verbose and extsource, 'source',
794 830 _(' location: %s\n'), extsource or "")
795 831
796 832 if ui.verbose:
797 833 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
798 834 fm.data(bundled=isinternal)
799 835
800 836 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
801 837 _(' tested with: %s\n'),
802 838 fm.formatlist(exttestedwith, name='ver'))
803 839
804 840 fm.condwrite(ui.verbose and extbuglink, 'buglink',
805 841 _(' bug reporting: %s\n'), extbuglink or "")
806 842
807 843 fm.end()
808 844
809 845 @command('debugfileset',
810 846 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
811 847 _('[-r REV] FILESPEC'))
812 848 def debugfileset(ui, repo, expr, **opts):
813 849 '''parse and apply a fileset specification'''
814 850 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
815 851 if ui.verbose:
816 852 tree = fileset.parse(expr)
817 853 ui.note(fileset.prettyformat(tree), "\n")
818 854
819 855 for f in ctx.getfileset(expr):
820 856 ui.write("%s\n" % f)
821 857
822 858 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
823 859 def debugfsinfo(ui, path="."):
824 860 """show information detected about current filesystem"""
825 861 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
826 862 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
827 863 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
828 864 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
829 865 casesensitive = '(unknown)'
830 866 try:
831 867 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
832 868 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
833 869 except OSError:
834 870 pass
835 871 ui.write(('case-sensitive: %s\n') % casesensitive)
836 872
837 873 @command('debuggetbundle',
838 874 [('H', 'head', [], _('id of head node'), _('ID')),
839 875 ('C', 'common', [], _('id of common node'), _('ID')),
840 876 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
841 877 _('REPO FILE [-H|-C ID]...'),
842 878 norepo=True)
843 879 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
844 880 """retrieves a bundle from a repo
845 881
846 882 Every ID must be a full-length hex node id string. Saves the bundle to the
847 883 given file.
848 884 """
849 885 opts = pycompat.byteskwargs(opts)
850 886 repo = hg.peer(ui, opts, repopath)
851 887 if not repo.capable('getbundle'):
852 888 raise error.Abort("getbundle() not supported by target repository")
853 889 args = {}
854 890 if common:
855 891 args[r'common'] = [bin(s) for s in common]
856 892 if head:
857 893 args[r'heads'] = [bin(s) for s in head]
858 894 # TODO: get desired bundlecaps from command line.
859 895 args[r'bundlecaps'] = None
860 896 bundle = repo.getbundle('debug', **args)
861 897
862 898 bundletype = opts.get('type', 'bzip2').lower()
863 899 btypes = {'none': 'HG10UN',
864 900 'bzip2': 'HG10BZ',
865 901 'gzip': 'HG10GZ',
866 902 'bundle2': 'HG20'}
867 903 bundletype = btypes.get(bundletype)
868 904 if bundletype not in bundle2.bundletypes:
869 905 raise error.Abort(_('unknown bundle type specified with --type'))
870 906 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
871 907
872 908 @command('debugignore', [], '[FILE]')
873 909 def debugignore(ui, repo, *files, **opts):
874 910 """display the combined ignore pattern and information about ignored files
875 911
876 912 With no argument display the combined ignore pattern.
877 913
878 914 Given space separated file names, shows if the given file is ignored and
879 915 if so, show the ignore rule (file and line number) that matched it.
880 916 """
881 917 ignore = repo.dirstate._ignore
882 918 if not files:
883 919 # Show all the patterns
884 920 ui.write("%s\n" % repr(ignore))
885 921 else:
886 922 m = scmutil.match(repo[None], pats=files)
887 923 for f in m.files():
888 924 nf = util.normpath(f)
889 925 ignored = None
890 926 ignoredata = None
891 927 if nf != '.':
892 928 if ignore(nf):
893 929 ignored = nf
894 930 ignoredata = repo.dirstate._ignorefileandline(nf)
895 931 else:
896 932 for p in util.finddirs(nf):
897 933 if ignore(p):
898 934 ignored = p
899 935 ignoredata = repo.dirstate._ignorefileandline(p)
900 936 break
901 937 if ignored:
902 938 if ignored == nf:
903 939 ui.write(_("%s is ignored\n") % m.uipath(f))
904 940 else:
905 941 ui.write(_("%s is ignored because of "
906 942 "containing folder %s\n")
907 943 % (m.uipath(f), ignored))
908 944 ignorefile, lineno, line = ignoredata
909 945 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
910 946 % (ignorefile, lineno, line))
911 947 else:
912 948 ui.write(_("%s is not ignored\n") % m.uipath(f))
913 949
914 950 @command('debugindex', cmdutil.debugrevlogopts +
915 951 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
916 952 _('[-f FORMAT] -c|-m|FILE'),
917 953 optionalrepo=True)
918 954 def debugindex(ui, repo, file_=None, **opts):
919 955 """dump the contents of an index file"""
920 956 opts = pycompat.byteskwargs(opts)
921 957 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
922 958 format = opts.get('format', 0)
923 959 if format not in (0, 1):
924 960 raise error.Abort(_("unknown format %d") % format)
925 961
926 962 generaldelta = r.version & revlog.FLAG_GENERALDELTA
927 963 if generaldelta:
928 964 basehdr = ' delta'
929 965 else:
930 966 basehdr = ' base'
931 967
932 968 if ui.debugflag:
933 969 shortfn = hex
934 970 else:
935 971 shortfn = short
936 972
937 973 # There might not be anything in r, so have a sane default
938 974 idlen = 12
939 975 for i in r:
940 976 idlen = len(shortfn(r.node(i)))
941 977 break
942 978
943 979 if format == 0:
944 980 ui.write((" rev offset length " + basehdr + " linkrev"
945 981 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
946 982 elif format == 1:
947 983 ui.write((" rev flag offset length"
948 984 " size " + basehdr + " link p1 p2"
949 985 " %s\n") % "nodeid".rjust(idlen))
950 986
951 987 for i in r:
952 988 node = r.node(i)
953 989 if generaldelta:
954 990 base = r.deltaparent(i)
955 991 else:
956 992 base = r.chainbase(i)
957 993 if format == 0:
958 994 try:
959 995 pp = r.parents(node)
960 996 except Exception:
961 997 pp = [nullid, nullid]
962 998 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
963 999 i, r.start(i), r.length(i), base, r.linkrev(i),
964 1000 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
965 1001 elif format == 1:
966 1002 pr = r.parentrevs(i)
967 1003 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
968 1004 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
969 1005 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
970 1006
971 1007 @command('debugindexdot', cmdutil.debugrevlogopts,
972 1008 _('-c|-m|FILE'), optionalrepo=True)
973 1009 def debugindexdot(ui, repo, file_=None, **opts):
974 1010 """dump an index DAG as a graphviz dot file"""
975 1011 opts = pycompat.byteskwargs(opts)
976 1012 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
977 1013 ui.write(("digraph G {\n"))
978 1014 for i in r:
979 1015 node = r.node(i)
980 1016 pp = r.parents(node)
981 1017 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
982 1018 if pp[1] != nullid:
983 1019 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
984 1020 ui.write("}\n")
985 1021
986 1022 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
987 1023 def debuginstall(ui, **opts):
988 1024 '''test Mercurial installation
989 1025
990 1026 Returns 0 on success.
991 1027 '''
992 1028 opts = pycompat.byteskwargs(opts)
993 1029
994 1030 def writetemp(contents):
995 1031 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
996 1032 f = os.fdopen(fd, pycompat.sysstr("wb"))
997 1033 f.write(contents)
998 1034 f.close()
999 1035 return name
1000 1036
1001 1037 problems = 0
1002 1038
1003 1039 fm = ui.formatter('debuginstall', opts)
1004 1040 fm.startitem()
1005 1041
1006 1042 # encoding
1007 1043 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1008 1044 err = None
1009 1045 try:
1010 1046 codecs.lookup(pycompat.sysstr(encoding.encoding))
1011 1047 except LookupError as inst:
1012 1048 err = util.forcebytestr(inst)
1013 1049 problems += 1
1014 1050 fm.condwrite(err, 'encodingerror', _(" %s\n"
1015 1051 " (check that your locale is properly set)\n"), err)
1016 1052
1017 1053 # Python
1018 1054 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1019 1055 pycompat.sysexecutable)
1020 1056 fm.write('pythonver', _("checking Python version (%s)\n"),
1021 1057 ("%d.%d.%d" % sys.version_info[:3]))
1022 1058 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1023 1059 os.path.dirname(pycompat.fsencode(os.__file__)))
1024 1060
1025 1061 security = set(sslutil.supportedprotocols)
1026 1062 if sslutil.hassni:
1027 1063 security.add('sni')
1028 1064
1029 1065 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1030 1066 fm.formatlist(sorted(security), name='protocol',
1031 1067 fmt='%s', sep=','))
1032 1068
1033 1069 # These are warnings, not errors. So don't increment problem count. This
1034 1070 # may change in the future.
1035 1071 if 'tls1.2' not in security:
1036 1072 fm.plain(_(' TLS 1.2 not supported by Python install; '
1037 1073 'network connections lack modern security\n'))
1038 1074 if 'sni' not in security:
1039 1075 fm.plain(_(' SNI not supported by Python install; may have '
1040 1076 'connectivity issues with some servers\n'))
1041 1077
1042 1078 # TODO print CA cert info
1043 1079
1044 1080 # hg version
1045 1081 hgver = util.version()
1046 1082 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1047 1083 hgver.split('+')[0])
1048 1084 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1049 1085 '+'.join(hgver.split('+')[1:]))
1050 1086
1051 1087 # compiled modules
1052 1088 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1053 1089 policy.policy)
1054 1090 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1055 1091 os.path.dirname(pycompat.fsencode(__file__)))
1056 1092
1057 1093 if policy.policy in ('c', 'allow'):
1058 1094 err = None
1059 1095 try:
1060 1096 from .cext import (
1061 1097 base85,
1062 1098 bdiff,
1063 1099 mpatch,
1064 1100 osutil,
1065 1101 )
1066 1102 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1067 1103 except Exception as inst:
1068 1104 err = util.forcebytestr(inst)
1069 1105 problems += 1
1070 1106 fm.condwrite(err, 'extensionserror', " %s\n", err)
1071 1107
1072 1108 compengines = util.compengines._engines.values()
1073 1109 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1074 1110 fm.formatlist(sorted(e.name() for e in compengines),
1075 1111 name='compengine', fmt='%s', sep=', '))
1076 1112 fm.write('compenginesavail', _('checking available compression engines '
1077 1113 '(%s)\n'),
1078 1114 fm.formatlist(sorted(e.name() for e in compengines
1079 1115 if e.available()),
1080 1116 name='compengine', fmt='%s', sep=', '))
1081 1117 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1082 1118 fm.write('compenginesserver', _('checking available compression engines '
1083 1119 'for wire protocol (%s)\n'),
1084 1120 fm.formatlist([e.name() for e in wirecompengines
1085 1121 if e.wireprotosupport()],
1086 1122 name='compengine', fmt='%s', sep=', '))
1087 1123
1088 1124 # templates
1089 1125 p = templater.templatepaths()
1090 1126 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1091 1127 fm.condwrite(not p, '', _(" no template directories found\n"))
1092 1128 if p:
1093 1129 m = templater.templatepath("map-cmdline.default")
1094 1130 if m:
1095 1131 # template found, check if it is working
1096 1132 err = None
1097 1133 try:
1098 1134 templater.templater.frommapfile(m)
1099 1135 except Exception as inst:
1100 1136 err = util.forcebytestr(inst)
1101 1137 p = None
1102 1138 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1103 1139 else:
1104 1140 p = None
1105 1141 fm.condwrite(p, 'defaulttemplate',
1106 1142 _("checking default template (%s)\n"), m)
1107 1143 fm.condwrite(not m, 'defaulttemplatenotfound',
1108 1144 _(" template '%s' not found\n"), "default")
1109 1145 if not p:
1110 1146 problems += 1
1111 1147 fm.condwrite(not p, '',
1112 1148 _(" (templates seem to have been installed incorrectly)\n"))
1113 1149
1114 1150 # editor
1115 1151 editor = ui.geteditor()
1116 1152 editor = util.expandpath(editor)
1117 1153 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1118 1154 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1119 1155 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1120 1156 _(" No commit editor set and can't find %s in PATH\n"
1121 1157 " (specify a commit editor in your configuration"
1122 1158 " file)\n"), not cmdpath and editor == 'vi' and editor)
1123 1159 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1124 1160 _(" Can't find editor '%s' in PATH\n"
1125 1161 " (specify a commit editor in your configuration"
1126 1162 " file)\n"), not cmdpath and editor)
1127 1163 if not cmdpath and editor != 'vi':
1128 1164 problems += 1
1129 1165
1130 1166 # check username
1131 1167 username = None
1132 1168 err = None
1133 1169 try:
1134 1170 username = ui.username()
1135 1171 except error.Abort as e:
1136 1172 err = util.forcebytestr(e)
1137 1173 problems += 1
1138 1174
1139 1175 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1140 1176 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1141 1177 " (specify a username in your configuration file)\n"), err)
1142 1178
1143 1179 fm.condwrite(not problems, '',
1144 1180 _("no problems detected\n"))
1145 1181 if not problems:
1146 1182 fm.data(problems=problems)
1147 1183 fm.condwrite(problems, 'problems',
1148 1184 _("%d problems detected,"
1149 1185 " please check your install!\n"), problems)
1150 1186 fm.end()
1151 1187
1152 1188 return problems
1153 1189
1154 1190 @command('debugknown', [], _('REPO ID...'), norepo=True)
1155 1191 def debugknown(ui, repopath, *ids, **opts):
1156 1192 """test whether node ids are known to a repo
1157 1193
1158 1194 Every ID must be a full-length hex node id string. Returns a list of 0s
1159 1195 and 1s indicating unknown/known.
1160 1196 """
1161 1197 opts = pycompat.byteskwargs(opts)
1162 1198 repo = hg.peer(ui, opts, repopath)
1163 1199 if not repo.capable('known'):
1164 1200 raise error.Abort("known() not supported by target repository")
1165 1201 flags = repo.known([bin(s) for s in ids])
1166 1202 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1167 1203
1168 1204 @command('debuglabelcomplete', [], _('LABEL...'))
1169 1205 def debuglabelcomplete(ui, repo, *args):
1170 1206 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1171 1207 debugnamecomplete(ui, repo, *args)
1172 1208
1173 1209 @command('debuglocks',
1174 1210 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1175 1211 ('W', 'force-wlock', None,
1176 1212 _('free the working state lock (DANGEROUS)'))],
1177 1213 _('[OPTION]...'))
1178 1214 def debuglocks(ui, repo, **opts):
1179 1215 """show or modify state of locks
1180 1216
1181 1217 By default, this command will show which locks are held. This
1182 1218 includes the user and process holding the lock, the amount of time
1183 1219 the lock has been held, and the machine name where the process is
1184 1220 running if it's not local.
1185 1221
1186 1222 Locks protect the integrity of Mercurial's data, so should be
1187 1223 treated with care. System crashes or other interruptions may cause
1188 1224 locks to not be properly released, though Mercurial will usually
1189 1225 detect and remove such stale locks automatically.
1190 1226
1191 1227 However, detecting stale locks may not always be possible (for
1192 1228 instance, on a shared filesystem). Removing locks may also be
1193 1229 blocked by filesystem permissions.
1194 1230
1195 1231 Returns 0 if no locks are held.
1196 1232
1197 1233 """
1198 1234
1199 1235 if opts.get(r'force_lock'):
1200 1236 repo.svfs.unlink('lock')
1201 1237 if opts.get(r'force_wlock'):
1202 1238 repo.vfs.unlink('wlock')
1203 1239 if opts.get(r'force_lock') or opts.get(r'force_lock'):
1204 1240 return 0
1205 1241
1206 1242 now = time.time()
1207 1243 held = 0
1208 1244
1209 1245 def report(vfs, name, method):
1210 1246 # this causes stale locks to get reaped for more accurate reporting
1211 1247 try:
1212 1248 l = method(False)
1213 1249 except error.LockHeld:
1214 1250 l = None
1215 1251
1216 1252 if l:
1217 1253 l.release()
1218 1254 else:
1219 1255 try:
1220 1256 stat = vfs.lstat(name)
1221 1257 age = now - stat.st_mtime
1222 1258 user = util.username(stat.st_uid)
1223 1259 locker = vfs.readlock(name)
1224 1260 if ":" in locker:
1225 1261 host, pid = locker.split(':')
1226 1262 if host == socket.gethostname():
1227 1263 locker = 'user %s, process %s' % (user, pid)
1228 1264 else:
1229 1265 locker = 'user %s, process %s, host %s' \
1230 1266 % (user, pid, host)
1231 1267 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1232 1268 return 1
1233 1269 except OSError as e:
1234 1270 if e.errno != errno.ENOENT:
1235 1271 raise
1236 1272
1237 1273 ui.write(("%-6s free\n") % (name + ":"))
1238 1274 return 0
1239 1275
1240 1276 held += report(repo.svfs, "lock", repo.lock)
1241 1277 held += report(repo.vfs, "wlock", repo.wlock)
1242 1278
1243 1279 return held
1244 1280
1245 1281 @command('debugmergestate', [], '')
1246 1282 def debugmergestate(ui, repo, *args):
1247 1283 """print merge state
1248 1284
1249 1285 Use --verbose to print out information about whether v1 or v2 merge state
1250 1286 was chosen."""
1251 1287 def _hashornull(h):
1252 1288 if h == nullhex:
1253 1289 return 'null'
1254 1290 else:
1255 1291 return h
1256 1292
1257 1293 def printrecords(version):
1258 1294 ui.write(('* version %s records\n') % version)
1259 1295 if version == 1:
1260 1296 records = v1records
1261 1297 else:
1262 1298 records = v2records
1263 1299
1264 1300 for rtype, record in records:
1265 1301 # pretty print some record types
1266 1302 if rtype == 'L':
1267 1303 ui.write(('local: %s\n') % record)
1268 1304 elif rtype == 'O':
1269 1305 ui.write(('other: %s\n') % record)
1270 1306 elif rtype == 'm':
1271 1307 driver, mdstate = record.split('\0', 1)
1272 1308 ui.write(('merge driver: %s (state "%s")\n')
1273 1309 % (driver, mdstate))
1274 1310 elif rtype in 'FDC':
1275 1311 r = record.split('\0')
1276 1312 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1277 1313 if version == 1:
1278 1314 onode = 'not stored in v1 format'
1279 1315 flags = r[7]
1280 1316 else:
1281 1317 onode, flags = r[7:9]
1282 1318 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1283 1319 % (f, rtype, state, _hashornull(hash)))
1284 1320 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1285 1321 ui.write((' ancestor path: %s (node %s)\n')
1286 1322 % (afile, _hashornull(anode)))
1287 1323 ui.write((' other path: %s (node %s)\n')
1288 1324 % (ofile, _hashornull(onode)))
1289 1325 elif rtype == 'f':
1290 1326 filename, rawextras = record.split('\0', 1)
1291 1327 extras = rawextras.split('\0')
1292 1328 i = 0
1293 1329 extrastrings = []
1294 1330 while i < len(extras):
1295 1331 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1296 1332 i += 2
1297 1333
1298 1334 ui.write(('file extras: %s (%s)\n')
1299 1335 % (filename, ', '.join(extrastrings)))
1300 1336 elif rtype == 'l':
1301 1337 labels = record.split('\0', 2)
1302 1338 labels = [l for l in labels if len(l) > 0]
1303 1339 ui.write(('labels:\n'))
1304 1340 ui.write((' local: %s\n' % labels[0]))
1305 1341 ui.write((' other: %s\n' % labels[1]))
1306 1342 if len(labels) > 2:
1307 1343 ui.write((' base: %s\n' % labels[2]))
1308 1344 else:
1309 1345 ui.write(('unrecognized entry: %s\t%s\n')
1310 1346 % (rtype, record.replace('\0', '\t')))
1311 1347
1312 1348 # Avoid mergestate.read() since it may raise an exception for unsupported
1313 1349 # merge state records. We shouldn't be doing this, but this is OK since this
1314 1350 # command is pretty low-level.
1315 1351 ms = mergemod.mergestate(repo)
1316 1352
1317 1353 # sort so that reasonable information is on top
1318 1354 v1records = ms._readrecordsv1()
1319 1355 v2records = ms._readrecordsv2()
1320 1356 order = 'LOml'
1321 1357 def key(r):
1322 1358 idx = order.find(r[0])
1323 1359 if idx == -1:
1324 1360 return (1, r[1])
1325 1361 else:
1326 1362 return (0, idx)
1327 1363 v1records.sort(key=key)
1328 1364 v2records.sort(key=key)
1329 1365
1330 1366 if not v1records and not v2records:
1331 1367 ui.write(('no merge state found\n'))
1332 1368 elif not v2records:
1333 1369 ui.note(('no version 2 merge state\n'))
1334 1370 printrecords(1)
1335 1371 elif ms._v1v2match(v1records, v2records):
1336 1372 ui.note(('v1 and v2 states match: using v2\n'))
1337 1373 printrecords(2)
1338 1374 else:
1339 1375 ui.note(('v1 and v2 states mismatch: using v1\n'))
1340 1376 printrecords(1)
1341 1377 if ui.verbose:
1342 1378 printrecords(2)
1343 1379
1344 1380 @command('debugnamecomplete', [], _('NAME...'))
1345 1381 def debugnamecomplete(ui, repo, *args):
1346 1382 '''complete "names" - tags, open branch names, bookmark names'''
1347 1383
1348 1384 names = set()
1349 1385 # since we previously only listed open branches, we will handle that
1350 1386 # specially (after this for loop)
1351 1387 for name, ns in repo.names.iteritems():
1352 1388 if name != 'branches':
1353 1389 names.update(ns.listnames(repo))
1354 1390 names.update(tag for (tag, heads, tip, closed)
1355 1391 in repo.branchmap().iterbranches() if not closed)
1356 1392 completions = set()
1357 1393 if not args:
1358 1394 args = ['']
1359 1395 for a in args:
1360 1396 completions.update(n for n in names if n.startswith(a))
1361 1397 ui.write('\n'.join(sorted(completions)))
1362 1398 ui.write('\n')
1363 1399
1364 1400 @command('debugobsolete',
1365 1401 [('', 'flags', 0, _('markers flag')),
1366 1402 ('', 'record-parents', False,
1367 1403 _('record parent information for the precursor')),
1368 1404 ('r', 'rev', [], _('display markers relevant to REV')),
1369 1405 ('', 'exclusive', False, _('restrict display to markers only '
1370 1406 'relevant to REV')),
1371 1407 ('', 'index', False, _('display index of the marker')),
1372 1408 ('', 'delete', [], _('delete markers specified by indices')),
1373 1409 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1374 1410 _('[OBSOLETED [REPLACEMENT ...]]'))
1375 1411 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1376 1412 """create arbitrary obsolete marker
1377 1413
1378 1414 With no arguments, displays the list of obsolescence markers."""
1379 1415
1380 1416 opts = pycompat.byteskwargs(opts)
1381 1417
1382 1418 def parsenodeid(s):
1383 1419 try:
1384 1420 # We do not use revsingle/revrange functions here to accept
1385 1421 # arbitrary node identifiers, possibly not present in the
1386 1422 # local repository.
1387 1423 n = bin(s)
1388 1424 if len(n) != len(nullid):
1389 1425 raise TypeError()
1390 1426 return n
1391 1427 except TypeError:
1392 1428 raise error.Abort('changeset references must be full hexadecimal '
1393 1429 'node identifiers')
1394 1430
1395 1431 if opts.get('delete'):
1396 1432 indices = []
1397 1433 for v in opts.get('delete'):
1398 1434 try:
1399 1435 indices.append(int(v))
1400 1436 except ValueError:
1401 1437 raise error.Abort(_('invalid index value: %r') % v,
1402 1438 hint=_('use integers for indices'))
1403 1439
1404 1440 if repo.currenttransaction():
1405 1441 raise error.Abort(_('cannot delete obsmarkers in the middle '
1406 1442 'of transaction.'))
1407 1443
1408 1444 with repo.lock():
1409 1445 n = repair.deleteobsmarkers(repo.obsstore, indices)
1410 1446 ui.write(_('deleted %i obsolescence markers\n') % n)
1411 1447
1412 1448 return
1413 1449
1414 1450 if precursor is not None:
1415 1451 if opts['rev']:
1416 1452 raise error.Abort('cannot select revision when creating marker')
1417 1453 metadata = {}
1418 1454 metadata['user'] = opts['user'] or ui.username()
1419 1455 succs = tuple(parsenodeid(succ) for succ in successors)
1420 1456 l = repo.lock()
1421 1457 try:
1422 1458 tr = repo.transaction('debugobsolete')
1423 1459 try:
1424 1460 date = opts.get('date')
1425 1461 if date:
1426 1462 date = util.parsedate(date)
1427 1463 else:
1428 1464 date = None
1429 1465 prec = parsenodeid(precursor)
1430 1466 parents = None
1431 1467 if opts['record_parents']:
1432 1468 if prec not in repo.unfiltered():
1433 1469 raise error.Abort('cannot used --record-parents on '
1434 1470 'unknown changesets')
1435 1471 parents = repo.unfiltered()[prec].parents()
1436 1472 parents = tuple(p.node() for p in parents)
1437 1473 repo.obsstore.create(tr, prec, succs, opts['flags'],
1438 1474 parents=parents, date=date,
1439 1475 metadata=metadata, ui=ui)
1440 1476 tr.close()
1441 1477 except ValueError as exc:
1442 1478 raise error.Abort(_('bad obsmarker input: %s') % exc)
1443 1479 finally:
1444 1480 tr.release()
1445 1481 finally:
1446 1482 l.release()
1447 1483 else:
1448 1484 if opts['rev']:
1449 1485 revs = scmutil.revrange(repo, opts['rev'])
1450 1486 nodes = [repo[r].node() for r in revs]
1451 1487 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1452 1488 exclusive=opts['exclusive']))
1453 1489 markers.sort(key=lambda x: x._data)
1454 1490 else:
1455 1491 markers = obsutil.getmarkers(repo)
1456 1492
1457 1493 markerstoiter = markers
1458 1494 isrelevant = lambda m: True
1459 1495 if opts.get('rev') and opts.get('index'):
1460 1496 markerstoiter = obsutil.getmarkers(repo)
1461 1497 markerset = set(markers)
1462 1498 isrelevant = lambda m: m in markerset
1463 1499
1464 1500 fm = ui.formatter('debugobsolete', opts)
1465 1501 for i, m in enumerate(markerstoiter):
1466 1502 if not isrelevant(m):
1467 1503 # marker can be irrelevant when we're iterating over a set
1468 1504 # of markers (markerstoiter) which is bigger than the set
1469 1505 # of markers we want to display (markers)
1470 1506 # this can happen if both --index and --rev options are
1471 1507 # provided and thus we need to iterate over all of the markers
1472 1508 # to get the correct indices, but only display the ones that
1473 1509 # are relevant to --rev value
1474 1510 continue
1475 1511 fm.startitem()
1476 1512 ind = i if opts.get('index') else None
1477 1513 cmdutil.showmarker(fm, m, index=ind)
1478 1514 fm.end()
1479 1515
1480 1516 @command('debugpathcomplete',
1481 1517 [('f', 'full', None, _('complete an entire path')),
1482 1518 ('n', 'normal', None, _('show only normal files')),
1483 1519 ('a', 'added', None, _('show only added files')),
1484 1520 ('r', 'removed', None, _('show only removed files'))],
1485 1521 _('FILESPEC...'))
1486 1522 def debugpathcomplete(ui, repo, *specs, **opts):
1487 1523 '''complete part or all of a tracked path
1488 1524
1489 1525 This command supports shells that offer path name completion. It
1490 1526 currently completes only files already known to the dirstate.
1491 1527
1492 1528 Completion extends only to the next path segment unless
1493 1529 --full is specified, in which case entire paths are used.'''
1494 1530
1495 1531 def complete(path, acceptable):
1496 1532 dirstate = repo.dirstate
1497 1533 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1498 1534 rootdir = repo.root + pycompat.ossep
1499 1535 if spec != repo.root and not spec.startswith(rootdir):
1500 1536 return [], []
1501 1537 if os.path.isdir(spec):
1502 1538 spec += '/'
1503 1539 spec = spec[len(rootdir):]
1504 1540 fixpaths = pycompat.ossep != '/'
1505 1541 if fixpaths:
1506 1542 spec = spec.replace(pycompat.ossep, '/')
1507 1543 speclen = len(spec)
1508 1544 fullpaths = opts[r'full']
1509 1545 files, dirs = set(), set()
1510 1546 adddir, addfile = dirs.add, files.add
1511 1547 for f, st in dirstate.iteritems():
1512 1548 if f.startswith(spec) and st[0] in acceptable:
1513 1549 if fixpaths:
1514 1550 f = f.replace('/', pycompat.ossep)
1515 1551 if fullpaths:
1516 1552 addfile(f)
1517 1553 continue
1518 1554 s = f.find(pycompat.ossep, speclen)
1519 1555 if s >= 0:
1520 1556 adddir(f[:s])
1521 1557 else:
1522 1558 addfile(f)
1523 1559 return files, dirs
1524 1560
1525 1561 acceptable = ''
1526 1562 if opts[r'normal']:
1527 1563 acceptable += 'nm'
1528 1564 if opts[r'added']:
1529 1565 acceptable += 'a'
1530 1566 if opts[r'removed']:
1531 1567 acceptable += 'r'
1532 1568 cwd = repo.getcwd()
1533 1569 if not specs:
1534 1570 specs = ['.']
1535 1571
1536 1572 files, dirs = set(), set()
1537 1573 for spec in specs:
1538 1574 f, d = complete(spec, acceptable or 'nmar')
1539 1575 files.update(f)
1540 1576 dirs.update(d)
1541 1577 files.update(dirs)
1542 1578 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1543 1579 ui.write('\n')
1544 1580
1545 1581 @command('debugpickmergetool',
1546 1582 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1547 1583 ('', 'changedelete', None, _('emulate merging change and delete')),
1548 1584 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1549 1585 _('[PATTERN]...'),
1550 1586 inferrepo=True)
1551 1587 def debugpickmergetool(ui, repo, *pats, **opts):
1552 1588 """examine which merge tool is chosen for specified file
1553 1589
1554 1590 As described in :hg:`help merge-tools`, Mercurial examines
1555 1591 configurations below in this order to decide which merge tool is
1556 1592 chosen for specified file.
1557 1593
1558 1594 1. ``--tool`` option
1559 1595 2. ``HGMERGE`` environment variable
1560 1596 3. configurations in ``merge-patterns`` section
1561 1597 4. configuration of ``ui.merge``
1562 1598 5. configurations in ``merge-tools`` section
1563 1599 6. ``hgmerge`` tool (for historical reason only)
1564 1600 7. default tool for fallback (``:merge`` or ``:prompt``)
1565 1601
1566 1602 This command writes out examination result in the style below::
1567 1603
1568 1604 FILE = MERGETOOL
1569 1605
1570 1606 By default, all files known in the first parent context of the
1571 1607 working directory are examined. Use file patterns and/or -I/-X
1572 1608 options to limit target files. -r/--rev is also useful to examine
1573 1609 files in another context without actual updating to it.
1574 1610
1575 1611 With --debug, this command shows warning messages while matching
1576 1612 against ``merge-patterns`` and so on, too. It is recommended to
1577 1613 use this option with explicit file patterns and/or -I/-X options,
1578 1614 because this option increases amount of output per file according
1579 1615 to configurations in hgrc.
1580 1616
1581 1617 With -v/--verbose, this command shows configurations below at
1582 1618 first (only if specified).
1583 1619
1584 1620 - ``--tool`` option
1585 1621 - ``HGMERGE`` environment variable
1586 1622 - configuration of ``ui.merge``
1587 1623
1588 1624 If merge tool is chosen before matching against
1589 1625 ``merge-patterns``, this command can't show any helpful
1590 1626 information, even with --debug. In such case, information above is
1591 1627 useful to know why a merge tool is chosen.
1592 1628 """
1593 1629 opts = pycompat.byteskwargs(opts)
1594 1630 overrides = {}
1595 1631 if opts['tool']:
1596 1632 overrides[('ui', 'forcemerge')] = opts['tool']
1597 1633 ui.note(('with --tool %r\n') % (opts['tool']))
1598 1634
1599 1635 with ui.configoverride(overrides, 'debugmergepatterns'):
1600 1636 hgmerge = encoding.environ.get("HGMERGE")
1601 1637 if hgmerge is not None:
1602 1638 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1603 1639 uimerge = ui.config("ui", "merge")
1604 1640 if uimerge:
1605 1641 ui.note(('with ui.merge=%r\n') % (uimerge))
1606 1642
1607 1643 ctx = scmutil.revsingle(repo, opts.get('rev'))
1608 1644 m = scmutil.match(ctx, pats, opts)
1609 1645 changedelete = opts['changedelete']
1610 1646 for path in ctx.walk(m):
1611 1647 fctx = ctx[path]
1612 1648 try:
1613 1649 if not ui.debugflag:
1614 1650 ui.pushbuffer(error=True)
1615 1651 tool, toolpath = filemerge._picktool(repo, ui, path,
1616 1652 fctx.isbinary(),
1617 1653 'l' in fctx.flags(),
1618 1654 changedelete)
1619 1655 finally:
1620 1656 if not ui.debugflag:
1621 1657 ui.popbuffer()
1622 1658 ui.write(('%s = %s\n') % (path, tool))
1623 1659
1624 1660 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1625 1661 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1626 1662 '''access the pushkey key/value protocol
1627 1663
1628 1664 With two args, list the keys in the given namespace.
1629 1665
1630 1666 With five args, set a key to new if it currently is set to old.
1631 1667 Reports success or failure.
1632 1668 '''
1633 1669
1634 1670 target = hg.peer(ui, {}, repopath)
1635 1671 if keyinfo:
1636 1672 key, old, new = keyinfo
1637 1673 r = target.pushkey(namespace, key, old, new)
1638 1674 ui.status(str(r) + '\n')
1639 1675 return not r
1640 1676 else:
1641 1677 for k, v in sorted(target.listkeys(namespace).iteritems()):
1642 1678 ui.write("%s\t%s\n" % (util.escapestr(k),
1643 1679 util.escapestr(v)))
1644 1680
1645 1681 @command('debugpvec', [], _('A B'))
1646 1682 def debugpvec(ui, repo, a, b=None):
1647 1683 ca = scmutil.revsingle(repo, a)
1648 1684 cb = scmutil.revsingle(repo, b)
1649 1685 pa = pvec.ctxpvec(ca)
1650 1686 pb = pvec.ctxpvec(cb)
1651 1687 if pa == pb:
1652 1688 rel = "="
1653 1689 elif pa > pb:
1654 1690 rel = ">"
1655 1691 elif pa < pb:
1656 1692 rel = "<"
1657 1693 elif pa | pb:
1658 1694 rel = "|"
1659 1695 ui.write(_("a: %s\n") % pa)
1660 1696 ui.write(_("b: %s\n") % pb)
1661 1697 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1662 1698 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1663 1699 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1664 1700 pa.distance(pb), rel))
1665 1701
1666 1702 @command('debugrebuilddirstate|debugrebuildstate',
1667 1703 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1668 1704 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1669 1705 'the working copy parent')),
1670 1706 ],
1671 1707 _('[-r REV]'))
1672 1708 def debugrebuilddirstate(ui, repo, rev, **opts):
1673 1709 """rebuild the dirstate as it would look like for the given revision
1674 1710
1675 1711 If no revision is specified the first current parent will be used.
1676 1712
1677 1713 The dirstate will be set to the files of the given revision.
1678 1714 The actual working directory content or existing dirstate
1679 1715 information such as adds or removes is not considered.
1680 1716
1681 1717 ``minimal`` will only rebuild the dirstate status for files that claim to be
1682 1718 tracked but are not in the parent manifest, or that exist in the parent
1683 1719 manifest but are not in the dirstate. It will not change adds, removes, or
1684 1720 modified files that are in the working copy parent.
1685 1721
1686 1722 One use of this command is to make the next :hg:`status` invocation
1687 1723 check the actual file content.
1688 1724 """
1689 1725 ctx = scmutil.revsingle(repo, rev)
1690 1726 with repo.wlock():
1691 1727 dirstate = repo.dirstate
1692 1728 changedfiles = None
1693 1729 # See command doc for what minimal does.
1694 1730 if opts.get(r'minimal'):
1695 1731 manifestfiles = set(ctx.manifest().keys())
1696 1732 dirstatefiles = set(dirstate)
1697 1733 manifestonly = manifestfiles - dirstatefiles
1698 1734 dsonly = dirstatefiles - manifestfiles
1699 1735 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1700 1736 changedfiles = manifestonly | dsnotadded
1701 1737
1702 1738 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1703 1739
1704 1740 @command('debugrebuildfncache', [], '')
1705 1741 def debugrebuildfncache(ui, repo):
1706 1742 """rebuild the fncache file"""
1707 1743 repair.rebuildfncache(ui, repo)
1708 1744
1709 1745 @command('debugrename',
1710 1746 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1711 1747 _('[-r REV] FILE'))
1712 1748 def debugrename(ui, repo, file1, *pats, **opts):
1713 1749 """dump rename information"""
1714 1750
1715 1751 opts = pycompat.byteskwargs(opts)
1716 1752 ctx = scmutil.revsingle(repo, opts.get('rev'))
1717 1753 m = scmutil.match(ctx, (file1,) + pats, opts)
1718 1754 for abs in ctx.walk(m):
1719 1755 fctx = ctx[abs]
1720 1756 o = fctx.filelog().renamed(fctx.filenode())
1721 1757 rel = m.rel(abs)
1722 1758 if o:
1723 1759 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1724 1760 else:
1725 1761 ui.write(_("%s not renamed\n") % rel)
1726 1762
1727 1763 @command('debugrevlog', cmdutil.debugrevlogopts +
1728 1764 [('d', 'dump', False, _('dump index data'))],
1729 1765 _('-c|-m|FILE'),
1730 1766 optionalrepo=True)
1731 1767 def debugrevlog(ui, repo, file_=None, **opts):
1732 1768 """show data and statistics about a revlog"""
1733 1769 opts = pycompat.byteskwargs(opts)
1734 1770 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1735 1771
1736 1772 if opts.get("dump"):
1737 1773 numrevs = len(r)
1738 1774 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1739 1775 " rawsize totalsize compression heads chainlen\n"))
1740 1776 ts = 0
1741 1777 heads = set()
1742 1778
1743 1779 for rev in xrange(numrevs):
1744 1780 dbase = r.deltaparent(rev)
1745 1781 if dbase == -1:
1746 1782 dbase = rev
1747 1783 cbase = r.chainbase(rev)
1748 1784 clen = r.chainlen(rev)
1749 1785 p1, p2 = r.parentrevs(rev)
1750 1786 rs = r.rawsize(rev)
1751 1787 ts = ts + rs
1752 1788 heads -= set(r.parentrevs(rev))
1753 1789 heads.add(rev)
1754 1790 try:
1755 1791 compression = ts / r.end(rev)
1756 1792 except ZeroDivisionError:
1757 1793 compression = 0
1758 1794 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1759 1795 "%11d %5d %8d\n" %
1760 1796 (rev, p1, p2, r.start(rev), r.end(rev),
1761 1797 r.start(dbase), r.start(cbase),
1762 1798 r.start(p1), r.start(p2),
1763 1799 rs, ts, compression, len(heads), clen))
1764 1800 return 0
1765 1801
1766 1802 v = r.version
1767 1803 format = v & 0xFFFF
1768 1804 flags = []
1769 1805 gdelta = False
1770 1806 if v & revlog.FLAG_INLINE_DATA:
1771 1807 flags.append('inline')
1772 1808 if v & revlog.FLAG_GENERALDELTA:
1773 1809 gdelta = True
1774 1810 flags.append('generaldelta')
1775 1811 if not flags:
1776 1812 flags = ['(none)']
1777 1813
1778 1814 nummerges = 0
1779 1815 numfull = 0
1780 1816 numprev = 0
1781 1817 nump1 = 0
1782 1818 nump2 = 0
1783 1819 numother = 0
1784 1820 nump1prev = 0
1785 1821 nump2prev = 0
1786 1822 chainlengths = []
1787 1823 chainbases = []
1788 1824 chainspans = []
1789 1825
1790 1826 datasize = [None, 0, 0]
1791 1827 fullsize = [None, 0, 0]
1792 1828 deltasize = [None, 0, 0]
1793 1829 chunktypecounts = {}
1794 1830 chunktypesizes = {}
1795 1831
1796 1832 def addsize(size, l):
1797 1833 if l[0] is None or size < l[0]:
1798 1834 l[0] = size
1799 1835 if size > l[1]:
1800 1836 l[1] = size
1801 1837 l[2] += size
1802 1838
1803 1839 numrevs = len(r)
1804 1840 for rev in xrange(numrevs):
1805 1841 p1, p2 = r.parentrevs(rev)
1806 1842 delta = r.deltaparent(rev)
1807 1843 if format > 0:
1808 1844 addsize(r.rawsize(rev), datasize)
1809 1845 if p2 != nullrev:
1810 1846 nummerges += 1
1811 1847 size = r.length(rev)
1812 1848 if delta == nullrev:
1813 1849 chainlengths.append(0)
1814 1850 chainbases.append(r.start(rev))
1815 1851 chainspans.append(size)
1816 1852 numfull += 1
1817 1853 addsize(size, fullsize)
1818 1854 else:
1819 1855 chainlengths.append(chainlengths[delta] + 1)
1820 1856 baseaddr = chainbases[delta]
1821 1857 revaddr = r.start(rev)
1822 1858 chainbases.append(baseaddr)
1823 1859 chainspans.append((revaddr - baseaddr) + size)
1824 1860 addsize(size, deltasize)
1825 1861 if delta == rev - 1:
1826 1862 numprev += 1
1827 1863 if delta == p1:
1828 1864 nump1prev += 1
1829 1865 elif delta == p2:
1830 1866 nump2prev += 1
1831 1867 elif delta == p1:
1832 1868 nump1 += 1
1833 1869 elif delta == p2:
1834 1870 nump2 += 1
1835 1871 elif delta != nullrev:
1836 1872 numother += 1
1837 1873
1838 1874 # Obtain data on the raw chunks in the revlog.
1839 1875 segment = r._getsegmentforrevs(rev, rev)[1]
1840 1876 if segment:
1841 1877 chunktype = bytes(segment[0:1])
1842 1878 else:
1843 1879 chunktype = 'empty'
1844 1880
1845 1881 if chunktype not in chunktypecounts:
1846 1882 chunktypecounts[chunktype] = 0
1847 1883 chunktypesizes[chunktype] = 0
1848 1884
1849 1885 chunktypecounts[chunktype] += 1
1850 1886 chunktypesizes[chunktype] += size
1851 1887
1852 1888 # Adjust size min value for empty cases
1853 1889 for size in (datasize, fullsize, deltasize):
1854 1890 if size[0] is None:
1855 1891 size[0] = 0
1856 1892
1857 1893 numdeltas = numrevs - numfull
1858 1894 numoprev = numprev - nump1prev - nump2prev
1859 1895 totalrawsize = datasize[2]
1860 1896 datasize[2] /= numrevs
1861 1897 fulltotal = fullsize[2]
1862 1898 fullsize[2] /= numfull
1863 1899 deltatotal = deltasize[2]
1864 1900 if numrevs - numfull > 0:
1865 1901 deltasize[2] /= numrevs - numfull
1866 1902 totalsize = fulltotal + deltatotal
1867 1903 avgchainlen = sum(chainlengths) / numrevs
1868 1904 maxchainlen = max(chainlengths)
1869 1905 maxchainspan = max(chainspans)
1870 1906 compratio = 1
1871 1907 if totalsize:
1872 1908 compratio = totalrawsize / totalsize
1873 1909
1874 1910 basedfmtstr = '%%%dd\n'
1875 1911 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1876 1912
1877 1913 def dfmtstr(max):
1878 1914 return basedfmtstr % len(str(max))
1879 1915 def pcfmtstr(max, padding=0):
1880 1916 return basepcfmtstr % (len(str(max)), ' ' * padding)
1881 1917
1882 1918 def pcfmt(value, total):
1883 1919 if total:
1884 1920 return (value, 100 * float(value) / total)
1885 1921 else:
1886 1922 return value, 100.0
1887 1923
1888 1924 ui.write(('format : %d\n') % format)
1889 1925 ui.write(('flags : %s\n') % ', '.join(flags))
1890 1926
1891 1927 ui.write('\n')
1892 1928 fmt = pcfmtstr(totalsize)
1893 1929 fmt2 = dfmtstr(totalsize)
1894 1930 ui.write(('revisions : ') + fmt2 % numrevs)
1895 1931 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1896 1932 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1897 1933 ui.write(('revisions : ') + fmt2 % numrevs)
1898 1934 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1899 1935 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1900 1936 ui.write(('revision size : ') + fmt2 % totalsize)
1901 1937 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1902 1938 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1903 1939
1904 1940 def fmtchunktype(chunktype):
1905 1941 if chunktype == 'empty':
1906 1942 return ' %s : ' % chunktype
1907 1943 elif chunktype in pycompat.bytestr(string.ascii_letters):
1908 1944 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1909 1945 else:
1910 1946 return ' 0x%s : ' % hex(chunktype)
1911 1947
1912 1948 ui.write('\n')
1913 1949 ui.write(('chunks : ') + fmt2 % numrevs)
1914 1950 for chunktype in sorted(chunktypecounts):
1915 1951 ui.write(fmtchunktype(chunktype))
1916 1952 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1917 1953 ui.write(('chunks size : ') + fmt2 % totalsize)
1918 1954 for chunktype in sorted(chunktypecounts):
1919 1955 ui.write(fmtchunktype(chunktype))
1920 1956 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1921 1957
1922 1958 ui.write('\n')
1923 1959 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
1924 1960 ui.write(('avg chain length : ') + fmt % avgchainlen)
1925 1961 ui.write(('max chain length : ') + fmt % maxchainlen)
1926 1962 ui.write(('max chain reach : ') + fmt % maxchainspan)
1927 1963 ui.write(('compression ratio : ') + fmt % compratio)
1928 1964
1929 1965 if format > 0:
1930 1966 ui.write('\n')
1931 1967 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1932 1968 % tuple(datasize))
1933 1969 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1934 1970 % tuple(fullsize))
1935 1971 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1936 1972 % tuple(deltasize))
1937 1973
1938 1974 if numdeltas > 0:
1939 1975 ui.write('\n')
1940 1976 fmt = pcfmtstr(numdeltas)
1941 1977 fmt2 = pcfmtstr(numdeltas, 4)
1942 1978 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1943 1979 if numprev > 0:
1944 1980 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1945 1981 numprev))
1946 1982 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1947 1983 numprev))
1948 1984 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1949 1985 numprev))
1950 1986 if gdelta:
1951 1987 ui.write(('deltas against p1 : ')
1952 1988 + fmt % pcfmt(nump1, numdeltas))
1953 1989 ui.write(('deltas against p2 : ')
1954 1990 + fmt % pcfmt(nump2, numdeltas))
1955 1991 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1956 1992 numdeltas))
1957 1993
1958 1994 @command('debugrevspec',
1959 1995 [('', 'optimize', None,
1960 1996 _('print parsed tree after optimizing (DEPRECATED)')),
1961 1997 ('', 'show-revs', True, _('print list of result revisions (default)')),
1962 1998 ('s', 'show-set', None, _('print internal representation of result set')),
1963 1999 ('p', 'show-stage', [],
1964 2000 _('print parsed tree at the given stage'), _('NAME')),
1965 2001 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1966 2002 ('', 'verify-optimized', False, _('verify optimized result')),
1967 2003 ],
1968 2004 ('REVSPEC'))
1969 2005 def debugrevspec(ui, repo, expr, **opts):
1970 2006 """parse and apply a revision specification
1971 2007
1972 2008 Use -p/--show-stage option to print the parsed tree at the given stages.
1973 2009 Use -p all to print tree at every stage.
1974 2010
1975 2011 Use --no-show-revs option with -s or -p to print only the set
1976 2012 representation or the parsed tree respectively.
1977 2013
1978 2014 Use --verify-optimized to compare the optimized result with the unoptimized
1979 2015 one. Returns 1 if the optimized result differs.
1980 2016 """
1981 2017 opts = pycompat.byteskwargs(opts)
1982 2018 aliases = ui.configitems('revsetalias')
1983 2019 stages = [
1984 2020 ('parsed', lambda tree: tree),
1985 2021 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
1986 2022 ui.warn)),
1987 2023 ('concatenated', revsetlang.foldconcat),
1988 2024 ('analyzed', revsetlang.analyze),
1989 2025 ('optimized', revsetlang.optimize),
1990 2026 ]
1991 2027 if opts['no_optimized']:
1992 2028 stages = stages[:-1]
1993 2029 if opts['verify_optimized'] and opts['no_optimized']:
1994 2030 raise error.Abort(_('cannot use --verify-optimized with '
1995 2031 '--no-optimized'))
1996 2032 stagenames = set(n for n, f in stages)
1997 2033
1998 2034 showalways = set()
1999 2035 showchanged = set()
2000 2036 if ui.verbose and not opts['show_stage']:
2001 2037 # show parsed tree by --verbose (deprecated)
2002 2038 showalways.add('parsed')
2003 2039 showchanged.update(['expanded', 'concatenated'])
2004 2040 if opts['optimize']:
2005 2041 showalways.add('optimized')
2006 2042 if opts['show_stage'] and opts['optimize']:
2007 2043 raise error.Abort(_('cannot use --optimize with --show-stage'))
2008 2044 if opts['show_stage'] == ['all']:
2009 2045 showalways.update(stagenames)
2010 2046 else:
2011 2047 for n in opts['show_stage']:
2012 2048 if n not in stagenames:
2013 2049 raise error.Abort(_('invalid stage name: %s') % n)
2014 2050 showalways.update(opts['show_stage'])
2015 2051
2016 2052 treebystage = {}
2017 2053 printedtree = None
2018 2054 tree = revsetlang.parse(expr, lookup=repo.__contains__)
2019 2055 for n, f in stages:
2020 2056 treebystage[n] = tree = f(tree)
2021 2057 if n in showalways or (n in showchanged and tree != printedtree):
2022 2058 if opts['show_stage'] or n != 'parsed':
2023 2059 ui.write(("* %s:\n") % n)
2024 2060 ui.write(revsetlang.prettyformat(tree), "\n")
2025 2061 printedtree = tree
2026 2062
2027 2063 if opts['verify_optimized']:
2028 2064 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2029 2065 brevs = revset.makematcher(treebystage['optimized'])(repo)
2030 2066 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2031 2067 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2032 2068 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2033 2069 arevs = list(arevs)
2034 2070 brevs = list(brevs)
2035 2071 if arevs == brevs:
2036 2072 return 0
2037 2073 ui.write(('--- analyzed\n'), label='diff.file_a')
2038 2074 ui.write(('+++ optimized\n'), label='diff.file_b')
2039 2075 sm = difflib.SequenceMatcher(None, arevs, brevs)
2040 2076 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2041 2077 if tag in ('delete', 'replace'):
2042 2078 for c in arevs[alo:ahi]:
2043 2079 ui.write('-%s\n' % c, label='diff.deleted')
2044 2080 if tag in ('insert', 'replace'):
2045 2081 for c in brevs[blo:bhi]:
2046 2082 ui.write('+%s\n' % c, label='diff.inserted')
2047 2083 if tag == 'equal':
2048 2084 for c in arevs[alo:ahi]:
2049 2085 ui.write(' %s\n' % c)
2050 2086 return 1
2051 2087
2052 2088 func = revset.makematcher(tree)
2053 2089 revs = func(repo)
2054 2090 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2055 2091 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2056 2092 if not opts['show_revs']:
2057 2093 return
2058 2094 for c in revs:
2059 2095 ui.write("%s\n" % c)
2060 2096
2061 2097 @command('debugsetparents', [], _('REV1 [REV2]'))
2062 2098 def debugsetparents(ui, repo, rev1, rev2=None):
2063 2099 """manually set the parents of the current working directory
2064 2100
2065 2101 This is useful for writing repository conversion tools, but should
2066 2102 be used with care. For example, neither the working directory nor the
2067 2103 dirstate is updated, so file status may be incorrect after running this
2068 2104 command.
2069 2105
2070 2106 Returns 0 on success.
2071 2107 """
2072 2108
2073 2109 r1 = scmutil.revsingle(repo, rev1).node()
2074 2110 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2075 2111
2076 2112 with repo.wlock():
2077 2113 repo.setparents(r1, r2)
2078 2114
2079 2115 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2080 2116 def debugssl(ui, repo, source=None, **opts):
2081 2117 '''test a secure connection to a server
2082 2118
2083 2119 This builds the certificate chain for the server on Windows, installing the
2084 2120 missing intermediates and trusted root via Windows Update if necessary. It
2085 2121 does nothing on other platforms.
2086 2122
2087 2123 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2088 2124 that server is used. See :hg:`help urls` for more information.
2089 2125
2090 2126 If the update succeeds, retry the original operation. Otherwise, the cause
2091 2127 of the SSL error is likely another issue.
2092 2128 '''
2093 2129 if not pycompat.iswindows:
2094 2130 raise error.Abort(_('certificate chain building is only possible on '
2095 2131 'Windows'))
2096 2132
2097 2133 if not source:
2098 2134 if not repo:
2099 2135 raise error.Abort(_("there is no Mercurial repository here, and no "
2100 2136 "server specified"))
2101 2137 source = "default"
2102 2138
2103 2139 source, branches = hg.parseurl(ui.expandpath(source))
2104 2140 url = util.url(source)
2105 2141 addr = None
2106 2142
2107 2143 if url.scheme == 'https':
2108 2144 addr = (url.host, url.port or 443)
2109 2145 elif url.scheme == 'ssh':
2110 2146 addr = (url.host, url.port or 22)
2111 2147 else:
2112 2148 raise error.Abort(_("only https and ssh connections are supported"))
2113 2149
2114 2150 from . import win32
2115 2151
2116 2152 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2117 2153 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2118 2154
2119 2155 try:
2120 2156 s.connect(addr)
2121 2157 cert = s.getpeercert(True)
2122 2158
2123 2159 ui.status(_('checking the certificate chain for %s\n') % url.host)
2124 2160
2125 2161 complete = win32.checkcertificatechain(cert, build=False)
2126 2162
2127 2163 if not complete:
2128 2164 ui.status(_('certificate chain is incomplete, updating... '))
2129 2165
2130 2166 if not win32.checkcertificatechain(cert):
2131 2167 ui.status(_('failed.\n'))
2132 2168 else:
2133 2169 ui.status(_('done.\n'))
2134 2170 else:
2135 2171 ui.status(_('full certificate chain is available\n'))
2136 2172 finally:
2137 2173 s.close()
2138 2174
2139 2175 @command('debugsub',
2140 2176 [('r', 'rev', '',
2141 2177 _('revision to check'), _('REV'))],
2142 2178 _('[-r REV] [REV]'))
2143 2179 def debugsub(ui, repo, rev=None):
2144 2180 ctx = scmutil.revsingle(repo, rev, None)
2145 2181 for k, v in sorted(ctx.substate.items()):
2146 2182 ui.write(('path %s\n') % k)
2147 2183 ui.write((' source %s\n') % v[0])
2148 2184 ui.write((' revision %s\n') % v[1])
2149 2185
2150 2186 @command('debugsuccessorssets',
2151 2187 [('', 'closest', False, _('return closest successors sets only'))],
2152 2188 _('[REV]'))
2153 2189 def debugsuccessorssets(ui, repo, *revs, **opts):
2154 2190 """show set of successors for revision
2155 2191
2156 2192 A successors set of changeset A is a consistent group of revisions that
2157 2193 succeed A. It contains non-obsolete changesets only unless closests
2158 2194 successors set is set.
2159 2195
2160 2196 In most cases a changeset A has a single successors set containing a single
2161 2197 successor (changeset A replaced by A').
2162 2198
2163 2199 A changeset that is made obsolete with no successors are called "pruned".
2164 2200 Such changesets have no successors sets at all.
2165 2201
2166 2202 A changeset that has been "split" will have a successors set containing
2167 2203 more than one successor.
2168 2204
2169 2205 A changeset that has been rewritten in multiple different ways is called
2170 2206 "divergent". Such changesets have multiple successor sets (each of which
2171 2207 may also be split, i.e. have multiple successors).
2172 2208
2173 2209 Results are displayed as follows::
2174 2210
2175 2211 <rev1>
2176 2212 <successors-1A>
2177 2213 <rev2>
2178 2214 <successors-2A>
2179 2215 <successors-2B1> <successors-2B2> <successors-2B3>
2180 2216
2181 2217 Here rev2 has two possible (i.e. divergent) successors sets. The first
2182 2218 holds one element, whereas the second holds three (i.e. the changeset has
2183 2219 been split).
2184 2220 """
2185 2221 # passed to successorssets caching computation from one call to another
2186 2222 cache = {}
2187 2223 ctx2str = str
2188 2224 node2str = short
2189 2225 if ui.debug():
2190 2226 def ctx2str(ctx):
2191 2227 return ctx.hex()
2192 2228 node2str = hex
2193 2229 for rev in scmutil.revrange(repo, revs):
2194 2230 ctx = repo[rev]
2195 2231 ui.write('%s\n'% ctx2str(ctx))
2196 2232 for succsset in obsutil.successorssets(repo, ctx.node(),
2197 2233 closest=opts['closest'],
2198 2234 cache=cache):
2199 2235 if succsset:
2200 2236 ui.write(' ')
2201 2237 ui.write(node2str(succsset[0]))
2202 2238 for node in succsset[1:]:
2203 2239 ui.write(' ')
2204 2240 ui.write(node2str(node))
2205 2241 ui.write('\n')
2206 2242
2207 2243 @command('debugtemplate',
2208 2244 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2209 2245 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2210 2246 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2211 2247 optionalrepo=True)
2212 2248 def debugtemplate(ui, repo, tmpl, **opts):
2213 2249 """parse and apply a template
2214 2250
2215 2251 If -r/--rev is given, the template is processed as a log template and
2216 2252 applied to the given changesets. Otherwise, it is processed as a generic
2217 2253 template.
2218 2254
2219 2255 Use --verbose to print the parsed tree.
2220 2256 """
2221 2257 revs = None
2222 2258 if opts[r'rev']:
2223 2259 if repo is None:
2224 2260 raise error.RepoError(_('there is no Mercurial repository here '
2225 2261 '(.hg not found)'))
2226 2262 revs = scmutil.revrange(repo, opts[r'rev'])
2227 2263
2228 2264 props = {}
2229 2265 for d in opts[r'define']:
2230 2266 try:
2231 2267 k, v = (e.strip() for e in d.split('=', 1))
2232 2268 if not k or k == 'ui':
2233 2269 raise ValueError
2234 2270 props[k] = v
2235 2271 except ValueError:
2236 2272 raise error.Abort(_('malformed keyword definition: %s') % d)
2237 2273
2238 2274 if ui.verbose:
2239 2275 aliases = ui.configitems('templatealias')
2240 2276 tree = templater.parse(tmpl)
2241 2277 ui.note(templater.prettyformat(tree), '\n')
2242 2278 newtree = templater.expandaliases(tree, aliases)
2243 2279 if newtree != tree:
2244 2280 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2245 2281
2246 2282 if revs is None:
2247 2283 t = formatter.maketemplater(ui, tmpl)
2248 2284 props['ui'] = ui
2249 2285 ui.write(t.render(props))
2250 2286 else:
2251 2287 displayer = cmdutil.makelogtemplater(ui, repo, tmpl)
2252 2288 for r in revs:
2253 2289 displayer.show(repo[r], **pycompat.strkwargs(props))
2254 2290 displayer.close()
2255 2291
2256 2292 @command('debugupdatecaches', [])
2257 2293 def debugupdatecaches(ui, repo, *pats, **opts):
2258 2294 """warm all known caches in the repository"""
2259 2295 with repo.wlock(), repo.lock():
2260 2296 repo.updatecaches()
2261 2297
2262 2298 @command('debugupgraderepo', [
2263 2299 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2264 2300 ('', 'run', False, _('performs an upgrade')),
2265 2301 ])
2266 2302 def debugupgraderepo(ui, repo, run=False, optimize=None):
2267 2303 """upgrade a repository to use different features
2268 2304
2269 2305 If no arguments are specified, the repository is evaluated for upgrade
2270 2306 and a list of problems and potential optimizations is printed.
2271 2307
2272 2308 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2273 2309 can be influenced via additional arguments. More details will be provided
2274 2310 by the command output when run without ``--run``.
2275 2311
2276 2312 During the upgrade, the repository will be locked and no writes will be
2277 2313 allowed.
2278 2314
2279 2315 At the end of the upgrade, the repository may not be readable while new
2280 2316 repository data is swapped in. This window will be as long as it takes to
2281 2317 rename some directories inside the ``.hg`` directory. On most machines, this
2282 2318 should complete almost instantaneously and the chances of a consumer being
2283 2319 unable to access the repository should be low.
2284 2320 """
2285 2321 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2286 2322
2287 2323 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2288 2324 inferrepo=True)
2289 2325 def debugwalk(ui, repo, *pats, **opts):
2290 2326 """show how files match on given patterns"""
2291 2327 opts = pycompat.byteskwargs(opts)
2292 2328 m = scmutil.match(repo[None], pats, opts)
2293 2329 ui.write(('matcher: %r\n' % m))
2294 2330 items = list(repo[None].walk(m))
2295 2331 if not items:
2296 2332 return
2297 2333 f = lambda fn: fn
2298 2334 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2299 2335 f = lambda fn: util.normpath(fn)
2300 2336 fmt = 'f %%-%ds %%-%ds %%s' % (
2301 2337 max([len(abs) for abs in items]),
2302 2338 max([len(m.rel(abs)) for abs in items]))
2303 2339 for abs in items:
2304 2340 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2305 2341 ui.write("%s\n" % line.rstrip())
2306 2342
2307 2343 @command('debugwireargs',
2308 2344 [('', 'three', '', 'three'),
2309 2345 ('', 'four', '', 'four'),
2310 2346 ('', 'five', '', 'five'),
2311 2347 ] + cmdutil.remoteopts,
2312 2348 _('REPO [OPTIONS]... [ONE [TWO]]'),
2313 2349 norepo=True)
2314 2350 def debugwireargs(ui, repopath, *vals, **opts):
2315 2351 opts = pycompat.byteskwargs(opts)
2316 2352 repo = hg.peer(ui, opts, repopath)
2317 2353 for opt in cmdutil.remoteopts:
2318 2354 del opts[opt[1]]
2319 2355 args = {}
2320 2356 for k, v in opts.iteritems():
2321 2357 if v:
2322 2358 args[k] = v
2323 2359 # run twice to check that we don't mess up the stream for the next command
2324 2360 res1 = repo.debugwireargs(*vals, **args)
2325 2361 res2 = repo.debugwireargs(*vals, **args)
2326 2362 ui.write("%s\n" % res1)
2327 2363 if res1 != res2:
2328 2364 ui.warn("%s\n" % res2)
@@ -1,192 +1,226 b''
1 1 $ cat << EOF >> $HGRCPATH
2 2 > [format]
3 3 > usegeneraldelta=yes
4 4 > EOF
5 5
6 6 $ hg init debugrevlog
7 7 $ cd debugrevlog
8 8 $ echo a > a
9 9 $ hg ci -Am adda
10 10 adding a
11 11 $ hg debugrevlog -m
12 12 format : 1
13 13 flags : inline, generaldelta
14 14
15 15 revisions : 1
16 16 merges : 0 ( 0.00%)
17 17 normal : 1 (100.00%)
18 18 revisions : 1
19 19 full : 1 (100.00%)
20 20 deltas : 0 ( 0.00%)
21 21 revision size : 44
22 22 full : 44 (100.00%)
23 23 deltas : 0 ( 0.00%)
24 24
25 25 chunks : 1
26 26 0x75 (u) : 1 (100.00%)
27 27 chunks size : 44
28 28 0x75 (u) : 44 (100.00%)
29 29
30 30 avg chain length : 0
31 31 max chain length : 0
32 32 max chain reach : 44
33 33 compression ratio : 0
34 34
35 35 uncompressed data size (min/max/avg) : 43 / 43 / 43
36 36 full revision size (min/max/avg) : 44 / 44 / 44
37 37 delta size (min/max/avg) : 0 / 0 / 0
38 38
39 39 Test debugindex, with and without the --debug flag
40 40 $ hg debugindex a
41 41 rev offset length ..... linkrev nodeid p1 p2 (re)
42 42 0 0 3 .... 0 b789fdd96dc2 000000000000 000000000000 (re)
43 43 $ hg --debug debugindex a
44 44 rev offset length ..... linkrev nodeid p1 p2 (re)
45 45 0 0 3 .... 0 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 (re)
46 46 $ hg debugindex -f 1 a
47 47 rev flag offset length size ..... link p1 p2 nodeid (re)
48 48 0 0000 0 3 2 .... 0 -1 -1 b789fdd96dc2 (re)
49 49 $ hg --debug debugindex -f 1 a
50 50 rev flag offset length size ..... link p1 p2 nodeid (re)
51 51 0 0000 0 3 2 .... 0 -1 -1 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 (re)
52 52
53 53 debugdelta chain basic output
54 54
55 55 $ hg debugdeltachain -m
56 56 rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio
57 57 0 1 1 -1 base 44 43 44 1.02326 44 0 0.00000
58 58
59 59 $ hg debugdeltachain -m -T '{rev} {chainid} {chainlen}\n'
60 60 0 1 1
61 61
62 62 $ hg debugdeltachain -m -Tjson
63 63 [
64 64 {
65 65 "chainid": 1,
66 66 "chainlen": 1,
67 67 "chainratio": 1.02325581395,
68 68 "chainsize": 44,
69 69 "compsize": 44,
70 70 "deltatype": "base",
71 71 "extradist": 0,
72 72 "extraratio": 0.0,
73 73 "lindist": 44,
74 74 "prevrev": -1,
75 75 "rev": 0,
76 76 "uncompsize": 43
77 77 }
78 78 ]
79 79
80 debugdelta chain with sparse read enabled
81
82 $ cat >> $HGRCPATH <<EOF
83 > [experimental]
84 > sparse-read = True
85 > EOF
86 $ hg debugdeltachain -m
87 rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio readsize largestblk rddensity
88 0 1 1 -1 base 44 43 44 1.02326 44 0 0.00000 44 44 1.00000
89
90 $ hg debugdeltachain -m -T '{rev} {chainid} {chainlen} {readsize} {largestblock} {readdensity}\n'
91 0 1 1 44 44 1.0
92
93 $ hg debugdeltachain -m -Tjson
94 [
95 {
96 "chainid": 1,
97 "chainlen": 1,
98 "chainratio": 1.02325581395,
99 "chainsize": 44,
100 "compsize": 44,
101 "deltatype": "base",
102 "extradist": 0,
103 "extraratio": 0.0,
104 "largestblock": 44,
105 "lindist": 44,
106 "prevrev": -1,
107 "readdensity": 1.0,
108 "readsize": 44,
109 "rev": 0,
110 "uncompsize": 43
111 }
112 ]
113
80 114 Test max chain len
81 115 $ cat >> $HGRCPATH << EOF
82 116 > [format]
83 117 > maxchainlen=4
84 118 > EOF
85 119
86 120 $ printf "This test checks if maxchainlen config value is respected also it can serve as basic test for debugrevlog -d <file>.\n" >> a
87 121 $ hg ci -m a
88 122 $ printf "b\n" >> a
89 123 $ hg ci -m a
90 124 $ printf "c\n" >> a
91 125 $ hg ci -m a
92 126 $ printf "d\n" >> a
93 127 $ hg ci -m a
94 128 $ printf "e\n" >> a
95 129 $ hg ci -m a
96 130 $ printf "f\n" >> a
97 131 $ hg ci -m a
98 132 $ printf 'g\n' >> a
99 133 $ hg ci -m a
100 134 $ printf 'h\n' >> a
101 135 $ hg ci -m a
102 136 $ hg debugrevlog -d a
103 137 # rev p1rev p2rev start end deltastart base p1 p2 rawsize totalsize compression heads chainlen
104 138 0 -1 -1 0 ??? 0 0 0 0 ??? ???? ? 1 0 (glob)
105 139 1 0 -1 ??? ??? 0 0 0 0 ??? ???? ? 1 1 (glob)
106 140 2 1 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 2 (glob)
107 141 3 2 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 3 (glob)
108 142 4 3 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 4 (glob)
109 143 5 4 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 0 (glob)
110 144 6 5 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 1 (glob)
111 145 7 6 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 2 (glob)
112 146 8 7 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 3 (glob)
113 147
114 148 Test WdirUnsupported exception
115 149
116 150 $ hg debugdata -c ffffffffffffffffffffffffffffffffffffffff
117 151 abort: working directory revision cannot be specified
118 152 [255]
119 153
120 154 Test cache warming command
121 155
122 156 $ rm -rf .hg/cache/
123 157 $ hg debugupdatecaches --debug
124 158 updating the branch cache
125 159 $ ls -r .hg/cache/*
126 160 .hg/cache/rbc-revs-v1
127 161 .hg/cache/rbc-names-v1
128 162 .hg/cache/branch2-served
129 163
130 164 $ cd ..
131 165
132 166 Test internal debugstacktrace command
133 167
134 168 $ cat > debugstacktrace.py << EOF
135 169 > from __future__ import absolute_import
136 170 > import sys
137 171 > from mercurial import util
138 172 > def f():
139 173 > util.debugstacktrace(f=sys.stdout)
140 174 > g()
141 175 > def g():
142 176 > util.dst('hello from g\\n', skip=1)
143 177 > h()
144 178 > def h():
145 179 > util.dst('hi ...\\nfrom h hidden in g', 1, depth=2)
146 180 > f()
147 181 > EOF
148 182 $ $PYTHON debugstacktrace.py
149 183 stacktrace at:
150 184 debugstacktrace.py:12 in * (glob)
151 185 debugstacktrace.py:5 in f
152 186 hello from g at:
153 187 debugstacktrace.py:12 in * (glob)
154 188 debugstacktrace.py:6 in f
155 189 hi ...
156 190 from h hidden in g at:
157 191 debugstacktrace.py:6 in f
158 192 debugstacktrace.py:9 in g
159 193
160 194 Test debugcapabilities command:
161 195
162 196 $ hg debugcapabilities ./debugrevlog/
163 197 Main capabilities:
164 198 branchmap
165 199 bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Aphases%3Dheads%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps
166 200 getbundle
167 201 known
168 202 lookup
169 203 pushkey
170 204 unbundle
171 205 Bundle2 capabilities:
172 206 HG20
173 207 changegroup
174 208 01
175 209 02
176 210 digests
177 211 md5
178 212 sha1
179 213 sha512
180 214 error
181 215 abort
182 216 unsupportedcontent
183 217 pushraced
184 218 pushkey
185 219 hgtagsfnodes
186 220 listkeys
187 221 phases
188 222 heads
189 223 pushkey
190 224 remote-changegroup
191 225 http
192 226 https
General Comments 0
You need to be logged in to leave comments. Login now