##// END OF EJS Templates
debugrevlog: fix for non-manifest object...
Boris Feld -
r39184:14641833 default
parent child Browse files
Show More
@@ -1,3285 +1,3288 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import operator
15 15 import os
16 16 import random
17 17 import re
18 18 import socket
19 19 import ssl
20 20 import stat
21 21 import string
22 22 import subprocess
23 23 import sys
24 24 import time
25 25
26 26 from .i18n import _
27 27 from .node import (
28 28 bin,
29 29 hex,
30 30 nullhex,
31 31 nullid,
32 32 nullrev,
33 33 short,
34 34 )
35 35 from .thirdparty import (
36 36 cbor,
37 37 )
38 38 from . import (
39 39 bundle2,
40 40 changegroup,
41 41 cmdutil,
42 42 color,
43 43 context,
44 44 dagparser,
45 45 dagutil,
46 46 encoding,
47 47 error,
48 48 exchange,
49 49 extensions,
50 50 filemerge,
51 51 filesetlang,
52 52 formatter,
53 53 hg,
54 54 httppeer,
55 55 localrepo,
56 56 lock as lockmod,
57 57 logcmdutil,
58 58 merge as mergemod,
59 59 obsolete,
60 60 obsutil,
61 61 phases,
62 62 policy,
63 63 pvec,
64 64 pycompat,
65 65 registrar,
66 66 repair,
67 67 revlog,
68 68 revset,
69 69 revsetlang,
70 70 scmutil,
71 71 setdiscovery,
72 72 simplemerge,
73 73 sshpeer,
74 74 sslutil,
75 75 streamclone,
76 76 templater,
77 77 treediscovery,
78 78 upgrade,
79 79 url as urlmod,
80 80 util,
81 81 vfs as vfsmod,
82 82 wireprotoframing,
83 83 wireprotoserver,
84 84 wireprotov2peer,
85 85 )
86 86 from .utils import (
87 87 dateutil,
88 88 procutil,
89 89 stringutil,
90 90 )
91 91
92 92 release = lockmod.release
93 93
94 94 command = registrar.command()
95 95
96 96 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
97 97 def debugancestor(ui, repo, *args):
98 98 """find the ancestor revision of two revisions in a given index"""
99 99 if len(args) == 3:
100 100 index, rev1, rev2 = args
101 101 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
102 102 lookup = r.lookup
103 103 elif len(args) == 2:
104 104 if not repo:
105 105 raise error.Abort(_('there is no Mercurial repository here '
106 106 '(.hg not found)'))
107 107 rev1, rev2 = args
108 108 r = repo.changelog
109 109 lookup = repo.lookup
110 110 else:
111 111 raise error.Abort(_('either two or three arguments required'))
112 112 a = r.ancestor(lookup(rev1), lookup(rev2))
113 113 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
114 114
115 115 @command('debugapplystreamclonebundle', [], 'FILE')
116 116 def debugapplystreamclonebundle(ui, repo, fname):
117 117 """apply a stream clone bundle file"""
118 118 f = hg.openpath(ui, fname)
119 119 gen = exchange.readbundle(ui, f, fname)
120 120 gen.apply(repo)
121 121
122 122 @command('debugbuilddag',
123 123 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
124 124 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
125 125 ('n', 'new-file', None, _('add new file at each rev'))],
126 126 _('[OPTION]... [TEXT]'))
127 127 def debugbuilddag(ui, repo, text=None,
128 128 mergeable_file=False,
129 129 overwritten_file=False,
130 130 new_file=False):
131 131 """builds a repo with a given DAG from scratch in the current empty repo
132 132
133 133 The description of the DAG is read from stdin if not given on the
134 134 command line.
135 135
136 136 Elements:
137 137
138 138 - "+n" is a linear run of n nodes based on the current default parent
139 139 - "." is a single node based on the current default parent
140 140 - "$" resets the default parent to null (implied at the start);
141 141 otherwise the default parent is always the last node created
142 142 - "<p" sets the default parent to the backref p
143 143 - "*p" is a fork at parent p, which is a backref
144 144 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
145 145 - "/p2" is a merge of the preceding node and p2
146 146 - ":tag" defines a local tag for the preceding node
147 147 - "@branch" sets the named branch for subsequent nodes
148 148 - "#...\\n" is a comment up to the end of the line
149 149
150 150 Whitespace between the above elements is ignored.
151 151
152 152 A backref is either
153 153
154 154 - a number n, which references the node curr-n, where curr is the current
155 155 node, or
156 156 - the name of a local tag you placed earlier using ":tag", or
157 157 - empty to denote the default parent.
158 158
159 159 All string valued-elements are either strictly alphanumeric, or must
160 160 be enclosed in double quotes ("..."), with "\\" as escape character.
161 161 """
162 162
163 163 if text is None:
164 164 ui.status(_("reading DAG from stdin\n"))
165 165 text = ui.fin.read()
166 166
167 167 cl = repo.changelog
168 168 if len(cl) > 0:
169 169 raise error.Abort(_('repository is not empty'))
170 170
171 171 # determine number of revs in DAG
172 172 total = 0
173 173 for type, data in dagparser.parsedag(text):
174 174 if type == 'n':
175 175 total += 1
176 176
177 177 if mergeable_file:
178 178 linesperrev = 2
179 179 # make a file with k lines per rev
180 180 initialmergedlines = ['%d' % i
181 181 for i in pycompat.xrange(0, total * linesperrev)]
182 182 initialmergedlines.append("")
183 183
184 184 tags = []
185 185 progress = ui.makeprogress(_('building'), unit=_('revisions'),
186 186 total=total)
187 187 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
188 188 at = -1
189 189 atbranch = 'default'
190 190 nodeids = []
191 191 id = 0
192 192 progress.update(id)
193 193 for type, data in dagparser.parsedag(text):
194 194 if type == 'n':
195 195 ui.note(('node %s\n' % pycompat.bytestr(data)))
196 196 id, ps = data
197 197
198 198 files = []
199 199 filecontent = {}
200 200
201 201 p2 = None
202 202 if mergeable_file:
203 203 fn = "mf"
204 204 p1 = repo[ps[0]]
205 205 if len(ps) > 1:
206 206 p2 = repo[ps[1]]
207 207 pa = p1.ancestor(p2)
208 208 base, local, other = [x[fn].data() for x in (pa, p1,
209 209 p2)]
210 210 m3 = simplemerge.Merge3Text(base, local, other)
211 211 ml = [l.strip() for l in m3.merge_lines()]
212 212 ml.append("")
213 213 elif at > 0:
214 214 ml = p1[fn].data().split("\n")
215 215 else:
216 216 ml = initialmergedlines
217 217 ml[id * linesperrev] += " r%i" % id
218 218 mergedtext = "\n".join(ml)
219 219 files.append(fn)
220 220 filecontent[fn] = mergedtext
221 221
222 222 if overwritten_file:
223 223 fn = "of"
224 224 files.append(fn)
225 225 filecontent[fn] = "r%i\n" % id
226 226
227 227 if new_file:
228 228 fn = "nf%i" % id
229 229 files.append(fn)
230 230 filecontent[fn] = "r%i\n" % id
231 231 if len(ps) > 1:
232 232 if not p2:
233 233 p2 = repo[ps[1]]
234 234 for fn in p2:
235 235 if fn.startswith("nf"):
236 236 files.append(fn)
237 237 filecontent[fn] = p2[fn].data()
238 238
239 239 def fctxfn(repo, cx, path):
240 240 if path in filecontent:
241 241 return context.memfilectx(repo, cx, path,
242 242 filecontent[path])
243 243 return None
244 244
245 245 if len(ps) == 0 or ps[0] < 0:
246 246 pars = [None, None]
247 247 elif len(ps) == 1:
248 248 pars = [nodeids[ps[0]], None]
249 249 else:
250 250 pars = [nodeids[p] for p in ps]
251 251 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
252 252 date=(id, 0),
253 253 user="debugbuilddag",
254 254 extra={'branch': atbranch})
255 255 nodeid = repo.commitctx(cx)
256 256 nodeids.append(nodeid)
257 257 at = id
258 258 elif type == 'l':
259 259 id, name = data
260 260 ui.note(('tag %s\n' % name))
261 261 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
262 262 elif type == 'a':
263 263 ui.note(('branch %s\n' % data))
264 264 atbranch = data
265 265 progress.update(id)
266 266
267 267 if tags:
268 268 repo.vfs.write("localtags", "".join(tags))
269 269
270 270 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
271 271 indent_string = ' ' * indent
272 272 if all:
273 273 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
274 274 % indent_string)
275 275
276 276 def showchunks(named):
277 277 ui.write("\n%s%s\n" % (indent_string, named))
278 278 for deltadata in gen.deltaiter():
279 279 node, p1, p2, cs, deltabase, delta, flags = deltadata
280 280 ui.write("%s%s %s %s %s %s %d\n" %
281 281 (indent_string, hex(node), hex(p1), hex(p2),
282 282 hex(cs), hex(deltabase), len(delta)))
283 283
284 284 chunkdata = gen.changelogheader()
285 285 showchunks("changelog")
286 286 chunkdata = gen.manifestheader()
287 287 showchunks("manifest")
288 288 for chunkdata in iter(gen.filelogheader, {}):
289 289 fname = chunkdata['filename']
290 290 showchunks(fname)
291 291 else:
292 292 if isinstance(gen, bundle2.unbundle20):
293 293 raise error.Abort(_('use debugbundle2 for this file'))
294 294 chunkdata = gen.changelogheader()
295 295 for deltadata in gen.deltaiter():
296 296 node, p1, p2, cs, deltabase, delta, flags = deltadata
297 297 ui.write("%s%s\n" % (indent_string, hex(node)))
298 298
299 299 def _debugobsmarkers(ui, part, indent=0, **opts):
300 300 """display version and markers contained in 'data'"""
301 301 opts = pycompat.byteskwargs(opts)
302 302 data = part.read()
303 303 indent_string = ' ' * indent
304 304 try:
305 305 version, markers = obsolete._readmarkers(data)
306 306 except error.UnknownVersion as exc:
307 307 msg = "%sunsupported version: %s (%d bytes)\n"
308 308 msg %= indent_string, exc.version, len(data)
309 309 ui.write(msg)
310 310 else:
311 311 msg = "%sversion: %d (%d bytes)\n"
312 312 msg %= indent_string, version, len(data)
313 313 ui.write(msg)
314 314 fm = ui.formatter('debugobsolete', opts)
315 315 for rawmarker in sorted(markers):
316 316 m = obsutil.marker(None, rawmarker)
317 317 fm.startitem()
318 318 fm.plain(indent_string)
319 319 cmdutil.showmarker(fm, m)
320 320 fm.end()
321 321
322 322 def _debugphaseheads(ui, data, indent=0):
323 323 """display version and markers contained in 'data'"""
324 324 indent_string = ' ' * indent
325 325 headsbyphase = phases.binarydecode(data)
326 326 for phase in phases.allphases:
327 327 for head in headsbyphase[phase]:
328 328 ui.write(indent_string)
329 329 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
330 330
331 331 def _quasirepr(thing):
332 332 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
333 333 return '{%s}' % (
334 334 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
335 335 return pycompat.bytestr(repr(thing))
336 336
337 337 def _debugbundle2(ui, gen, all=None, **opts):
338 338 """lists the contents of a bundle2"""
339 339 if not isinstance(gen, bundle2.unbundle20):
340 340 raise error.Abort(_('not a bundle2 file'))
341 341 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
342 342 parttypes = opts.get(r'part_type', [])
343 343 for part in gen.iterparts():
344 344 if parttypes and part.type not in parttypes:
345 345 continue
346 346 msg = '%s -- %s (mandatory: %r)\n'
347 347 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
348 348 if part.type == 'changegroup':
349 349 version = part.params.get('version', '01')
350 350 cg = changegroup.getunbundler(version, part, 'UN')
351 351 if not ui.quiet:
352 352 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
353 353 if part.type == 'obsmarkers':
354 354 if not ui.quiet:
355 355 _debugobsmarkers(ui, part, indent=4, **opts)
356 356 if part.type == 'phase-heads':
357 357 if not ui.quiet:
358 358 _debugphaseheads(ui, part, indent=4)
359 359
360 360 @command('debugbundle',
361 361 [('a', 'all', None, _('show all details')),
362 362 ('', 'part-type', [], _('show only the named part type')),
363 363 ('', 'spec', None, _('print the bundlespec of the bundle'))],
364 364 _('FILE'),
365 365 norepo=True)
366 366 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
367 367 """lists the contents of a bundle"""
368 368 with hg.openpath(ui, bundlepath) as f:
369 369 if spec:
370 370 spec = exchange.getbundlespec(ui, f)
371 371 ui.write('%s\n' % spec)
372 372 return
373 373
374 374 gen = exchange.readbundle(ui, f, bundlepath)
375 375 if isinstance(gen, bundle2.unbundle20):
376 376 return _debugbundle2(ui, gen, all=all, **opts)
377 377 _debugchangegroup(ui, gen, all=all, **opts)
378 378
379 379 @command('debugcapabilities',
380 380 [], _('PATH'),
381 381 norepo=True)
382 382 def debugcapabilities(ui, path, **opts):
383 383 """lists the capabilities of a remote peer"""
384 384 opts = pycompat.byteskwargs(opts)
385 385 peer = hg.peer(ui, opts, path)
386 386 caps = peer.capabilities()
387 387 ui.write(('Main capabilities:\n'))
388 388 for c in sorted(caps):
389 389 ui.write((' %s\n') % c)
390 390 b2caps = bundle2.bundle2caps(peer)
391 391 if b2caps:
392 392 ui.write(('Bundle2 capabilities:\n'))
393 393 for key, values in sorted(b2caps.iteritems()):
394 394 ui.write((' %s\n') % key)
395 395 for v in values:
396 396 ui.write((' %s\n') % v)
397 397
398 398 @command('debugcheckstate', [], '')
399 399 def debugcheckstate(ui, repo):
400 400 """validate the correctness of the current dirstate"""
401 401 parent1, parent2 = repo.dirstate.parents()
402 402 m1 = repo[parent1].manifest()
403 403 m2 = repo[parent2].manifest()
404 404 errors = 0
405 405 for f in repo.dirstate:
406 406 state = repo.dirstate[f]
407 407 if state in "nr" and f not in m1:
408 408 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
409 409 errors += 1
410 410 if state in "a" and f in m1:
411 411 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
412 412 errors += 1
413 413 if state in "m" and f not in m1 and f not in m2:
414 414 ui.warn(_("%s in state %s, but not in either manifest\n") %
415 415 (f, state))
416 416 errors += 1
417 417 for f in m1:
418 418 state = repo.dirstate[f]
419 419 if state not in "nrm":
420 420 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
421 421 errors += 1
422 422 if errors:
423 423 error = _(".hg/dirstate inconsistent with current parent's manifest")
424 424 raise error.Abort(error)
425 425
426 426 @command('debugcolor',
427 427 [('', 'style', None, _('show all configured styles'))],
428 428 'hg debugcolor')
429 429 def debugcolor(ui, repo, **opts):
430 430 """show available color, effects or style"""
431 431 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
432 432 if opts.get(r'style'):
433 433 return _debugdisplaystyle(ui)
434 434 else:
435 435 return _debugdisplaycolor(ui)
436 436
437 437 def _debugdisplaycolor(ui):
438 438 ui = ui.copy()
439 439 ui._styles.clear()
440 440 for effect in color._activeeffects(ui).keys():
441 441 ui._styles[effect] = effect
442 442 if ui._terminfoparams:
443 443 for k, v in ui.configitems('color'):
444 444 if k.startswith('color.'):
445 445 ui._styles[k] = k[6:]
446 446 elif k.startswith('terminfo.'):
447 447 ui._styles[k] = k[9:]
448 448 ui.write(_('available colors:\n'))
449 449 # sort label with a '_' after the other to group '_background' entry.
450 450 items = sorted(ui._styles.items(),
451 451 key=lambda i: ('_' in i[0], i[0], i[1]))
452 452 for colorname, label in items:
453 453 ui.write(('%s\n') % colorname, label=label)
454 454
455 455 def _debugdisplaystyle(ui):
456 456 ui.write(_('available style:\n'))
457 457 if not ui._styles:
458 458 return
459 459 width = max(len(s) for s in ui._styles)
460 460 for label, effects in sorted(ui._styles.items()):
461 461 ui.write('%s' % label, label=label)
462 462 if effects:
463 463 # 50
464 464 ui.write(': ')
465 465 ui.write(' ' * (max(0, width - len(label))))
466 466 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
467 467 ui.write('\n')
468 468
469 469 @command('debugcreatestreamclonebundle', [], 'FILE')
470 470 def debugcreatestreamclonebundle(ui, repo, fname):
471 471 """create a stream clone bundle file
472 472
473 473 Stream bundles are special bundles that are essentially archives of
474 474 revlog files. They are commonly used for cloning very quickly.
475 475 """
476 476 # TODO we may want to turn this into an abort when this functionality
477 477 # is moved into `hg bundle`.
478 478 if phases.hassecret(repo):
479 479 ui.warn(_('(warning: stream clone bundle will contain secret '
480 480 'revisions)\n'))
481 481
482 482 requirements, gen = streamclone.generatebundlev1(repo)
483 483 changegroup.writechunks(ui, gen, fname)
484 484
485 485 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
486 486
487 487 @command('debugdag',
488 488 [('t', 'tags', None, _('use tags as labels')),
489 489 ('b', 'branches', None, _('annotate with branch names')),
490 490 ('', 'dots', None, _('use dots for runs')),
491 491 ('s', 'spaces', None, _('separate elements by spaces'))],
492 492 _('[OPTION]... [FILE [REV]...]'),
493 493 optionalrepo=True)
494 494 def debugdag(ui, repo, file_=None, *revs, **opts):
495 495 """format the changelog or an index DAG as a concise textual description
496 496
497 497 If you pass a revlog index, the revlog's DAG is emitted. If you list
498 498 revision numbers, they get labeled in the output as rN.
499 499
500 500 Otherwise, the changelog DAG of the current repo is emitted.
501 501 """
502 502 spaces = opts.get(r'spaces')
503 503 dots = opts.get(r'dots')
504 504 if file_:
505 505 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
506 506 file_)
507 507 revs = set((int(r) for r in revs))
508 508 def events():
509 509 for r in rlog:
510 510 yield 'n', (r, list(p for p in rlog.parentrevs(r)
511 511 if p != -1))
512 512 if r in revs:
513 513 yield 'l', (r, "r%i" % r)
514 514 elif repo:
515 515 cl = repo.changelog
516 516 tags = opts.get(r'tags')
517 517 branches = opts.get(r'branches')
518 518 if tags:
519 519 labels = {}
520 520 for l, n in repo.tags().items():
521 521 labels.setdefault(cl.rev(n), []).append(l)
522 522 def events():
523 523 b = "default"
524 524 for r in cl:
525 525 if branches:
526 526 newb = cl.read(cl.node(r))[5]['branch']
527 527 if newb != b:
528 528 yield 'a', newb
529 529 b = newb
530 530 yield 'n', (r, list(p for p in cl.parentrevs(r)
531 531 if p != -1))
532 532 if tags:
533 533 ls = labels.get(r)
534 534 if ls:
535 535 for l in ls:
536 536 yield 'l', (r, l)
537 537 else:
538 538 raise error.Abort(_('need repo for changelog dag'))
539 539
540 540 for line in dagparser.dagtextlines(events(),
541 541 addspaces=spaces,
542 542 wraplabels=True,
543 543 wrapannotations=True,
544 544 wrapnonlinear=dots,
545 545 usedots=dots,
546 546 maxlinewidth=70):
547 547 ui.write(line)
548 548 ui.write("\n")
549 549
550 550 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
551 551 def debugdata(ui, repo, file_, rev=None, **opts):
552 552 """dump the contents of a data file revision"""
553 553 opts = pycompat.byteskwargs(opts)
554 554 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
555 555 if rev is not None:
556 556 raise error.CommandError('debugdata', _('invalid arguments'))
557 557 file_, rev = None, file_
558 558 elif rev is None:
559 559 raise error.CommandError('debugdata', _('invalid arguments'))
560 560 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
561 561 try:
562 562 ui.write(r.revision(r.lookup(rev), raw=True))
563 563 except KeyError:
564 564 raise error.Abort(_('invalid revision identifier %s') % rev)
565 565
566 566 @command('debugdate',
567 567 [('e', 'extended', None, _('try extended date formats'))],
568 568 _('[-e] DATE [RANGE]'),
569 569 norepo=True, optionalrepo=True)
570 570 def debugdate(ui, date, range=None, **opts):
571 571 """parse and display a date"""
572 572 if opts[r"extended"]:
573 573 d = dateutil.parsedate(date, util.extendeddateformats)
574 574 else:
575 575 d = dateutil.parsedate(date)
576 576 ui.write(("internal: %d %d\n") % d)
577 577 ui.write(("standard: %s\n") % dateutil.datestr(d))
578 578 if range:
579 579 m = dateutil.matchdate(range)
580 580 ui.write(("match: %s\n") % m(d[0]))
581 581
582 582 @command('debugdeltachain',
583 583 cmdutil.debugrevlogopts + cmdutil.formatteropts,
584 584 _('-c|-m|FILE'),
585 585 optionalrepo=True)
586 586 def debugdeltachain(ui, repo, file_=None, **opts):
587 587 """dump information about delta chains in a revlog
588 588
589 589 Output can be templatized. Available template keywords are:
590 590
591 591 :``rev``: revision number
592 592 :``chainid``: delta chain identifier (numbered by unique base)
593 593 :``chainlen``: delta chain length to this revision
594 594 :``prevrev``: previous revision in delta chain
595 595 :``deltatype``: role of delta / how it was computed
596 596 :``compsize``: compressed size of revision
597 597 :``uncompsize``: uncompressed size of revision
598 598 :``chainsize``: total size of compressed revisions in chain
599 599 :``chainratio``: total chain size divided by uncompressed revision size
600 600 (new delta chains typically start at ratio 2.00)
601 601 :``lindist``: linear distance from base revision in delta chain to end
602 602 of this revision
603 603 :``extradist``: total size of revisions not part of this delta chain from
604 604 base of delta chain to end of this revision; a measurement
605 605 of how much extra data we need to read/seek across to read
606 606 the delta chain for this revision
607 607 :``extraratio``: extradist divided by chainsize; another representation of
608 608 how much unrelated data is needed to load this delta chain
609 609
610 610 If the repository is configured to use the sparse read, additional keywords
611 611 are available:
612 612
613 613 :``readsize``: total size of data read from the disk for a revision
614 614 (sum of the sizes of all the blocks)
615 615 :``largestblock``: size of the largest block of data read from the disk
616 616 :``readdensity``: density of useful bytes in the data read from the disk
617 617 :``srchunks``: in how many data hunks the whole revision would be read
618 618
619 619 The sparse read can be enabled with experimental.sparse-read = True
620 620 """
621 621 opts = pycompat.byteskwargs(opts)
622 622 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
623 623 index = r.index
624 624 start = r.start
625 625 length = r.length
626 626 generaldelta = r.version & revlog.FLAG_GENERALDELTA
627 627 withsparseread = getattr(r, '_withsparseread', False)
628 628
629 629 def revinfo(rev):
630 630 e = index[rev]
631 631 compsize = e[1]
632 632 uncompsize = e[2]
633 633 chainsize = 0
634 634
635 635 if generaldelta:
636 636 if e[3] == e[5]:
637 637 deltatype = 'p1'
638 638 elif e[3] == e[6]:
639 639 deltatype = 'p2'
640 640 elif e[3] == rev - 1:
641 641 deltatype = 'prev'
642 642 elif e[3] == rev:
643 643 deltatype = 'base'
644 644 else:
645 645 deltatype = 'other'
646 646 else:
647 647 if e[3] == rev:
648 648 deltatype = 'base'
649 649 else:
650 650 deltatype = 'prev'
651 651
652 652 chain = r._deltachain(rev)[0]
653 653 for iterrev in chain:
654 654 e = index[iterrev]
655 655 chainsize += e[1]
656 656
657 657 return compsize, uncompsize, deltatype, chain, chainsize
658 658
659 659 fm = ui.formatter('debugdeltachain', opts)
660 660
661 661 fm.plain(' rev chain# chainlen prev delta '
662 662 'size rawsize chainsize ratio lindist extradist '
663 663 'extraratio')
664 664 if withsparseread:
665 665 fm.plain(' readsize largestblk rddensity srchunks')
666 666 fm.plain('\n')
667 667
668 668 chainbases = {}
669 669 for rev in r:
670 670 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
671 671 chainbase = chain[0]
672 672 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
673 673 basestart = start(chainbase)
674 674 revstart = start(rev)
675 675 lineardist = revstart + comp - basestart
676 676 extradist = lineardist - chainsize
677 677 try:
678 678 prevrev = chain[-2]
679 679 except IndexError:
680 680 prevrev = -1
681 681
682 682 if uncomp != 0:
683 683 chainratio = float(chainsize) / float(uncomp)
684 684 else:
685 685 chainratio = chainsize
686 686
687 687 if chainsize != 0:
688 688 extraratio = float(extradist) / float(chainsize)
689 689 else:
690 690 extraratio = extradist
691 691
692 692 fm.startitem()
693 693 fm.write('rev chainid chainlen prevrev deltatype compsize '
694 694 'uncompsize chainsize chainratio lindist extradist '
695 695 'extraratio',
696 696 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
697 697 rev, chainid, len(chain), prevrev, deltatype, comp,
698 698 uncomp, chainsize, chainratio, lineardist, extradist,
699 699 extraratio,
700 700 rev=rev, chainid=chainid, chainlen=len(chain),
701 701 prevrev=prevrev, deltatype=deltatype, compsize=comp,
702 702 uncompsize=uncomp, chainsize=chainsize,
703 703 chainratio=chainratio, lindist=lineardist,
704 704 extradist=extradist, extraratio=extraratio)
705 705 if withsparseread:
706 706 readsize = 0
707 707 largestblock = 0
708 708 srchunks = 0
709 709
710 710 for revschunk in revlog._slicechunk(r, chain):
711 711 srchunks += 1
712 712 blkend = start(revschunk[-1]) + length(revschunk[-1])
713 713 blksize = blkend - start(revschunk[0])
714 714
715 715 readsize += blksize
716 716 if largestblock < blksize:
717 717 largestblock = blksize
718 718
719 719 if readsize:
720 720 readdensity = float(chainsize) / float(readsize)
721 721 else:
722 722 readdensity = 1
723 723
724 724 fm.write('readsize largestblock readdensity srchunks',
725 725 ' %10d %10d %9.5f %8d',
726 726 readsize, largestblock, readdensity, srchunks,
727 727 readsize=readsize, largestblock=largestblock,
728 728 readdensity=readdensity, srchunks=srchunks)
729 729
730 730 fm.plain('\n')
731 731
732 732 fm.end()
733 733
734 734 @command('debugdirstate|debugstate',
735 735 [('', 'nodates', None, _('do not display the saved mtime')),
736 736 ('', 'datesort', None, _('sort by saved mtime'))],
737 737 _('[OPTION]...'))
738 738 def debugstate(ui, repo, **opts):
739 739 """show the contents of the current dirstate"""
740 740
741 741 nodates = opts.get(r'nodates')
742 742 datesort = opts.get(r'datesort')
743 743
744 744 timestr = ""
745 745 if datesort:
746 746 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
747 747 else:
748 748 keyfunc = None # sort by filename
749 749 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
750 750 if ent[3] == -1:
751 751 timestr = 'unset '
752 752 elif nodates:
753 753 timestr = 'set '
754 754 else:
755 755 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
756 756 time.localtime(ent[3]))
757 757 timestr = encoding.strtolocal(timestr)
758 758 if ent[1] & 0o20000:
759 759 mode = 'lnk'
760 760 else:
761 761 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
762 762 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
763 763 for f in repo.dirstate.copies():
764 764 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
765 765
766 766 @command('debugdiscovery',
767 767 [('', 'old', None, _('use old-style discovery')),
768 768 ('', 'nonheads', None,
769 769 _('use old-style discovery with non-heads included')),
770 770 ('', 'rev', [], 'restrict discovery to this set of revs'),
771 771 ] + cmdutil.remoteopts,
772 772 _('[--rev REV] [OTHER]'))
773 773 def debugdiscovery(ui, repo, remoteurl="default", **opts):
774 774 """runs the changeset discovery protocol in isolation"""
775 775 opts = pycompat.byteskwargs(opts)
776 776 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
777 777 remote = hg.peer(repo, opts, remoteurl)
778 778 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
779 779
780 780 # make sure tests are repeatable
781 781 random.seed(12323)
782 782
783 783 def doit(pushedrevs, remoteheads, remote=remote):
784 784 if opts.get('old'):
785 785 if not util.safehasattr(remote, 'branches'):
786 786 # enable in-client legacy support
787 787 remote = localrepo.locallegacypeer(remote.local())
788 788 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
789 789 force=True)
790 790 common = set(common)
791 791 if not opts.get('nonheads'):
792 792 ui.write(("unpruned common: %s\n") %
793 793 " ".join(sorted(short(n) for n in common)))
794 794 dag = dagutil.revlogdag(repo.changelog)
795 795 all = dag.ancestorset(dag.internalizeall(common))
796 796 common = dag.externalizeall(dag.headsetofconnecteds(all))
797 797 else:
798 798 nodes = None
799 799 if pushedrevs:
800 800 revs = scmutil.revrange(repo, pushedrevs)
801 801 nodes = [repo[r].node() for r in revs]
802 802 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
803 803 ancestorsof=nodes)
804 804 common = set(common)
805 805 rheads = set(hds)
806 806 lheads = set(repo.heads())
807 807 ui.write(("common heads: %s\n") %
808 808 " ".join(sorted(short(n) for n in common)))
809 809 if lheads <= common:
810 810 ui.write(("local is subset\n"))
811 811 elif rheads <= common:
812 812 ui.write(("remote is subset\n"))
813 813
814 814 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
815 815 localrevs = opts['rev']
816 816 doit(localrevs, remoterevs)
817 817
818 818 _chunksize = 4 << 10
819 819
820 820 @command('debugdownload',
821 821 [
822 822 ('o', 'output', '', _('path')),
823 823 ],
824 824 optionalrepo=True)
825 825 def debugdownload(ui, repo, url, output=None, **opts):
826 826 """download a resource using Mercurial logic and config
827 827 """
828 828 fh = urlmod.open(ui, url, output)
829 829
830 830 dest = ui
831 831 if output:
832 832 dest = open(output, "wb", _chunksize)
833 833 try:
834 834 data = fh.read(_chunksize)
835 835 while data:
836 836 dest.write(data)
837 837 data = fh.read(_chunksize)
838 838 finally:
839 839 if output:
840 840 dest.close()
841 841
842 842 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
843 843 def debugextensions(ui, repo, **opts):
844 844 '''show information about active extensions'''
845 845 opts = pycompat.byteskwargs(opts)
846 846 exts = extensions.extensions(ui)
847 847 hgver = util.version()
848 848 fm = ui.formatter('debugextensions', opts)
849 849 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
850 850 isinternal = extensions.ismoduleinternal(extmod)
851 851 extsource = pycompat.fsencode(extmod.__file__)
852 852 if isinternal:
853 853 exttestedwith = [] # never expose magic string to users
854 854 else:
855 855 exttestedwith = getattr(extmod, 'testedwith', '').split()
856 856 extbuglink = getattr(extmod, 'buglink', None)
857 857
858 858 fm.startitem()
859 859
860 860 if ui.quiet or ui.verbose:
861 861 fm.write('name', '%s\n', extname)
862 862 else:
863 863 fm.write('name', '%s', extname)
864 864 if isinternal or hgver in exttestedwith:
865 865 fm.plain('\n')
866 866 elif not exttestedwith:
867 867 fm.plain(_(' (untested!)\n'))
868 868 else:
869 869 lasttestedversion = exttestedwith[-1]
870 870 fm.plain(' (%s!)\n' % lasttestedversion)
871 871
872 872 fm.condwrite(ui.verbose and extsource, 'source',
873 873 _(' location: %s\n'), extsource or "")
874 874
875 875 if ui.verbose:
876 876 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
877 877 fm.data(bundled=isinternal)
878 878
879 879 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
880 880 _(' tested with: %s\n'),
881 881 fm.formatlist(exttestedwith, name='ver'))
882 882
883 883 fm.condwrite(ui.verbose and extbuglink, 'buglink',
884 884 _(' bug reporting: %s\n'), extbuglink or "")
885 885
886 886 fm.end()
887 887
888 888 @command('debugfileset',
889 889 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
890 890 ('', 'all-files', False,
891 891 _('test files from all revisions and working directory')),
892 892 ('s', 'show-matcher', None,
893 893 _('print internal representation of matcher')),
894 894 ('p', 'show-stage', [],
895 895 _('print parsed tree at the given stage'), _('NAME'))],
896 896 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
897 897 def debugfileset(ui, repo, expr, **opts):
898 898 '''parse and apply a fileset specification'''
899 899 from . import fileset
900 900 fileset.symbols # force import of fileset so we have predicates to optimize
901 901 opts = pycompat.byteskwargs(opts)
902 902 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
903 903
904 904 stages = [
905 905 ('parsed', pycompat.identity),
906 906 ('analyzed', filesetlang.analyze),
907 907 ('optimized', filesetlang.optimize),
908 908 ]
909 909 stagenames = set(n for n, f in stages)
910 910
911 911 showalways = set()
912 912 if ui.verbose and not opts['show_stage']:
913 913 # show parsed tree by --verbose (deprecated)
914 914 showalways.add('parsed')
915 915 if opts['show_stage'] == ['all']:
916 916 showalways.update(stagenames)
917 917 else:
918 918 for n in opts['show_stage']:
919 919 if n not in stagenames:
920 920 raise error.Abort(_('invalid stage name: %s') % n)
921 921 showalways.update(opts['show_stage'])
922 922
923 923 tree = filesetlang.parse(expr)
924 924 for n, f in stages:
925 925 tree = f(tree)
926 926 if n in showalways:
927 927 if opts['show_stage'] or n != 'parsed':
928 928 ui.write(("* %s:\n") % n)
929 929 ui.write(filesetlang.prettyformat(tree), "\n")
930 930
931 931 files = set()
932 932 if opts['all_files']:
933 933 for r in repo:
934 934 c = repo[r]
935 935 files.update(c.files())
936 936 files.update(c.substate)
937 937 if opts['all_files'] or ctx.rev() is None:
938 938 wctx = repo[None]
939 939 files.update(repo.dirstate.walk(scmutil.matchall(repo),
940 940 subrepos=list(wctx.substate),
941 941 unknown=True, ignored=True))
942 942 files.update(wctx.substate)
943 943 else:
944 944 files.update(ctx.files())
945 945 files.update(ctx.substate)
946 946
947 947 m = ctx.matchfileset(expr)
948 948 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
949 949 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
950 950 for f in sorted(files):
951 951 if not m(f):
952 952 continue
953 953 ui.write("%s\n" % f)
954 954
955 955 @command('debugformat',
956 956 [] + cmdutil.formatteropts)
957 957 def debugformat(ui, repo, **opts):
958 958 """display format information about the current repository
959 959
960 960 Use --verbose to get extra information about current config value and
961 961 Mercurial default."""
962 962 opts = pycompat.byteskwargs(opts)
963 963 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
964 964 maxvariantlength = max(len('format-variant'), maxvariantlength)
965 965
966 966 def makeformatname(name):
967 967 return '%s:' + (' ' * (maxvariantlength - len(name)))
968 968
969 969 fm = ui.formatter('debugformat', opts)
970 970 if fm.isplain():
971 971 def formatvalue(value):
972 972 if util.safehasattr(value, 'startswith'):
973 973 return value
974 974 if value:
975 975 return 'yes'
976 976 else:
977 977 return 'no'
978 978 else:
979 979 formatvalue = pycompat.identity
980 980
981 981 fm.plain('format-variant')
982 982 fm.plain(' ' * (maxvariantlength - len('format-variant')))
983 983 fm.plain(' repo')
984 984 if ui.verbose:
985 985 fm.plain(' config default')
986 986 fm.plain('\n')
987 987 for fv in upgrade.allformatvariant:
988 988 fm.startitem()
989 989 repovalue = fv.fromrepo(repo)
990 990 configvalue = fv.fromconfig(repo)
991 991
992 992 if repovalue != configvalue:
993 993 namelabel = 'formatvariant.name.mismatchconfig'
994 994 repolabel = 'formatvariant.repo.mismatchconfig'
995 995 elif repovalue != fv.default:
996 996 namelabel = 'formatvariant.name.mismatchdefault'
997 997 repolabel = 'formatvariant.repo.mismatchdefault'
998 998 else:
999 999 namelabel = 'formatvariant.name.uptodate'
1000 1000 repolabel = 'formatvariant.repo.uptodate'
1001 1001
1002 1002 fm.write('name', makeformatname(fv.name), fv.name,
1003 1003 label=namelabel)
1004 1004 fm.write('repo', ' %3s', formatvalue(repovalue),
1005 1005 label=repolabel)
1006 1006 if fv.default != configvalue:
1007 1007 configlabel = 'formatvariant.config.special'
1008 1008 else:
1009 1009 configlabel = 'formatvariant.config.default'
1010 1010 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1011 1011 label=configlabel)
1012 1012 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1013 1013 label='formatvariant.default')
1014 1014 fm.plain('\n')
1015 1015 fm.end()
1016 1016
1017 1017 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1018 1018 def debugfsinfo(ui, path="."):
1019 1019 """show information detected about current filesystem"""
1020 1020 ui.write(('path: %s\n') % path)
1021 1021 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1022 1022 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1023 1023 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1024 1024 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1025 1025 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1026 1026 casesensitive = '(unknown)'
1027 1027 try:
1028 1028 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1029 1029 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1030 1030 except OSError:
1031 1031 pass
1032 1032 ui.write(('case-sensitive: %s\n') % casesensitive)
1033 1033
1034 1034 @command('debuggetbundle',
1035 1035 [('H', 'head', [], _('id of head node'), _('ID')),
1036 1036 ('C', 'common', [], _('id of common node'), _('ID')),
1037 1037 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1038 1038 _('REPO FILE [-H|-C ID]...'),
1039 1039 norepo=True)
1040 1040 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1041 1041 """retrieves a bundle from a repo
1042 1042
1043 1043 Every ID must be a full-length hex node id string. Saves the bundle to the
1044 1044 given file.
1045 1045 """
1046 1046 opts = pycompat.byteskwargs(opts)
1047 1047 repo = hg.peer(ui, opts, repopath)
1048 1048 if not repo.capable('getbundle'):
1049 1049 raise error.Abort("getbundle() not supported by target repository")
1050 1050 args = {}
1051 1051 if common:
1052 1052 args[r'common'] = [bin(s) for s in common]
1053 1053 if head:
1054 1054 args[r'heads'] = [bin(s) for s in head]
1055 1055 # TODO: get desired bundlecaps from command line.
1056 1056 args[r'bundlecaps'] = None
1057 1057 bundle = repo.getbundle('debug', **args)
1058 1058
1059 1059 bundletype = opts.get('type', 'bzip2').lower()
1060 1060 btypes = {'none': 'HG10UN',
1061 1061 'bzip2': 'HG10BZ',
1062 1062 'gzip': 'HG10GZ',
1063 1063 'bundle2': 'HG20'}
1064 1064 bundletype = btypes.get(bundletype)
1065 1065 if bundletype not in bundle2.bundletypes:
1066 1066 raise error.Abort(_('unknown bundle type specified with --type'))
1067 1067 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1068 1068
1069 1069 @command('debugignore', [], '[FILE]')
1070 1070 def debugignore(ui, repo, *files, **opts):
1071 1071 """display the combined ignore pattern and information about ignored files
1072 1072
1073 1073 With no argument display the combined ignore pattern.
1074 1074
1075 1075 Given space separated file names, shows if the given file is ignored and
1076 1076 if so, show the ignore rule (file and line number) that matched it.
1077 1077 """
1078 1078 ignore = repo.dirstate._ignore
1079 1079 if not files:
1080 1080 # Show all the patterns
1081 1081 ui.write("%s\n" % pycompat.byterepr(ignore))
1082 1082 else:
1083 1083 m = scmutil.match(repo[None], pats=files)
1084 1084 for f in m.files():
1085 1085 nf = util.normpath(f)
1086 1086 ignored = None
1087 1087 ignoredata = None
1088 1088 if nf != '.':
1089 1089 if ignore(nf):
1090 1090 ignored = nf
1091 1091 ignoredata = repo.dirstate._ignorefileandline(nf)
1092 1092 else:
1093 1093 for p in util.finddirs(nf):
1094 1094 if ignore(p):
1095 1095 ignored = p
1096 1096 ignoredata = repo.dirstate._ignorefileandline(p)
1097 1097 break
1098 1098 if ignored:
1099 1099 if ignored == nf:
1100 1100 ui.write(_("%s is ignored\n") % m.uipath(f))
1101 1101 else:
1102 1102 ui.write(_("%s is ignored because of "
1103 1103 "containing folder %s\n")
1104 1104 % (m.uipath(f), ignored))
1105 1105 ignorefile, lineno, line = ignoredata
1106 1106 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1107 1107 % (ignorefile, lineno, line))
1108 1108 else:
1109 1109 ui.write(_("%s is not ignored\n") % m.uipath(f))
1110 1110
1111 1111 @command('debugindex', cmdutil.debugrevlogopts +
1112 1112 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1113 1113 _('[-f FORMAT] -c|-m|FILE'),
1114 1114 optionalrepo=True)
1115 1115 def debugindex(ui, repo, file_=None, **opts):
1116 1116 """dump the contents of an index file"""
1117 1117 opts = pycompat.byteskwargs(opts)
1118 1118 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1119 1119 format = opts.get('format', 0)
1120 1120 if format not in (0, 1):
1121 1121 raise error.Abort(_("unknown format %d") % format)
1122 1122
1123 1123 if ui.debugflag:
1124 1124 shortfn = hex
1125 1125 else:
1126 1126 shortfn = short
1127 1127
1128 1128 # There might not be anything in r, so have a sane default
1129 1129 idlen = 12
1130 1130 for i in r:
1131 1131 idlen = len(shortfn(r.node(i)))
1132 1132 break
1133 1133
1134 1134 if format == 0:
1135 1135 if ui.verbose:
1136 1136 ui.write((" rev offset length linkrev"
1137 1137 " %s %s p2\n") % ("nodeid".ljust(idlen),
1138 1138 "p1".ljust(idlen)))
1139 1139 else:
1140 1140 ui.write((" rev linkrev %s %s p2\n") % (
1141 1141 "nodeid".ljust(idlen), "p1".ljust(idlen)))
1142 1142 elif format == 1:
1143 1143 if ui.verbose:
1144 1144 ui.write((" rev flag offset length size link p1"
1145 1145 " p2 %s\n") % "nodeid".rjust(idlen))
1146 1146 else:
1147 1147 ui.write((" rev flag size link p1 p2 %s\n") %
1148 1148 "nodeid".rjust(idlen))
1149 1149
1150 1150 for i in r:
1151 1151 node = r.node(i)
1152 1152 if format == 0:
1153 1153 try:
1154 1154 pp = r.parents(node)
1155 1155 except Exception:
1156 1156 pp = [nullid, nullid]
1157 1157 if ui.verbose:
1158 1158 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
1159 1159 i, r.start(i), r.length(i), r.linkrev(i),
1160 1160 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1161 1161 else:
1162 1162 ui.write("% 6d % 7d %s %s %s\n" % (
1163 1163 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
1164 1164 shortfn(pp[1])))
1165 1165 elif format == 1:
1166 1166 pr = r.parentrevs(i)
1167 1167 if ui.verbose:
1168 1168 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
1169 1169 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1170 1170 r.linkrev(i), pr[0], pr[1], shortfn(node)))
1171 1171 else:
1172 1172 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
1173 1173 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
1174 1174 shortfn(node)))
1175 1175
1176 1176 @command('debugindexdot', cmdutil.debugrevlogopts,
1177 1177 _('-c|-m|FILE'), optionalrepo=True)
1178 1178 def debugindexdot(ui, repo, file_=None, **opts):
1179 1179 """dump an index DAG as a graphviz dot file"""
1180 1180 opts = pycompat.byteskwargs(opts)
1181 1181 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1182 1182 ui.write(("digraph G {\n"))
1183 1183 for i in r:
1184 1184 node = r.node(i)
1185 1185 pp = r.parents(node)
1186 1186 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1187 1187 if pp[1] != nullid:
1188 1188 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1189 1189 ui.write("}\n")
1190 1190
1191 1191 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1192 1192 def debuginstall(ui, **opts):
1193 1193 '''test Mercurial installation
1194 1194
1195 1195 Returns 0 on success.
1196 1196 '''
1197 1197 opts = pycompat.byteskwargs(opts)
1198 1198
1199 1199 def writetemp(contents):
1200 1200 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1201 1201 f = os.fdopen(fd, r"wb")
1202 1202 f.write(contents)
1203 1203 f.close()
1204 1204 return name
1205 1205
1206 1206 problems = 0
1207 1207
1208 1208 fm = ui.formatter('debuginstall', opts)
1209 1209 fm.startitem()
1210 1210
1211 1211 # encoding
1212 1212 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1213 1213 err = None
1214 1214 try:
1215 1215 codecs.lookup(pycompat.sysstr(encoding.encoding))
1216 1216 except LookupError as inst:
1217 1217 err = stringutil.forcebytestr(inst)
1218 1218 problems += 1
1219 1219 fm.condwrite(err, 'encodingerror', _(" %s\n"
1220 1220 " (check that your locale is properly set)\n"), err)
1221 1221
1222 1222 # Python
1223 1223 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1224 1224 pycompat.sysexecutable)
1225 1225 fm.write('pythonver', _("checking Python version (%s)\n"),
1226 1226 ("%d.%d.%d" % sys.version_info[:3]))
1227 1227 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1228 1228 os.path.dirname(pycompat.fsencode(os.__file__)))
1229 1229
1230 1230 security = set(sslutil.supportedprotocols)
1231 1231 if sslutil.hassni:
1232 1232 security.add('sni')
1233 1233
1234 1234 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1235 1235 fm.formatlist(sorted(security), name='protocol',
1236 1236 fmt='%s', sep=','))
1237 1237
1238 1238 # These are warnings, not errors. So don't increment problem count. This
1239 1239 # may change in the future.
1240 1240 if 'tls1.2' not in security:
1241 1241 fm.plain(_(' TLS 1.2 not supported by Python install; '
1242 1242 'network connections lack modern security\n'))
1243 1243 if 'sni' not in security:
1244 1244 fm.plain(_(' SNI not supported by Python install; may have '
1245 1245 'connectivity issues with some servers\n'))
1246 1246
1247 1247 # TODO print CA cert info
1248 1248
1249 1249 # hg version
1250 1250 hgver = util.version()
1251 1251 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1252 1252 hgver.split('+')[0])
1253 1253 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1254 1254 '+'.join(hgver.split('+')[1:]))
1255 1255
1256 1256 # compiled modules
1257 1257 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1258 1258 policy.policy)
1259 1259 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1260 1260 os.path.dirname(pycompat.fsencode(__file__)))
1261 1261
1262 1262 if policy.policy in ('c', 'allow'):
1263 1263 err = None
1264 1264 try:
1265 1265 from .cext import (
1266 1266 base85,
1267 1267 bdiff,
1268 1268 mpatch,
1269 1269 osutil,
1270 1270 )
1271 1271 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1272 1272 except Exception as inst:
1273 1273 err = stringutil.forcebytestr(inst)
1274 1274 problems += 1
1275 1275 fm.condwrite(err, 'extensionserror', " %s\n", err)
1276 1276
1277 1277 compengines = util.compengines._engines.values()
1278 1278 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1279 1279 fm.formatlist(sorted(e.name() for e in compengines),
1280 1280 name='compengine', fmt='%s', sep=', '))
1281 1281 fm.write('compenginesavail', _('checking available compression engines '
1282 1282 '(%s)\n'),
1283 1283 fm.formatlist(sorted(e.name() for e in compengines
1284 1284 if e.available()),
1285 1285 name='compengine', fmt='%s', sep=', '))
1286 1286 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1287 1287 fm.write('compenginesserver', _('checking available compression engines '
1288 1288 'for wire protocol (%s)\n'),
1289 1289 fm.formatlist([e.name() for e in wirecompengines
1290 1290 if e.wireprotosupport()],
1291 1291 name='compengine', fmt='%s', sep=', '))
1292 1292 re2 = 'missing'
1293 1293 if util._re2:
1294 1294 re2 = 'available'
1295 1295 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1296 1296 fm.data(re2=bool(util._re2))
1297 1297
1298 1298 # templates
1299 1299 p = templater.templatepaths()
1300 1300 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1301 1301 fm.condwrite(not p, '', _(" no template directories found\n"))
1302 1302 if p:
1303 1303 m = templater.templatepath("map-cmdline.default")
1304 1304 if m:
1305 1305 # template found, check if it is working
1306 1306 err = None
1307 1307 try:
1308 1308 templater.templater.frommapfile(m)
1309 1309 except Exception as inst:
1310 1310 err = stringutil.forcebytestr(inst)
1311 1311 p = None
1312 1312 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1313 1313 else:
1314 1314 p = None
1315 1315 fm.condwrite(p, 'defaulttemplate',
1316 1316 _("checking default template (%s)\n"), m)
1317 1317 fm.condwrite(not m, 'defaulttemplatenotfound',
1318 1318 _(" template '%s' not found\n"), "default")
1319 1319 if not p:
1320 1320 problems += 1
1321 1321 fm.condwrite(not p, '',
1322 1322 _(" (templates seem to have been installed incorrectly)\n"))
1323 1323
1324 1324 # editor
1325 1325 editor = ui.geteditor()
1326 1326 editor = util.expandpath(editor)
1327 1327 editorbin = procutil.shellsplit(editor)[0]
1328 1328 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1329 1329 cmdpath = procutil.findexe(editorbin)
1330 1330 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1331 1331 _(" No commit editor set and can't find %s in PATH\n"
1332 1332 " (specify a commit editor in your configuration"
1333 1333 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1334 1334 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1335 1335 _(" Can't find editor '%s' in PATH\n"
1336 1336 " (specify a commit editor in your configuration"
1337 1337 " file)\n"), not cmdpath and editorbin)
1338 1338 if not cmdpath and editor != 'vi':
1339 1339 problems += 1
1340 1340
1341 1341 # check username
1342 1342 username = None
1343 1343 err = None
1344 1344 try:
1345 1345 username = ui.username()
1346 1346 except error.Abort as e:
1347 1347 err = stringutil.forcebytestr(e)
1348 1348 problems += 1
1349 1349
1350 1350 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1351 1351 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1352 1352 " (specify a username in your configuration file)\n"), err)
1353 1353
1354 1354 fm.condwrite(not problems, '',
1355 1355 _("no problems detected\n"))
1356 1356 if not problems:
1357 1357 fm.data(problems=problems)
1358 1358 fm.condwrite(problems, 'problems',
1359 1359 _("%d problems detected,"
1360 1360 " please check your install!\n"), problems)
1361 1361 fm.end()
1362 1362
1363 1363 return problems
1364 1364
1365 1365 @command('debugknown', [], _('REPO ID...'), norepo=True)
1366 1366 def debugknown(ui, repopath, *ids, **opts):
1367 1367 """test whether node ids are known to a repo
1368 1368
1369 1369 Every ID must be a full-length hex node id string. Returns a list of 0s
1370 1370 and 1s indicating unknown/known.
1371 1371 """
1372 1372 opts = pycompat.byteskwargs(opts)
1373 1373 repo = hg.peer(ui, opts, repopath)
1374 1374 if not repo.capable('known'):
1375 1375 raise error.Abort("known() not supported by target repository")
1376 1376 flags = repo.known([bin(s) for s in ids])
1377 1377 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1378 1378
1379 1379 @command('debuglabelcomplete', [], _('LABEL...'))
1380 1380 def debuglabelcomplete(ui, repo, *args):
1381 1381 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1382 1382 debugnamecomplete(ui, repo, *args)
1383 1383
1384 1384 @command('debuglocks',
1385 1385 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1386 1386 ('W', 'force-wlock', None,
1387 1387 _('free the working state lock (DANGEROUS)')),
1388 1388 ('s', 'set-lock', None, _('set the store lock until stopped')),
1389 1389 ('S', 'set-wlock', None,
1390 1390 _('set the working state lock until stopped'))],
1391 1391 _('[OPTION]...'))
1392 1392 def debuglocks(ui, repo, **opts):
1393 1393 """show or modify state of locks
1394 1394
1395 1395 By default, this command will show which locks are held. This
1396 1396 includes the user and process holding the lock, the amount of time
1397 1397 the lock has been held, and the machine name where the process is
1398 1398 running if it's not local.
1399 1399
1400 1400 Locks protect the integrity of Mercurial's data, so should be
1401 1401 treated with care. System crashes or other interruptions may cause
1402 1402 locks to not be properly released, though Mercurial will usually
1403 1403 detect and remove such stale locks automatically.
1404 1404
1405 1405 However, detecting stale locks may not always be possible (for
1406 1406 instance, on a shared filesystem). Removing locks may also be
1407 1407 blocked by filesystem permissions.
1408 1408
1409 1409 Setting a lock will prevent other commands from changing the data.
1410 1410 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1411 1411 The set locks are removed when the command exits.
1412 1412
1413 1413 Returns 0 if no locks are held.
1414 1414
1415 1415 """
1416 1416
1417 1417 if opts.get(r'force_lock'):
1418 1418 repo.svfs.unlink('lock')
1419 1419 if opts.get(r'force_wlock'):
1420 1420 repo.vfs.unlink('wlock')
1421 1421 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1422 1422 return 0
1423 1423
1424 1424 locks = []
1425 1425 try:
1426 1426 if opts.get(r'set_wlock'):
1427 1427 try:
1428 1428 locks.append(repo.wlock(False))
1429 1429 except error.LockHeld:
1430 1430 raise error.Abort(_('wlock is already held'))
1431 1431 if opts.get(r'set_lock'):
1432 1432 try:
1433 1433 locks.append(repo.lock(False))
1434 1434 except error.LockHeld:
1435 1435 raise error.Abort(_('lock is already held'))
1436 1436 if len(locks):
1437 1437 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1438 1438 return 0
1439 1439 finally:
1440 1440 release(*locks)
1441 1441
1442 1442 now = time.time()
1443 1443 held = 0
1444 1444
1445 1445 def report(vfs, name, method):
1446 1446 # this causes stale locks to get reaped for more accurate reporting
1447 1447 try:
1448 1448 l = method(False)
1449 1449 except error.LockHeld:
1450 1450 l = None
1451 1451
1452 1452 if l:
1453 1453 l.release()
1454 1454 else:
1455 1455 try:
1456 1456 st = vfs.lstat(name)
1457 1457 age = now - st[stat.ST_MTIME]
1458 1458 user = util.username(st.st_uid)
1459 1459 locker = vfs.readlock(name)
1460 1460 if ":" in locker:
1461 1461 host, pid = locker.split(':')
1462 1462 if host == socket.gethostname():
1463 1463 locker = 'user %s, process %s' % (user, pid)
1464 1464 else:
1465 1465 locker = 'user %s, process %s, host %s' \
1466 1466 % (user, pid, host)
1467 1467 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1468 1468 return 1
1469 1469 except OSError as e:
1470 1470 if e.errno != errno.ENOENT:
1471 1471 raise
1472 1472
1473 1473 ui.write(("%-6s free\n") % (name + ":"))
1474 1474 return 0
1475 1475
1476 1476 held += report(repo.svfs, "lock", repo.lock)
1477 1477 held += report(repo.vfs, "wlock", repo.wlock)
1478 1478
1479 1479 return held
1480 1480
1481 1481 @command('debugmanifestfulltextcache', [
1482 1482 ('', 'clear', False, _('clear the cache')),
1483 1483 ('a', 'add', '', _('add the given manifest node to the cache'),
1484 1484 _('NODE'))
1485 1485 ], '')
1486 1486 def debugmanifestfulltextcache(ui, repo, add=None, **opts):
1487 1487 """show, clear or amend the contents of the manifest fulltext cache"""
1488 1488 with repo.lock():
1489 1489 r = repo.manifestlog._revlog
1490 1490 try:
1491 1491 cache = r._fulltextcache
1492 1492 except AttributeError:
1493 1493 ui.warn(_(
1494 1494 "Current revlog implementation doesn't appear to have a "
1495 1495 'manifest fulltext cache\n'))
1496 1496 return
1497 1497
1498 1498 if opts.get(r'clear'):
1499 1499 cache.clear()
1500 1500
1501 1501 if add:
1502 1502 try:
1503 1503 manifest = repo.manifestlog[r.lookup(add)]
1504 1504 except error.LookupError as e:
1505 1505 raise error.Abort(e, hint="Check your manifest node id")
1506 1506 manifest.read() # stores revisision in cache too
1507 1507
1508 1508 if not len(cache):
1509 1509 ui.write(_('Cache empty'))
1510 1510 else:
1511 1511 ui.write(
1512 1512 _('Cache contains %d manifest entries, in order of most to '
1513 1513 'least recent:\n') % (len(cache),))
1514 1514 totalsize = 0
1515 1515 for nodeid in cache:
1516 1516 # Use cache.get to not update the LRU order
1517 1517 data = cache.get(nodeid)
1518 1518 size = len(data)
1519 1519 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1520 1520 ui.write(_('id: %s, size %s\n') % (
1521 1521 hex(nodeid), util.bytecount(size)))
1522 1522 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1523 1523 ui.write(
1524 1524 _('Total cache data size %s, on-disk %s\n') % (
1525 1525 util.bytecount(totalsize), util.bytecount(ondisk))
1526 1526 )
1527 1527
1528 1528 @command('debugmergestate', [], '')
1529 1529 def debugmergestate(ui, repo, *args):
1530 1530 """print merge state
1531 1531
1532 1532 Use --verbose to print out information about whether v1 or v2 merge state
1533 1533 was chosen."""
1534 1534 def _hashornull(h):
1535 1535 if h == nullhex:
1536 1536 return 'null'
1537 1537 else:
1538 1538 return h
1539 1539
1540 1540 def printrecords(version):
1541 1541 ui.write(('* version %d records\n') % version)
1542 1542 if version == 1:
1543 1543 records = v1records
1544 1544 else:
1545 1545 records = v2records
1546 1546
1547 1547 for rtype, record in records:
1548 1548 # pretty print some record types
1549 1549 if rtype == 'L':
1550 1550 ui.write(('local: %s\n') % record)
1551 1551 elif rtype == 'O':
1552 1552 ui.write(('other: %s\n') % record)
1553 1553 elif rtype == 'm':
1554 1554 driver, mdstate = record.split('\0', 1)
1555 1555 ui.write(('merge driver: %s (state "%s")\n')
1556 1556 % (driver, mdstate))
1557 1557 elif rtype in 'FDC':
1558 1558 r = record.split('\0')
1559 1559 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1560 1560 if version == 1:
1561 1561 onode = 'not stored in v1 format'
1562 1562 flags = r[7]
1563 1563 else:
1564 1564 onode, flags = r[7:9]
1565 1565 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1566 1566 % (f, rtype, state, _hashornull(hash)))
1567 1567 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1568 1568 ui.write((' ancestor path: %s (node %s)\n')
1569 1569 % (afile, _hashornull(anode)))
1570 1570 ui.write((' other path: %s (node %s)\n')
1571 1571 % (ofile, _hashornull(onode)))
1572 1572 elif rtype == 'f':
1573 1573 filename, rawextras = record.split('\0', 1)
1574 1574 extras = rawextras.split('\0')
1575 1575 i = 0
1576 1576 extrastrings = []
1577 1577 while i < len(extras):
1578 1578 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1579 1579 i += 2
1580 1580
1581 1581 ui.write(('file extras: %s (%s)\n')
1582 1582 % (filename, ', '.join(extrastrings)))
1583 1583 elif rtype == 'l':
1584 1584 labels = record.split('\0', 2)
1585 1585 labels = [l for l in labels if len(l) > 0]
1586 1586 ui.write(('labels:\n'))
1587 1587 ui.write((' local: %s\n' % labels[0]))
1588 1588 ui.write((' other: %s\n' % labels[1]))
1589 1589 if len(labels) > 2:
1590 1590 ui.write((' base: %s\n' % labels[2]))
1591 1591 else:
1592 1592 ui.write(('unrecognized entry: %s\t%s\n')
1593 1593 % (rtype, record.replace('\0', '\t')))
1594 1594
1595 1595 # Avoid mergestate.read() since it may raise an exception for unsupported
1596 1596 # merge state records. We shouldn't be doing this, but this is OK since this
1597 1597 # command is pretty low-level.
1598 1598 ms = mergemod.mergestate(repo)
1599 1599
1600 1600 # sort so that reasonable information is on top
1601 1601 v1records = ms._readrecordsv1()
1602 1602 v2records = ms._readrecordsv2()
1603 1603 order = 'LOml'
1604 1604 def key(r):
1605 1605 idx = order.find(r[0])
1606 1606 if idx == -1:
1607 1607 return (1, r[1])
1608 1608 else:
1609 1609 return (0, idx)
1610 1610 v1records.sort(key=key)
1611 1611 v2records.sort(key=key)
1612 1612
1613 1613 if not v1records and not v2records:
1614 1614 ui.write(('no merge state found\n'))
1615 1615 elif not v2records:
1616 1616 ui.note(('no version 2 merge state\n'))
1617 1617 printrecords(1)
1618 1618 elif ms._v1v2match(v1records, v2records):
1619 1619 ui.note(('v1 and v2 states match: using v2\n'))
1620 1620 printrecords(2)
1621 1621 else:
1622 1622 ui.note(('v1 and v2 states mismatch: using v1\n'))
1623 1623 printrecords(1)
1624 1624 if ui.verbose:
1625 1625 printrecords(2)
1626 1626
1627 1627 @command('debugnamecomplete', [], _('NAME...'))
1628 1628 def debugnamecomplete(ui, repo, *args):
1629 1629 '''complete "names" - tags, open branch names, bookmark names'''
1630 1630
1631 1631 names = set()
1632 1632 # since we previously only listed open branches, we will handle that
1633 1633 # specially (after this for loop)
1634 1634 for name, ns in repo.names.iteritems():
1635 1635 if name != 'branches':
1636 1636 names.update(ns.listnames(repo))
1637 1637 names.update(tag for (tag, heads, tip, closed)
1638 1638 in repo.branchmap().iterbranches() if not closed)
1639 1639 completions = set()
1640 1640 if not args:
1641 1641 args = ['']
1642 1642 for a in args:
1643 1643 completions.update(n for n in names if n.startswith(a))
1644 1644 ui.write('\n'.join(sorted(completions)))
1645 1645 ui.write('\n')
1646 1646
1647 1647 @command('debugobsolete',
1648 1648 [('', 'flags', 0, _('markers flag')),
1649 1649 ('', 'record-parents', False,
1650 1650 _('record parent information for the precursor')),
1651 1651 ('r', 'rev', [], _('display markers relevant to REV')),
1652 1652 ('', 'exclusive', False, _('restrict display to markers only '
1653 1653 'relevant to REV')),
1654 1654 ('', 'index', False, _('display index of the marker')),
1655 1655 ('', 'delete', [], _('delete markers specified by indices')),
1656 1656 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1657 1657 _('[OBSOLETED [REPLACEMENT ...]]'))
1658 1658 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1659 1659 """create arbitrary obsolete marker
1660 1660
1661 1661 With no arguments, displays the list of obsolescence markers."""
1662 1662
1663 1663 opts = pycompat.byteskwargs(opts)
1664 1664
1665 1665 def parsenodeid(s):
1666 1666 try:
1667 1667 # We do not use revsingle/revrange functions here to accept
1668 1668 # arbitrary node identifiers, possibly not present in the
1669 1669 # local repository.
1670 1670 n = bin(s)
1671 1671 if len(n) != len(nullid):
1672 1672 raise TypeError()
1673 1673 return n
1674 1674 except TypeError:
1675 1675 raise error.Abort('changeset references must be full hexadecimal '
1676 1676 'node identifiers')
1677 1677
1678 1678 if opts.get('delete'):
1679 1679 indices = []
1680 1680 for v in opts.get('delete'):
1681 1681 try:
1682 1682 indices.append(int(v))
1683 1683 except ValueError:
1684 1684 raise error.Abort(_('invalid index value: %r') % v,
1685 1685 hint=_('use integers for indices'))
1686 1686
1687 1687 if repo.currenttransaction():
1688 1688 raise error.Abort(_('cannot delete obsmarkers in the middle '
1689 1689 'of transaction.'))
1690 1690
1691 1691 with repo.lock():
1692 1692 n = repair.deleteobsmarkers(repo.obsstore, indices)
1693 1693 ui.write(_('deleted %i obsolescence markers\n') % n)
1694 1694
1695 1695 return
1696 1696
1697 1697 if precursor is not None:
1698 1698 if opts['rev']:
1699 1699 raise error.Abort('cannot select revision when creating marker')
1700 1700 metadata = {}
1701 1701 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1702 1702 succs = tuple(parsenodeid(succ) for succ in successors)
1703 1703 l = repo.lock()
1704 1704 try:
1705 1705 tr = repo.transaction('debugobsolete')
1706 1706 try:
1707 1707 date = opts.get('date')
1708 1708 if date:
1709 1709 date = dateutil.parsedate(date)
1710 1710 else:
1711 1711 date = None
1712 1712 prec = parsenodeid(precursor)
1713 1713 parents = None
1714 1714 if opts['record_parents']:
1715 1715 if prec not in repo.unfiltered():
1716 1716 raise error.Abort('cannot used --record-parents on '
1717 1717 'unknown changesets')
1718 1718 parents = repo.unfiltered()[prec].parents()
1719 1719 parents = tuple(p.node() for p in parents)
1720 1720 repo.obsstore.create(tr, prec, succs, opts['flags'],
1721 1721 parents=parents, date=date,
1722 1722 metadata=metadata, ui=ui)
1723 1723 tr.close()
1724 1724 except ValueError as exc:
1725 1725 raise error.Abort(_('bad obsmarker input: %s') %
1726 1726 pycompat.bytestr(exc))
1727 1727 finally:
1728 1728 tr.release()
1729 1729 finally:
1730 1730 l.release()
1731 1731 else:
1732 1732 if opts['rev']:
1733 1733 revs = scmutil.revrange(repo, opts['rev'])
1734 1734 nodes = [repo[r].node() for r in revs]
1735 1735 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1736 1736 exclusive=opts['exclusive']))
1737 1737 markers.sort(key=lambda x: x._data)
1738 1738 else:
1739 1739 markers = obsutil.getmarkers(repo)
1740 1740
1741 1741 markerstoiter = markers
1742 1742 isrelevant = lambda m: True
1743 1743 if opts.get('rev') and opts.get('index'):
1744 1744 markerstoiter = obsutil.getmarkers(repo)
1745 1745 markerset = set(markers)
1746 1746 isrelevant = lambda m: m in markerset
1747 1747
1748 1748 fm = ui.formatter('debugobsolete', opts)
1749 1749 for i, m in enumerate(markerstoiter):
1750 1750 if not isrelevant(m):
1751 1751 # marker can be irrelevant when we're iterating over a set
1752 1752 # of markers (markerstoiter) which is bigger than the set
1753 1753 # of markers we want to display (markers)
1754 1754 # this can happen if both --index and --rev options are
1755 1755 # provided and thus we need to iterate over all of the markers
1756 1756 # to get the correct indices, but only display the ones that
1757 1757 # are relevant to --rev value
1758 1758 continue
1759 1759 fm.startitem()
1760 1760 ind = i if opts.get('index') else None
1761 1761 cmdutil.showmarker(fm, m, index=ind)
1762 1762 fm.end()
1763 1763
1764 1764 @command('debugpathcomplete',
1765 1765 [('f', 'full', None, _('complete an entire path')),
1766 1766 ('n', 'normal', None, _('show only normal files')),
1767 1767 ('a', 'added', None, _('show only added files')),
1768 1768 ('r', 'removed', None, _('show only removed files'))],
1769 1769 _('FILESPEC...'))
1770 1770 def debugpathcomplete(ui, repo, *specs, **opts):
1771 1771 '''complete part or all of a tracked path
1772 1772
1773 1773 This command supports shells that offer path name completion. It
1774 1774 currently completes only files already known to the dirstate.
1775 1775
1776 1776 Completion extends only to the next path segment unless
1777 1777 --full is specified, in which case entire paths are used.'''
1778 1778
1779 1779 def complete(path, acceptable):
1780 1780 dirstate = repo.dirstate
1781 1781 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1782 1782 rootdir = repo.root + pycompat.ossep
1783 1783 if spec != repo.root and not spec.startswith(rootdir):
1784 1784 return [], []
1785 1785 if os.path.isdir(spec):
1786 1786 spec += '/'
1787 1787 spec = spec[len(rootdir):]
1788 1788 fixpaths = pycompat.ossep != '/'
1789 1789 if fixpaths:
1790 1790 spec = spec.replace(pycompat.ossep, '/')
1791 1791 speclen = len(spec)
1792 1792 fullpaths = opts[r'full']
1793 1793 files, dirs = set(), set()
1794 1794 adddir, addfile = dirs.add, files.add
1795 1795 for f, st in dirstate.iteritems():
1796 1796 if f.startswith(spec) and st[0] in acceptable:
1797 1797 if fixpaths:
1798 1798 f = f.replace('/', pycompat.ossep)
1799 1799 if fullpaths:
1800 1800 addfile(f)
1801 1801 continue
1802 1802 s = f.find(pycompat.ossep, speclen)
1803 1803 if s >= 0:
1804 1804 adddir(f[:s])
1805 1805 else:
1806 1806 addfile(f)
1807 1807 return files, dirs
1808 1808
1809 1809 acceptable = ''
1810 1810 if opts[r'normal']:
1811 1811 acceptable += 'nm'
1812 1812 if opts[r'added']:
1813 1813 acceptable += 'a'
1814 1814 if opts[r'removed']:
1815 1815 acceptable += 'r'
1816 1816 cwd = repo.getcwd()
1817 1817 if not specs:
1818 1818 specs = ['.']
1819 1819
1820 1820 files, dirs = set(), set()
1821 1821 for spec in specs:
1822 1822 f, d = complete(spec, acceptable or 'nmar')
1823 1823 files.update(f)
1824 1824 dirs.update(d)
1825 1825 files.update(dirs)
1826 1826 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1827 1827 ui.write('\n')
1828 1828
1829 1829 @command('debugpeer', [], _('PATH'), norepo=True)
1830 1830 def debugpeer(ui, path):
1831 1831 """establish a connection to a peer repository"""
1832 1832 # Always enable peer request logging. Requires --debug to display
1833 1833 # though.
1834 1834 overrides = {
1835 1835 ('devel', 'debug.peer-request'): True,
1836 1836 }
1837 1837
1838 1838 with ui.configoverride(overrides):
1839 1839 peer = hg.peer(ui, {}, path)
1840 1840
1841 1841 local = peer.local() is not None
1842 1842 canpush = peer.canpush()
1843 1843
1844 1844 ui.write(_('url: %s\n') % peer.url())
1845 1845 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1846 1846 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1847 1847
1848 1848 @command('debugpickmergetool',
1849 1849 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1850 1850 ('', 'changedelete', None, _('emulate merging change and delete')),
1851 1851 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1852 1852 _('[PATTERN]...'),
1853 1853 inferrepo=True)
1854 1854 def debugpickmergetool(ui, repo, *pats, **opts):
1855 1855 """examine which merge tool is chosen for specified file
1856 1856
1857 1857 As described in :hg:`help merge-tools`, Mercurial examines
1858 1858 configurations below in this order to decide which merge tool is
1859 1859 chosen for specified file.
1860 1860
1861 1861 1. ``--tool`` option
1862 1862 2. ``HGMERGE`` environment variable
1863 1863 3. configurations in ``merge-patterns`` section
1864 1864 4. configuration of ``ui.merge``
1865 1865 5. configurations in ``merge-tools`` section
1866 1866 6. ``hgmerge`` tool (for historical reason only)
1867 1867 7. default tool for fallback (``:merge`` or ``:prompt``)
1868 1868
1869 1869 This command writes out examination result in the style below::
1870 1870
1871 1871 FILE = MERGETOOL
1872 1872
1873 1873 By default, all files known in the first parent context of the
1874 1874 working directory are examined. Use file patterns and/or -I/-X
1875 1875 options to limit target files. -r/--rev is also useful to examine
1876 1876 files in another context without actual updating to it.
1877 1877
1878 1878 With --debug, this command shows warning messages while matching
1879 1879 against ``merge-patterns`` and so on, too. It is recommended to
1880 1880 use this option with explicit file patterns and/or -I/-X options,
1881 1881 because this option increases amount of output per file according
1882 1882 to configurations in hgrc.
1883 1883
1884 1884 With -v/--verbose, this command shows configurations below at
1885 1885 first (only if specified).
1886 1886
1887 1887 - ``--tool`` option
1888 1888 - ``HGMERGE`` environment variable
1889 1889 - configuration of ``ui.merge``
1890 1890
1891 1891 If merge tool is chosen before matching against
1892 1892 ``merge-patterns``, this command can't show any helpful
1893 1893 information, even with --debug. In such case, information above is
1894 1894 useful to know why a merge tool is chosen.
1895 1895 """
1896 1896 opts = pycompat.byteskwargs(opts)
1897 1897 overrides = {}
1898 1898 if opts['tool']:
1899 1899 overrides[('ui', 'forcemerge')] = opts['tool']
1900 1900 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1901 1901
1902 1902 with ui.configoverride(overrides, 'debugmergepatterns'):
1903 1903 hgmerge = encoding.environ.get("HGMERGE")
1904 1904 if hgmerge is not None:
1905 1905 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1906 1906 uimerge = ui.config("ui", "merge")
1907 1907 if uimerge:
1908 1908 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1909 1909
1910 1910 ctx = scmutil.revsingle(repo, opts.get('rev'))
1911 1911 m = scmutil.match(ctx, pats, opts)
1912 1912 changedelete = opts['changedelete']
1913 1913 for path in ctx.walk(m):
1914 1914 fctx = ctx[path]
1915 1915 try:
1916 1916 if not ui.debugflag:
1917 1917 ui.pushbuffer(error=True)
1918 1918 tool, toolpath = filemerge._picktool(repo, ui, path,
1919 1919 fctx.isbinary(),
1920 1920 'l' in fctx.flags(),
1921 1921 changedelete)
1922 1922 finally:
1923 1923 if not ui.debugflag:
1924 1924 ui.popbuffer()
1925 1925 ui.write(('%s = %s\n') % (path, tool))
1926 1926
1927 1927 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1928 1928 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1929 1929 '''access the pushkey key/value protocol
1930 1930
1931 1931 With two args, list the keys in the given namespace.
1932 1932
1933 1933 With five args, set a key to new if it currently is set to old.
1934 1934 Reports success or failure.
1935 1935 '''
1936 1936
1937 1937 target = hg.peer(ui, {}, repopath)
1938 1938 if keyinfo:
1939 1939 key, old, new = keyinfo
1940 1940 with target.commandexecutor() as e:
1941 1941 r = e.callcommand('pushkey', {
1942 1942 'namespace': namespace,
1943 1943 'key': key,
1944 1944 'old': old,
1945 1945 'new': new,
1946 1946 }).result()
1947 1947
1948 1948 ui.status(pycompat.bytestr(r) + '\n')
1949 1949 return not r
1950 1950 else:
1951 1951 for k, v in sorted(target.listkeys(namespace).iteritems()):
1952 1952 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1953 1953 stringutil.escapestr(v)))
1954 1954
1955 1955 @command('debugpvec', [], _('A B'))
1956 1956 def debugpvec(ui, repo, a, b=None):
1957 1957 ca = scmutil.revsingle(repo, a)
1958 1958 cb = scmutil.revsingle(repo, b)
1959 1959 pa = pvec.ctxpvec(ca)
1960 1960 pb = pvec.ctxpvec(cb)
1961 1961 if pa == pb:
1962 1962 rel = "="
1963 1963 elif pa > pb:
1964 1964 rel = ">"
1965 1965 elif pa < pb:
1966 1966 rel = "<"
1967 1967 elif pa | pb:
1968 1968 rel = "|"
1969 1969 ui.write(_("a: %s\n") % pa)
1970 1970 ui.write(_("b: %s\n") % pb)
1971 1971 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1972 1972 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1973 1973 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1974 1974 pa.distance(pb), rel))
1975 1975
1976 1976 @command('debugrebuilddirstate|debugrebuildstate',
1977 1977 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1978 1978 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1979 1979 'the working copy parent')),
1980 1980 ],
1981 1981 _('[-r REV]'))
1982 1982 def debugrebuilddirstate(ui, repo, rev, **opts):
1983 1983 """rebuild the dirstate as it would look like for the given revision
1984 1984
1985 1985 If no revision is specified the first current parent will be used.
1986 1986
1987 1987 The dirstate will be set to the files of the given revision.
1988 1988 The actual working directory content or existing dirstate
1989 1989 information such as adds or removes is not considered.
1990 1990
1991 1991 ``minimal`` will only rebuild the dirstate status for files that claim to be
1992 1992 tracked but are not in the parent manifest, or that exist in the parent
1993 1993 manifest but are not in the dirstate. It will not change adds, removes, or
1994 1994 modified files that are in the working copy parent.
1995 1995
1996 1996 One use of this command is to make the next :hg:`status` invocation
1997 1997 check the actual file content.
1998 1998 """
1999 1999 ctx = scmutil.revsingle(repo, rev)
2000 2000 with repo.wlock():
2001 2001 dirstate = repo.dirstate
2002 2002 changedfiles = None
2003 2003 # See command doc for what minimal does.
2004 2004 if opts.get(r'minimal'):
2005 2005 manifestfiles = set(ctx.manifest().keys())
2006 2006 dirstatefiles = set(dirstate)
2007 2007 manifestonly = manifestfiles - dirstatefiles
2008 2008 dsonly = dirstatefiles - manifestfiles
2009 2009 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2010 2010 changedfiles = manifestonly | dsnotadded
2011 2011
2012 2012 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2013 2013
2014 2014 @command('debugrebuildfncache', [], '')
2015 2015 def debugrebuildfncache(ui, repo):
2016 2016 """rebuild the fncache file"""
2017 2017 repair.rebuildfncache(ui, repo)
2018 2018
2019 2019 @command('debugrename',
2020 2020 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2021 2021 _('[-r REV] FILE'))
2022 2022 def debugrename(ui, repo, file1, *pats, **opts):
2023 2023 """dump rename information"""
2024 2024
2025 2025 opts = pycompat.byteskwargs(opts)
2026 2026 ctx = scmutil.revsingle(repo, opts.get('rev'))
2027 2027 m = scmutil.match(ctx, (file1,) + pats, opts)
2028 2028 for abs in ctx.walk(m):
2029 2029 fctx = ctx[abs]
2030 2030 o = fctx.filelog().renamed(fctx.filenode())
2031 2031 rel = m.rel(abs)
2032 2032 if o:
2033 2033 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2034 2034 else:
2035 2035 ui.write(_("%s not renamed\n") % rel)
2036 2036
2037 2037 @command('debugrevlog', cmdutil.debugrevlogopts +
2038 2038 [('d', 'dump', False, _('dump index data'))],
2039 2039 _('-c|-m|FILE'),
2040 2040 optionalrepo=True)
2041 2041 def debugrevlog(ui, repo, file_=None, **opts):
2042 2042 """show data and statistics about a revlog"""
2043 2043 opts = pycompat.byteskwargs(opts)
2044 2044 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2045 2045
2046 2046 if opts.get("dump"):
2047 2047 numrevs = len(r)
2048 2048 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2049 2049 " rawsize totalsize compression heads chainlen\n"))
2050 2050 ts = 0
2051 2051 heads = set()
2052 2052
2053 2053 for rev in pycompat.xrange(numrevs):
2054 2054 dbase = r.deltaparent(rev)
2055 2055 if dbase == -1:
2056 2056 dbase = rev
2057 2057 cbase = r.chainbase(rev)
2058 2058 clen = r.chainlen(rev)
2059 2059 p1, p2 = r.parentrevs(rev)
2060 2060 rs = r.rawsize(rev)
2061 2061 ts = ts + rs
2062 2062 heads -= set(r.parentrevs(rev))
2063 2063 heads.add(rev)
2064 2064 try:
2065 2065 compression = ts / r.end(rev)
2066 2066 except ZeroDivisionError:
2067 2067 compression = 0
2068 2068 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2069 2069 "%11d %5d %8d\n" %
2070 2070 (rev, p1, p2, r.start(rev), r.end(rev),
2071 2071 r.start(dbase), r.start(cbase),
2072 2072 r.start(p1), r.start(p2),
2073 2073 rs, ts, compression, len(heads), clen))
2074 2074 return 0
2075 2075
2076 2076 v = r.version
2077 2077 format = v & 0xFFFF
2078 2078 flags = []
2079 2079 gdelta = False
2080 2080 if v & revlog.FLAG_INLINE_DATA:
2081 2081 flags.append('inline')
2082 2082 if v & revlog.FLAG_GENERALDELTA:
2083 2083 gdelta = True
2084 2084 flags.append('generaldelta')
2085 2085 if not flags:
2086 2086 flags = ['(none)']
2087 2087
2088 2088 ### tracks merge vs single parent
2089 2089 nummerges = 0
2090 2090
2091 2091 ### tracks ways the "delta" are build
2092 2092 # nodelta
2093 2093 numempty = 0
2094 2094 numemptytext = 0
2095 2095 numemptydelta = 0
2096 2096 # full file content
2097 2097 numfull = 0
2098 2098 # delta against previous revision
2099 2099 numprev = 0
2100 2100 # delta against first or second parent (not prev)
2101 2101 nump1 = 0
2102 2102 nump2 = 0
2103 2103 # delta against neither prev nor parents
2104 2104 numother = 0
2105 2105 # delta against prev that are also first or second parent
2106 2106 # (details of `numprev`)
2107 2107 nump1prev = 0
2108 2108 nump2prev = 0
2109 2109
2110 2110 # data about delta chain of each revs
2111 2111 chainlengths = []
2112 2112 chainbases = []
2113 2113 chainspans = []
2114 2114
2115 2115 # data about each revision
2116 2116 datasize = [None, 0, 0]
2117 2117 fullsize = [None, 0, 0]
2118 2118 deltasize = [None, 0, 0]
2119 2119 chunktypecounts = {}
2120 2120 chunktypesizes = {}
2121 2121
2122 2122 def addsize(size, l):
2123 2123 if l[0] is None or size < l[0]:
2124 2124 l[0] = size
2125 2125 if size > l[1]:
2126 2126 l[1] = size
2127 2127 l[2] += size
2128 2128
2129 2129 numrevs = len(r)
2130 2130 for rev in pycompat.xrange(numrevs):
2131 2131 p1, p2 = r.parentrevs(rev)
2132 2132 delta = r.deltaparent(rev)
2133 2133 if format > 0:
2134 2134 addsize(r.rawsize(rev), datasize)
2135 2135 if p2 != nullrev:
2136 2136 nummerges += 1
2137 2137 size = r.length(rev)
2138 2138 if delta == nullrev:
2139 2139 chainlengths.append(0)
2140 2140 chainbases.append(r.start(rev))
2141 2141 chainspans.append(size)
2142 2142 if size == 0:
2143 2143 numempty += 1
2144 2144 numemptytext += 1
2145 2145 else:
2146 2146 numfull += 1
2147 2147 addsize(size, fullsize)
2148 2148 else:
2149 2149 chainlengths.append(chainlengths[delta] + 1)
2150 2150 baseaddr = chainbases[delta]
2151 2151 revaddr = r.start(rev)
2152 2152 chainbases.append(baseaddr)
2153 2153 chainspans.append((revaddr - baseaddr) + size)
2154 2154 if size == 0:
2155 2155 numempty += 1
2156 2156 numemptydelta += 1
2157 2157 else:
2158 2158 addsize(size, deltasize)
2159 2159 if delta == rev - 1:
2160 2160 numprev += 1
2161 2161 if delta == p1:
2162 2162 nump1prev += 1
2163 2163 elif delta == p2:
2164 2164 nump2prev += 1
2165 2165 elif delta == p1:
2166 2166 nump1 += 1
2167 2167 elif delta == p2:
2168 2168 nump2 += 1
2169 2169 elif delta != nullrev:
2170 2170 numother += 1
2171 2171
2172 2172 # Obtain data on the raw chunks in the revlog.
2173 if util.safehasattr(r, '_getsegmentforrevs'):
2173 2174 segment = r._getsegmentforrevs(rev, rev)[1]
2175 else:
2176 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2174 2177 if segment:
2175 2178 chunktype = bytes(segment[0:1])
2176 2179 else:
2177 2180 chunktype = 'empty'
2178 2181
2179 2182 if chunktype not in chunktypecounts:
2180 2183 chunktypecounts[chunktype] = 0
2181 2184 chunktypesizes[chunktype] = 0
2182 2185
2183 2186 chunktypecounts[chunktype] += 1
2184 2187 chunktypesizes[chunktype] += size
2185 2188
2186 2189 # Adjust size min value for empty cases
2187 2190 for size in (datasize, fullsize, deltasize):
2188 2191 if size[0] is None:
2189 2192 size[0] = 0
2190 2193
2191 2194 numdeltas = numrevs - numfull - numempty
2192 2195 numoprev = numprev - nump1prev - nump2prev
2193 2196 totalrawsize = datasize[2]
2194 2197 datasize[2] /= numrevs
2195 2198 fulltotal = fullsize[2]
2196 2199 fullsize[2] /= numfull
2197 2200 deltatotal = deltasize[2]
2198 2201 if numdeltas > 0:
2199 2202 deltasize[2] /= numdeltas
2200 2203 totalsize = fulltotal + deltatotal
2201 2204 avgchainlen = sum(chainlengths) / numrevs
2202 2205 maxchainlen = max(chainlengths)
2203 2206 maxchainspan = max(chainspans)
2204 2207 compratio = 1
2205 2208 if totalsize:
2206 2209 compratio = totalrawsize / totalsize
2207 2210
2208 2211 basedfmtstr = '%%%dd\n'
2209 2212 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2210 2213
2211 2214 def dfmtstr(max):
2212 2215 return basedfmtstr % len(str(max))
2213 2216 def pcfmtstr(max, padding=0):
2214 2217 return basepcfmtstr % (len(str(max)), ' ' * padding)
2215 2218
2216 2219 def pcfmt(value, total):
2217 2220 if total:
2218 2221 return (value, 100 * float(value) / total)
2219 2222 else:
2220 2223 return value, 100.0
2221 2224
2222 2225 ui.write(('format : %d\n') % format)
2223 2226 ui.write(('flags : %s\n') % ', '.join(flags))
2224 2227
2225 2228 ui.write('\n')
2226 2229 fmt = pcfmtstr(totalsize)
2227 2230 fmt2 = dfmtstr(totalsize)
2228 2231 ui.write(('revisions : ') + fmt2 % numrevs)
2229 2232 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2230 2233 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2231 2234 ui.write(('revisions : ') + fmt2 % numrevs)
2232 2235 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2233 2236 ui.write((' text : ')
2234 2237 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2235 2238 ui.write((' delta : ')
2236 2239 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2237 2240 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2238 2241 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2239 2242 ui.write(('revision size : ') + fmt2 % totalsize)
2240 2243 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2241 2244 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2242 2245
2243 2246 def fmtchunktype(chunktype):
2244 2247 if chunktype == 'empty':
2245 2248 return ' %s : ' % chunktype
2246 2249 elif chunktype in pycompat.bytestr(string.ascii_letters):
2247 2250 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2248 2251 else:
2249 2252 return ' 0x%s : ' % hex(chunktype)
2250 2253
2251 2254 ui.write('\n')
2252 2255 ui.write(('chunks : ') + fmt2 % numrevs)
2253 2256 for chunktype in sorted(chunktypecounts):
2254 2257 ui.write(fmtchunktype(chunktype))
2255 2258 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2256 2259 ui.write(('chunks size : ') + fmt2 % totalsize)
2257 2260 for chunktype in sorted(chunktypecounts):
2258 2261 ui.write(fmtchunktype(chunktype))
2259 2262 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2260 2263
2261 2264 ui.write('\n')
2262 2265 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2263 2266 ui.write(('avg chain length : ') + fmt % avgchainlen)
2264 2267 ui.write(('max chain length : ') + fmt % maxchainlen)
2265 2268 ui.write(('max chain reach : ') + fmt % maxchainspan)
2266 2269 ui.write(('compression ratio : ') + fmt % compratio)
2267 2270
2268 2271 if format > 0:
2269 2272 ui.write('\n')
2270 2273 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2271 2274 % tuple(datasize))
2272 2275 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2273 2276 % tuple(fullsize))
2274 2277 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2275 2278 % tuple(deltasize))
2276 2279
2277 2280 if numdeltas > 0:
2278 2281 ui.write('\n')
2279 2282 fmt = pcfmtstr(numdeltas)
2280 2283 fmt2 = pcfmtstr(numdeltas, 4)
2281 2284 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2282 2285 if numprev > 0:
2283 2286 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2284 2287 numprev))
2285 2288 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2286 2289 numprev))
2287 2290 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2288 2291 numprev))
2289 2292 if gdelta:
2290 2293 ui.write(('deltas against p1 : ')
2291 2294 + fmt % pcfmt(nump1, numdeltas))
2292 2295 ui.write(('deltas against p2 : ')
2293 2296 + fmt % pcfmt(nump2, numdeltas))
2294 2297 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2295 2298 numdeltas))
2296 2299
2297 2300 @command('debugrevspec',
2298 2301 [('', 'optimize', None,
2299 2302 _('print parsed tree after optimizing (DEPRECATED)')),
2300 2303 ('', 'show-revs', True, _('print list of result revisions (default)')),
2301 2304 ('s', 'show-set', None, _('print internal representation of result set')),
2302 2305 ('p', 'show-stage', [],
2303 2306 _('print parsed tree at the given stage'), _('NAME')),
2304 2307 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2305 2308 ('', 'verify-optimized', False, _('verify optimized result')),
2306 2309 ],
2307 2310 ('REVSPEC'))
2308 2311 def debugrevspec(ui, repo, expr, **opts):
2309 2312 """parse and apply a revision specification
2310 2313
2311 2314 Use -p/--show-stage option to print the parsed tree at the given stages.
2312 2315 Use -p all to print tree at every stage.
2313 2316
2314 2317 Use --no-show-revs option with -s or -p to print only the set
2315 2318 representation or the parsed tree respectively.
2316 2319
2317 2320 Use --verify-optimized to compare the optimized result with the unoptimized
2318 2321 one. Returns 1 if the optimized result differs.
2319 2322 """
2320 2323 opts = pycompat.byteskwargs(opts)
2321 2324 aliases = ui.configitems('revsetalias')
2322 2325 stages = [
2323 2326 ('parsed', lambda tree: tree),
2324 2327 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2325 2328 ui.warn)),
2326 2329 ('concatenated', revsetlang.foldconcat),
2327 2330 ('analyzed', revsetlang.analyze),
2328 2331 ('optimized', revsetlang.optimize),
2329 2332 ]
2330 2333 if opts['no_optimized']:
2331 2334 stages = stages[:-1]
2332 2335 if opts['verify_optimized'] and opts['no_optimized']:
2333 2336 raise error.Abort(_('cannot use --verify-optimized with '
2334 2337 '--no-optimized'))
2335 2338 stagenames = set(n for n, f in stages)
2336 2339
2337 2340 showalways = set()
2338 2341 showchanged = set()
2339 2342 if ui.verbose and not opts['show_stage']:
2340 2343 # show parsed tree by --verbose (deprecated)
2341 2344 showalways.add('parsed')
2342 2345 showchanged.update(['expanded', 'concatenated'])
2343 2346 if opts['optimize']:
2344 2347 showalways.add('optimized')
2345 2348 if opts['show_stage'] and opts['optimize']:
2346 2349 raise error.Abort(_('cannot use --optimize with --show-stage'))
2347 2350 if opts['show_stage'] == ['all']:
2348 2351 showalways.update(stagenames)
2349 2352 else:
2350 2353 for n in opts['show_stage']:
2351 2354 if n not in stagenames:
2352 2355 raise error.Abort(_('invalid stage name: %s') % n)
2353 2356 showalways.update(opts['show_stage'])
2354 2357
2355 2358 treebystage = {}
2356 2359 printedtree = None
2357 2360 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2358 2361 for n, f in stages:
2359 2362 treebystage[n] = tree = f(tree)
2360 2363 if n in showalways or (n in showchanged and tree != printedtree):
2361 2364 if opts['show_stage'] or n != 'parsed':
2362 2365 ui.write(("* %s:\n") % n)
2363 2366 ui.write(revsetlang.prettyformat(tree), "\n")
2364 2367 printedtree = tree
2365 2368
2366 2369 if opts['verify_optimized']:
2367 2370 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2368 2371 brevs = revset.makematcher(treebystage['optimized'])(repo)
2369 2372 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2370 2373 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2371 2374 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2372 2375 arevs = list(arevs)
2373 2376 brevs = list(brevs)
2374 2377 if arevs == brevs:
2375 2378 return 0
2376 2379 ui.write(('--- analyzed\n'), label='diff.file_a')
2377 2380 ui.write(('+++ optimized\n'), label='diff.file_b')
2378 2381 sm = difflib.SequenceMatcher(None, arevs, brevs)
2379 2382 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2380 2383 if tag in ('delete', 'replace'):
2381 2384 for c in arevs[alo:ahi]:
2382 2385 ui.write('-%s\n' % c, label='diff.deleted')
2383 2386 if tag in ('insert', 'replace'):
2384 2387 for c in brevs[blo:bhi]:
2385 2388 ui.write('+%s\n' % c, label='diff.inserted')
2386 2389 if tag == 'equal':
2387 2390 for c in arevs[alo:ahi]:
2388 2391 ui.write(' %s\n' % c)
2389 2392 return 1
2390 2393
2391 2394 func = revset.makematcher(tree)
2392 2395 revs = func(repo)
2393 2396 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2394 2397 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2395 2398 if not opts['show_revs']:
2396 2399 return
2397 2400 for c in revs:
2398 2401 ui.write("%d\n" % c)
2399 2402
2400 2403 @command('debugserve', [
2401 2404 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2402 2405 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2403 2406 ('', 'logiofile', '', _('file to log server I/O to')),
2404 2407 ], '')
2405 2408 def debugserve(ui, repo, **opts):
2406 2409 """run a server with advanced settings
2407 2410
2408 2411 This command is similar to :hg:`serve`. It exists partially as a
2409 2412 workaround to the fact that ``hg serve --stdio`` must have specific
2410 2413 arguments for security reasons.
2411 2414 """
2412 2415 opts = pycompat.byteskwargs(opts)
2413 2416
2414 2417 if not opts['sshstdio']:
2415 2418 raise error.Abort(_('only --sshstdio is currently supported'))
2416 2419
2417 2420 logfh = None
2418 2421
2419 2422 if opts['logiofd'] and opts['logiofile']:
2420 2423 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2421 2424
2422 2425 if opts['logiofd']:
2423 2426 # Line buffered because output is line based.
2424 2427 try:
2425 2428 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2426 2429 except OSError as e:
2427 2430 if e.errno != errno.ESPIPE:
2428 2431 raise
2429 2432 # can't seek a pipe, so `ab` mode fails on py3
2430 2433 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2431 2434 elif opts['logiofile']:
2432 2435 logfh = open(opts['logiofile'], 'ab', 1)
2433 2436
2434 2437 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2435 2438 s.serve_forever()
2436 2439
2437 2440 @command('debugsetparents', [], _('REV1 [REV2]'))
2438 2441 def debugsetparents(ui, repo, rev1, rev2=None):
2439 2442 """manually set the parents of the current working directory
2440 2443
2441 2444 This is useful for writing repository conversion tools, but should
2442 2445 be used with care. For example, neither the working directory nor the
2443 2446 dirstate is updated, so file status may be incorrect after running this
2444 2447 command.
2445 2448
2446 2449 Returns 0 on success.
2447 2450 """
2448 2451
2449 2452 node1 = scmutil.revsingle(repo, rev1).node()
2450 2453 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2451 2454
2452 2455 with repo.wlock():
2453 2456 repo.setparents(node1, node2)
2454 2457
2455 2458 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2456 2459 def debugssl(ui, repo, source=None, **opts):
2457 2460 '''test a secure connection to a server
2458 2461
2459 2462 This builds the certificate chain for the server on Windows, installing the
2460 2463 missing intermediates and trusted root via Windows Update if necessary. It
2461 2464 does nothing on other platforms.
2462 2465
2463 2466 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2464 2467 that server is used. See :hg:`help urls` for more information.
2465 2468
2466 2469 If the update succeeds, retry the original operation. Otherwise, the cause
2467 2470 of the SSL error is likely another issue.
2468 2471 '''
2469 2472 if not pycompat.iswindows:
2470 2473 raise error.Abort(_('certificate chain building is only possible on '
2471 2474 'Windows'))
2472 2475
2473 2476 if not source:
2474 2477 if not repo:
2475 2478 raise error.Abort(_("there is no Mercurial repository here, and no "
2476 2479 "server specified"))
2477 2480 source = "default"
2478 2481
2479 2482 source, branches = hg.parseurl(ui.expandpath(source))
2480 2483 url = util.url(source)
2481 2484 addr = None
2482 2485
2483 2486 defaultport = {'https': 443, 'ssh': 22}
2484 2487 if url.scheme in defaultport:
2485 2488 try:
2486 2489 addr = (url.host, int(url.port or defaultport[url.scheme]))
2487 2490 except ValueError:
2488 2491 raise error.Abort(_("malformed port number in URL"))
2489 2492 else:
2490 2493 raise error.Abort(_("only https and ssh connections are supported"))
2491 2494
2492 2495 from . import win32
2493 2496
2494 2497 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2495 2498 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2496 2499
2497 2500 try:
2498 2501 s.connect(addr)
2499 2502 cert = s.getpeercert(True)
2500 2503
2501 2504 ui.status(_('checking the certificate chain for %s\n') % url.host)
2502 2505
2503 2506 complete = win32.checkcertificatechain(cert, build=False)
2504 2507
2505 2508 if not complete:
2506 2509 ui.status(_('certificate chain is incomplete, updating... '))
2507 2510
2508 2511 if not win32.checkcertificatechain(cert):
2509 2512 ui.status(_('failed.\n'))
2510 2513 else:
2511 2514 ui.status(_('done.\n'))
2512 2515 else:
2513 2516 ui.status(_('full certificate chain is available\n'))
2514 2517 finally:
2515 2518 s.close()
2516 2519
2517 2520 @command('debugsub',
2518 2521 [('r', 'rev', '',
2519 2522 _('revision to check'), _('REV'))],
2520 2523 _('[-r REV] [REV]'))
2521 2524 def debugsub(ui, repo, rev=None):
2522 2525 ctx = scmutil.revsingle(repo, rev, None)
2523 2526 for k, v in sorted(ctx.substate.items()):
2524 2527 ui.write(('path %s\n') % k)
2525 2528 ui.write((' source %s\n') % v[0])
2526 2529 ui.write((' revision %s\n') % v[1])
2527 2530
2528 2531 @command('debugsuccessorssets',
2529 2532 [('', 'closest', False, _('return closest successors sets only'))],
2530 2533 _('[REV]'))
2531 2534 def debugsuccessorssets(ui, repo, *revs, **opts):
2532 2535 """show set of successors for revision
2533 2536
2534 2537 A successors set of changeset A is a consistent group of revisions that
2535 2538 succeed A. It contains non-obsolete changesets only unless closests
2536 2539 successors set is set.
2537 2540
2538 2541 In most cases a changeset A has a single successors set containing a single
2539 2542 successor (changeset A replaced by A').
2540 2543
2541 2544 A changeset that is made obsolete with no successors are called "pruned".
2542 2545 Such changesets have no successors sets at all.
2543 2546
2544 2547 A changeset that has been "split" will have a successors set containing
2545 2548 more than one successor.
2546 2549
2547 2550 A changeset that has been rewritten in multiple different ways is called
2548 2551 "divergent". Such changesets have multiple successor sets (each of which
2549 2552 may also be split, i.e. have multiple successors).
2550 2553
2551 2554 Results are displayed as follows::
2552 2555
2553 2556 <rev1>
2554 2557 <successors-1A>
2555 2558 <rev2>
2556 2559 <successors-2A>
2557 2560 <successors-2B1> <successors-2B2> <successors-2B3>
2558 2561
2559 2562 Here rev2 has two possible (i.e. divergent) successors sets. The first
2560 2563 holds one element, whereas the second holds three (i.e. the changeset has
2561 2564 been split).
2562 2565 """
2563 2566 # passed to successorssets caching computation from one call to another
2564 2567 cache = {}
2565 2568 ctx2str = bytes
2566 2569 node2str = short
2567 2570 for rev in scmutil.revrange(repo, revs):
2568 2571 ctx = repo[rev]
2569 2572 ui.write('%s\n'% ctx2str(ctx))
2570 2573 for succsset in obsutil.successorssets(repo, ctx.node(),
2571 2574 closest=opts[r'closest'],
2572 2575 cache=cache):
2573 2576 if succsset:
2574 2577 ui.write(' ')
2575 2578 ui.write(node2str(succsset[0]))
2576 2579 for node in succsset[1:]:
2577 2580 ui.write(' ')
2578 2581 ui.write(node2str(node))
2579 2582 ui.write('\n')
2580 2583
2581 2584 @command('debugtemplate',
2582 2585 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2583 2586 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2584 2587 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2585 2588 optionalrepo=True)
2586 2589 def debugtemplate(ui, repo, tmpl, **opts):
2587 2590 """parse and apply a template
2588 2591
2589 2592 If -r/--rev is given, the template is processed as a log template and
2590 2593 applied to the given changesets. Otherwise, it is processed as a generic
2591 2594 template.
2592 2595
2593 2596 Use --verbose to print the parsed tree.
2594 2597 """
2595 2598 revs = None
2596 2599 if opts[r'rev']:
2597 2600 if repo is None:
2598 2601 raise error.RepoError(_('there is no Mercurial repository here '
2599 2602 '(.hg not found)'))
2600 2603 revs = scmutil.revrange(repo, opts[r'rev'])
2601 2604
2602 2605 props = {}
2603 2606 for d in opts[r'define']:
2604 2607 try:
2605 2608 k, v = (e.strip() for e in d.split('=', 1))
2606 2609 if not k or k == 'ui':
2607 2610 raise ValueError
2608 2611 props[k] = v
2609 2612 except ValueError:
2610 2613 raise error.Abort(_('malformed keyword definition: %s') % d)
2611 2614
2612 2615 if ui.verbose:
2613 2616 aliases = ui.configitems('templatealias')
2614 2617 tree = templater.parse(tmpl)
2615 2618 ui.note(templater.prettyformat(tree), '\n')
2616 2619 newtree = templater.expandaliases(tree, aliases)
2617 2620 if newtree != tree:
2618 2621 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2619 2622
2620 2623 if revs is None:
2621 2624 tres = formatter.templateresources(ui, repo)
2622 2625 t = formatter.maketemplater(ui, tmpl, resources=tres)
2623 2626 if ui.verbose:
2624 2627 kwds, funcs = t.symbolsuseddefault()
2625 2628 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2626 2629 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2627 2630 ui.write(t.renderdefault(props))
2628 2631 else:
2629 2632 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2630 2633 if ui.verbose:
2631 2634 kwds, funcs = displayer.t.symbolsuseddefault()
2632 2635 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2633 2636 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2634 2637 for r in revs:
2635 2638 displayer.show(repo[r], **pycompat.strkwargs(props))
2636 2639 displayer.close()
2637 2640
2638 2641 @command('debuguigetpass', [
2639 2642 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2640 2643 ], _('[-p TEXT]'), norepo=True)
2641 2644 def debuguigetpass(ui, prompt=''):
2642 2645 """show prompt to type password"""
2643 2646 r = ui.getpass(prompt)
2644 2647 ui.write(('respose: %s\n') % r)
2645 2648
2646 2649 @command('debuguiprompt', [
2647 2650 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2648 2651 ], _('[-p TEXT]'), norepo=True)
2649 2652 def debuguiprompt(ui, prompt=''):
2650 2653 """show plain prompt"""
2651 2654 r = ui.prompt(prompt)
2652 2655 ui.write(('response: %s\n') % r)
2653 2656
2654 2657 @command('debugupdatecaches', [])
2655 2658 def debugupdatecaches(ui, repo, *pats, **opts):
2656 2659 """warm all known caches in the repository"""
2657 2660 with repo.wlock(), repo.lock():
2658 2661 repo.updatecaches(full=True)
2659 2662
2660 2663 @command('debugupgraderepo', [
2661 2664 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2662 2665 ('', 'run', False, _('performs an upgrade')),
2663 2666 ])
2664 2667 def debugupgraderepo(ui, repo, run=False, optimize=None):
2665 2668 """upgrade a repository to use different features
2666 2669
2667 2670 If no arguments are specified, the repository is evaluated for upgrade
2668 2671 and a list of problems and potential optimizations is printed.
2669 2672
2670 2673 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2671 2674 can be influenced via additional arguments. More details will be provided
2672 2675 by the command output when run without ``--run``.
2673 2676
2674 2677 During the upgrade, the repository will be locked and no writes will be
2675 2678 allowed.
2676 2679
2677 2680 At the end of the upgrade, the repository may not be readable while new
2678 2681 repository data is swapped in. This window will be as long as it takes to
2679 2682 rename some directories inside the ``.hg`` directory. On most machines, this
2680 2683 should complete almost instantaneously and the chances of a consumer being
2681 2684 unable to access the repository should be low.
2682 2685 """
2683 2686 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2684 2687
2685 2688 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2686 2689 inferrepo=True)
2687 2690 def debugwalk(ui, repo, *pats, **opts):
2688 2691 """show how files match on given patterns"""
2689 2692 opts = pycompat.byteskwargs(opts)
2690 2693 m = scmutil.match(repo[None], pats, opts)
2691 2694 if ui.verbose:
2692 2695 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2693 2696 items = list(repo[None].walk(m))
2694 2697 if not items:
2695 2698 return
2696 2699 f = lambda fn: fn
2697 2700 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2698 2701 f = lambda fn: util.normpath(fn)
2699 2702 fmt = 'f %%-%ds %%-%ds %%s' % (
2700 2703 max([len(abs) for abs in items]),
2701 2704 max([len(m.rel(abs)) for abs in items]))
2702 2705 for abs in items:
2703 2706 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2704 2707 ui.write("%s\n" % line.rstrip())
2705 2708
2706 2709 @command('debugwhyunstable', [], _('REV'))
2707 2710 def debugwhyunstable(ui, repo, rev):
2708 2711 """explain instabilities of a changeset"""
2709 2712 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2710 2713 dnodes = ''
2711 2714 if entry.get('divergentnodes'):
2712 2715 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2713 2716 for ctx in entry['divergentnodes']) + ' '
2714 2717 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2715 2718 entry['reason'], entry['node']))
2716 2719
2717 2720 @command('debugwireargs',
2718 2721 [('', 'three', '', 'three'),
2719 2722 ('', 'four', '', 'four'),
2720 2723 ('', 'five', '', 'five'),
2721 2724 ] + cmdutil.remoteopts,
2722 2725 _('REPO [OPTIONS]... [ONE [TWO]]'),
2723 2726 norepo=True)
2724 2727 def debugwireargs(ui, repopath, *vals, **opts):
2725 2728 opts = pycompat.byteskwargs(opts)
2726 2729 repo = hg.peer(ui, opts, repopath)
2727 2730 for opt in cmdutil.remoteopts:
2728 2731 del opts[opt[1]]
2729 2732 args = {}
2730 2733 for k, v in opts.iteritems():
2731 2734 if v:
2732 2735 args[k] = v
2733 2736 args = pycompat.strkwargs(args)
2734 2737 # run twice to check that we don't mess up the stream for the next command
2735 2738 res1 = repo.debugwireargs(*vals, **args)
2736 2739 res2 = repo.debugwireargs(*vals, **args)
2737 2740 ui.write("%s\n" % res1)
2738 2741 if res1 != res2:
2739 2742 ui.warn("%s\n" % res2)
2740 2743
2741 2744 def _parsewirelangblocks(fh):
2742 2745 activeaction = None
2743 2746 blocklines = []
2744 2747
2745 2748 for line in fh:
2746 2749 line = line.rstrip()
2747 2750 if not line:
2748 2751 continue
2749 2752
2750 2753 if line.startswith(b'#'):
2751 2754 continue
2752 2755
2753 2756 if not line.startswith(b' '):
2754 2757 # New block. Flush previous one.
2755 2758 if activeaction:
2756 2759 yield activeaction, blocklines
2757 2760
2758 2761 activeaction = line
2759 2762 blocklines = []
2760 2763 continue
2761 2764
2762 2765 # Else we start with an indent.
2763 2766
2764 2767 if not activeaction:
2765 2768 raise error.Abort(_('indented line outside of block'))
2766 2769
2767 2770 blocklines.append(line)
2768 2771
2769 2772 # Flush last block.
2770 2773 if activeaction:
2771 2774 yield activeaction, blocklines
2772 2775
2773 2776 @command('debugwireproto',
2774 2777 [
2775 2778 ('', 'localssh', False, _('start an SSH server for this repo')),
2776 2779 ('', 'peer', '', _('construct a specific version of the peer')),
2777 2780 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2778 2781 ('', 'nologhandshake', False,
2779 2782 _('do not log I/O related to the peer handshake')),
2780 2783 ] + cmdutil.remoteopts,
2781 2784 _('[PATH]'),
2782 2785 optionalrepo=True)
2783 2786 def debugwireproto(ui, repo, path=None, **opts):
2784 2787 """send wire protocol commands to a server
2785 2788
2786 2789 This command can be used to issue wire protocol commands to remote
2787 2790 peers and to debug the raw data being exchanged.
2788 2791
2789 2792 ``--localssh`` will start an SSH server against the current repository
2790 2793 and connect to that. By default, the connection will perform a handshake
2791 2794 and establish an appropriate peer instance.
2792 2795
2793 2796 ``--peer`` can be used to bypass the handshake protocol and construct a
2794 2797 peer instance using the specified class type. Valid values are ``raw``,
2795 2798 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2796 2799 raw data payloads and don't support higher-level command actions.
2797 2800
2798 2801 ``--noreadstderr`` can be used to disable automatic reading from stderr
2799 2802 of the peer (for SSH connections only). Disabling automatic reading of
2800 2803 stderr is useful for making output more deterministic.
2801 2804
2802 2805 Commands are issued via a mini language which is specified via stdin.
2803 2806 The language consists of individual actions to perform. An action is
2804 2807 defined by a block. A block is defined as a line with no leading
2805 2808 space followed by 0 or more lines with leading space. Blocks are
2806 2809 effectively a high-level command with additional metadata.
2807 2810
2808 2811 Lines beginning with ``#`` are ignored.
2809 2812
2810 2813 The following sections denote available actions.
2811 2814
2812 2815 raw
2813 2816 ---
2814 2817
2815 2818 Send raw data to the server.
2816 2819
2817 2820 The block payload contains the raw data to send as one atomic send
2818 2821 operation. The data may not actually be delivered in a single system
2819 2822 call: it depends on the abilities of the transport being used.
2820 2823
2821 2824 Each line in the block is de-indented and concatenated. Then, that
2822 2825 value is evaluated as a Python b'' literal. This allows the use of
2823 2826 backslash escaping, etc.
2824 2827
2825 2828 raw+
2826 2829 ----
2827 2830
2828 2831 Behaves like ``raw`` except flushes output afterwards.
2829 2832
2830 2833 command <X>
2831 2834 -----------
2832 2835
2833 2836 Send a request to run a named command, whose name follows the ``command``
2834 2837 string.
2835 2838
2836 2839 Arguments to the command are defined as lines in this block. The format of
2837 2840 each line is ``<key> <value>``. e.g.::
2838 2841
2839 2842 command listkeys
2840 2843 namespace bookmarks
2841 2844
2842 2845 If the value begins with ``eval:``, it will be interpreted as a Python
2843 2846 literal expression. Otherwise values are interpreted as Python b'' literals.
2844 2847 This allows sending complex types and encoding special byte sequences via
2845 2848 backslash escaping.
2846 2849
2847 2850 The following arguments have special meaning:
2848 2851
2849 2852 ``PUSHFILE``
2850 2853 When defined, the *push* mechanism of the peer will be used instead
2851 2854 of the static request-response mechanism and the content of the
2852 2855 file specified in the value of this argument will be sent as the
2853 2856 command payload.
2854 2857
2855 2858 This can be used to submit a local bundle file to the remote.
2856 2859
2857 2860 batchbegin
2858 2861 ----------
2859 2862
2860 2863 Instruct the peer to begin a batched send.
2861 2864
2862 2865 All ``command`` blocks are queued for execution until the next
2863 2866 ``batchsubmit`` block.
2864 2867
2865 2868 batchsubmit
2866 2869 -----------
2867 2870
2868 2871 Submit previously queued ``command`` blocks as a batch request.
2869 2872
2870 2873 This action MUST be paired with a ``batchbegin`` action.
2871 2874
2872 2875 httprequest <method> <path>
2873 2876 ---------------------------
2874 2877
2875 2878 (HTTP peer only)
2876 2879
2877 2880 Send an HTTP request to the peer.
2878 2881
2879 2882 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2880 2883
2881 2884 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2882 2885 headers to add to the request. e.g. ``Accept: foo``.
2883 2886
2884 2887 The following arguments are special:
2885 2888
2886 2889 ``BODYFILE``
2887 2890 The content of the file defined as the value to this argument will be
2888 2891 transferred verbatim as the HTTP request body.
2889 2892
2890 2893 ``frame <type> <flags> <payload>``
2891 2894 Send a unified protocol frame as part of the request body.
2892 2895
2893 2896 All frames will be collected and sent as the body to the HTTP
2894 2897 request.
2895 2898
2896 2899 close
2897 2900 -----
2898 2901
2899 2902 Close the connection to the server.
2900 2903
2901 2904 flush
2902 2905 -----
2903 2906
2904 2907 Flush data written to the server.
2905 2908
2906 2909 readavailable
2907 2910 -------------
2908 2911
2909 2912 Close the write end of the connection and read all available data from
2910 2913 the server.
2911 2914
2912 2915 If the connection to the server encompasses multiple pipes, we poll both
2913 2916 pipes and read available data.
2914 2917
2915 2918 readline
2916 2919 --------
2917 2920
2918 2921 Read a line of output from the server. If there are multiple output
2919 2922 pipes, reads only the main pipe.
2920 2923
2921 2924 ereadline
2922 2925 ---------
2923 2926
2924 2927 Like ``readline``, but read from the stderr pipe, if available.
2925 2928
2926 2929 read <X>
2927 2930 --------
2928 2931
2929 2932 ``read()`` N bytes from the server's main output pipe.
2930 2933
2931 2934 eread <X>
2932 2935 ---------
2933 2936
2934 2937 ``read()`` N bytes from the server's stderr pipe, if available.
2935 2938
2936 2939 Specifying Unified Frame-Based Protocol Frames
2937 2940 ----------------------------------------------
2938 2941
2939 2942 It is possible to emit a *Unified Frame-Based Protocol* by using special
2940 2943 syntax.
2941 2944
2942 2945 A frame is composed as a type, flags, and payload. These can be parsed
2943 2946 from a string of the form:
2944 2947
2945 2948 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
2946 2949
2947 2950 ``request-id`` and ``stream-id`` are integers defining the request and
2948 2951 stream identifiers.
2949 2952
2950 2953 ``type`` can be an integer value for the frame type or the string name
2951 2954 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
2952 2955 ``command-name``.
2953 2956
2954 2957 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
2955 2958 components. Each component (and there can be just one) can be an integer
2956 2959 or a flag name for stream flags or frame flags, respectively. Values are
2957 2960 resolved to integers and then bitwise OR'd together.
2958 2961
2959 2962 ``payload`` represents the raw frame payload. If it begins with
2960 2963 ``cbor:``, the following string is evaluated as Python code and the
2961 2964 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
2962 2965 as a Python byte string literal.
2963 2966 """
2964 2967 opts = pycompat.byteskwargs(opts)
2965 2968
2966 2969 if opts['localssh'] and not repo:
2967 2970 raise error.Abort(_('--localssh requires a repository'))
2968 2971
2969 2972 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
2970 2973 raise error.Abort(_('invalid value for --peer'),
2971 2974 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
2972 2975
2973 2976 if path and opts['localssh']:
2974 2977 raise error.Abort(_('cannot specify --localssh with an explicit '
2975 2978 'path'))
2976 2979
2977 2980 if ui.interactive():
2978 2981 ui.write(_('(waiting for commands on stdin)\n'))
2979 2982
2980 2983 blocks = list(_parsewirelangblocks(ui.fin))
2981 2984
2982 2985 proc = None
2983 2986 stdin = None
2984 2987 stdout = None
2985 2988 stderr = None
2986 2989 opener = None
2987 2990
2988 2991 if opts['localssh']:
2989 2992 # We start the SSH server in its own process so there is process
2990 2993 # separation. This prevents a whole class of potential bugs around
2991 2994 # shared state from interfering with server operation.
2992 2995 args = procutil.hgcmd() + [
2993 2996 '-R', repo.root,
2994 2997 'debugserve', '--sshstdio',
2995 2998 ]
2996 2999 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
2997 3000 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
2998 3001 bufsize=0)
2999 3002
3000 3003 stdin = proc.stdin
3001 3004 stdout = proc.stdout
3002 3005 stderr = proc.stderr
3003 3006
3004 3007 # We turn the pipes into observers so we can log I/O.
3005 3008 if ui.verbose or opts['peer'] == 'raw':
3006 3009 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3007 3010 logdata=True)
3008 3011 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3009 3012 logdata=True)
3010 3013 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3011 3014 logdata=True)
3012 3015
3013 3016 # --localssh also implies the peer connection settings.
3014 3017
3015 3018 url = 'ssh://localserver'
3016 3019 autoreadstderr = not opts['noreadstderr']
3017 3020
3018 3021 if opts['peer'] == 'ssh1':
3019 3022 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3020 3023 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3021 3024 None, autoreadstderr=autoreadstderr)
3022 3025 elif opts['peer'] == 'ssh2':
3023 3026 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3024 3027 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3025 3028 None, autoreadstderr=autoreadstderr)
3026 3029 elif opts['peer'] == 'raw':
3027 3030 ui.write(_('using raw connection to peer\n'))
3028 3031 peer = None
3029 3032 else:
3030 3033 ui.write(_('creating ssh peer from handshake results\n'))
3031 3034 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3032 3035 autoreadstderr=autoreadstderr)
3033 3036
3034 3037 elif path:
3035 3038 # We bypass hg.peer() so we can proxy the sockets.
3036 3039 # TODO consider not doing this because we skip
3037 3040 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3038 3041 u = util.url(path)
3039 3042 if u.scheme != 'http':
3040 3043 raise error.Abort(_('only http:// paths are currently supported'))
3041 3044
3042 3045 url, authinfo = u.authinfo()
3043 3046 openerargs = {
3044 3047 r'useragent': b'Mercurial debugwireproto',
3045 3048 }
3046 3049
3047 3050 # Turn pipes/sockets into observers so we can log I/O.
3048 3051 if ui.verbose:
3049 3052 openerargs.update({
3050 3053 r'loggingfh': ui,
3051 3054 r'loggingname': b's',
3052 3055 r'loggingopts': {
3053 3056 r'logdata': True,
3054 3057 r'logdataapis': False,
3055 3058 },
3056 3059 })
3057 3060
3058 3061 if ui.debugflag:
3059 3062 openerargs[r'loggingopts'][r'logdataapis'] = True
3060 3063
3061 3064 # Don't send default headers when in raw mode. This allows us to
3062 3065 # bypass most of the behavior of our URL handling code so we can
3063 3066 # have near complete control over what's sent on the wire.
3064 3067 if opts['peer'] == 'raw':
3065 3068 openerargs[r'sendaccept'] = False
3066 3069
3067 3070 opener = urlmod.opener(ui, authinfo, **openerargs)
3068 3071
3069 3072 if opts['peer'] == 'http2':
3070 3073 ui.write(_('creating http peer for wire protocol version 2\n'))
3071 3074 # We go through makepeer() because we need an API descriptor for
3072 3075 # the peer instance to be useful.
3073 3076 with ui.configoverride({
3074 3077 ('experimental', 'httppeer.advertise-v2'): True}):
3075 3078 if opts['nologhandshake']:
3076 3079 ui.pushbuffer()
3077 3080
3078 3081 peer = httppeer.makepeer(ui, path, opener=opener)
3079 3082
3080 3083 if opts['nologhandshake']:
3081 3084 ui.popbuffer()
3082 3085
3083 3086 if not isinstance(peer, httppeer.httpv2peer):
3084 3087 raise error.Abort(_('could not instantiate HTTP peer for '
3085 3088 'wire protocol version 2'),
3086 3089 hint=_('the server may not have the feature '
3087 3090 'enabled or is not allowing this '
3088 3091 'client version'))
3089 3092
3090 3093 elif opts['peer'] == 'raw':
3091 3094 ui.write(_('using raw connection to peer\n'))
3092 3095 peer = None
3093 3096 elif opts['peer']:
3094 3097 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3095 3098 opts['peer'])
3096 3099 else:
3097 3100 peer = httppeer.makepeer(ui, path, opener=opener)
3098 3101
3099 3102 # We /could/ populate stdin/stdout with sock.makefile()...
3100 3103 else:
3101 3104 raise error.Abort(_('unsupported connection configuration'))
3102 3105
3103 3106 batchedcommands = None
3104 3107
3105 3108 # Now perform actions based on the parsed wire language instructions.
3106 3109 for action, lines in blocks:
3107 3110 if action in ('raw', 'raw+'):
3108 3111 if not stdin:
3109 3112 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3110 3113
3111 3114 # Concatenate the data together.
3112 3115 data = ''.join(l.lstrip() for l in lines)
3113 3116 data = stringutil.unescapestr(data)
3114 3117 stdin.write(data)
3115 3118
3116 3119 if action == 'raw+':
3117 3120 stdin.flush()
3118 3121 elif action == 'flush':
3119 3122 if not stdin:
3120 3123 raise error.Abort(_('cannot call flush on this peer'))
3121 3124 stdin.flush()
3122 3125 elif action.startswith('command'):
3123 3126 if not peer:
3124 3127 raise error.Abort(_('cannot send commands unless peer instance '
3125 3128 'is available'))
3126 3129
3127 3130 command = action.split(' ', 1)[1]
3128 3131
3129 3132 args = {}
3130 3133 for line in lines:
3131 3134 # We need to allow empty values.
3132 3135 fields = line.lstrip().split(' ', 1)
3133 3136 if len(fields) == 1:
3134 3137 key = fields[0]
3135 3138 value = ''
3136 3139 else:
3137 3140 key, value = fields
3138 3141
3139 3142 if value.startswith('eval:'):
3140 3143 value = stringutil.evalpythonliteral(value[5:])
3141 3144 else:
3142 3145 value = stringutil.unescapestr(value)
3143 3146
3144 3147 args[key] = value
3145 3148
3146 3149 if batchedcommands is not None:
3147 3150 batchedcommands.append((command, args))
3148 3151 continue
3149 3152
3150 3153 ui.status(_('sending %s command\n') % command)
3151 3154
3152 3155 if 'PUSHFILE' in args:
3153 3156 with open(args['PUSHFILE'], r'rb') as fh:
3154 3157 del args['PUSHFILE']
3155 3158 res, output = peer._callpush(command, fh,
3156 3159 **pycompat.strkwargs(args))
3157 3160 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3158 3161 ui.status(_('remote output: %s\n') %
3159 3162 stringutil.escapestr(output))
3160 3163 else:
3161 3164 with peer.commandexecutor() as e:
3162 3165 res = e.callcommand(command, args).result()
3163 3166
3164 3167 if isinstance(res, wireprotov2peer.commandresponse):
3165 3168 val = list(res.cborobjects())
3166 3169 ui.status(_('response: %s\n') %
3167 3170 stringutil.pprint(val, bprefix=True))
3168 3171
3169 3172 else:
3170 3173 ui.status(_('response: %s\n') %
3171 3174 stringutil.pprint(res, bprefix=True))
3172 3175
3173 3176 elif action == 'batchbegin':
3174 3177 if batchedcommands is not None:
3175 3178 raise error.Abort(_('nested batchbegin not allowed'))
3176 3179
3177 3180 batchedcommands = []
3178 3181 elif action == 'batchsubmit':
3179 3182 # There is a batching API we could go through. But it would be
3180 3183 # difficult to normalize requests into function calls. It is easier
3181 3184 # to bypass this layer and normalize to commands + args.
3182 3185 ui.status(_('sending batch with %d sub-commands\n') %
3183 3186 len(batchedcommands))
3184 3187 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3185 3188 ui.status(_('response #%d: %s\n') %
3186 3189 (i, stringutil.escapestr(chunk)))
3187 3190
3188 3191 batchedcommands = None
3189 3192
3190 3193 elif action.startswith('httprequest '):
3191 3194 if not opener:
3192 3195 raise error.Abort(_('cannot use httprequest without an HTTP '
3193 3196 'peer'))
3194 3197
3195 3198 request = action.split(' ', 2)
3196 3199 if len(request) != 3:
3197 3200 raise error.Abort(_('invalid httprequest: expected format is '
3198 3201 '"httprequest <method> <path>'))
3199 3202
3200 3203 method, httppath = request[1:]
3201 3204 headers = {}
3202 3205 body = None
3203 3206 frames = []
3204 3207 for line in lines:
3205 3208 line = line.lstrip()
3206 3209 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3207 3210 if m:
3208 3211 headers[m.group(1)] = m.group(2)
3209 3212 continue
3210 3213
3211 3214 if line.startswith(b'BODYFILE '):
3212 3215 with open(line.split(b' ', 1), 'rb') as fh:
3213 3216 body = fh.read()
3214 3217 elif line.startswith(b'frame '):
3215 3218 frame = wireprotoframing.makeframefromhumanstring(
3216 3219 line[len(b'frame '):])
3217 3220
3218 3221 frames.append(frame)
3219 3222 else:
3220 3223 raise error.Abort(_('unknown argument to httprequest: %s') %
3221 3224 line)
3222 3225
3223 3226 url = path + httppath
3224 3227
3225 3228 if frames:
3226 3229 body = b''.join(bytes(f) for f in frames)
3227 3230
3228 3231 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3229 3232
3230 3233 # urllib.Request insists on using has_data() as a proxy for
3231 3234 # determining the request method. Override that to use our
3232 3235 # explicitly requested method.
3233 3236 req.get_method = lambda: pycompat.sysstr(method)
3234 3237
3235 3238 try:
3236 3239 res = opener.open(req)
3237 3240 body = res.read()
3238 3241 except util.urlerr.urlerror as e:
3239 3242 # read() method must be called, but only exists in Python 2
3240 3243 getattr(e, 'read', lambda: None)()
3241 3244 continue
3242 3245
3243 3246 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3244 3247 ui.write(_('cbor> %s\n') %
3245 3248 stringutil.pprint(cbor.loads(body), bprefix=True))
3246 3249
3247 3250 elif action == 'close':
3248 3251 peer.close()
3249 3252 elif action == 'readavailable':
3250 3253 if not stdout or not stderr:
3251 3254 raise error.Abort(_('readavailable not available on this peer'))
3252 3255
3253 3256 stdin.close()
3254 3257 stdout.read()
3255 3258 stderr.read()
3256 3259
3257 3260 elif action == 'readline':
3258 3261 if not stdout:
3259 3262 raise error.Abort(_('readline not available on this peer'))
3260 3263 stdout.readline()
3261 3264 elif action == 'ereadline':
3262 3265 if not stderr:
3263 3266 raise error.Abort(_('ereadline not available on this peer'))
3264 3267 stderr.readline()
3265 3268 elif action.startswith('read '):
3266 3269 count = int(action.split(' ', 1)[1])
3267 3270 if not stdout:
3268 3271 raise error.Abort(_('read not available on this peer'))
3269 3272 stdout.read(count)
3270 3273 elif action.startswith('eread '):
3271 3274 count = int(action.split(' ', 1)[1])
3272 3275 if not stderr:
3273 3276 raise error.Abort(_('eread not available on this peer'))
3274 3277 stderr.read(count)
3275 3278 else:
3276 3279 raise error.Abort(_('unknown action: %s') % action)
3277 3280
3278 3281 if batchedcommands is not None:
3279 3282 raise error.Abort(_('unclosed "batchbegin" request'))
3280 3283
3281 3284 if peer:
3282 3285 peer.close()
3283 3286
3284 3287 if proc:
3285 3288 proc.kill()
@@ -1,523 +1,583 b''
1 1 $ cat << EOF >> $HGRCPATH
2 2 > [ui]
3 3 > interactive=yes
4 4 > EOF
5 5
6 6 $ hg init debugrevlog
7 7 $ cd debugrevlog
8 8 $ echo a > a
9 9 $ hg ci -Am adda
10 10 adding a
11 11 $ hg rm .
12 12 removing a
13 13 $ hg ci -Am make-it-empty
14 14 $ hg revert --all -r 0
15 15 adding a
16 16 $ hg ci -Am make-it-full
17 17 #if reporevlogstore
18 $ hg debugrevlog -c
19 format : 1
20 flags : inline
21
22 revisions : 3
23 merges : 0 ( 0.00%)
24 normal : 3 (100.00%)
25 revisions : 3
26 empty : 0 ( 0.00%)
27 text : 0 (100.00%)
28 delta : 0 (100.00%)
29 full : 3 (100.00%)
30 deltas : 0 ( 0.00%)
31 revision size : 191
32 full : 191 (100.00%)
33 deltas : 0 ( 0.00%)
34
35 chunks : 3
36 0x75 (u) : 3 (100.00%)
37 chunks size : 191
38 0x75 (u) : 191 (100.00%)
39
40 avg chain length : 0
41 max chain length : 0
42 max chain reach : 67
43 compression ratio : 0
44
45 uncompressed data size (min/max/avg) : 57 / 66 / 62
46 full revision size (min/max/avg) : 58 / 67 / 63
47 delta size (min/max/avg) : 0 / 0 / 0
18 48 $ hg debugrevlog -m
19 49 format : 1
20 50 flags : inline, generaldelta
21 51
22 52 revisions : 3
23 53 merges : 0 ( 0.00%)
24 54 normal : 3 (100.00%)
25 55 revisions : 3
26 56 empty : 1 (33.33%)
27 57 text : 1 (100.00%)
28 58 delta : 0 ( 0.00%)
29 59 full : 2 (66.67%)
30 60 deltas : 0 ( 0.00%)
31 61 revision size : 88
32 62 full : 88 (100.00%)
33 63 deltas : 0 ( 0.00%)
34 64
35 65 chunks : 3
36 66 empty : 1 (33.33%)
37 67 0x75 (u) : 2 (66.67%)
38 68 chunks size : 88
39 69 empty : 0 ( 0.00%)
40 70 0x75 (u) : 88 (100.00%)
41 71
42 72 avg chain length : 0
43 73 max chain length : 0
44 74 max chain reach : 44
45 75 compression ratio : 0
46 76
47 77 uncompressed data size (min/max/avg) : 0 / 43 / 28
48 78 full revision size (min/max/avg) : 44 / 44 / 44
49 79 delta size (min/max/avg) : 0 / 0 / 0
80 $ hg debugrevlog a
81 format : 1
82 flags : inline, generaldelta
83
84 revisions : 1
85 merges : 0 ( 0.00%)
86 normal : 1 (100.00%)
87 revisions : 1
88 empty : 0 ( 0.00%)
89 text : 0 (100.00%)
90 delta : 0 (100.00%)
91 full : 1 (100.00%)
92 deltas : 0 ( 0.00%)
93 revision size : 3
94 full : 3 (100.00%)
95 deltas : 0 ( 0.00%)
96
97 chunks : 1
98 0x75 (u) : 1 (100.00%)
99 chunks size : 3
100 0x75 (u) : 3 (100.00%)
101
102 avg chain length : 0
103 max chain length : 0
104 max chain reach : 3
105 compression ratio : 0
106
107 uncompressed data size (min/max/avg) : 2 / 2 / 2
108 full revision size (min/max/avg) : 3 / 3 / 3
109 delta size (min/max/avg) : 0 / 0 / 0
50 110 #endif
51 111
52 112 Test debugindex, with and without the --verbose/--debug flag
53 113 $ hg debugindex a
54 114 rev linkrev nodeid p1 p2
55 115 0 0 b789fdd96dc2 000000000000 000000000000
56 116
57 117 #if no-reposimplestore
58 118 $ hg --verbose debugindex a
59 119 rev offset length linkrev nodeid p1 p2
60 120 0 0 3 0 b789fdd96dc2 000000000000 000000000000
61 121
62 122 $ hg --debug debugindex a
63 123 rev offset length linkrev nodeid p1 p2
64 124 0 0 3 0 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000
65 125 #endif
66 126
67 127 $ hg debugindex -f 1 a
68 128 rev flag size link p1 p2 nodeid
69 129 0 0000 2 0 -1 -1 b789fdd96dc2
70 130
71 131 #if no-reposimplestore
72 132 $ hg --verbose debugindex -f 1 a
73 133 rev flag offset length size link p1 p2 nodeid
74 134 0 0000 0 3 2 0 -1 -1 b789fdd96dc2
75 135
76 136 $ hg --debug debugindex -f 1 a
77 137 rev flag offset length size link p1 p2 nodeid
78 138 0 0000 0 3 2 0 -1 -1 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3
79 139 #endif
80 140
81 141 debugdelta chain basic output
82 142
83 143 #if reporevlogstore
84 144 $ hg debugdeltachain -m
85 145 rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio
86 146 0 1 1 -1 base 44 43 44 1.02326 44 0 0.00000
87 147 1 2 1 -1 base 0 0 0 0.00000 0 0 0.00000
88 148 2 3 1 -1 base 44 43 44 1.02326 44 0 0.00000
89 149
90 150 $ hg debugdeltachain -m -T '{rev} {chainid} {chainlen}\n'
91 151 0 1 1
92 152 1 2 1
93 153 2 3 1
94 154
95 155 $ hg debugdeltachain -m -Tjson
96 156 [
97 157 {
98 158 "chainid": 1,
99 159 "chainlen": 1,
100 160 "chainratio": 1.02325581395,
101 161 "chainsize": 44,
102 162 "compsize": 44,
103 163 "deltatype": "base",
104 164 "extradist": 0,
105 165 "extraratio": 0.0,
106 166 "lindist": 44,
107 167 "prevrev": -1,
108 168 "rev": 0,
109 169 "uncompsize": 43
110 170 },
111 171 {
112 172 "chainid": 2,
113 173 "chainlen": 1,
114 174 "chainratio": 0,
115 175 "chainsize": 0,
116 176 "compsize": 0,
117 177 "deltatype": "base",
118 178 "extradist": 0,
119 179 "extraratio": 0,
120 180 "lindist": 0,
121 181 "prevrev": -1,
122 182 "rev": 1,
123 183 "uncompsize": 0
124 184 },
125 185 {
126 186 "chainid": 3,
127 187 "chainlen": 1,
128 188 "chainratio": 1.02325581395,
129 189 "chainsize": 44,
130 190 "compsize": 44,
131 191 "deltatype": "base",
132 192 "extradist": 0,
133 193 "extraratio": 0.0,
134 194 "lindist": 44,
135 195 "prevrev": -1,
136 196 "rev": 2,
137 197 "uncompsize": 43
138 198 }
139 199 ]
140 200
141 201 debugdelta chain with sparse read enabled
142 202
143 203 $ cat >> $HGRCPATH <<EOF
144 204 > [experimental]
145 205 > sparse-read = True
146 206 > EOF
147 207 $ hg debugdeltachain -m
148 208 rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio readsize largestblk rddensity srchunks
149 209 0 1 1 -1 base 44 43 44 1.02326 44 0 0.00000 44 44 1.00000 1
150 210 1 2 1 -1 base 0 0 0 0.00000 0 0 0.00000 0 0 1.00000 1
151 211 2 3 1 -1 base 44 43 44 1.02326 44 0 0.00000 44 44 1.00000 1
152 212
153 213 $ hg debugdeltachain -m -T '{rev} {chainid} {chainlen} {readsize} {largestblock} {readdensity}\n'
154 214 0 1 1 44 44 1.0
155 215 1 2 1 0 0 1
156 216 2 3 1 44 44 1.0
157 217
158 218 $ hg debugdeltachain -m -Tjson
159 219 [
160 220 {
161 221 "chainid": 1,
162 222 "chainlen": 1,
163 223 "chainratio": 1.02325581395,
164 224 "chainsize": 44,
165 225 "compsize": 44,
166 226 "deltatype": "base",
167 227 "extradist": 0,
168 228 "extraratio": 0.0,
169 229 "largestblock": 44,
170 230 "lindist": 44,
171 231 "prevrev": -1,
172 232 "readdensity": 1.0,
173 233 "readsize": 44,
174 234 "rev": 0,
175 235 "srchunks": 1,
176 236 "uncompsize": 43
177 237 },
178 238 {
179 239 "chainid": 2,
180 240 "chainlen": 1,
181 241 "chainratio": 0,
182 242 "chainsize": 0,
183 243 "compsize": 0,
184 244 "deltatype": "base",
185 245 "extradist": 0,
186 246 "extraratio": 0,
187 247 "largestblock": 0,
188 248 "lindist": 0,
189 249 "prevrev": -1,
190 250 "readdensity": 1,
191 251 "readsize": 0,
192 252 "rev": 1,
193 253 "srchunks": 1,
194 254 "uncompsize": 0
195 255 },
196 256 {
197 257 "chainid": 3,
198 258 "chainlen": 1,
199 259 "chainratio": 1.02325581395,
200 260 "chainsize": 44,
201 261 "compsize": 44,
202 262 "deltatype": "base",
203 263 "extradist": 0,
204 264 "extraratio": 0.0,
205 265 "largestblock": 44,
206 266 "lindist": 44,
207 267 "prevrev": -1,
208 268 "readdensity": 1.0,
209 269 "readsize": 44,
210 270 "rev": 2,
211 271 "srchunks": 1,
212 272 "uncompsize": 43
213 273 }
214 274 ]
215 275
216 276 $ printf "This test checks things.\n" >> a
217 277 $ hg ci -m a
218 278 $ hg branch other
219 279 marked working directory as branch other
220 280 (branches are permanent and global, did you want a bookmark?)
221 281 $ for i in `$TESTDIR/seq.py 5`; do
222 282 > printf "shorter ${i}" >> a
223 283 > hg ci -m "a other:$i"
224 284 > hg up -q default
225 285 > printf "for the branch default we want longer chains: ${i}" >> a
226 286 > hg ci -m "a default:$i"
227 287 > hg up -q other
228 288 > done
229 289 $ hg debugdeltachain a -T '{rev} {srchunks}\n' \
230 290 > --config experimental.sparse-read.density-threshold=0.50 \
231 291 > --config experimental.sparse-read.min-gap-size=0
232 292 0 1
233 293 1 1
234 294 2 1
235 295 3 1
236 296 4 1
237 297 5 1
238 298 6 1
239 299 7 1
240 300 8 1
241 301 9 1
242 302 10 2
243 303 11 1
244 304 $ hg --config extensions.strip= strip --no-backup -r 1
245 305 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
246 306
247 307 Test max chain len
248 308 $ cat >> $HGRCPATH << EOF
249 309 > [format]
250 310 > maxchainlen=4
251 311 > EOF
252 312
253 313 $ printf "This test checks if maxchainlen config value is respected also it can serve as basic test for debugrevlog -d <file>.\n" >> a
254 314 $ hg ci -m a
255 315 $ printf "b\n" >> a
256 316 $ hg ci -m a
257 317 $ printf "c\n" >> a
258 318 $ hg ci -m a
259 319 $ printf "d\n" >> a
260 320 $ hg ci -m a
261 321 $ printf "e\n" >> a
262 322 $ hg ci -m a
263 323 $ printf "f\n" >> a
264 324 $ hg ci -m a
265 325 $ printf 'g\n' >> a
266 326 $ hg ci -m a
267 327 $ printf 'h\n' >> a
268 328 $ hg ci -m a
269 329
270 330 $ hg debugrevlog -d a
271 331 # rev p1rev p2rev start end deltastart base p1 p2 rawsize totalsize compression heads chainlen
272 332 0 -1 -1 0 ??? 0 0 0 0 ??? ???? ? 1 0 (glob)
273 333 1 0 -1 ??? ??? 0 0 0 0 ??? ???? ? 1 1 (glob)
274 334 2 1 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 2 (glob)
275 335 3 2 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 3 (glob)
276 336 4 3 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 4 (glob)
277 337 5 4 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 0 (glob)
278 338 6 5 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 1 (glob)
279 339 7 6 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 2 (glob)
280 340 8 7 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 3 (glob)
281 341 #endif
282 342
283 343 Test debuglocks command:
284 344
285 345 $ hg debuglocks
286 346 lock: free
287 347 wlock: free
288 348
289 349 * Test setting the lock
290 350
291 351 waitlock <file> will wait for file to be created. If it isn't in a reasonable
292 352 amount of time, displays error message and returns 1
293 353 $ waitlock() {
294 354 > start=`date +%s`
295 355 > timeout=5
296 356 > while [ \( ! -f $1 \) -a \( ! -L $1 \) ]; do
297 357 > now=`date +%s`
298 358 > if [ "`expr $now - $start`" -gt $timeout ]; then
299 359 > echo "timeout: $1 was not created in $timeout seconds"
300 360 > return 1
301 361 > fi
302 362 > sleep 0.1
303 363 > done
304 364 > }
305 365 $ dolock() {
306 366 > {
307 367 > waitlock .hg/unlock
308 368 > rm -f .hg/unlock
309 369 > echo y
310 370 > } | hg debuglocks "$@" > /dev/null
311 371 > }
312 372 $ dolock -s &
313 373 $ waitlock .hg/store/lock
314 374
315 375 $ hg debuglocks
316 376 lock: user *, process * (*s) (glob)
317 377 wlock: free
318 378 [1]
319 379 $ touch .hg/unlock
320 380 $ wait
321 381 $ [ -f .hg/store/lock ] || echo "There is no lock"
322 382 There is no lock
323 383
324 384 * Test setting the wlock
325 385
326 386 $ dolock -S &
327 387 $ waitlock .hg/wlock
328 388
329 389 $ hg debuglocks
330 390 lock: free
331 391 wlock: user *, process * (*s) (glob)
332 392 [1]
333 393 $ touch .hg/unlock
334 394 $ wait
335 395 $ [ -f .hg/wlock ] || echo "There is no wlock"
336 396 There is no wlock
337 397
338 398 * Test setting both locks
339 399
340 400 $ dolock -Ss &
341 401 $ waitlock .hg/wlock && waitlock .hg/store/lock
342 402
343 403 $ hg debuglocks
344 404 lock: user *, process * (*s) (glob)
345 405 wlock: user *, process * (*s) (glob)
346 406 [2]
347 407
348 408 * Test failing to set a lock
349 409
350 410 $ hg debuglocks -s
351 411 abort: lock is already held
352 412 [255]
353 413
354 414 $ hg debuglocks -S
355 415 abort: wlock is already held
356 416 [255]
357 417
358 418 $ touch .hg/unlock
359 419 $ wait
360 420
361 421 $ hg debuglocks
362 422 lock: free
363 423 wlock: free
364 424
365 425 * Test forcing the lock
366 426
367 427 $ dolock -s &
368 428 $ waitlock .hg/store/lock
369 429
370 430 $ hg debuglocks
371 431 lock: user *, process * (*s) (glob)
372 432 wlock: free
373 433 [1]
374 434
375 435 $ hg debuglocks -L
376 436
377 437 $ hg debuglocks
378 438 lock: free
379 439 wlock: free
380 440
381 441 $ touch .hg/unlock
382 442 $ wait
383 443
384 444 * Test forcing the wlock
385 445
386 446 $ dolock -S &
387 447 $ waitlock .hg/wlock
388 448
389 449 $ hg debuglocks
390 450 lock: free
391 451 wlock: user *, process * (*s) (glob)
392 452 [1]
393 453
394 454 $ hg debuglocks -W
395 455
396 456 $ hg debuglocks
397 457 lock: free
398 458 wlock: free
399 459
400 460 $ touch .hg/unlock
401 461 $ wait
402 462
403 463 Test WdirUnsupported exception
404 464
405 465 $ hg debugdata -c ffffffffffffffffffffffffffffffffffffffff
406 466 abort: working directory revision cannot be specified
407 467 [255]
408 468
409 469 Test cache warming command
410 470
411 471 $ rm -rf .hg/cache/
412 472 $ hg debugupdatecaches --debug
413 473 updating the branch cache
414 474 $ ls -r .hg/cache/*
415 475 .hg/cache/rbc-revs-v1
416 476 .hg/cache/rbc-names-v1
417 477 .hg/cache/manifestfulltextcache
418 478 .hg/cache/branch2-served
419 479
420 480 Test debugcolor
421 481
422 482 #if no-windows
423 483 $ hg debugcolor --style --color always | egrep 'mode|style|log\.'
424 484 color mode: 'ansi'
425 485 available style:
426 486 \x1b[0;33mlog.changeset\x1b[0m: \x1b[0;33myellow\x1b[0m (esc)
427 487 #endif
428 488
429 489 $ hg debugcolor --style --color never
430 490 color mode: None
431 491 available style:
432 492
433 493 $ cd ..
434 494
435 495 Test internal debugstacktrace command
436 496
437 497 $ cat > debugstacktrace.py << EOF
438 498 > from __future__ import absolute_import
439 499 > import sys
440 500 > from mercurial import util
441 501 > def f():
442 502 > util.debugstacktrace(f=sys.stdout)
443 503 > g()
444 504 > def g():
445 505 > util.dst('hello from g\\n', skip=1)
446 506 > h()
447 507 > def h():
448 508 > util.dst('hi ...\\nfrom h hidden in g', 1, depth=2)
449 509 > f()
450 510 > EOF
451 511 $ $PYTHON debugstacktrace.py
452 512 stacktrace at:
453 513 debugstacktrace.py:12 in * (glob)
454 514 debugstacktrace.py:5 in f
455 515 hello from g at:
456 516 debugstacktrace.py:12 in * (glob)
457 517 debugstacktrace.py:6 in f
458 518 hi ...
459 519 from h hidden in g at:
460 520 debugstacktrace.py:6 in f
461 521 debugstacktrace.py:9 in g
462 522
463 523 Test debugcapabilities command:
464 524
465 525 $ hg debugcapabilities ./debugrevlog/
466 526 Main capabilities:
467 527 branchmap
468 528 $USUAL_BUNDLE2_CAPS$
469 529 getbundle
470 530 known
471 531 lookup
472 532 pushkey
473 533 unbundle
474 534 Bundle2 capabilities:
475 535 HG20
476 536 bookmarks
477 537 changegroup
478 538 01
479 539 02
480 540 digests
481 541 md5
482 542 sha1
483 543 sha512
484 544 error
485 545 abort
486 546 unsupportedcontent
487 547 pushraced
488 548 pushkey
489 549 hgtagsfnodes
490 550 listkeys
491 551 phases
492 552 heads
493 553 pushkey
494 554 remote-changegroup
495 555 http
496 556 https
497 557 rev-branch-cache
498 558 stream
499 559 v2
500 560
501 561 Test debugpeer
502 562
503 563 $ hg --config ui.ssh="\"$PYTHON\" \"$TESTDIR/dummyssh\"" debugpeer ssh://user@dummy/debugrevlog
504 564 url: ssh://user@dummy/debugrevlog
505 565 local: no
506 566 pushable: yes
507 567
508 568 $ hg --config ui.ssh="\"$PYTHON\" \"$TESTDIR/dummyssh\"" --debug debugpeer ssh://user@dummy/debugrevlog
509 569 running "*" "*/tests/dummyssh" 'user@dummy' 'hg -R debugrevlog serve --stdio' (glob) (no-windows !)
510 570 running "*" "*\tests/dummyssh" "user@dummy" "hg -R debugrevlog serve --stdio" (glob) (windows !)
511 571 devel-peer-request: hello+between
512 572 devel-peer-request: pairs: 81 bytes
513 573 sending hello command
514 574 sending between command
515 575 remote: 413
516 576 remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS_SERVER$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
517 577 remote: 1
518 578 devel-peer-request: protocaps
519 579 devel-peer-request: caps: * bytes (glob)
520 580 sending protocaps command
521 581 url: ssh://user@dummy/debugrevlog
522 582 local: no
523 583 pushable: yes
General Comments 0
You need to be logged in to leave comments. Login now