##// END OF EJS Templates
debugcommands: avoid stack trace from debugindexstats in pure mode...
Martin von Zweigbergk -
r40401:4f37af86 default
parent child Browse files
Show More
@@ -1,3388 +1,3391
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import operator
15 15 import os
16 16 import random
17 17 import re
18 18 import socket
19 19 import ssl
20 20 import stat
21 21 import string
22 22 import subprocess
23 23 import sys
24 24 import time
25 25
26 26 from .i18n import _
27 27 from .node import (
28 28 bin,
29 29 hex,
30 30 nullhex,
31 31 nullid,
32 32 nullrev,
33 33 short,
34 34 )
35 35 from . import (
36 36 bundle2,
37 37 changegroup,
38 38 cmdutil,
39 39 color,
40 40 context,
41 41 dagparser,
42 42 encoding,
43 43 error,
44 44 exchange,
45 45 extensions,
46 46 filemerge,
47 47 filesetlang,
48 48 formatter,
49 49 hg,
50 50 httppeer,
51 51 localrepo,
52 52 lock as lockmod,
53 53 logcmdutil,
54 54 merge as mergemod,
55 55 obsolete,
56 56 obsutil,
57 57 phases,
58 58 policy,
59 59 pvec,
60 60 pycompat,
61 61 registrar,
62 62 repair,
63 63 revlog,
64 64 revset,
65 65 revsetlang,
66 66 scmutil,
67 67 setdiscovery,
68 68 simplemerge,
69 69 sshpeer,
70 70 sslutil,
71 71 streamclone,
72 72 templater,
73 73 treediscovery,
74 74 upgrade,
75 75 url as urlmod,
76 76 util,
77 77 vfs as vfsmod,
78 78 wireprotoframing,
79 79 wireprotoserver,
80 80 wireprotov2peer,
81 81 )
82 82 from .utils import (
83 83 cborutil,
84 84 dateutil,
85 85 procutil,
86 86 stringutil,
87 87 )
88 88
89 89 from .revlogutils import (
90 90 deltas as deltautil
91 91 )
92 92
93 93 release = lockmod.release
94 94
95 95 command = registrar.command()
96 96
97 97 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
98 98 def debugancestor(ui, repo, *args):
99 99 """find the ancestor revision of two revisions in a given index"""
100 100 if len(args) == 3:
101 101 index, rev1, rev2 = args
102 102 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
103 103 lookup = r.lookup
104 104 elif len(args) == 2:
105 105 if not repo:
106 106 raise error.Abort(_('there is no Mercurial repository here '
107 107 '(.hg not found)'))
108 108 rev1, rev2 = args
109 109 r = repo.changelog
110 110 lookup = repo.lookup
111 111 else:
112 112 raise error.Abort(_('either two or three arguments required'))
113 113 a = r.ancestor(lookup(rev1), lookup(rev2))
114 114 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
115 115
116 116 @command('debugapplystreamclonebundle', [], 'FILE')
117 117 def debugapplystreamclonebundle(ui, repo, fname):
118 118 """apply a stream clone bundle file"""
119 119 f = hg.openpath(ui, fname)
120 120 gen = exchange.readbundle(ui, f, fname)
121 121 gen.apply(repo)
122 122
123 123 @command('debugbuilddag',
124 124 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
125 125 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
126 126 ('n', 'new-file', None, _('add new file at each rev'))],
127 127 _('[OPTION]... [TEXT]'))
128 128 def debugbuilddag(ui, repo, text=None,
129 129 mergeable_file=False,
130 130 overwritten_file=False,
131 131 new_file=False):
132 132 """builds a repo with a given DAG from scratch in the current empty repo
133 133
134 134 The description of the DAG is read from stdin if not given on the
135 135 command line.
136 136
137 137 Elements:
138 138
139 139 - "+n" is a linear run of n nodes based on the current default parent
140 140 - "." is a single node based on the current default parent
141 141 - "$" resets the default parent to null (implied at the start);
142 142 otherwise the default parent is always the last node created
143 143 - "<p" sets the default parent to the backref p
144 144 - "*p" is a fork at parent p, which is a backref
145 145 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
146 146 - "/p2" is a merge of the preceding node and p2
147 147 - ":tag" defines a local tag for the preceding node
148 148 - "@branch" sets the named branch for subsequent nodes
149 149 - "#...\\n" is a comment up to the end of the line
150 150
151 151 Whitespace between the above elements is ignored.
152 152
153 153 A backref is either
154 154
155 155 - a number n, which references the node curr-n, where curr is the current
156 156 node, or
157 157 - the name of a local tag you placed earlier using ":tag", or
158 158 - empty to denote the default parent.
159 159
160 160 All string valued-elements are either strictly alphanumeric, or must
161 161 be enclosed in double quotes ("..."), with "\\" as escape character.
162 162 """
163 163
164 164 if text is None:
165 165 ui.status(_("reading DAG from stdin\n"))
166 166 text = ui.fin.read()
167 167
168 168 cl = repo.changelog
169 169 if len(cl) > 0:
170 170 raise error.Abort(_('repository is not empty'))
171 171
172 172 # determine number of revs in DAG
173 173 total = 0
174 174 for type, data in dagparser.parsedag(text):
175 175 if type == 'n':
176 176 total += 1
177 177
178 178 if mergeable_file:
179 179 linesperrev = 2
180 180 # make a file with k lines per rev
181 181 initialmergedlines = ['%d' % i
182 182 for i in pycompat.xrange(0, total * linesperrev)]
183 183 initialmergedlines.append("")
184 184
185 185 tags = []
186 186 progress = ui.makeprogress(_('building'), unit=_('revisions'),
187 187 total=total)
188 188 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
189 189 at = -1
190 190 atbranch = 'default'
191 191 nodeids = []
192 192 id = 0
193 193 progress.update(id)
194 194 for type, data in dagparser.parsedag(text):
195 195 if type == 'n':
196 196 ui.note(('node %s\n' % pycompat.bytestr(data)))
197 197 id, ps = data
198 198
199 199 files = []
200 200 filecontent = {}
201 201
202 202 p2 = None
203 203 if mergeable_file:
204 204 fn = "mf"
205 205 p1 = repo[ps[0]]
206 206 if len(ps) > 1:
207 207 p2 = repo[ps[1]]
208 208 pa = p1.ancestor(p2)
209 209 base, local, other = [x[fn].data() for x in (pa, p1,
210 210 p2)]
211 211 m3 = simplemerge.Merge3Text(base, local, other)
212 212 ml = [l.strip() for l in m3.merge_lines()]
213 213 ml.append("")
214 214 elif at > 0:
215 215 ml = p1[fn].data().split("\n")
216 216 else:
217 217 ml = initialmergedlines
218 218 ml[id * linesperrev] += " r%i" % id
219 219 mergedtext = "\n".join(ml)
220 220 files.append(fn)
221 221 filecontent[fn] = mergedtext
222 222
223 223 if overwritten_file:
224 224 fn = "of"
225 225 files.append(fn)
226 226 filecontent[fn] = "r%i\n" % id
227 227
228 228 if new_file:
229 229 fn = "nf%i" % id
230 230 files.append(fn)
231 231 filecontent[fn] = "r%i\n" % id
232 232 if len(ps) > 1:
233 233 if not p2:
234 234 p2 = repo[ps[1]]
235 235 for fn in p2:
236 236 if fn.startswith("nf"):
237 237 files.append(fn)
238 238 filecontent[fn] = p2[fn].data()
239 239
240 240 def fctxfn(repo, cx, path):
241 241 if path in filecontent:
242 242 return context.memfilectx(repo, cx, path,
243 243 filecontent[path])
244 244 return None
245 245
246 246 if len(ps) == 0 or ps[0] < 0:
247 247 pars = [None, None]
248 248 elif len(ps) == 1:
249 249 pars = [nodeids[ps[0]], None]
250 250 else:
251 251 pars = [nodeids[p] for p in ps]
252 252 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
253 253 date=(id, 0),
254 254 user="debugbuilddag",
255 255 extra={'branch': atbranch})
256 256 nodeid = repo.commitctx(cx)
257 257 nodeids.append(nodeid)
258 258 at = id
259 259 elif type == 'l':
260 260 id, name = data
261 261 ui.note(('tag %s\n' % name))
262 262 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
263 263 elif type == 'a':
264 264 ui.note(('branch %s\n' % data))
265 265 atbranch = data
266 266 progress.update(id)
267 267
268 268 if tags:
269 269 repo.vfs.write("localtags", "".join(tags))
270 270
271 271 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
272 272 indent_string = ' ' * indent
273 273 if all:
274 274 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
275 275 % indent_string)
276 276
277 277 def showchunks(named):
278 278 ui.write("\n%s%s\n" % (indent_string, named))
279 279 for deltadata in gen.deltaiter():
280 280 node, p1, p2, cs, deltabase, delta, flags = deltadata
281 281 ui.write("%s%s %s %s %s %s %d\n" %
282 282 (indent_string, hex(node), hex(p1), hex(p2),
283 283 hex(cs), hex(deltabase), len(delta)))
284 284
285 285 chunkdata = gen.changelogheader()
286 286 showchunks("changelog")
287 287 chunkdata = gen.manifestheader()
288 288 showchunks("manifest")
289 289 for chunkdata in iter(gen.filelogheader, {}):
290 290 fname = chunkdata['filename']
291 291 showchunks(fname)
292 292 else:
293 293 if isinstance(gen, bundle2.unbundle20):
294 294 raise error.Abort(_('use debugbundle2 for this file'))
295 295 chunkdata = gen.changelogheader()
296 296 for deltadata in gen.deltaiter():
297 297 node, p1, p2, cs, deltabase, delta, flags = deltadata
298 298 ui.write("%s%s\n" % (indent_string, hex(node)))
299 299
300 300 def _debugobsmarkers(ui, part, indent=0, **opts):
301 301 """display version and markers contained in 'data'"""
302 302 opts = pycompat.byteskwargs(opts)
303 303 data = part.read()
304 304 indent_string = ' ' * indent
305 305 try:
306 306 version, markers = obsolete._readmarkers(data)
307 307 except error.UnknownVersion as exc:
308 308 msg = "%sunsupported version: %s (%d bytes)\n"
309 309 msg %= indent_string, exc.version, len(data)
310 310 ui.write(msg)
311 311 else:
312 312 msg = "%sversion: %d (%d bytes)\n"
313 313 msg %= indent_string, version, len(data)
314 314 ui.write(msg)
315 315 fm = ui.formatter('debugobsolete', opts)
316 316 for rawmarker in sorted(markers):
317 317 m = obsutil.marker(None, rawmarker)
318 318 fm.startitem()
319 319 fm.plain(indent_string)
320 320 cmdutil.showmarker(fm, m)
321 321 fm.end()
322 322
323 323 def _debugphaseheads(ui, data, indent=0):
324 324 """display version and markers contained in 'data'"""
325 325 indent_string = ' ' * indent
326 326 headsbyphase = phases.binarydecode(data)
327 327 for phase in phases.allphases:
328 328 for head in headsbyphase[phase]:
329 329 ui.write(indent_string)
330 330 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
331 331
332 332 def _quasirepr(thing):
333 333 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
334 334 return '{%s}' % (
335 335 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
336 336 return pycompat.bytestr(repr(thing))
337 337
338 338 def _debugbundle2(ui, gen, all=None, **opts):
339 339 """lists the contents of a bundle2"""
340 340 if not isinstance(gen, bundle2.unbundle20):
341 341 raise error.Abort(_('not a bundle2 file'))
342 342 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
343 343 parttypes = opts.get(r'part_type', [])
344 344 for part in gen.iterparts():
345 345 if parttypes and part.type not in parttypes:
346 346 continue
347 347 msg = '%s -- %s (mandatory: %r)\n'
348 348 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
349 349 if part.type == 'changegroup':
350 350 version = part.params.get('version', '01')
351 351 cg = changegroup.getunbundler(version, part, 'UN')
352 352 if not ui.quiet:
353 353 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
354 354 if part.type == 'obsmarkers':
355 355 if not ui.quiet:
356 356 _debugobsmarkers(ui, part, indent=4, **opts)
357 357 if part.type == 'phase-heads':
358 358 if not ui.quiet:
359 359 _debugphaseheads(ui, part, indent=4)
360 360
361 361 @command('debugbundle',
362 362 [('a', 'all', None, _('show all details')),
363 363 ('', 'part-type', [], _('show only the named part type')),
364 364 ('', 'spec', None, _('print the bundlespec of the bundle'))],
365 365 _('FILE'),
366 366 norepo=True)
367 367 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
368 368 """lists the contents of a bundle"""
369 369 with hg.openpath(ui, bundlepath) as f:
370 370 if spec:
371 371 spec = exchange.getbundlespec(ui, f)
372 372 ui.write('%s\n' % spec)
373 373 return
374 374
375 375 gen = exchange.readbundle(ui, f, bundlepath)
376 376 if isinstance(gen, bundle2.unbundle20):
377 377 return _debugbundle2(ui, gen, all=all, **opts)
378 378 _debugchangegroup(ui, gen, all=all, **opts)
379 379
380 380 @command('debugcapabilities',
381 381 [], _('PATH'),
382 382 norepo=True)
383 383 def debugcapabilities(ui, path, **opts):
384 384 """lists the capabilities of a remote peer"""
385 385 opts = pycompat.byteskwargs(opts)
386 386 peer = hg.peer(ui, opts, path)
387 387 caps = peer.capabilities()
388 388 ui.write(('Main capabilities:\n'))
389 389 for c in sorted(caps):
390 390 ui.write((' %s\n') % c)
391 391 b2caps = bundle2.bundle2caps(peer)
392 392 if b2caps:
393 393 ui.write(('Bundle2 capabilities:\n'))
394 394 for key, values in sorted(b2caps.iteritems()):
395 395 ui.write((' %s\n') % key)
396 396 for v in values:
397 397 ui.write((' %s\n') % v)
398 398
399 399 @command('debugcheckstate', [], '')
400 400 def debugcheckstate(ui, repo):
401 401 """validate the correctness of the current dirstate"""
402 402 parent1, parent2 = repo.dirstate.parents()
403 403 m1 = repo[parent1].manifest()
404 404 m2 = repo[parent2].manifest()
405 405 errors = 0
406 406 for f in repo.dirstate:
407 407 state = repo.dirstate[f]
408 408 if state in "nr" and f not in m1:
409 409 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
410 410 errors += 1
411 411 if state in "a" and f in m1:
412 412 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
413 413 errors += 1
414 414 if state in "m" and f not in m1 and f not in m2:
415 415 ui.warn(_("%s in state %s, but not in either manifest\n") %
416 416 (f, state))
417 417 errors += 1
418 418 for f in m1:
419 419 state = repo.dirstate[f]
420 420 if state not in "nrm":
421 421 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
422 422 errors += 1
423 423 if errors:
424 424 error = _(".hg/dirstate inconsistent with current parent's manifest")
425 425 raise error.Abort(error)
426 426
427 427 @command('debugcolor',
428 428 [('', 'style', None, _('show all configured styles'))],
429 429 'hg debugcolor')
430 430 def debugcolor(ui, repo, **opts):
431 431 """show available color, effects or style"""
432 432 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
433 433 if opts.get(r'style'):
434 434 return _debugdisplaystyle(ui)
435 435 else:
436 436 return _debugdisplaycolor(ui)
437 437
438 438 def _debugdisplaycolor(ui):
439 439 ui = ui.copy()
440 440 ui._styles.clear()
441 441 for effect in color._activeeffects(ui).keys():
442 442 ui._styles[effect] = effect
443 443 if ui._terminfoparams:
444 444 for k, v in ui.configitems('color'):
445 445 if k.startswith('color.'):
446 446 ui._styles[k] = k[6:]
447 447 elif k.startswith('terminfo.'):
448 448 ui._styles[k] = k[9:]
449 449 ui.write(_('available colors:\n'))
450 450 # sort label with a '_' after the other to group '_background' entry.
451 451 items = sorted(ui._styles.items(),
452 452 key=lambda i: ('_' in i[0], i[0], i[1]))
453 453 for colorname, label in items:
454 454 ui.write(('%s\n') % colorname, label=label)
455 455
456 456 def _debugdisplaystyle(ui):
457 457 ui.write(_('available style:\n'))
458 458 if not ui._styles:
459 459 return
460 460 width = max(len(s) for s in ui._styles)
461 461 for label, effects in sorted(ui._styles.items()):
462 462 ui.write('%s' % label, label=label)
463 463 if effects:
464 464 # 50
465 465 ui.write(': ')
466 466 ui.write(' ' * (max(0, width - len(label))))
467 467 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
468 468 ui.write('\n')
469 469
470 470 @command('debugcreatestreamclonebundle', [], 'FILE')
471 471 def debugcreatestreamclonebundle(ui, repo, fname):
472 472 """create a stream clone bundle file
473 473
474 474 Stream bundles are special bundles that are essentially archives of
475 475 revlog files. They are commonly used for cloning very quickly.
476 476 """
477 477 # TODO we may want to turn this into an abort when this functionality
478 478 # is moved into `hg bundle`.
479 479 if phases.hassecret(repo):
480 480 ui.warn(_('(warning: stream clone bundle will contain secret '
481 481 'revisions)\n'))
482 482
483 483 requirements, gen = streamclone.generatebundlev1(repo)
484 484 changegroup.writechunks(ui, gen, fname)
485 485
486 486 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
487 487
488 488 @command('debugdag',
489 489 [('t', 'tags', None, _('use tags as labels')),
490 490 ('b', 'branches', None, _('annotate with branch names')),
491 491 ('', 'dots', None, _('use dots for runs')),
492 492 ('s', 'spaces', None, _('separate elements by spaces'))],
493 493 _('[OPTION]... [FILE [REV]...]'),
494 494 optionalrepo=True)
495 495 def debugdag(ui, repo, file_=None, *revs, **opts):
496 496 """format the changelog or an index DAG as a concise textual description
497 497
498 498 If you pass a revlog index, the revlog's DAG is emitted. If you list
499 499 revision numbers, they get labeled in the output as rN.
500 500
501 501 Otherwise, the changelog DAG of the current repo is emitted.
502 502 """
503 503 spaces = opts.get(r'spaces')
504 504 dots = opts.get(r'dots')
505 505 if file_:
506 506 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
507 507 file_)
508 508 revs = set((int(r) for r in revs))
509 509 def events():
510 510 for r in rlog:
511 511 yield 'n', (r, list(p for p in rlog.parentrevs(r)
512 512 if p != -1))
513 513 if r in revs:
514 514 yield 'l', (r, "r%i" % r)
515 515 elif repo:
516 516 cl = repo.changelog
517 517 tags = opts.get(r'tags')
518 518 branches = opts.get(r'branches')
519 519 if tags:
520 520 labels = {}
521 521 for l, n in repo.tags().items():
522 522 labels.setdefault(cl.rev(n), []).append(l)
523 523 def events():
524 524 b = "default"
525 525 for r in cl:
526 526 if branches:
527 527 newb = cl.read(cl.node(r))[5]['branch']
528 528 if newb != b:
529 529 yield 'a', newb
530 530 b = newb
531 531 yield 'n', (r, list(p for p in cl.parentrevs(r)
532 532 if p != -1))
533 533 if tags:
534 534 ls = labels.get(r)
535 535 if ls:
536 536 for l in ls:
537 537 yield 'l', (r, l)
538 538 else:
539 539 raise error.Abort(_('need repo for changelog dag'))
540 540
541 541 for line in dagparser.dagtextlines(events(),
542 542 addspaces=spaces,
543 543 wraplabels=True,
544 544 wrapannotations=True,
545 545 wrapnonlinear=dots,
546 546 usedots=dots,
547 547 maxlinewidth=70):
548 548 ui.write(line)
549 549 ui.write("\n")
550 550
551 551 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
552 552 def debugdata(ui, repo, file_, rev=None, **opts):
553 553 """dump the contents of a data file revision"""
554 554 opts = pycompat.byteskwargs(opts)
555 555 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
556 556 if rev is not None:
557 557 raise error.CommandError('debugdata', _('invalid arguments'))
558 558 file_, rev = None, file_
559 559 elif rev is None:
560 560 raise error.CommandError('debugdata', _('invalid arguments'))
561 561 r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
562 562 try:
563 563 ui.write(r.revision(r.lookup(rev), raw=True))
564 564 except KeyError:
565 565 raise error.Abort(_('invalid revision identifier %s') % rev)
566 566
567 567 @command('debugdate',
568 568 [('e', 'extended', None, _('try extended date formats'))],
569 569 _('[-e] DATE [RANGE]'),
570 570 norepo=True, optionalrepo=True)
571 571 def debugdate(ui, date, range=None, **opts):
572 572 """parse and display a date"""
573 573 if opts[r"extended"]:
574 574 d = dateutil.parsedate(date, util.extendeddateformats)
575 575 else:
576 576 d = dateutil.parsedate(date)
577 577 ui.write(("internal: %d %d\n") % d)
578 578 ui.write(("standard: %s\n") % dateutil.datestr(d))
579 579 if range:
580 580 m = dateutil.matchdate(range)
581 581 ui.write(("match: %s\n") % m(d[0]))
582 582
583 583 @command('debugdeltachain',
584 584 cmdutil.debugrevlogopts + cmdutil.formatteropts,
585 585 _('-c|-m|FILE'),
586 586 optionalrepo=True)
587 587 def debugdeltachain(ui, repo, file_=None, **opts):
588 588 """dump information about delta chains in a revlog
589 589
590 590 Output can be templatized. Available template keywords are:
591 591
592 592 :``rev``: revision number
593 593 :``chainid``: delta chain identifier (numbered by unique base)
594 594 :``chainlen``: delta chain length to this revision
595 595 :``prevrev``: previous revision in delta chain
596 596 :``deltatype``: role of delta / how it was computed
597 597 :``compsize``: compressed size of revision
598 598 :``uncompsize``: uncompressed size of revision
599 599 :``chainsize``: total size of compressed revisions in chain
600 600 :``chainratio``: total chain size divided by uncompressed revision size
601 601 (new delta chains typically start at ratio 2.00)
602 602 :``lindist``: linear distance from base revision in delta chain to end
603 603 of this revision
604 604 :``extradist``: total size of revisions not part of this delta chain from
605 605 base of delta chain to end of this revision; a measurement
606 606 of how much extra data we need to read/seek across to read
607 607 the delta chain for this revision
608 608 :``extraratio``: extradist divided by chainsize; another representation of
609 609 how much unrelated data is needed to load this delta chain
610 610
611 611 If the repository is configured to use the sparse read, additional keywords
612 612 are available:
613 613
614 614 :``readsize``: total size of data read from the disk for a revision
615 615 (sum of the sizes of all the blocks)
616 616 :``largestblock``: size of the largest block of data read from the disk
617 617 :``readdensity``: density of useful bytes in the data read from the disk
618 618 :``srchunks``: in how many data hunks the whole revision would be read
619 619
620 620 The sparse read can be enabled with experimental.sparse-read = True
621 621 """
622 622 opts = pycompat.byteskwargs(opts)
623 623 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
624 624 index = r.index
625 625 start = r.start
626 626 length = r.length
627 627 generaldelta = r.version & revlog.FLAG_GENERALDELTA
628 628 withsparseread = getattr(r, '_withsparseread', False)
629 629
630 630 def revinfo(rev):
631 631 e = index[rev]
632 632 compsize = e[1]
633 633 uncompsize = e[2]
634 634 chainsize = 0
635 635
636 636 if generaldelta:
637 637 if e[3] == e[5]:
638 638 deltatype = 'p1'
639 639 elif e[3] == e[6]:
640 640 deltatype = 'p2'
641 641 elif e[3] == rev - 1:
642 642 deltatype = 'prev'
643 643 elif e[3] == rev:
644 644 deltatype = 'base'
645 645 else:
646 646 deltatype = 'other'
647 647 else:
648 648 if e[3] == rev:
649 649 deltatype = 'base'
650 650 else:
651 651 deltatype = 'prev'
652 652
653 653 chain = r._deltachain(rev)[0]
654 654 for iterrev in chain:
655 655 e = index[iterrev]
656 656 chainsize += e[1]
657 657
658 658 return compsize, uncompsize, deltatype, chain, chainsize
659 659
660 660 fm = ui.formatter('debugdeltachain', opts)
661 661
662 662 fm.plain(' rev chain# chainlen prev delta '
663 663 'size rawsize chainsize ratio lindist extradist '
664 664 'extraratio')
665 665 if withsparseread:
666 666 fm.plain(' readsize largestblk rddensity srchunks')
667 667 fm.plain('\n')
668 668
669 669 chainbases = {}
670 670 for rev in r:
671 671 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
672 672 chainbase = chain[0]
673 673 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
674 674 basestart = start(chainbase)
675 675 revstart = start(rev)
676 676 lineardist = revstart + comp - basestart
677 677 extradist = lineardist - chainsize
678 678 try:
679 679 prevrev = chain[-2]
680 680 except IndexError:
681 681 prevrev = -1
682 682
683 683 if uncomp != 0:
684 684 chainratio = float(chainsize) / float(uncomp)
685 685 else:
686 686 chainratio = chainsize
687 687
688 688 if chainsize != 0:
689 689 extraratio = float(extradist) / float(chainsize)
690 690 else:
691 691 extraratio = extradist
692 692
693 693 fm.startitem()
694 694 fm.write('rev chainid chainlen prevrev deltatype compsize '
695 695 'uncompsize chainsize chainratio lindist extradist '
696 696 'extraratio',
697 697 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
698 698 rev, chainid, len(chain), prevrev, deltatype, comp,
699 699 uncomp, chainsize, chainratio, lineardist, extradist,
700 700 extraratio,
701 701 rev=rev, chainid=chainid, chainlen=len(chain),
702 702 prevrev=prevrev, deltatype=deltatype, compsize=comp,
703 703 uncompsize=uncomp, chainsize=chainsize,
704 704 chainratio=chainratio, lindist=lineardist,
705 705 extradist=extradist, extraratio=extraratio)
706 706 if withsparseread:
707 707 readsize = 0
708 708 largestblock = 0
709 709 srchunks = 0
710 710
711 711 for revschunk in deltautil.slicechunk(r, chain):
712 712 srchunks += 1
713 713 blkend = start(revschunk[-1]) + length(revschunk[-1])
714 714 blksize = blkend - start(revschunk[0])
715 715
716 716 readsize += blksize
717 717 if largestblock < blksize:
718 718 largestblock = blksize
719 719
720 720 if readsize:
721 721 readdensity = float(chainsize) / float(readsize)
722 722 else:
723 723 readdensity = 1
724 724
725 725 fm.write('readsize largestblock readdensity srchunks',
726 726 ' %10d %10d %9.5f %8d',
727 727 readsize, largestblock, readdensity, srchunks,
728 728 readsize=readsize, largestblock=largestblock,
729 729 readdensity=readdensity, srchunks=srchunks)
730 730
731 731 fm.plain('\n')
732 732
733 733 fm.end()
734 734
735 735 @command('debugdirstate|debugstate',
736 736 [('', 'nodates', None, _('do not display the saved mtime (DEPRECATED)')),
737 737 ('', 'dates', True, _('display the saved mtime')),
738 738 ('', 'datesort', None, _('sort by saved mtime'))],
739 739 _('[OPTION]...'))
740 740 def debugstate(ui, repo, **opts):
741 741 """show the contents of the current dirstate"""
742 742
743 743 nodates = not opts[r'dates']
744 744 if opts.get(r'nodates') is not None:
745 745 nodates = True
746 746 datesort = opts.get(r'datesort')
747 747
748 748 timestr = ""
749 749 if datesort:
750 750 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
751 751 else:
752 752 keyfunc = None # sort by filename
753 753 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
754 754 if ent[3] == -1:
755 755 timestr = 'unset '
756 756 elif nodates:
757 757 timestr = 'set '
758 758 else:
759 759 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
760 760 time.localtime(ent[3]))
761 761 timestr = encoding.strtolocal(timestr)
762 762 if ent[1] & 0o20000:
763 763 mode = 'lnk'
764 764 else:
765 765 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
766 766 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
767 767 for f in repo.dirstate.copies():
768 768 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
769 769
770 770 @command('debugdiscovery',
771 771 [('', 'old', None, _('use old-style discovery')),
772 772 ('', 'nonheads', None,
773 773 _('use old-style discovery with non-heads included')),
774 774 ('', 'rev', [], 'restrict discovery to this set of revs'),
775 775 ] + cmdutil.remoteopts,
776 776 _('[--rev REV] [OTHER]'))
777 777 def debugdiscovery(ui, repo, remoteurl="default", **opts):
778 778 """runs the changeset discovery protocol in isolation"""
779 779 opts = pycompat.byteskwargs(opts)
780 780 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
781 781 remote = hg.peer(repo, opts, remoteurl)
782 782 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
783 783
784 784 # make sure tests are repeatable
785 785 random.seed(12323)
786 786
787 787 def doit(pushedrevs, remoteheads, remote=remote):
788 788 if opts.get('old'):
789 789 if not util.safehasattr(remote, 'branches'):
790 790 # enable in-client legacy support
791 791 remote = localrepo.locallegacypeer(remote.local())
792 792 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
793 793 force=True)
794 794 common = set(common)
795 795 if not opts.get('nonheads'):
796 796 ui.write(("unpruned common: %s\n") %
797 797 " ".join(sorted(short(n) for n in common)))
798 798
799 799 clnode = repo.changelog.node
800 800 common = repo.revs('heads(::%ln)', common)
801 801 common = {clnode(r) for r in common}
802 802 else:
803 803 nodes = None
804 804 if pushedrevs:
805 805 revs = scmutil.revrange(repo, pushedrevs)
806 806 nodes = [repo[r].node() for r in revs]
807 807 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
808 808 ancestorsof=nodes)
809 809 common = set(common)
810 810 rheads = set(hds)
811 811 lheads = set(repo.heads())
812 812 ui.write(("common heads: %s\n") %
813 813 " ".join(sorted(short(n) for n in common)))
814 814 if lheads <= common:
815 815 ui.write(("local is subset\n"))
816 816 elif rheads <= common:
817 817 ui.write(("remote is subset\n"))
818 818
819 819 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
820 820 localrevs = opts['rev']
821 821 doit(localrevs, remoterevs)
822 822
823 823 _chunksize = 4 << 10
824 824
825 825 @command('debugdownload',
826 826 [
827 827 ('o', 'output', '', _('path')),
828 828 ],
829 829 optionalrepo=True)
830 830 def debugdownload(ui, repo, url, output=None, **opts):
831 831 """download a resource using Mercurial logic and config
832 832 """
833 833 fh = urlmod.open(ui, url, output)
834 834
835 835 dest = ui
836 836 if output:
837 837 dest = open(output, "wb", _chunksize)
838 838 try:
839 839 data = fh.read(_chunksize)
840 840 while data:
841 841 dest.write(data)
842 842 data = fh.read(_chunksize)
843 843 finally:
844 844 if output:
845 845 dest.close()
846 846
847 847 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
848 848 def debugextensions(ui, repo, **opts):
849 849 '''show information about active extensions'''
850 850 opts = pycompat.byteskwargs(opts)
851 851 exts = extensions.extensions(ui)
852 852 hgver = util.version()
853 853 fm = ui.formatter('debugextensions', opts)
854 854 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
855 855 isinternal = extensions.ismoduleinternal(extmod)
856 856 extsource = pycompat.fsencode(extmod.__file__)
857 857 if isinternal:
858 858 exttestedwith = [] # never expose magic string to users
859 859 else:
860 860 exttestedwith = getattr(extmod, 'testedwith', '').split()
861 861 extbuglink = getattr(extmod, 'buglink', None)
862 862
863 863 fm.startitem()
864 864
865 865 if ui.quiet or ui.verbose:
866 866 fm.write('name', '%s\n', extname)
867 867 else:
868 868 fm.write('name', '%s', extname)
869 869 if isinternal or hgver in exttestedwith:
870 870 fm.plain('\n')
871 871 elif not exttestedwith:
872 872 fm.plain(_(' (untested!)\n'))
873 873 else:
874 874 lasttestedversion = exttestedwith[-1]
875 875 fm.plain(' (%s!)\n' % lasttestedversion)
876 876
877 877 fm.condwrite(ui.verbose and extsource, 'source',
878 878 _(' location: %s\n'), extsource or "")
879 879
880 880 if ui.verbose:
881 881 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
882 882 fm.data(bundled=isinternal)
883 883
884 884 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
885 885 _(' tested with: %s\n'),
886 886 fm.formatlist(exttestedwith, name='ver'))
887 887
888 888 fm.condwrite(ui.verbose and extbuglink, 'buglink',
889 889 _(' bug reporting: %s\n'), extbuglink or "")
890 890
891 891 fm.end()
892 892
893 893 @command('debugfileset',
894 894 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
895 895 ('', 'all-files', False,
896 896 _('test files from all revisions and working directory')),
897 897 ('s', 'show-matcher', None,
898 898 _('print internal representation of matcher')),
899 899 ('p', 'show-stage', [],
900 900 _('print parsed tree at the given stage'), _('NAME'))],
901 901 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
902 902 def debugfileset(ui, repo, expr, **opts):
903 903 '''parse and apply a fileset specification'''
904 904 from . import fileset
905 905 fileset.symbols # force import of fileset so we have predicates to optimize
906 906 opts = pycompat.byteskwargs(opts)
907 907 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
908 908
909 909 stages = [
910 910 ('parsed', pycompat.identity),
911 911 ('analyzed', filesetlang.analyze),
912 912 ('optimized', filesetlang.optimize),
913 913 ]
914 914 stagenames = set(n for n, f in stages)
915 915
916 916 showalways = set()
917 917 if ui.verbose and not opts['show_stage']:
918 918 # show parsed tree by --verbose (deprecated)
919 919 showalways.add('parsed')
920 920 if opts['show_stage'] == ['all']:
921 921 showalways.update(stagenames)
922 922 else:
923 923 for n in opts['show_stage']:
924 924 if n not in stagenames:
925 925 raise error.Abort(_('invalid stage name: %s') % n)
926 926 showalways.update(opts['show_stage'])
927 927
928 928 tree = filesetlang.parse(expr)
929 929 for n, f in stages:
930 930 tree = f(tree)
931 931 if n in showalways:
932 932 if opts['show_stage'] or n != 'parsed':
933 933 ui.write(("* %s:\n") % n)
934 934 ui.write(filesetlang.prettyformat(tree), "\n")
935 935
936 936 files = set()
937 937 if opts['all_files']:
938 938 for r in repo:
939 939 c = repo[r]
940 940 files.update(c.files())
941 941 files.update(c.substate)
942 942 if opts['all_files'] or ctx.rev() is None:
943 943 wctx = repo[None]
944 944 files.update(repo.dirstate.walk(scmutil.matchall(repo),
945 945 subrepos=list(wctx.substate),
946 946 unknown=True, ignored=True))
947 947 files.update(wctx.substate)
948 948 else:
949 949 files.update(ctx.files())
950 950 files.update(ctx.substate)
951 951
952 952 m = ctx.matchfileset(expr)
953 953 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
954 954 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
955 955 for f in sorted(files):
956 956 if not m(f):
957 957 continue
958 958 ui.write("%s\n" % f)
959 959
960 960 @command('debugformat',
961 961 [] + cmdutil.formatteropts)
962 962 def debugformat(ui, repo, **opts):
963 963 """display format information about the current repository
964 964
965 965 Use --verbose to get extra information about current config value and
966 966 Mercurial default."""
967 967 opts = pycompat.byteskwargs(opts)
968 968 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
969 969 maxvariantlength = max(len('format-variant'), maxvariantlength)
970 970
971 971 def makeformatname(name):
972 972 return '%s:' + (' ' * (maxvariantlength - len(name)))
973 973
974 974 fm = ui.formatter('debugformat', opts)
975 975 if fm.isplain():
976 976 def formatvalue(value):
977 977 if util.safehasattr(value, 'startswith'):
978 978 return value
979 979 if value:
980 980 return 'yes'
981 981 else:
982 982 return 'no'
983 983 else:
984 984 formatvalue = pycompat.identity
985 985
986 986 fm.plain('format-variant')
987 987 fm.plain(' ' * (maxvariantlength - len('format-variant')))
988 988 fm.plain(' repo')
989 989 if ui.verbose:
990 990 fm.plain(' config default')
991 991 fm.plain('\n')
992 992 for fv in upgrade.allformatvariant:
993 993 fm.startitem()
994 994 repovalue = fv.fromrepo(repo)
995 995 configvalue = fv.fromconfig(repo)
996 996
997 997 if repovalue != configvalue:
998 998 namelabel = 'formatvariant.name.mismatchconfig'
999 999 repolabel = 'formatvariant.repo.mismatchconfig'
1000 1000 elif repovalue != fv.default:
1001 1001 namelabel = 'formatvariant.name.mismatchdefault'
1002 1002 repolabel = 'formatvariant.repo.mismatchdefault'
1003 1003 else:
1004 1004 namelabel = 'formatvariant.name.uptodate'
1005 1005 repolabel = 'formatvariant.repo.uptodate'
1006 1006
1007 1007 fm.write('name', makeformatname(fv.name), fv.name,
1008 1008 label=namelabel)
1009 1009 fm.write('repo', ' %3s', formatvalue(repovalue),
1010 1010 label=repolabel)
1011 1011 if fv.default != configvalue:
1012 1012 configlabel = 'formatvariant.config.special'
1013 1013 else:
1014 1014 configlabel = 'formatvariant.config.default'
1015 1015 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1016 1016 label=configlabel)
1017 1017 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1018 1018 label='formatvariant.default')
1019 1019 fm.plain('\n')
1020 1020 fm.end()
1021 1021
1022 1022 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1023 1023 def debugfsinfo(ui, path="."):
1024 1024 """show information detected about current filesystem"""
1025 1025 ui.write(('path: %s\n') % path)
1026 1026 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1027 1027 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1028 1028 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1029 1029 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1030 1030 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1031 1031 casesensitive = '(unknown)'
1032 1032 try:
1033 1033 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1034 1034 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1035 1035 except OSError:
1036 1036 pass
1037 1037 ui.write(('case-sensitive: %s\n') % casesensitive)
1038 1038
1039 1039 @command('debuggetbundle',
1040 1040 [('H', 'head', [], _('id of head node'), _('ID')),
1041 1041 ('C', 'common', [], _('id of common node'), _('ID')),
1042 1042 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1043 1043 _('REPO FILE [-H|-C ID]...'),
1044 1044 norepo=True)
1045 1045 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1046 1046 """retrieves a bundle from a repo
1047 1047
1048 1048 Every ID must be a full-length hex node id string. Saves the bundle to the
1049 1049 given file.
1050 1050 """
1051 1051 opts = pycompat.byteskwargs(opts)
1052 1052 repo = hg.peer(ui, opts, repopath)
1053 1053 if not repo.capable('getbundle'):
1054 1054 raise error.Abort("getbundle() not supported by target repository")
1055 1055 args = {}
1056 1056 if common:
1057 1057 args[r'common'] = [bin(s) for s in common]
1058 1058 if head:
1059 1059 args[r'heads'] = [bin(s) for s in head]
1060 1060 # TODO: get desired bundlecaps from command line.
1061 1061 args[r'bundlecaps'] = None
1062 1062 bundle = repo.getbundle('debug', **args)
1063 1063
1064 1064 bundletype = opts.get('type', 'bzip2').lower()
1065 1065 btypes = {'none': 'HG10UN',
1066 1066 'bzip2': 'HG10BZ',
1067 1067 'gzip': 'HG10GZ',
1068 1068 'bundle2': 'HG20'}
1069 1069 bundletype = btypes.get(bundletype)
1070 1070 if bundletype not in bundle2.bundletypes:
1071 1071 raise error.Abort(_('unknown bundle type specified with --type'))
1072 1072 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1073 1073
1074 1074 @command('debugignore', [], '[FILE]')
1075 1075 def debugignore(ui, repo, *files, **opts):
1076 1076 """display the combined ignore pattern and information about ignored files
1077 1077
1078 1078 With no argument display the combined ignore pattern.
1079 1079
1080 1080 Given space separated file names, shows if the given file is ignored and
1081 1081 if so, show the ignore rule (file and line number) that matched it.
1082 1082 """
1083 1083 ignore = repo.dirstate._ignore
1084 1084 if not files:
1085 1085 # Show all the patterns
1086 1086 ui.write("%s\n" % pycompat.byterepr(ignore))
1087 1087 else:
1088 1088 m = scmutil.match(repo[None], pats=files)
1089 1089 for f in m.files():
1090 1090 nf = util.normpath(f)
1091 1091 ignored = None
1092 1092 ignoredata = None
1093 1093 if nf != '.':
1094 1094 if ignore(nf):
1095 1095 ignored = nf
1096 1096 ignoredata = repo.dirstate._ignorefileandline(nf)
1097 1097 else:
1098 1098 for p in util.finddirs(nf):
1099 1099 if ignore(p):
1100 1100 ignored = p
1101 1101 ignoredata = repo.dirstate._ignorefileandline(p)
1102 1102 break
1103 1103 if ignored:
1104 1104 if ignored == nf:
1105 1105 ui.write(_("%s is ignored\n") % m.uipath(f))
1106 1106 else:
1107 1107 ui.write(_("%s is ignored because of "
1108 1108 "containing folder %s\n")
1109 1109 % (m.uipath(f), ignored))
1110 1110 ignorefile, lineno, line = ignoredata
1111 1111 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1112 1112 % (ignorefile, lineno, line))
1113 1113 else:
1114 1114 ui.write(_("%s is not ignored\n") % m.uipath(f))
1115 1115
1116 1116 @command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts,
1117 1117 _('-c|-m|FILE'))
1118 1118 def debugindex(ui, repo, file_=None, **opts):
1119 1119 """dump index data for a storage primitive"""
1120 1120 opts = pycompat.byteskwargs(opts)
1121 1121 store = cmdutil.openstorage(repo, 'debugindex', file_, opts)
1122 1122
1123 1123 if ui.debugflag:
1124 1124 shortfn = hex
1125 1125 else:
1126 1126 shortfn = short
1127 1127
1128 1128 idlen = 12
1129 1129 for i in store:
1130 1130 idlen = len(shortfn(store.node(i)))
1131 1131 break
1132 1132
1133 1133 fm = ui.formatter('debugindex', opts)
1134 1134 fm.plain(b' rev linkrev %s %s p2\n' % (
1135 1135 b'nodeid'.ljust(idlen),
1136 1136 b'p1'.ljust(idlen)))
1137 1137
1138 1138 for rev in store:
1139 1139 node = store.node(rev)
1140 1140 parents = store.parents(node)
1141 1141
1142 1142 fm.startitem()
1143 1143 fm.write(b'rev', b'%6d ', rev)
1144 1144 fm.write(b'linkrev', '%7d ', store.linkrev(rev))
1145 1145 fm.write(b'node', '%s ', shortfn(node))
1146 1146 fm.write(b'p1', '%s ', shortfn(parents[0]))
1147 1147 fm.write(b'p2', '%s', shortfn(parents[1]))
1148 1148 fm.plain(b'\n')
1149 1149
1150 1150 fm.end()
1151 1151
1152 1152 @command('debugindexdot', cmdutil.debugrevlogopts,
1153 1153 _('-c|-m|FILE'), optionalrepo=True)
1154 1154 def debugindexdot(ui, repo, file_=None, **opts):
1155 1155 """dump an index DAG as a graphviz dot file"""
1156 1156 opts = pycompat.byteskwargs(opts)
1157 1157 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
1158 1158 ui.write(("digraph G {\n"))
1159 1159 for i in r:
1160 1160 node = r.node(i)
1161 1161 pp = r.parents(node)
1162 1162 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1163 1163 if pp[1] != nullid:
1164 1164 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1165 1165 ui.write("}\n")
1166 1166
1167 1167 @command('debugindexstats', [])
1168 1168 def debugindexstats(ui, repo):
1169 1169 """show stats related to the changelog index"""
1170 1170 repo.changelog.shortest(nullid, 1)
1171 for k, v in sorted(repo.changelog.index.stats().items()):
1171 index = repo.changelog.index
1172 if not util.safehasattr(index, 'stats'):
1173 raise error.Abort(_('debugindexstats only works with native code'))
1174 for k, v in sorted(index.stats().items()):
1172 1175 ui.write('%s: %s\n' % (k, v))
1173 1176
1174 1177 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1175 1178 def debuginstall(ui, **opts):
1176 1179 '''test Mercurial installation
1177 1180
1178 1181 Returns 0 on success.
1179 1182 '''
1180 1183 opts = pycompat.byteskwargs(opts)
1181 1184
1182 1185 def writetemp(contents):
1183 1186 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1184 1187 f = os.fdopen(fd, r"wb")
1185 1188 f.write(contents)
1186 1189 f.close()
1187 1190 return name
1188 1191
1189 1192 problems = 0
1190 1193
1191 1194 fm = ui.formatter('debuginstall', opts)
1192 1195 fm.startitem()
1193 1196
1194 1197 # encoding
1195 1198 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1196 1199 err = None
1197 1200 try:
1198 1201 codecs.lookup(pycompat.sysstr(encoding.encoding))
1199 1202 except LookupError as inst:
1200 1203 err = stringutil.forcebytestr(inst)
1201 1204 problems += 1
1202 1205 fm.condwrite(err, 'encodingerror', _(" %s\n"
1203 1206 " (check that your locale is properly set)\n"), err)
1204 1207
1205 1208 # Python
1206 1209 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1207 1210 pycompat.sysexecutable)
1208 1211 fm.write('pythonver', _("checking Python version (%s)\n"),
1209 1212 ("%d.%d.%d" % sys.version_info[:3]))
1210 1213 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1211 1214 os.path.dirname(pycompat.fsencode(os.__file__)))
1212 1215
1213 1216 security = set(sslutil.supportedprotocols)
1214 1217 if sslutil.hassni:
1215 1218 security.add('sni')
1216 1219
1217 1220 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1218 1221 fm.formatlist(sorted(security), name='protocol',
1219 1222 fmt='%s', sep=','))
1220 1223
1221 1224 # These are warnings, not errors. So don't increment problem count. This
1222 1225 # may change in the future.
1223 1226 if 'tls1.2' not in security:
1224 1227 fm.plain(_(' TLS 1.2 not supported by Python install; '
1225 1228 'network connections lack modern security\n'))
1226 1229 if 'sni' not in security:
1227 1230 fm.plain(_(' SNI not supported by Python install; may have '
1228 1231 'connectivity issues with some servers\n'))
1229 1232
1230 1233 # TODO print CA cert info
1231 1234
1232 1235 # hg version
1233 1236 hgver = util.version()
1234 1237 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1235 1238 hgver.split('+')[0])
1236 1239 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1237 1240 '+'.join(hgver.split('+')[1:]))
1238 1241
1239 1242 # compiled modules
1240 1243 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1241 1244 policy.policy)
1242 1245 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1243 1246 os.path.dirname(pycompat.fsencode(__file__)))
1244 1247
1245 1248 if policy.policy in ('c', 'allow'):
1246 1249 err = None
1247 1250 try:
1248 1251 from .cext import (
1249 1252 base85,
1250 1253 bdiff,
1251 1254 mpatch,
1252 1255 osutil,
1253 1256 )
1254 1257 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1255 1258 except Exception as inst:
1256 1259 err = stringutil.forcebytestr(inst)
1257 1260 problems += 1
1258 1261 fm.condwrite(err, 'extensionserror', " %s\n", err)
1259 1262
1260 1263 compengines = util.compengines._engines.values()
1261 1264 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1262 1265 fm.formatlist(sorted(e.name() for e in compengines),
1263 1266 name='compengine', fmt='%s', sep=', '))
1264 1267 fm.write('compenginesavail', _('checking available compression engines '
1265 1268 '(%s)\n'),
1266 1269 fm.formatlist(sorted(e.name() for e in compengines
1267 1270 if e.available()),
1268 1271 name='compengine', fmt='%s', sep=', '))
1269 1272 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1270 1273 fm.write('compenginesserver', _('checking available compression engines '
1271 1274 'for wire protocol (%s)\n'),
1272 1275 fm.formatlist([e.name() for e in wirecompengines
1273 1276 if e.wireprotosupport()],
1274 1277 name='compengine', fmt='%s', sep=', '))
1275 1278 re2 = 'missing'
1276 1279 if util._re2:
1277 1280 re2 = 'available'
1278 1281 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1279 1282 fm.data(re2=bool(util._re2))
1280 1283
1281 1284 # templates
1282 1285 p = templater.templatepaths()
1283 1286 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1284 1287 fm.condwrite(not p, '', _(" no template directories found\n"))
1285 1288 if p:
1286 1289 m = templater.templatepath("map-cmdline.default")
1287 1290 if m:
1288 1291 # template found, check if it is working
1289 1292 err = None
1290 1293 try:
1291 1294 templater.templater.frommapfile(m)
1292 1295 except Exception as inst:
1293 1296 err = stringutil.forcebytestr(inst)
1294 1297 p = None
1295 1298 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1296 1299 else:
1297 1300 p = None
1298 1301 fm.condwrite(p, 'defaulttemplate',
1299 1302 _("checking default template (%s)\n"), m)
1300 1303 fm.condwrite(not m, 'defaulttemplatenotfound',
1301 1304 _(" template '%s' not found\n"), "default")
1302 1305 if not p:
1303 1306 problems += 1
1304 1307 fm.condwrite(not p, '',
1305 1308 _(" (templates seem to have been installed incorrectly)\n"))
1306 1309
1307 1310 # editor
1308 1311 editor = ui.geteditor()
1309 1312 editor = util.expandpath(editor)
1310 1313 editorbin = procutil.shellsplit(editor)[0]
1311 1314 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1312 1315 cmdpath = procutil.findexe(editorbin)
1313 1316 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1314 1317 _(" No commit editor set and can't find %s in PATH\n"
1315 1318 " (specify a commit editor in your configuration"
1316 1319 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1317 1320 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1318 1321 _(" Can't find editor '%s' in PATH\n"
1319 1322 " (specify a commit editor in your configuration"
1320 1323 " file)\n"), not cmdpath and editorbin)
1321 1324 if not cmdpath and editor != 'vi':
1322 1325 problems += 1
1323 1326
1324 1327 # check username
1325 1328 username = None
1326 1329 err = None
1327 1330 try:
1328 1331 username = ui.username()
1329 1332 except error.Abort as e:
1330 1333 err = stringutil.forcebytestr(e)
1331 1334 problems += 1
1332 1335
1333 1336 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1334 1337 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1335 1338 " (specify a username in your configuration file)\n"), err)
1336 1339
1337 1340 fm.condwrite(not problems, '',
1338 1341 _("no problems detected\n"))
1339 1342 if not problems:
1340 1343 fm.data(problems=problems)
1341 1344 fm.condwrite(problems, 'problems',
1342 1345 _("%d problems detected,"
1343 1346 " please check your install!\n"), problems)
1344 1347 fm.end()
1345 1348
1346 1349 return problems
1347 1350
1348 1351 @command('debugknown', [], _('REPO ID...'), norepo=True)
1349 1352 def debugknown(ui, repopath, *ids, **opts):
1350 1353 """test whether node ids are known to a repo
1351 1354
1352 1355 Every ID must be a full-length hex node id string. Returns a list of 0s
1353 1356 and 1s indicating unknown/known.
1354 1357 """
1355 1358 opts = pycompat.byteskwargs(opts)
1356 1359 repo = hg.peer(ui, opts, repopath)
1357 1360 if not repo.capable('known'):
1358 1361 raise error.Abort("known() not supported by target repository")
1359 1362 flags = repo.known([bin(s) for s in ids])
1360 1363 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1361 1364
1362 1365 @command('debuglabelcomplete', [], _('LABEL...'))
1363 1366 def debuglabelcomplete(ui, repo, *args):
1364 1367 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1365 1368 debugnamecomplete(ui, repo, *args)
1366 1369
1367 1370 @command('debuglocks',
1368 1371 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1369 1372 ('W', 'force-wlock', None,
1370 1373 _('free the working state lock (DANGEROUS)')),
1371 1374 ('s', 'set-lock', None, _('set the store lock until stopped')),
1372 1375 ('S', 'set-wlock', None,
1373 1376 _('set the working state lock until stopped'))],
1374 1377 _('[OPTION]...'))
1375 1378 def debuglocks(ui, repo, **opts):
1376 1379 """show or modify state of locks
1377 1380
1378 1381 By default, this command will show which locks are held. This
1379 1382 includes the user and process holding the lock, the amount of time
1380 1383 the lock has been held, and the machine name where the process is
1381 1384 running if it's not local.
1382 1385
1383 1386 Locks protect the integrity of Mercurial's data, so should be
1384 1387 treated with care. System crashes or other interruptions may cause
1385 1388 locks to not be properly released, though Mercurial will usually
1386 1389 detect and remove such stale locks automatically.
1387 1390
1388 1391 However, detecting stale locks may not always be possible (for
1389 1392 instance, on a shared filesystem). Removing locks may also be
1390 1393 blocked by filesystem permissions.
1391 1394
1392 1395 Setting a lock will prevent other commands from changing the data.
1393 1396 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1394 1397 The set locks are removed when the command exits.
1395 1398
1396 1399 Returns 0 if no locks are held.
1397 1400
1398 1401 """
1399 1402
1400 1403 if opts.get(r'force_lock'):
1401 1404 repo.svfs.unlink('lock')
1402 1405 if opts.get(r'force_wlock'):
1403 1406 repo.vfs.unlink('wlock')
1404 1407 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1405 1408 return 0
1406 1409
1407 1410 locks = []
1408 1411 try:
1409 1412 if opts.get(r'set_wlock'):
1410 1413 try:
1411 1414 locks.append(repo.wlock(False))
1412 1415 except error.LockHeld:
1413 1416 raise error.Abort(_('wlock is already held'))
1414 1417 if opts.get(r'set_lock'):
1415 1418 try:
1416 1419 locks.append(repo.lock(False))
1417 1420 except error.LockHeld:
1418 1421 raise error.Abort(_('lock is already held'))
1419 1422 if len(locks):
1420 1423 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1421 1424 return 0
1422 1425 finally:
1423 1426 release(*locks)
1424 1427
1425 1428 now = time.time()
1426 1429 held = 0
1427 1430
1428 1431 def report(vfs, name, method):
1429 1432 # this causes stale locks to get reaped for more accurate reporting
1430 1433 try:
1431 1434 l = method(False)
1432 1435 except error.LockHeld:
1433 1436 l = None
1434 1437
1435 1438 if l:
1436 1439 l.release()
1437 1440 else:
1438 1441 try:
1439 1442 st = vfs.lstat(name)
1440 1443 age = now - st[stat.ST_MTIME]
1441 1444 user = util.username(st.st_uid)
1442 1445 locker = vfs.readlock(name)
1443 1446 if ":" in locker:
1444 1447 host, pid = locker.split(':')
1445 1448 if host == socket.gethostname():
1446 1449 locker = 'user %s, process %s' % (user or b'None', pid)
1447 1450 else:
1448 1451 locker = 'user %s, process %s, host %s' \
1449 1452 % (user or b'None', pid, host)
1450 1453 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1451 1454 return 1
1452 1455 except OSError as e:
1453 1456 if e.errno != errno.ENOENT:
1454 1457 raise
1455 1458
1456 1459 ui.write(("%-6s free\n") % (name + ":"))
1457 1460 return 0
1458 1461
1459 1462 held += report(repo.svfs, "lock", repo.lock)
1460 1463 held += report(repo.vfs, "wlock", repo.wlock)
1461 1464
1462 1465 return held
1463 1466
1464 1467 @command('debugmanifestfulltextcache', [
1465 1468 ('', 'clear', False, _('clear the cache')),
1466 1469 ('a', 'add', '', _('add the given manifest node to the cache'),
1467 1470 _('NODE'))
1468 1471 ], '')
1469 1472 def debugmanifestfulltextcache(ui, repo, add=None, **opts):
1470 1473 """show, clear or amend the contents of the manifest fulltext cache"""
1471 1474 with repo.lock():
1472 1475 r = repo.manifestlog.getstorage(b'')
1473 1476 try:
1474 1477 cache = r._fulltextcache
1475 1478 except AttributeError:
1476 1479 ui.warn(_(
1477 1480 "Current revlog implementation doesn't appear to have a "
1478 1481 'manifest fulltext cache\n'))
1479 1482 return
1480 1483
1481 1484 if opts.get(r'clear'):
1482 1485 cache.clear()
1483 1486
1484 1487 if add:
1485 1488 try:
1486 1489 manifest = repo.manifestlog[r.lookup(add)]
1487 1490 except error.LookupError as e:
1488 1491 raise error.Abort(e, hint="Check your manifest node id")
1489 1492 manifest.read() # stores revisision in cache too
1490 1493
1491 1494 if not len(cache):
1492 1495 ui.write(_('Cache empty'))
1493 1496 else:
1494 1497 ui.write(
1495 1498 _('Cache contains %d manifest entries, in order of most to '
1496 1499 'least recent:\n') % (len(cache),))
1497 1500 totalsize = 0
1498 1501 for nodeid in cache:
1499 1502 # Use cache.get to not update the LRU order
1500 1503 data = cache.get(nodeid)
1501 1504 size = len(data)
1502 1505 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1503 1506 ui.write(_('id: %s, size %s\n') % (
1504 1507 hex(nodeid), util.bytecount(size)))
1505 1508 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1506 1509 ui.write(
1507 1510 _('Total cache data size %s, on-disk %s\n') % (
1508 1511 util.bytecount(totalsize), util.bytecount(ondisk))
1509 1512 )
1510 1513
1511 1514 @command('debugmergestate', [], '')
1512 1515 def debugmergestate(ui, repo, *args):
1513 1516 """print merge state
1514 1517
1515 1518 Use --verbose to print out information about whether v1 or v2 merge state
1516 1519 was chosen."""
1517 1520 def _hashornull(h):
1518 1521 if h == nullhex:
1519 1522 return 'null'
1520 1523 else:
1521 1524 return h
1522 1525
1523 1526 def printrecords(version):
1524 1527 ui.write(('* version %d records\n') % version)
1525 1528 if version == 1:
1526 1529 records = v1records
1527 1530 else:
1528 1531 records = v2records
1529 1532
1530 1533 for rtype, record in records:
1531 1534 # pretty print some record types
1532 1535 if rtype == 'L':
1533 1536 ui.write(('local: %s\n') % record)
1534 1537 elif rtype == 'O':
1535 1538 ui.write(('other: %s\n') % record)
1536 1539 elif rtype == 'm':
1537 1540 driver, mdstate = record.split('\0', 1)
1538 1541 ui.write(('merge driver: %s (state "%s")\n')
1539 1542 % (driver, mdstate))
1540 1543 elif rtype in 'FDC':
1541 1544 r = record.split('\0')
1542 1545 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1543 1546 if version == 1:
1544 1547 onode = 'not stored in v1 format'
1545 1548 flags = r[7]
1546 1549 else:
1547 1550 onode, flags = r[7:9]
1548 1551 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1549 1552 % (f, rtype, state, _hashornull(hash)))
1550 1553 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1551 1554 ui.write((' ancestor path: %s (node %s)\n')
1552 1555 % (afile, _hashornull(anode)))
1553 1556 ui.write((' other path: %s (node %s)\n')
1554 1557 % (ofile, _hashornull(onode)))
1555 1558 elif rtype == 'f':
1556 1559 filename, rawextras = record.split('\0', 1)
1557 1560 extras = rawextras.split('\0')
1558 1561 i = 0
1559 1562 extrastrings = []
1560 1563 while i < len(extras):
1561 1564 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1562 1565 i += 2
1563 1566
1564 1567 ui.write(('file extras: %s (%s)\n')
1565 1568 % (filename, ', '.join(extrastrings)))
1566 1569 elif rtype == 'l':
1567 1570 labels = record.split('\0', 2)
1568 1571 labels = [l for l in labels if len(l) > 0]
1569 1572 ui.write(('labels:\n'))
1570 1573 ui.write((' local: %s\n' % labels[0]))
1571 1574 ui.write((' other: %s\n' % labels[1]))
1572 1575 if len(labels) > 2:
1573 1576 ui.write((' base: %s\n' % labels[2]))
1574 1577 else:
1575 1578 ui.write(('unrecognized entry: %s\t%s\n')
1576 1579 % (rtype, record.replace('\0', '\t')))
1577 1580
1578 1581 # Avoid mergestate.read() since it may raise an exception for unsupported
1579 1582 # merge state records. We shouldn't be doing this, but this is OK since this
1580 1583 # command is pretty low-level.
1581 1584 ms = mergemod.mergestate(repo)
1582 1585
1583 1586 # sort so that reasonable information is on top
1584 1587 v1records = ms._readrecordsv1()
1585 1588 v2records = ms._readrecordsv2()
1586 1589 order = 'LOml'
1587 1590 def key(r):
1588 1591 idx = order.find(r[0])
1589 1592 if idx == -1:
1590 1593 return (1, r[1])
1591 1594 else:
1592 1595 return (0, idx)
1593 1596 v1records.sort(key=key)
1594 1597 v2records.sort(key=key)
1595 1598
1596 1599 if not v1records and not v2records:
1597 1600 ui.write(('no merge state found\n'))
1598 1601 elif not v2records:
1599 1602 ui.note(('no version 2 merge state\n'))
1600 1603 printrecords(1)
1601 1604 elif ms._v1v2match(v1records, v2records):
1602 1605 ui.note(('v1 and v2 states match: using v2\n'))
1603 1606 printrecords(2)
1604 1607 else:
1605 1608 ui.note(('v1 and v2 states mismatch: using v1\n'))
1606 1609 printrecords(1)
1607 1610 if ui.verbose:
1608 1611 printrecords(2)
1609 1612
1610 1613 @command('debugnamecomplete', [], _('NAME...'))
1611 1614 def debugnamecomplete(ui, repo, *args):
1612 1615 '''complete "names" - tags, open branch names, bookmark names'''
1613 1616
1614 1617 names = set()
1615 1618 # since we previously only listed open branches, we will handle that
1616 1619 # specially (after this for loop)
1617 1620 for name, ns in repo.names.iteritems():
1618 1621 if name != 'branches':
1619 1622 names.update(ns.listnames(repo))
1620 1623 names.update(tag for (tag, heads, tip, closed)
1621 1624 in repo.branchmap().iterbranches() if not closed)
1622 1625 completions = set()
1623 1626 if not args:
1624 1627 args = ['']
1625 1628 for a in args:
1626 1629 completions.update(n for n in names if n.startswith(a))
1627 1630 ui.write('\n'.join(sorted(completions)))
1628 1631 ui.write('\n')
1629 1632
1630 1633 @command('debugobsolete',
1631 1634 [('', 'flags', 0, _('markers flag')),
1632 1635 ('', 'record-parents', False,
1633 1636 _('record parent information for the precursor')),
1634 1637 ('r', 'rev', [], _('display markers relevant to REV')),
1635 1638 ('', 'exclusive', False, _('restrict display to markers only '
1636 1639 'relevant to REV')),
1637 1640 ('', 'index', False, _('display index of the marker')),
1638 1641 ('', 'delete', [], _('delete markers specified by indices')),
1639 1642 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1640 1643 _('[OBSOLETED [REPLACEMENT ...]]'))
1641 1644 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1642 1645 """create arbitrary obsolete marker
1643 1646
1644 1647 With no arguments, displays the list of obsolescence markers."""
1645 1648
1646 1649 opts = pycompat.byteskwargs(opts)
1647 1650
1648 1651 def parsenodeid(s):
1649 1652 try:
1650 1653 # We do not use revsingle/revrange functions here to accept
1651 1654 # arbitrary node identifiers, possibly not present in the
1652 1655 # local repository.
1653 1656 n = bin(s)
1654 1657 if len(n) != len(nullid):
1655 1658 raise TypeError()
1656 1659 return n
1657 1660 except TypeError:
1658 1661 raise error.Abort('changeset references must be full hexadecimal '
1659 1662 'node identifiers')
1660 1663
1661 1664 if opts.get('delete'):
1662 1665 indices = []
1663 1666 for v in opts.get('delete'):
1664 1667 try:
1665 1668 indices.append(int(v))
1666 1669 except ValueError:
1667 1670 raise error.Abort(_('invalid index value: %r') % v,
1668 1671 hint=_('use integers for indices'))
1669 1672
1670 1673 if repo.currenttransaction():
1671 1674 raise error.Abort(_('cannot delete obsmarkers in the middle '
1672 1675 'of transaction.'))
1673 1676
1674 1677 with repo.lock():
1675 1678 n = repair.deleteobsmarkers(repo.obsstore, indices)
1676 1679 ui.write(_('deleted %i obsolescence markers\n') % n)
1677 1680
1678 1681 return
1679 1682
1680 1683 if precursor is not None:
1681 1684 if opts['rev']:
1682 1685 raise error.Abort('cannot select revision when creating marker')
1683 1686 metadata = {}
1684 1687 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1685 1688 succs = tuple(parsenodeid(succ) for succ in successors)
1686 1689 l = repo.lock()
1687 1690 try:
1688 1691 tr = repo.transaction('debugobsolete')
1689 1692 try:
1690 1693 date = opts.get('date')
1691 1694 if date:
1692 1695 date = dateutil.parsedate(date)
1693 1696 else:
1694 1697 date = None
1695 1698 prec = parsenodeid(precursor)
1696 1699 parents = None
1697 1700 if opts['record_parents']:
1698 1701 if prec not in repo.unfiltered():
1699 1702 raise error.Abort('cannot used --record-parents on '
1700 1703 'unknown changesets')
1701 1704 parents = repo.unfiltered()[prec].parents()
1702 1705 parents = tuple(p.node() for p in parents)
1703 1706 repo.obsstore.create(tr, prec, succs, opts['flags'],
1704 1707 parents=parents, date=date,
1705 1708 metadata=metadata, ui=ui)
1706 1709 tr.close()
1707 1710 except ValueError as exc:
1708 1711 raise error.Abort(_('bad obsmarker input: %s') %
1709 1712 pycompat.bytestr(exc))
1710 1713 finally:
1711 1714 tr.release()
1712 1715 finally:
1713 1716 l.release()
1714 1717 else:
1715 1718 if opts['rev']:
1716 1719 revs = scmutil.revrange(repo, opts['rev'])
1717 1720 nodes = [repo[r].node() for r in revs]
1718 1721 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1719 1722 exclusive=opts['exclusive']))
1720 1723 markers.sort(key=lambda x: x._data)
1721 1724 else:
1722 1725 markers = obsutil.getmarkers(repo)
1723 1726
1724 1727 markerstoiter = markers
1725 1728 isrelevant = lambda m: True
1726 1729 if opts.get('rev') and opts.get('index'):
1727 1730 markerstoiter = obsutil.getmarkers(repo)
1728 1731 markerset = set(markers)
1729 1732 isrelevant = lambda m: m in markerset
1730 1733
1731 1734 fm = ui.formatter('debugobsolete', opts)
1732 1735 for i, m in enumerate(markerstoiter):
1733 1736 if not isrelevant(m):
1734 1737 # marker can be irrelevant when we're iterating over a set
1735 1738 # of markers (markerstoiter) which is bigger than the set
1736 1739 # of markers we want to display (markers)
1737 1740 # this can happen if both --index and --rev options are
1738 1741 # provided and thus we need to iterate over all of the markers
1739 1742 # to get the correct indices, but only display the ones that
1740 1743 # are relevant to --rev value
1741 1744 continue
1742 1745 fm.startitem()
1743 1746 ind = i if opts.get('index') else None
1744 1747 cmdutil.showmarker(fm, m, index=ind)
1745 1748 fm.end()
1746 1749
1747 1750 @command('debugpathcomplete',
1748 1751 [('f', 'full', None, _('complete an entire path')),
1749 1752 ('n', 'normal', None, _('show only normal files')),
1750 1753 ('a', 'added', None, _('show only added files')),
1751 1754 ('r', 'removed', None, _('show only removed files'))],
1752 1755 _('FILESPEC...'))
1753 1756 def debugpathcomplete(ui, repo, *specs, **opts):
1754 1757 '''complete part or all of a tracked path
1755 1758
1756 1759 This command supports shells that offer path name completion. It
1757 1760 currently completes only files already known to the dirstate.
1758 1761
1759 1762 Completion extends only to the next path segment unless
1760 1763 --full is specified, in which case entire paths are used.'''
1761 1764
1762 1765 def complete(path, acceptable):
1763 1766 dirstate = repo.dirstate
1764 1767 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
1765 1768 rootdir = repo.root + pycompat.ossep
1766 1769 if spec != repo.root and not spec.startswith(rootdir):
1767 1770 return [], []
1768 1771 if os.path.isdir(spec):
1769 1772 spec += '/'
1770 1773 spec = spec[len(rootdir):]
1771 1774 fixpaths = pycompat.ossep != '/'
1772 1775 if fixpaths:
1773 1776 spec = spec.replace(pycompat.ossep, '/')
1774 1777 speclen = len(spec)
1775 1778 fullpaths = opts[r'full']
1776 1779 files, dirs = set(), set()
1777 1780 adddir, addfile = dirs.add, files.add
1778 1781 for f, st in dirstate.iteritems():
1779 1782 if f.startswith(spec) and st[0] in acceptable:
1780 1783 if fixpaths:
1781 1784 f = f.replace('/', pycompat.ossep)
1782 1785 if fullpaths:
1783 1786 addfile(f)
1784 1787 continue
1785 1788 s = f.find(pycompat.ossep, speclen)
1786 1789 if s >= 0:
1787 1790 adddir(f[:s])
1788 1791 else:
1789 1792 addfile(f)
1790 1793 return files, dirs
1791 1794
1792 1795 acceptable = ''
1793 1796 if opts[r'normal']:
1794 1797 acceptable += 'nm'
1795 1798 if opts[r'added']:
1796 1799 acceptable += 'a'
1797 1800 if opts[r'removed']:
1798 1801 acceptable += 'r'
1799 1802 cwd = repo.getcwd()
1800 1803 if not specs:
1801 1804 specs = ['.']
1802 1805
1803 1806 files, dirs = set(), set()
1804 1807 for spec in specs:
1805 1808 f, d = complete(spec, acceptable or 'nmar')
1806 1809 files.update(f)
1807 1810 dirs.update(d)
1808 1811 files.update(dirs)
1809 1812 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1810 1813 ui.write('\n')
1811 1814
1812 1815 @command('debugpeer', [], _('PATH'), norepo=True)
1813 1816 def debugpeer(ui, path):
1814 1817 """establish a connection to a peer repository"""
1815 1818 # Always enable peer request logging. Requires --debug to display
1816 1819 # though.
1817 1820 overrides = {
1818 1821 ('devel', 'debug.peer-request'): True,
1819 1822 }
1820 1823
1821 1824 with ui.configoverride(overrides):
1822 1825 peer = hg.peer(ui, {}, path)
1823 1826
1824 1827 local = peer.local() is not None
1825 1828 canpush = peer.canpush()
1826 1829
1827 1830 ui.write(_('url: %s\n') % peer.url())
1828 1831 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1829 1832 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1830 1833
1831 1834 @command('debugpickmergetool',
1832 1835 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1833 1836 ('', 'changedelete', None, _('emulate merging change and delete')),
1834 1837 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1835 1838 _('[PATTERN]...'),
1836 1839 inferrepo=True)
1837 1840 def debugpickmergetool(ui, repo, *pats, **opts):
1838 1841 """examine which merge tool is chosen for specified file
1839 1842
1840 1843 As described in :hg:`help merge-tools`, Mercurial examines
1841 1844 configurations below in this order to decide which merge tool is
1842 1845 chosen for specified file.
1843 1846
1844 1847 1. ``--tool`` option
1845 1848 2. ``HGMERGE`` environment variable
1846 1849 3. configurations in ``merge-patterns`` section
1847 1850 4. configuration of ``ui.merge``
1848 1851 5. configurations in ``merge-tools`` section
1849 1852 6. ``hgmerge`` tool (for historical reason only)
1850 1853 7. default tool for fallback (``:merge`` or ``:prompt``)
1851 1854
1852 1855 This command writes out examination result in the style below::
1853 1856
1854 1857 FILE = MERGETOOL
1855 1858
1856 1859 By default, all files known in the first parent context of the
1857 1860 working directory are examined. Use file patterns and/or -I/-X
1858 1861 options to limit target files. -r/--rev is also useful to examine
1859 1862 files in another context without actual updating to it.
1860 1863
1861 1864 With --debug, this command shows warning messages while matching
1862 1865 against ``merge-patterns`` and so on, too. It is recommended to
1863 1866 use this option with explicit file patterns and/or -I/-X options,
1864 1867 because this option increases amount of output per file according
1865 1868 to configurations in hgrc.
1866 1869
1867 1870 With -v/--verbose, this command shows configurations below at
1868 1871 first (only if specified).
1869 1872
1870 1873 - ``--tool`` option
1871 1874 - ``HGMERGE`` environment variable
1872 1875 - configuration of ``ui.merge``
1873 1876
1874 1877 If merge tool is chosen before matching against
1875 1878 ``merge-patterns``, this command can't show any helpful
1876 1879 information, even with --debug. In such case, information above is
1877 1880 useful to know why a merge tool is chosen.
1878 1881 """
1879 1882 opts = pycompat.byteskwargs(opts)
1880 1883 overrides = {}
1881 1884 if opts['tool']:
1882 1885 overrides[('ui', 'forcemerge')] = opts['tool']
1883 1886 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1884 1887
1885 1888 with ui.configoverride(overrides, 'debugmergepatterns'):
1886 1889 hgmerge = encoding.environ.get("HGMERGE")
1887 1890 if hgmerge is not None:
1888 1891 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1889 1892 uimerge = ui.config("ui", "merge")
1890 1893 if uimerge:
1891 1894 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1892 1895
1893 1896 ctx = scmutil.revsingle(repo, opts.get('rev'))
1894 1897 m = scmutil.match(ctx, pats, opts)
1895 1898 changedelete = opts['changedelete']
1896 1899 for path in ctx.walk(m):
1897 1900 fctx = ctx[path]
1898 1901 try:
1899 1902 if not ui.debugflag:
1900 1903 ui.pushbuffer(error=True)
1901 1904 tool, toolpath = filemerge._picktool(repo, ui, path,
1902 1905 fctx.isbinary(),
1903 1906 'l' in fctx.flags(),
1904 1907 changedelete)
1905 1908 finally:
1906 1909 if not ui.debugflag:
1907 1910 ui.popbuffer()
1908 1911 ui.write(('%s = %s\n') % (path, tool))
1909 1912
1910 1913 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1911 1914 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1912 1915 '''access the pushkey key/value protocol
1913 1916
1914 1917 With two args, list the keys in the given namespace.
1915 1918
1916 1919 With five args, set a key to new if it currently is set to old.
1917 1920 Reports success or failure.
1918 1921 '''
1919 1922
1920 1923 target = hg.peer(ui, {}, repopath)
1921 1924 if keyinfo:
1922 1925 key, old, new = keyinfo
1923 1926 with target.commandexecutor() as e:
1924 1927 r = e.callcommand('pushkey', {
1925 1928 'namespace': namespace,
1926 1929 'key': key,
1927 1930 'old': old,
1928 1931 'new': new,
1929 1932 }).result()
1930 1933
1931 1934 ui.status(pycompat.bytestr(r) + '\n')
1932 1935 return not r
1933 1936 else:
1934 1937 for k, v in sorted(target.listkeys(namespace).iteritems()):
1935 1938 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1936 1939 stringutil.escapestr(v)))
1937 1940
1938 1941 @command('debugpvec', [], _('A B'))
1939 1942 def debugpvec(ui, repo, a, b=None):
1940 1943 ca = scmutil.revsingle(repo, a)
1941 1944 cb = scmutil.revsingle(repo, b)
1942 1945 pa = pvec.ctxpvec(ca)
1943 1946 pb = pvec.ctxpvec(cb)
1944 1947 if pa == pb:
1945 1948 rel = "="
1946 1949 elif pa > pb:
1947 1950 rel = ">"
1948 1951 elif pa < pb:
1949 1952 rel = "<"
1950 1953 elif pa | pb:
1951 1954 rel = "|"
1952 1955 ui.write(_("a: %s\n") % pa)
1953 1956 ui.write(_("b: %s\n") % pb)
1954 1957 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1955 1958 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1956 1959 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1957 1960 pa.distance(pb), rel))
1958 1961
1959 1962 @command('debugrebuilddirstate|debugrebuildstate',
1960 1963 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1961 1964 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1962 1965 'the working copy parent')),
1963 1966 ],
1964 1967 _('[-r REV]'))
1965 1968 def debugrebuilddirstate(ui, repo, rev, **opts):
1966 1969 """rebuild the dirstate as it would look like for the given revision
1967 1970
1968 1971 If no revision is specified the first current parent will be used.
1969 1972
1970 1973 The dirstate will be set to the files of the given revision.
1971 1974 The actual working directory content or existing dirstate
1972 1975 information such as adds or removes is not considered.
1973 1976
1974 1977 ``minimal`` will only rebuild the dirstate status for files that claim to be
1975 1978 tracked but are not in the parent manifest, or that exist in the parent
1976 1979 manifest but are not in the dirstate. It will not change adds, removes, or
1977 1980 modified files that are in the working copy parent.
1978 1981
1979 1982 One use of this command is to make the next :hg:`status` invocation
1980 1983 check the actual file content.
1981 1984 """
1982 1985 ctx = scmutil.revsingle(repo, rev)
1983 1986 with repo.wlock():
1984 1987 dirstate = repo.dirstate
1985 1988 changedfiles = None
1986 1989 # See command doc for what minimal does.
1987 1990 if opts.get(r'minimal'):
1988 1991 manifestfiles = set(ctx.manifest().keys())
1989 1992 dirstatefiles = set(dirstate)
1990 1993 manifestonly = manifestfiles - dirstatefiles
1991 1994 dsonly = dirstatefiles - manifestfiles
1992 1995 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1993 1996 changedfiles = manifestonly | dsnotadded
1994 1997
1995 1998 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1996 1999
1997 2000 @command('debugrebuildfncache', [], '')
1998 2001 def debugrebuildfncache(ui, repo):
1999 2002 """rebuild the fncache file"""
2000 2003 repair.rebuildfncache(ui, repo)
2001 2004
2002 2005 @command('debugrename',
2003 2006 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2004 2007 _('[-r REV] FILE'))
2005 2008 def debugrename(ui, repo, file1, *pats, **opts):
2006 2009 """dump rename information"""
2007 2010
2008 2011 opts = pycompat.byteskwargs(opts)
2009 2012 ctx = scmutil.revsingle(repo, opts.get('rev'))
2010 2013 m = scmutil.match(ctx, (file1,) + pats, opts)
2011 2014 for abs in ctx.walk(m):
2012 2015 fctx = ctx[abs]
2013 2016 o = fctx.filelog().renamed(fctx.filenode())
2014 2017 rel = m.rel(abs)
2015 2018 if o:
2016 2019 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2017 2020 else:
2018 2021 ui.write(_("%s not renamed\n") % rel)
2019 2022
2020 2023 @command('debugrevlog', cmdutil.debugrevlogopts +
2021 2024 [('d', 'dump', False, _('dump index data'))],
2022 2025 _('-c|-m|FILE'),
2023 2026 optionalrepo=True)
2024 2027 def debugrevlog(ui, repo, file_=None, **opts):
2025 2028 """show data and statistics about a revlog"""
2026 2029 opts = pycompat.byteskwargs(opts)
2027 2030 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2028 2031
2029 2032 if opts.get("dump"):
2030 2033 numrevs = len(r)
2031 2034 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2032 2035 " rawsize totalsize compression heads chainlen\n"))
2033 2036 ts = 0
2034 2037 heads = set()
2035 2038
2036 2039 for rev in pycompat.xrange(numrevs):
2037 2040 dbase = r.deltaparent(rev)
2038 2041 if dbase == -1:
2039 2042 dbase = rev
2040 2043 cbase = r.chainbase(rev)
2041 2044 clen = r.chainlen(rev)
2042 2045 p1, p2 = r.parentrevs(rev)
2043 2046 rs = r.rawsize(rev)
2044 2047 ts = ts + rs
2045 2048 heads -= set(r.parentrevs(rev))
2046 2049 heads.add(rev)
2047 2050 try:
2048 2051 compression = ts / r.end(rev)
2049 2052 except ZeroDivisionError:
2050 2053 compression = 0
2051 2054 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2052 2055 "%11d %5d %8d\n" %
2053 2056 (rev, p1, p2, r.start(rev), r.end(rev),
2054 2057 r.start(dbase), r.start(cbase),
2055 2058 r.start(p1), r.start(p2),
2056 2059 rs, ts, compression, len(heads), clen))
2057 2060 return 0
2058 2061
2059 2062 v = r.version
2060 2063 format = v & 0xFFFF
2061 2064 flags = []
2062 2065 gdelta = False
2063 2066 if v & revlog.FLAG_INLINE_DATA:
2064 2067 flags.append('inline')
2065 2068 if v & revlog.FLAG_GENERALDELTA:
2066 2069 gdelta = True
2067 2070 flags.append('generaldelta')
2068 2071 if not flags:
2069 2072 flags = ['(none)']
2070 2073
2071 2074 ### tracks merge vs single parent
2072 2075 nummerges = 0
2073 2076
2074 2077 ### tracks ways the "delta" are build
2075 2078 # nodelta
2076 2079 numempty = 0
2077 2080 numemptytext = 0
2078 2081 numemptydelta = 0
2079 2082 # full file content
2080 2083 numfull = 0
2081 2084 # intermediate snapshot against a prior snapshot
2082 2085 numsemi = 0
2083 2086 # snapshot count per depth
2084 2087 numsnapdepth = collections.defaultdict(lambda: 0)
2085 2088 # delta against previous revision
2086 2089 numprev = 0
2087 2090 # delta against first or second parent (not prev)
2088 2091 nump1 = 0
2089 2092 nump2 = 0
2090 2093 # delta against neither prev nor parents
2091 2094 numother = 0
2092 2095 # delta against prev that are also first or second parent
2093 2096 # (details of `numprev`)
2094 2097 nump1prev = 0
2095 2098 nump2prev = 0
2096 2099
2097 2100 # data about delta chain of each revs
2098 2101 chainlengths = []
2099 2102 chainbases = []
2100 2103 chainspans = []
2101 2104
2102 2105 # data about each revision
2103 2106 datasize = [None, 0, 0]
2104 2107 fullsize = [None, 0, 0]
2105 2108 semisize = [None, 0, 0]
2106 2109 # snapshot count per depth
2107 2110 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2108 2111 deltasize = [None, 0, 0]
2109 2112 chunktypecounts = {}
2110 2113 chunktypesizes = {}
2111 2114
2112 2115 def addsize(size, l):
2113 2116 if l[0] is None or size < l[0]:
2114 2117 l[0] = size
2115 2118 if size > l[1]:
2116 2119 l[1] = size
2117 2120 l[2] += size
2118 2121
2119 2122 numrevs = len(r)
2120 2123 for rev in pycompat.xrange(numrevs):
2121 2124 p1, p2 = r.parentrevs(rev)
2122 2125 delta = r.deltaparent(rev)
2123 2126 if format > 0:
2124 2127 addsize(r.rawsize(rev), datasize)
2125 2128 if p2 != nullrev:
2126 2129 nummerges += 1
2127 2130 size = r.length(rev)
2128 2131 if delta == nullrev:
2129 2132 chainlengths.append(0)
2130 2133 chainbases.append(r.start(rev))
2131 2134 chainspans.append(size)
2132 2135 if size == 0:
2133 2136 numempty += 1
2134 2137 numemptytext += 1
2135 2138 else:
2136 2139 numfull += 1
2137 2140 numsnapdepth[0] += 1
2138 2141 addsize(size, fullsize)
2139 2142 addsize(size, snapsizedepth[0])
2140 2143 else:
2141 2144 chainlengths.append(chainlengths[delta] + 1)
2142 2145 baseaddr = chainbases[delta]
2143 2146 revaddr = r.start(rev)
2144 2147 chainbases.append(baseaddr)
2145 2148 chainspans.append((revaddr - baseaddr) + size)
2146 2149 if size == 0:
2147 2150 numempty += 1
2148 2151 numemptydelta += 1
2149 2152 elif r.issnapshot(rev):
2150 2153 addsize(size, semisize)
2151 2154 numsemi += 1
2152 2155 depth = r.snapshotdepth(rev)
2153 2156 numsnapdepth[depth] += 1
2154 2157 addsize(size, snapsizedepth[depth])
2155 2158 else:
2156 2159 addsize(size, deltasize)
2157 2160 if delta == rev - 1:
2158 2161 numprev += 1
2159 2162 if delta == p1:
2160 2163 nump1prev += 1
2161 2164 elif delta == p2:
2162 2165 nump2prev += 1
2163 2166 elif delta == p1:
2164 2167 nump1 += 1
2165 2168 elif delta == p2:
2166 2169 nump2 += 1
2167 2170 elif delta != nullrev:
2168 2171 numother += 1
2169 2172
2170 2173 # Obtain data on the raw chunks in the revlog.
2171 2174 if util.safehasattr(r, '_getsegmentforrevs'):
2172 2175 segment = r._getsegmentforrevs(rev, rev)[1]
2173 2176 else:
2174 2177 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2175 2178 if segment:
2176 2179 chunktype = bytes(segment[0:1])
2177 2180 else:
2178 2181 chunktype = 'empty'
2179 2182
2180 2183 if chunktype not in chunktypecounts:
2181 2184 chunktypecounts[chunktype] = 0
2182 2185 chunktypesizes[chunktype] = 0
2183 2186
2184 2187 chunktypecounts[chunktype] += 1
2185 2188 chunktypesizes[chunktype] += size
2186 2189
2187 2190 # Adjust size min value for empty cases
2188 2191 for size in (datasize, fullsize, semisize, deltasize):
2189 2192 if size[0] is None:
2190 2193 size[0] = 0
2191 2194
2192 2195 numdeltas = numrevs - numfull - numempty - numsemi
2193 2196 numoprev = numprev - nump1prev - nump2prev
2194 2197 totalrawsize = datasize[2]
2195 2198 datasize[2] /= numrevs
2196 2199 fulltotal = fullsize[2]
2197 2200 fullsize[2] /= numfull
2198 2201 semitotal = semisize[2]
2199 2202 snaptotal = {}
2200 2203 if numsemi > 0:
2201 2204 semisize[2] /= numsemi
2202 2205 for depth in snapsizedepth:
2203 2206 snaptotal[depth] = snapsizedepth[depth][2]
2204 2207 snapsizedepth[depth][2] /= numsnapdepth[depth]
2205 2208
2206 2209 deltatotal = deltasize[2]
2207 2210 if numdeltas > 0:
2208 2211 deltasize[2] /= numdeltas
2209 2212 totalsize = fulltotal + semitotal + deltatotal
2210 2213 avgchainlen = sum(chainlengths) / numrevs
2211 2214 maxchainlen = max(chainlengths)
2212 2215 maxchainspan = max(chainspans)
2213 2216 compratio = 1
2214 2217 if totalsize:
2215 2218 compratio = totalrawsize / totalsize
2216 2219
2217 2220 basedfmtstr = '%%%dd\n'
2218 2221 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2219 2222
2220 2223 def dfmtstr(max):
2221 2224 return basedfmtstr % len(str(max))
2222 2225 def pcfmtstr(max, padding=0):
2223 2226 return basepcfmtstr % (len(str(max)), ' ' * padding)
2224 2227
2225 2228 def pcfmt(value, total):
2226 2229 if total:
2227 2230 return (value, 100 * float(value) / total)
2228 2231 else:
2229 2232 return value, 100.0
2230 2233
2231 2234 ui.write(('format : %d\n') % format)
2232 2235 ui.write(('flags : %s\n') % ', '.join(flags))
2233 2236
2234 2237 ui.write('\n')
2235 2238 fmt = pcfmtstr(totalsize)
2236 2239 fmt2 = dfmtstr(totalsize)
2237 2240 ui.write(('revisions : ') + fmt2 % numrevs)
2238 2241 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2239 2242 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2240 2243 ui.write(('revisions : ') + fmt2 % numrevs)
2241 2244 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2242 2245 ui.write((' text : ')
2243 2246 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2244 2247 ui.write((' delta : ')
2245 2248 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2246 2249 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2247 2250 for depth in sorted(numsnapdepth):
2248 2251 ui.write((' lvl-%-3d : ' % depth)
2249 2252 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2250 2253 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2251 2254 ui.write(('revision size : ') + fmt2 % totalsize)
2252 2255 ui.write((' snapshot : ')
2253 2256 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2254 2257 for depth in sorted(numsnapdepth):
2255 2258 ui.write((' lvl-%-3d : ' % depth)
2256 2259 + fmt % pcfmt(snaptotal[depth], totalsize))
2257 2260 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2258 2261
2259 2262 def fmtchunktype(chunktype):
2260 2263 if chunktype == 'empty':
2261 2264 return ' %s : ' % chunktype
2262 2265 elif chunktype in pycompat.bytestr(string.ascii_letters):
2263 2266 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2264 2267 else:
2265 2268 return ' 0x%s : ' % hex(chunktype)
2266 2269
2267 2270 ui.write('\n')
2268 2271 ui.write(('chunks : ') + fmt2 % numrevs)
2269 2272 for chunktype in sorted(chunktypecounts):
2270 2273 ui.write(fmtchunktype(chunktype))
2271 2274 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2272 2275 ui.write(('chunks size : ') + fmt2 % totalsize)
2273 2276 for chunktype in sorted(chunktypecounts):
2274 2277 ui.write(fmtchunktype(chunktype))
2275 2278 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2276 2279
2277 2280 ui.write('\n')
2278 2281 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2279 2282 ui.write(('avg chain length : ') + fmt % avgchainlen)
2280 2283 ui.write(('max chain length : ') + fmt % maxchainlen)
2281 2284 ui.write(('max chain reach : ') + fmt % maxchainspan)
2282 2285 ui.write(('compression ratio : ') + fmt % compratio)
2283 2286
2284 2287 if format > 0:
2285 2288 ui.write('\n')
2286 2289 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2287 2290 % tuple(datasize))
2288 2291 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2289 2292 % tuple(fullsize))
2290 2293 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2291 2294 % tuple(semisize))
2292 2295 for depth in sorted(snapsizedepth):
2293 2296 if depth == 0:
2294 2297 continue
2295 2298 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2296 2299 % ((depth,) + tuple(snapsizedepth[depth])))
2297 2300 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2298 2301 % tuple(deltasize))
2299 2302
2300 2303 if numdeltas > 0:
2301 2304 ui.write('\n')
2302 2305 fmt = pcfmtstr(numdeltas)
2303 2306 fmt2 = pcfmtstr(numdeltas, 4)
2304 2307 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2305 2308 if numprev > 0:
2306 2309 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2307 2310 numprev))
2308 2311 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2309 2312 numprev))
2310 2313 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2311 2314 numprev))
2312 2315 if gdelta:
2313 2316 ui.write(('deltas against p1 : ')
2314 2317 + fmt % pcfmt(nump1, numdeltas))
2315 2318 ui.write(('deltas against p2 : ')
2316 2319 + fmt % pcfmt(nump2, numdeltas))
2317 2320 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2318 2321 numdeltas))
2319 2322
2320 2323 @command('debugrevlogindex', cmdutil.debugrevlogopts +
2321 2324 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2322 2325 _('[-f FORMAT] -c|-m|FILE'),
2323 2326 optionalrepo=True)
2324 2327 def debugrevlogindex(ui, repo, file_=None, **opts):
2325 2328 """dump the contents of a revlog index"""
2326 2329 opts = pycompat.byteskwargs(opts)
2327 2330 r = cmdutil.openrevlog(repo, 'debugrevlogindex', file_, opts)
2328 2331 format = opts.get('format', 0)
2329 2332 if format not in (0, 1):
2330 2333 raise error.Abort(_("unknown format %d") % format)
2331 2334
2332 2335 if ui.debugflag:
2333 2336 shortfn = hex
2334 2337 else:
2335 2338 shortfn = short
2336 2339
2337 2340 # There might not be anything in r, so have a sane default
2338 2341 idlen = 12
2339 2342 for i in r:
2340 2343 idlen = len(shortfn(r.node(i)))
2341 2344 break
2342 2345
2343 2346 if format == 0:
2344 2347 if ui.verbose:
2345 2348 ui.write((" rev offset length linkrev"
2346 2349 " %s %s p2\n") % ("nodeid".ljust(idlen),
2347 2350 "p1".ljust(idlen)))
2348 2351 else:
2349 2352 ui.write((" rev linkrev %s %s p2\n") % (
2350 2353 "nodeid".ljust(idlen), "p1".ljust(idlen)))
2351 2354 elif format == 1:
2352 2355 if ui.verbose:
2353 2356 ui.write((" rev flag offset length size link p1"
2354 2357 " p2 %s\n") % "nodeid".rjust(idlen))
2355 2358 else:
2356 2359 ui.write((" rev flag size link p1 p2 %s\n") %
2357 2360 "nodeid".rjust(idlen))
2358 2361
2359 2362 for i in r:
2360 2363 node = r.node(i)
2361 2364 if format == 0:
2362 2365 try:
2363 2366 pp = r.parents(node)
2364 2367 except Exception:
2365 2368 pp = [nullid, nullid]
2366 2369 if ui.verbose:
2367 2370 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
2368 2371 i, r.start(i), r.length(i), r.linkrev(i),
2369 2372 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2370 2373 else:
2371 2374 ui.write("% 6d % 7d %s %s %s\n" % (
2372 2375 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
2373 2376 shortfn(pp[1])))
2374 2377 elif format == 1:
2375 2378 pr = r.parentrevs(i)
2376 2379 if ui.verbose:
2377 2380 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
2378 2381 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2379 2382 r.linkrev(i), pr[0], pr[1], shortfn(node)))
2380 2383 else:
2381 2384 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
2382 2385 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
2383 2386 shortfn(node)))
2384 2387
2385 2388 @command('debugrevspec',
2386 2389 [('', 'optimize', None,
2387 2390 _('print parsed tree after optimizing (DEPRECATED)')),
2388 2391 ('', 'show-revs', True, _('print list of result revisions (default)')),
2389 2392 ('s', 'show-set', None, _('print internal representation of result set')),
2390 2393 ('p', 'show-stage', [],
2391 2394 _('print parsed tree at the given stage'), _('NAME')),
2392 2395 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2393 2396 ('', 'verify-optimized', False, _('verify optimized result')),
2394 2397 ],
2395 2398 ('REVSPEC'))
2396 2399 def debugrevspec(ui, repo, expr, **opts):
2397 2400 """parse and apply a revision specification
2398 2401
2399 2402 Use -p/--show-stage option to print the parsed tree at the given stages.
2400 2403 Use -p all to print tree at every stage.
2401 2404
2402 2405 Use --no-show-revs option with -s or -p to print only the set
2403 2406 representation or the parsed tree respectively.
2404 2407
2405 2408 Use --verify-optimized to compare the optimized result with the unoptimized
2406 2409 one. Returns 1 if the optimized result differs.
2407 2410 """
2408 2411 opts = pycompat.byteskwargs(opts)
2409 2412 aliases = ui.configitems('revsetalias')
2410 2413 stages = [
2411 2414 ('parsed', lambda tree: tree),
2412 2415 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2413 2416 ui.warn)),
2414 2417 ('concatenated', revsetlang.foldconcat),
2415 2418 ('analyzed', revsetlang.analyze),
2416 2419 ('optimized', revsetlang.optimize),
2417 2420 ]
2418 2421 if opts['no_optimized']:
2419 2422 stages = stages[:-1]
2420 2423 if opts['verify_optimized'] and opts['no_optimized']:
2421 2424 raise error.Abort(_('cannot use --verify-optimized with '
2422 2425 '--no-optimized'))
2423 2426 stagenames = set(n for n, f in stages)
2424 2427
2425 2428 showalways = set()
2426 2429 showchanged = set()
2427 2430 if ui.verbose and not opts['show_stage']:
2428 2431 # show parsed tree by --verbose (deprecated)
2429 2432 showalways.add('parsed')
2430 2433 showchanged.update(['expanded', 'concatenated'])
2431 2434 if opts['optimize']:
2432 2435 showalways.add('optimized')
2433 2436 if opts['show_stage'] and opts['optimize']:
2434 2437 raise error.Abort(_('cannot use --optimize with --show-stage'))
2435 2438 if opts['show_stage'] == ['all']:
2436 2439 showalways.update(stagenames)
2437 2440 else:
2438 2441 for n in opts['show_stage']:
2439 2442 if n not in stagenames:
2440 2443 raise error.Abort(_('invalid stage name: %s') % n)
2441 2444 showalways.update(opts['show_stage'])
2442 2445
2443 2446 treebystage = {}
2444 2447 printedtree = None
2445 2448 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2446 2449 for n, f in stages:
2447 2450 treebystage[n] = tree = f(tree)
2448 2451 if n in showalways or (n in showchanged and tree != printedtree):
2449 2452 if opts['show_stage'] or n != 'parsed':
2450 2453 ui.write(("* %s:\n") % n)
2451 2454 ui.write(revsetlang.prettyformat(tree), "\n")
2452 2455 printedtree = tree
2453 2456
2454 2457 if opts['verify_optimized']:
2455 2458 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2456 2459 brevs = revset.makematcher(treebystage['optimized'])(repo)
2457 2460 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2458 2461 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2459 2462 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2460 2463 arevs = list(arevs)
2461 2464 brevs = list(brevs)
2462 2465 if arevs == brevs:
2463 2466 return 0
2464 2467 ui.write(('--- analyzed\n'), label='diff.file_a')
2465 2468 ui.write(('+++ optimized\n'), label='diff.file_b')
2466 2469 sm = difflib.SequenceMatcher(None, arevs, brevs)
2467 2470 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2468 2471 if tag in ('delete', 'replace'):
2469 2472 for c in arevs[alo:ahi]:
2470 2473 ui.write('-%s\n' % c, label='diff.deleted')
2471 2474 if tag in ('insert', 'replace'):
2472 2475 for c in brevs[blo:bhi]:
2473 2476 ui.write('+%s\n' % c, label='diff.inserted')
2474 2477 if tag == 'equal':
2475 2478 for c in arevs[alo:ahi]:
2476 2479 ui.write(' %s\n' % c)
2477 2480 return 1
2478 2481
2479 2482 func = revset.makematcher(tree)
2480 2483 revs = func(repo)
2481 2484 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2482 2485 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2483 2486 if not opts['show_revs']:
2484 2487 return
2485 2488 for c in revs:
2486 2489 ui.write("%d\n" % c)
2487 2490
2488 2491 @command('debugserve', [
2489 2492 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2490 2493 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2491 2494 ('', 'logiofile', '', _('file to log server I/O to')),
2492 2495 ], '')
2493 2496 def debugserve(ui, repo, **opts):
2494 2497 """run a server with advanced settings
2495 2498
2496 2499 This command is similar to :hg:`serve`. It exists partially as a
2497 2500 workaround to the fact that ``hg serve --stdio`` must have specific
2498 2501 arguments for security reasons.
2499 2502 """
2500 2503 opts = pycompat.byteskwargs(opts)
2501 2504
2502 2505 if not opts['sshstdio']:
2503 2506 raise error.Abort(_('only --sshstdio is currently supported'))
2504 2507
2505 2508 logfh = None
2506 2509
2507 2510 if opts['logiofd'] and opts['logiofile']:
2508 2511 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2509 2512
2510 2513 if opts['logiofd']:
2511 2514 # Line buffered because output is line based.
2512 2515 try:
2513 2516 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2514 2517 except OSError as e:
2515 2518 if e.errno != errno.ESPIPE:
2516 2519 raise
2517 2520 # can't seek a pipe, so `ab` mode fails on py3
2518 2521 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2519 2522 elif opts['logiofile']:
2520 2523 logfh = open(opts['logiofile'], 'ab', 1)
2521 2524
2522 2525 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2523 2526 s.serve_forever()
2524 2527
2525 2528 @command('debugsetparents', [], _('REV1 [REV2]'))
2526 2529 def debugsetparents(ui, repo, rev1, rev2=None):
2527 2530 """manually set the parents of the current working directory
2528 2531
2529 2532 This is useful for writing repository conversion tools, but should
2530 2533 be used with care. For example, neither the working directory nor the
2531 2534 dirstate is updated, so file status may be incorrect after running this
2532 2535 command.
2533 2536
2534 2537 Returns 0 on success.
2535 2538 """
2536 2539
2537 2540 node1 = scmutil.revsingle(repo, rev1).node()
2538 2541 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2539 2542
2540 2543 with repo.wlock():
2541 2544 repo.setparents(node1, node2)
2542 2545
2543 2546 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2544 2547 def debugssl(ui, repo, source=None, **opts):
2545 2548 '''test a secure connection to a server
2546 2549
2547 2550 This builds the certificate chain for the server on Windows, installing the
2548 2551 missing intermediates and trusted root via Windows Update if necessary. It
2549 2552 does nothing on other platforms.
2550 2553
2551 2554 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2552 2555 that server is used. See :hg:`help urls` for more information.
2553 2556
2554 2557 If the update succeeds, retry the original operation. Otherwise, the cause
2555 2558 of the SSL error is likely another issue.
2556 2559 '''
2557 2560 if not pycompat.iswindows:
2558 2561 raise error.Abort(_('certificate chain building is only possible on '
2559 2562 'Windows'))
2560 2563
2561 2564 if not source:
2562 2565 if not repo:
2563 2566 raise error.Abort(_("there is no Mercurial repository here, and no "
2564 2567 "server specified"))
2565 2568 source = "default"
2566 2569
2567 2570 source, branches = hg.parseurl(ui.expandpath(source))
2568 2571 url = util.url(source)
2569 2572 addr = None
2570 2573
2571 2574 defaultport = {'https': 443, 'ssh': 22}
2572 2575 if url.scheme in defaultport:
2573 2576 try:
2574 2577 addr = (url.host, int(url.port or defaultport[url.scheme]))
2575 2578 except ValueError:
2576 2579 raise error.Abort(_("malformed port number in URL"))
2577 2580 else:
2578 2581 raise error.Abort(_("only https and ssh connections are supported"))
2579 2582
2580 2583 from . import win32
2581 2584
2582 2585 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2583 2586 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2584 2587
2585 2588 try:
2586 2589 s.connect(addr)
2587 2590 cert = s.getpeercert(True)
2588 2591
2589 2592 ui.status(_('checking the certificate chain for %s\n') % url.host)
2590 2593
2591 2594 complete = win32.checkcertificatechain(cert, build=False)
2592 2595
2593 2596 if not complete:
2594 2597 ui.status(_('certificate chain is incomplete, updating... '))
2595 2598
2596 2599 if not win32.checkcertificatechain(cert):
2597 2600 ui.status(_('failed.\n'))
2598 2601 else:
2599 2602 ui.status(_('done.\n'))
2600 2603 else:
2601 2604 ui.status(_('full certificate chain is available\n'))
2602 2605 finally:
2603 2606 s.close()
2604 2607
2605 2608 @command('debugsub',
2606 2609 [('r', 'rev', '',
2607 2610 _('revision to check'), _('REV'))],
2608 2611 _('[-r REV] [REV]'))
2609 2612 def debugsub(ui, repo, rev=None):
2610 2613 ctx = scmutil.revsingle(repo, rev, None)
2611 2614 for k, v in sorted(ctx.substate.items()):
2612 2615 ui.write(('path %s\n') % k)
2613 2616 ui.write((' source %s\n') % v[0])
2614 2617 ui.write((' revision %s\n') % v[1])
2615 2618
2616 2619 @command('debugsuccessorssets',
2617 2620 [('', 'closest', False, _('return closest successors sets only'))],
2618 2621 _('[REV]'))
2619 2622 def debugsuccessorssets(ui, repo, *revs, **opts):
2620 2623 """show set of successors for revision
2621 2624
2622 2625 A successors set of changeset A is a consistent group of revisions that
2623 2626 succeed A. It contains non-obsolete changesets only unless closests
2624 2627 successors set is set.
2625 2628
2626 2629 In most cases a changeset A has a single successors set containing a single
2627 2630 successor (changeset A replaced by A').
2628 2631
2629 2632 A changeset that is made obsolete with no successors are called "pruned".
2630 2633 Such changesets have no successors sets at all.
2631 2634
2632 2635 A changeset that has been "split" will have a successors set containing
2633 2636 more than one successor.
2634 2637
2635 2638 A changeset that has been rewritten in multiple different ways is called
2636 2639 "divergent". Such changesets have multiple successor sets (each of which
2637 2640 may also be split, i.e. have multiple successors).
2638 2641
2639 2642 Results are displayed as follows::
2640 2643
2641 2644 <rev1>
2642 2645 <successors-1A>
2643 2646 <rev2>
2644 2647 <successors-2A>
2645 2648 <successors-2B1> <successors-2B2> <successors-2B3>
2646 2649
2647 2650 Here rev2 has two possible (i.e. divergent) successors sets. The first
2648 2651 holds one element, whereas the second holds three (i.e. the changeset has
2649 2652 been split).
2650 2653 """
2651 2654 # passed to successorssets caching computation from one call to another
2652 2655 cache = {}
2653 2656 ctx2str = bytes
2654 2657 node2str = short
2655 2658 for rev in scmutil.revrange(repo, revs):
2656 2659 ctx = repo[rev]
2657 2660 ui.write('%s\n'% ctx2str(ctx))
2658 2661 for succsset in obsutil.successorssets(repo, ctx.node(),
2659 2662 closest=opts[r'closest'],
2660 2663 cache=cache):
2661 2664 if succsset:
2662 2665 ui.write(' ')
2663 2666 ui.write(node2str(succsset[0]))
2664 2667 for node in succsset[1:]:
2665 2668 ui.write(' ')
2666 2669 ui.write(node2str(node))
2667 2670 ui.write('\n')
2668 2671
2669 2672 @command('debugtemplate',
2670 2673 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2671 2674 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2672 2675 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2673 2676 optionalrepo=True)
2674 2677 def debugtemplate(ui, repo, tmpl, **opts):
2675 2678 """parse and apply a template
2676 2679
2677 2680 If -r/--rev is given, the template is processed as a log template and
2678 2681 applied to the given changesets. Otherwise, it is processed as a generic
2679 2682 template.
2680 2683
2681 2684 Use --verbose to print the parsed tree.
2682 2685 """
2683 2686 revs = None
2684 2687 if opts[r'rev']:
2685 2688 if repo is None:
2686 2689 raise error.RepoError(_('there is no Mercurial repository here '
2687 2690 '(.hg not found)'))
2688 2691 revs = scmutil.revrange(repo, opts[r'rev'])
2689 2692
2690 2693 props = {}
2691 2694 for d in opts[r'define']:
2692 2695 try:
2693 2696 k, v = (e.strip() for e in d.split('=', 1))
2694 2697 if not k or k == 'ui':
2695 2698 raise ValueError
2696 2699 props[k] = v
2697 2700 except ValueError:
2698 2701 raise error.Abort(_('malformed keyword definition: %s') % d)
2699 2702
2700 2703 if ui.verbose:
2701 2704 aliases = ui.configitems('templatealias')
2702 2705 tree = templater.parse(tmpl)
2703 2706 ui.note(templater.prettyformat(tree), '\n')
2704 2707 newtree = templater.expandaliases(tree, aliases)
2705 2708 if newtree != tree:
2706 2709 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2707 2710
2708 2711 if revs is None:
2709 2712 tres = formatter.templateresources(ui, repo)
2710 2713 t = formatter.maketemplater(ui, tmpl, resources=tres)
2711 2714 if ui.verbose:
2712 2715 kwds, funcs = t.symbolsuseddefault()
2713 2716 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2714 2717 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2715 2718 ui.write(t.renderdefault(props))
2716 2719 else:
2717 2720 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2718 2721 if ui.verbose:
2719 2722 kwds, funcs = displayer.t.symbolsuseddefault()
2720 2723 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2721 2724 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2722 2725 for r in revs:
2723 2726 displayer.show(repo[r], **pycompat.strkwargs(props))
2724 2727 displayer.close()
2725 2728
2726 2729 @command('debuguigetpass', [
2727 2730 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2728 2731 ], _('[-p TEXT]'), norepo=True)
2729 2732 def debuguigetpass(ui, prompt=''):
2730 2733 """show prompt to type password"""
2731 2734 r = ui.getpass(prompt)
2732 2735 ui.write(('respose: %s\n') % r)
2733 2736
2734 2737 @command('debuguiprompt', [
2735 2738 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2736 2739 ], _('[-p TEXT]'), norepo=True)
2737 2740 def debuguiprompt(ui, prompt=''):
2738 2741 """show plain prompt"""
2739 2742 r = ui.prompt(prompt)
2740 2743 ui.write(('response: %s\n') % r)
2741 2744
2742 2745 @command('debugupdatecaches', [])
2743 2746 def debugupdatecaches(ui, repo, *pats, **opts):
2744 2747 """warm all known caches in the repository"""
2745 2748 with repo.wlock(), repo.lock():
2746 2749 repo.updatecaches(full=True)
2747 2750
2748 2751 @command('debugupgraderepo', [
2749 2752 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2750 2753 ('', 'run', False, _('performs an upgrade')),
2751 2754 ])
2752 2755 def debugupgraderepo(ui, repo, run=False, optimize=None):
2753 2756 """upgrade a repository to use different features
2754 2757
2755 2758 If no arguments are specified, the repository is evaluated for upgrade
2756 2759 and a list of problems and potential optimizations is printed.
2757 2760
2758 2761 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2759 2762 can be influenced via additional arguments. More details will be provided
2760 2763 by the command output when run without ``--run``.
2761 2764
2762 2765 During the upgrade, the repository will be locked and no writes will be
2763 2766 allowed.
2764 2767
2765 2768 At the end of the upgrade, the repository may not be readable while new
2766 2769 repository data is swapped in. This window will be as long as it takes to
2767 2770 rename some directories inside the ``.hg`` directory. On most machines, this
2768 2771 should complete almost instantaneously and the chances of a consumer being
2769 2772 unable to access the repository should be low.
2770 2773 """
2771 2774 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2772 2775
2773 2776 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2774 2777 inferrepo=True)
2775 2778 def debugwalk(ui, repo, *pats, **opts):
2776 2779 """show how files match on given patterns"""
2777 2780 opts = pycompat.byteskwargs(opts)
2778 2781 m = scmutil.match(repo[None], pats, opts)
2779 2782 if ui.verbose:
2780 2783 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2781 2784 items = list(repo[None].walk(m))
2782 2785 if not items:
2783 2786 return
2784 2787 f = lambda fn: fn
2785 2788 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2786 2789 f = lambda fn: util.normpath(fn)
2787 2790 fmt = 'f %%-%ds %%-%ds %%s' % (
2788 2791 max([len(abs) for abs in items]),
2789 2792 max([len(m.rel(abs)) for abs in items]))
2790 2793 for abs in items:
2791 2794 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2792 2795 ui.write("%s\n" % line.rstrip())
2793 2796
2794 2797 @command('debugwhyunstable', [], _('REV'))
2795 2798 def debugwhyunstable(ui, repo, rev):
2796 2799 """explain instabilities of a changeset"""
2797 2800 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2798 2801 dnodes = ''
2799 2802 if entry.get('divergentnodes'):
2800 2803 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2801 2804 for ctx in entry['divergentnodes']) + ' '
2802 2805 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2803 2806 entry['reason'], entry['node']))
2804 2807
2805 2808 @command('debugwireargs',
2806 2809 [('', 'three', '', 'three'),
2807 2810 ('', 'four', '', 'four'),
2808 2811 ('', 'five', '', 'five'),
2809 2812 ] + cmdutil.remoteopts,
2810 2813 _('REPO [OPTIONS]... [ONE [TWO]]'),
2811 2814 norepo=True)
2812 2815 def debugwireargs(ui, repopath, *vals, **opts):
2813 2816 opts = pycompat.byteskwargs(opts)
2814 2817 repo = hg.peer(ui, opts, repopath)
2815 2818 for opt in cmdutil.remoteopts:
2816 2819 del opts[opt[1]]
2817 2820 args = {}
2818 2821 for k, v in opts.iteritems():
2819 2822 if v:
2820 2823 args[k] = v
2821 2824 args = pycompat.strkwargs(args)
2822 2825 # run twice to check that we don't mess up the stream for the next command
2823 2826 res1 = repo.debugwireargs(*vals, **args)
2824 2827 res2 = repo.debugwireargs(*vals, **args)
2825 2828 ui.write("%s\n" % res1)
2826 2829 if res1 != res2:
2827 2830 ui.warn("%s\n" % res2)
2828 2831
2829 2832 def _parsewirelangblocks(fh):
2830 2833 activeaction = None
2831 2834 blocklines = []
2832 2835 lastindent = 0
2833 2836
2834 2837 for line in fh:
2835 2838 line = line.rstrip()
2836 2839 if not line:
2837 2840 continue
2838 2841
2839 2842 if line.startswith(b'#'):
2840 2843 continue
2841 2844
2842 2845 if not line.startswith(b' '):
2843 2846 # New block. Flush previous one.
2844 2847 if activeaction:
2845 2848 yield activeaction, blocklines
2846 2849
2847 2850 activeaction = line
2848 2851 blocklines = []
2849 2852 lastindent = 0
2850 2853 continue
2851 2854
2852 2855 # Else we start with an indent.
2853 2856
2854 2857 if not activeaction:
2855 2858 raise error.Abort(_('indented line outside of block'))
2856 2859
2857 2860 indent = len(line) - len(line.lstrip())
2858 2861
2859 2862 # If this line is indented more than the last line, concatenate it.
2860 2863 if indent > lastindent and blocklines:
2861 2864 blocklines[-1] += line.lstrip()
2862 2865 else:
2863 2866 blocklines.append(line)
2864 2867 lastindent = indent
2865 2868
2866 2869 # Flush last block.
2867 2870 if activeaction:
2868 2871 yield activeaction, blocklines
2869 2872
2870 2873 @command('debugwireproto',
2871 2874 [
2872 2875 ('', 'localssh', False, _('start an SSH server for this repo')),
2873 2876 ('', 'peer', '', _('construct a specific version of the peer')),
2874 2877 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2875 2878 ('', 'nologhandshake', False,
2876 2879 _('do not log I/O related to the peer handshake')),
2877 2880 ] + cmdutil.remoteopts,
2878 2881 _('[PATH]'),
2879 2882 optionalrepo=True)
2880 2883 def debugwireproto(ui, repo, path=None, **opts):
2881 2884 """send wire protocol commands to a server
2882 2885
2883 2886 This command can be used to issue wire protocol commands to remote
2884 2887 peers and to debug the raw data being exchanged.
2885 2888
2886 2889 ``--localssh`` will start an SSH server against the current repository
2887 2890 and connect to that. By default, the connection will perform a handshake
2888 2891 and establish an appropriate peer instance.
2889 2892
2890 2893 ``--peer`` can be used to bypass the handshake protocol and construct a
2891 2894 peer instance using the specified class type. Valid values are ``raw``,
2892 2895 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2893 2896 raw data payloads and don't support higher-level command actions.
2894 2897
2895 2898 ``--noreadstderr`` can be used to disable automatic reading from stderr
2896 2899 of the peer (for SSH connections only). Disabling automatic reading of
2897 2900 stderr is useful for making output more deterministic.
2898 2901
2899 2902 Commands are issued via a mini language which is specified via stdin.
2900 2903 The language consists of individual actions to perform. An action is
2901 2904 defined by a block. A block is defined as a line with no leading
2902 2905 space followed by 0 or more lines with leading space. Blocks are
2903 2906 effectively a high-level command with additional metadata.
2904 2907
2905 2908 Lines beginning with ``#`` are ignored.
2906 2909
2907 2910 The following sections denote available actions.
2908 2911
2909 2912 raw
2910 2913 ---
2911 2914
2912 2915 Send raw data to the server.
2913 2916
2914 2917 The block payload contains the raw data to send as one atomic send
2915 2918 operation. The data may not actually be delivered in a single system
2916 2919 call: it depends on the abilities of the transport being used.
2917 2920
2918 2921 Each line in the block is de-indented and concatenated. Then, that
2919 2922 value is evaluated as a Python b'' literal. This allows the use of
2920 2923 backslash escaping, etc.
2921 2924
2922 2925 raw+
2923 2926 ----
2924 2927
2925 2928 Behaves like ``raw`` except flushes output afterwards.
2926 2929
2927 2930 command <X>
2928 2931 -----------
2929 2932
2930 2933 Send a request to run a named command, whose name follows the ``command``
2931 2934 string.
2932 2935
2933 2936 Arguments to the command are defined as lines in this block. The format of
2934 2937 each line is ``<key> <value>``. e.g.::
2935 2938
2936 2939 command listkeys
2937 2940 namespace bookmarks
2938 2941
2939 2942 If the value begins with ``eval:``, it will be interpreted as a Python
2940 2943 literal expression. Otherwise values are interpreted as Python b'' literals.
2941 2944 This allows sending complex types and encoding special byte sequences via
2942 2945 backslash escaping.
2943 2946
2944 2947 The following arguments have special meaning:
2945 2948
2946 2949 ``PUSHFILE``
2947 2950 When defined, the *push* mechanism of the peer will be used instead
2948 2951 of the static request-response mechanism and the content of the
2949 2952 file specified in the value of this argument will be sent as the
2950 2953 command payload.
2951 2954
2952 2955 This can be used to submit a local bundle file to the remote.
2953 2956
2954 2957 batchbegin
2955 2958 ----------
2956 2959
2957 2960 Instruct the peer to begin a batched send.
2958 2961
2959 2962 All ``command`` blocks are queued for execution until the next
2960 2963 ``batchsubmit`` block.
2961 2964
2962 2965 batchsubmit
2963 2966 -----------
2964 2967
2965 2968 Submit previously queued ``command`` blocks as a batch request.
2966 2969
2967 2970 This action MUST be paired with a ``batchbegin`` action.
2968 2971
2969 2972 httprequest <method> <path>
2970 2973 ---------------------------
2971 2974
2972 2975 (HTTP peer only)
2973 2976
2974 2977 Send an HTTP request to the peer.
2975 2978
2976 2979 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2977 2980
2978 2981 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2979 2982 headers to add to the request. e.g. ``Accept: foo``.
2980 2983
2981 2984 The following arguments are special:
2982 2985
2983 2986 ``BODYFILE``
2984 2987 The content of the file defined as the value to this argument will be
2985 2988 transferred verbatim as the HTTP request body.
2986 2989
2987 2990 ``frame <type> <flags> <payload>``
2988 2991 Send a unified protocol frame as part of the request body.
2989 2992
2990 2993 All frames will be collected and sent as the body to the HTTP
2991 2994 request.
2992 2995
2993 2996 close
2994 2997 -----
2995 2998
2996 2999 Close the connection to the server.
2997 3000
2998 3001 flush
2999 3002 -----
3000 3003
3001 3004 Flush data written to the server.
3002 3005
3003 3006 readavailable
3004 3007 -------------
3005 3008
3006 3009 Close the write end of the connection and read all available data from
3007 3010 the server.
3008 3011
3009 3012 If the connection to the server encompasses multiple pipes, we poll both
3010 3013 pipes and read available data.
3011 3014
3012 3015 readline
3013 3016 --------
3014 3017
3015 3018 Read a line of output from the server. If there are multiple output
3016 3019 pipes, reads only the main pipe.
3017 3020
3018 3021 ereadline
3019 3022 ---------
3020 3023
3021 3024 Like ``readline``, but read from the stderr pipe, if available.
3022 3025
3023 3026 read <X>
3024 3027 --------
3025 3028
3026 3029 ``read()`` N bytes from the server's main output pipe.
3027 3030
3028 3031 eread <X>
3029 3032 ---------
3030 3033
3031 3034 ``read()`` N bytes from the server's stderr pipe, if available.
3032 3035
3033 3036 Specifying Unified Frame-Based Protocol Frames
3034 3037 ----------------------------------------------
3035 3038
3036 3039 It is possible to emit a *Unified Frame-Based Protocol* by using special
3037 3040 syntax.
3038 3041
3039 3042 A frame is composed as a type, flags, and payload. These can be parsed
3040 3043 from a string of the form:
3041 3044
3042 3045 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3043 3046
3044 3047 ``request-id`` and ``stream-id`` are integers defining the request and
3045 3048 stream identifiers.
3046 3049
3047 3050 ``type`` can be an integer value for the frame type or the string name
3048 3051 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3049 3052 ``command-name``.
3050 3053
3051 3054 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3052 3055 components. Each component (and there can be just one) can be an integer
3053 3056 or a flag name for stream flags or frame flags, respectively. Values are
3054 3057 resolved to integers and then bitwise OR'd together.
3055 3058
3056 3059 ``payload`` represents the raw frame payload. If it begins with
3057 3060 ``cbor:``, the following string is evaluated as Python code and the
3058 3061 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3059 3062 as a Python byte string literal.
3060 3063 """
3061 3064 opts = pycompat.byteskwargs(opts)
3062 3065
3063 3066 if opts['localssh'] and not repo:
3064 3067 raise error.Abort(_('--localssh requires a repository'))
3065 3068
3066 3069 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3067 3070 raise error.Abort(_('invalid value for --peer'),
3068 3071 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3069 3072
3070 3073 if path and opts['localssh']:
3071 3074 raise error.Abort(_('cannot specify --localssh with an explicit '
3072 3075 'path'))
3073 3076
3074 3077 if ui.interactive():
3075 3078 ui.write(_('(waiting for commands on stdin)\n'))
3076 3079
3077 3080 blocks = list(_parsewirelangblocks(ui.fin))
3078 3081
3079 3082 proc = None
3080 3083 stdin = None
3081 3084 stdout = None
3082 3085 stderr = None
3083 3086 opener = None
3084 3087
3085 3088 if opts['localssh']:
3086 3089 # We start the SSH server in its own process so there is process
3087 3090 # separation. This prevents a whole class of potential bugs around
3088 3091 # shared state from interfering with server operation.
3089 3092 args = procutil.hgcmd() + [
3090 3093 '-R', repo.root,
3091 3094 'debugserve', '--sshstdio',
3092 3095 ]
3093 3096 proc = subprocess.Popen(pycompat.rapply(procutil.tonativestr, args),
3094 3097 stdin=subprocess.PIPE,
3095 3098 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3096 3099 bufsize=0)
3097 3100
3098 3101 stdin = proc.stdin
3099 3102 stdout = proc.stdout
3100 3103 stderr = proc.stderr
3101 3104
3102 3105 # We turn the pipes into observers so we can log I/O.
3103 3106 if ui.verbose or opts['peer'] == 'raw':
3104 3107 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3105 3108 logdata=True)
3106 3109 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3107 3110 logdata=True)
3108 3111 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3109 3112 logdata=True)
3110 3113
3111 3114 # --localssh also implies the peer connection settings.
3112 3115
3113 3116 url = 'ssh://localserver'
3114 3117 autoreadstderr = not opts['noreadstderr']
3115 3118
3116 3119 if opts['peer'] == 'ssh1':
3117 3120 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3118 3121 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3119 3122 None, autoreadstderr=autoreadstderr)
3120 3123 elif opts['peer'] == 'ssh2':
3121 3124 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3122 3125 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3123 3126 None, autoreadstderr=autoreadstderr)
3124 3127 elif opts['peer'] == 'raw':
3125 3128 ui.write(_('using raw connection to peer\n'))
3126 3129 peer = None
3127 3130 else:
3128 3131 ui.write(_('creating ssh peer from handshake results\n'))
3129 3132 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3130 3133 autoreadstderr=autoreadstderr)
3131 3134
3132 3135 elif path:
3133 3136 # We bypass hg.peer() so we can proxy the sockets.
3134 3137 # TODO consider not doing this because we skip
3135 3138 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3136 3139 u = util.url(path)
3137 3140 if u.scheme != 'http':
3138 3141 raise error.Abort(_('only http:// paths are currently supported'))
3139 3142
3140 3143 url, authinfo = u.authinfo()
3141 3144 openerargs = {
3142 3145 r'useragent': b'Mercurial debugwireproto',
3143 3146 }
3144 3147
3145 3148 # Turn pipes/sockets into observers so we can log I/O.
3146 3149 if ui.verbose:
3147 3150 openerargs.update({
3148 3151 r'loggingfh': ui,
3149 3152 r'loggingname': b's',
3150 3153 r'loggingopts': {
3151 3154 r'logdata': True,
3152 3155 r'logdataapis': False,
3153 3156 },
3154 3157 })
3155 3158
3156 3159 if ui.debugflag:
3157 3160 openerargs[r'loggingopts'][r'logdataapis'] = True
3158 3161
3159 3162 # Don't send default headers when in raw mode. This allows us to
3160 3163 # bypass most of the behavior of our URL handling code so we can
3161 3164 # have near complete control over what's sent on the wire.
3162 3165 if opts['peer'] == 'raw':
3163 3166 openerargs[r'sendaccept'] = False
3164 3167
3165 3168 opener = urlmod.opener(ui, authinfo, **openerargs)
3166 3169
3167 3170 if opts['peer'] == 'http2':
3168 3171 ui.write(_('creating http peer for wire protocol version 2\n'))
3169 3172 # We go through makepeer() because we need an API descriptor for
3170 3173 # the peer instance to be useful.
3171 3174 with ui.configoverride({
3172 3175 ('experimental', 'httppeer.advertise-v2'): True}):
3173 3176 if opts['nologhandshake']:
3174 3177 ui.pushbuffer()
3175 3178
3176 3179 peer = httppeer.makepeer(ui, path, opener=opener)
3177 3180
3178 3181 if opts['nologhandshake']:
3179 3182 ui.popbuffer()
3180 3183
3181 3184 if not isinstance(peer, httppeer.httpv2peer):
3182 3185 raise error.Abort(_('could not instantiate HTTP peer for '
3183 3186 'wire protocol version 2'),
3184 3187 hint=_('the server may not have the feature '
3185 3188 'enabled or is not allowing this '
3186 3189 'client version'))
3187 3190
3188 3191 elif opts['peer'] == 'raw':
3189 3192 ui.write(_('using raw connection to peer\n'))
3190 3193 peer = None
3191 3194 elif opts['peer']:
3192 3195 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3193 3196 opts['peer'])
3194 3197 else:
3195 3198 peer = httppeer.makepeer(ui, path, opener=opener)
3196 3199
3197 3200 # We /could/ populate stdin/stdout with sock.makefile()...
3198 3201 else:
3199 3202 raise error.Abort(_('unsupported connection configuration'))
3200 3203
3201 3204 batchedcommands = None
3202 3205
3203 3206 # Now perform actions based on the parsed wire language instructions.
3204 3207 for action, lines in blocks:
3205 3208 if action in ('raw', 'raw+'):
3206 3209 if not stdin:
3207 3210 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3208 3211
3209 3212 # Concatenate the data together.
3210 3213 data = ''.join(l.lstrip() for l in lines)
3211 3214 data = stringutil.unescapestr(data)
3212 3215 stdin.write(data)
3213 3216
3214 3217 if action == 'raw+':
3215 3218 stdin.flush()
3216 3219 elif action == 'flush':
3217 3220 if not stdin:
3218 3221 raise error.Abort(_('cannot call flush on this peer'))
3219 3222 stdin.flush()
3220 3223 elif action.startswith('command'):
3221 3224 if not peer:
3222 3225 raise error.Abort(_('cannot send commands unless peer instance '
3223 3226 'is available'))
3224 3227
3225 3228 command = action.split(' ', 1)[1]
3226 3229
3227 3230 args = {}
3228 3231 for line in lines:
3229 3232 # We need to allow empty values.
3230 3233 fields = line.lstrip().split(' ', 1)
3231 3234 if len(fields) == 1:
3232 3235 key = fields[0]
3233 3236 value = ''
3234 3237 else:
3235 3238 key, value = fields
3236 3239
3237 3240 if value.startswith('eval:'):
3238 3241 value = stringutil.evalpythonliteral(value[5:])
3239 3242 else:
3240 3243 value = stringutil.unescapestr(value)
3241 3244
3242 3245 args[key] = value
3243 3246
3244 3247 if batchedcommands is not None:
3245 3248 batchedcommands.append((command, args))
3246 3249 continue
3247 3250
3248 3251 ui.status(_('sending %s command\n') % command)
3249 3252
3250 3253 if 'PUSHFILE' in args:
3251 3254 with open(args['PUSHFILE'], r'rb') as fh:
3252 3255 del args['PUSHFILE']
3253 3256 res, output = peer._callpush(command, fh,
3254 3257 **pycompat.strkwargs(args))
3255 3258 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3256 3259 ui.status(_('remote output: %s\n') %
3257 3260 stringutil.escapestr(output))
3258 3261 else:
3259 3262 with peer.commandexecutor() as e:
3260 3263 res = e.callcommand(command, args).result()
3261 3264
3262 3265 if isinstance(res, wireprotov2peer.commandresponse):
3263 3266 val = res.objects()
3264 3267 ui.status(_('response: %s\n') %
3265 3268 stringutil.pprint(val, bprefix=True, indent=2))
3266 3269 else:
3267 3270 ui.status(_('response: %s\n') %
3268 3271 stringutil.pprint(res, bprefix=True, indent=2))
3269 3272
3270 3273 elif action == 'batchbegin':
3271 3274 if batchedcommands is not None:
3272 3275 raise error.Abort(_('nested batchbegin not allowed'))
3273 3276
3274 3277 batchedcommands = []
3275 3278 elif action == 'batchsubmit':
3276 3279 # There is a batching API we could go through. But it would be
3277 3280 # difficult to normalize requests into function calls. It is easier
3278 3281 # to bypass this layer and normalize to commands + args.
3279 3282 ui.status(_('sending batch with %d sub-commands\n') %
3280 3283 len(batchedcommands))
3281 3284 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3282 3285 ui.status(_('response #%d: %s\n') %
3283 3286 (i, stringutil.escapestr(chunk)))
3284 3287
3285 3288 batchedcommands = None
3286 3289
3287 3290 elif action.startswith('httprequest '):
3288 3291 if not opener:
3289 3292 raise error.Abort(_('cannot use httprequest without an HTTP '
3290 3293 'peer'))
3291 3294
3292 3295 request = action.split(' ', 2)
3293 3296 if len(request) != 3:
3294 3297 raise error.Abort(_('invalid httprequest: expected format is '
3295 3298 '"httprequest <method> <path>'))
3296 3299
3297 3300 method, httppath = request[1:]
3298 3301 headers = {}
3299 3302 body = None
3300 3303 frames = []
3301 3304 for line in lines:
3302 3305 line = line.lstrip()
3303 3306 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3304 3307 if m:
3305 3308 # Headers need to use native strings.
3306 3309 key = pycompat.strurl(m.group(1))
3307 3310 value = pycompat.strurl(m.group(2))
3308 3311 headers[key] = value
3309 3312 continue
3310 3313
3311 3314 if line.startswith(b'BODYFILE '):
3312 3315 with open(line.split(b' ', 1), 'rb') as fh:
3313 3316 body = fh.read()
3314 3317 elif line.startswith(b'frame '):
3315 3318 frame = wireprotoframing.makeframefromhumanstring(
3316 3319 line[len(b'frame '):])
3317 3320
3318 3321 frames.append(frame)
3319 3322 else:
3320 3323 raise error.Abort(_('unknown argument to httprequest: %s') %
3321 3324 line)
3322 3325
3323 3326 url = path + httppath
3324 3327
3325 3328 if frames:
3326 3329 body = b''.join(bytes(f) for f in frames)
3327 3330
3328 3331 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3329 3332
3330 3333 # urllib.Request insists on using has_data() as a proxy for
3331 3334 # determining the request method. Override that to use our
3332 3335 # explicitly requested method.
3333 3336 req.get_method = lambda: pycompat.sysstr(method)
3334 3337
3335 3338 try:
3336 3339 res = opener.open(req)
3337 3340 body = res.read()
3338 3341 except util.urlerr.urlerror as e:
3339 3342 # read() method must be called, but only exists in Python 2
3340 3343 getattr(e, 'read', lambda: None)()
3341 3344 continue
3342 3345
3343 3346 ct = res.headers.get(r'Content-Type')
3344 3347 if ct == r'application/mercurial-cbor':
3345 3348 ui.write(_('cbor> %s\n') %
3346 3349 stringutil.pprint(cborutil.decodeall(body),
3347 3350 bprefix=True,
3348 3351 indent=2))
3349 3352
3350 3353 elif action == 'close':
3351 3354 peer.close()
3352 3355 elif action == 'readavailable':
3353 3356 if not stdout or not stderr:
3354 3357 raise error.Abort(_('readavailable not available on this peer'))
3355 3358
3356 3359 stdin.close()
3357 3360 stdout.read()
3358 3361 stderr.read()
3359 3362
3360 3363 elif action == 'readline':
3361 3364 if not stdout:
3362 3365 raise error.Abort(_('readline not available on this peer'))
3363 3366 stdout.readline()
3364 3367 elif action == 'ereadline':
3365 3368 if not stderr:
3366 3369 raise error.Abort(_('ereadline not available on this peer'))
3367 3370 stderr.readline()
3368 3371 elif action.startswith('read '):
3369 3372 count = int(action.split(' ', 1)[1])
3370 3373 if not stdout:
3371 3374 raise error.Abort(_('read not available on this peer'))
3372 3375 stdout.read(count)
3373 3376 elif action.startswith('eread '):
3374 3377 count = int(action.split(' ', 1)[1])
3375 3378 if not stderr:
3376 3379 raise error.Abort(_('eread not available on this peer'))
3377 3380 stderr.read(count)
3378 3381 else:
3379 3382 raise error.Abort(_('unknown action: %s') % action)
3380 3383
3381 3384 if batchedcommands is not None:
3382 3385 raise error.Abort(_('unclosed "batchbegin" request'))
3383 3386
3384 3387 if peer:
3385 3388 peer.close()
3386 3389
3387 3390 if proc:
3388 3391 proc.kill()
@@ -1,635 +1,642
1 1 $ cat << EOF >> $HGRCPATH
2 2 > [ui]
3 3 > interactive=yes
4 4 > EOF
5 5
6 6 $ hg init debugrevlog
7 7 $ cd debugrevlog
8 8 $ echo a > a
9 9 $ hg ci -Am adda
10 10 adding a
11 11 $ hg rm .
12 12 removing a
13 13 $ hg ci -Am make-it-empty
14 14 $ hg revert --all -r 0
15 15 adding a
16 16 $ hg ci -Am make-it-full
17 17 #if reporevlogstore
18 18 $ hg debugrevlog -c
19 19 format : 1
20 20 flags : inline
21 21
22 22 revisions : 3
23 23 merges : 0 ( 0.00%)
24 24 normal : 3 (100.00%)
25 25 revisions : 3
26 26 empty : 0 ( 0.00%)
27 27 text : 0 (100.00%)
28 28 delta : 0 (100.00%)
29 29 snapshot : 3 (100.00%)
30 30 lvl-0 : 3 (100.00%)
31 31 deltas : 0 ( 0.00%)
32 32 revision size : 191
33 33 snapshot : 191 (100.00%)
34 34 lvl-0 : 191 (100.00%)
35 35 deltas : 0 ( 0.00%)
36 36
37 37 chunks : 3
38 38 0x75 (u) : 3 (100.00%)
39 39 chunks size : 191
40 40 0x75 (u) : 191 (100.00%)
41 41
42 42 avg chain length : 0
43 43 max chain length : 0
44 44 max chain reach : 67
45 45 compression ratio : 0
46 46
47 47 uncompressed data size (min/max/avg) : 57 / 66 / 62
48 48 full revision size (min/max/avg) : 58 / 67 / 63
49 49 inter-snapshot size (min/max/avg) : 0 / 0 / 0
50 50 delta size (min/max/avg) : 0 / 0 / 0
51 51 $ hg debugrevlog -m
52 52 format : 1
53 53 flags : inline, generaldelta
54 54
55 55 revisions : 3
56 56 merges : 0 ( 0.00%)
57 57 normal : 3 (100.00%)
58 58 revisions : 3
59 59 empty : 1 (33.33%)
60 60 text : 1 (100.00%)
61 61 delta : 0 ( 0.00%)
62 62 snapshot : 2 (66.67%)
63 63 lvl-0 : 2 (66.67%)
64 64 deltas : 0 ( 0.00%)
65 65 revision size : 88
66 66 snapshot : 88 (100.00%)
67 67 lvl-0 : 88 (100.00%)
68 68 deltas : 0 ( 0.00%)
69 69
70 70 chunks : 3
71 71 empty : 1 (33.33%)
72 72 0x75 (u) : 2 (66.67%)
73 73 chunks size : 88
74 74 empty : 0 ( 0.00%)
75 75 0x75 (u) : 88 (100.00%)
76 76
77 77 avg chain length : 0
78 78 max chain length : 0
79 79 max chain reach : 44
80 80 compression ratio : 0
81 81
82 82 uncompressed data size (min/max/avg) : 0 / 43 / 28
83 83 full revision size (min/max/avg) : 44 / 44 / 44
84 84 inter-snapshot size (min/max/avg) : 0 / 0 / 0
85 85 delta size (min/max/avg) : 0 / 0 / 0
86 86 $ hg debugrevlog a
87 87 format : 1
88 88 flags : inline, generaldelta
89 89
90 90 revisions : 1
91 91 merges : 0 ( 0.00%)
92 92 normal : 1 (100.00%)
93 93 revisions : 1
94 94 empty : 0 ( 0.00%)
95 95 text : 0 (100.00%)
96 96 delta : 0 (100.00%)
97 97 snapshot : 1 (100.00%)
98 98 lvl-0 : 1 (100.00%)
99 99 deltas : 0 ( 0.00%)
100 100 revision size : 3
101 101 snapshot : 3 (100.00%)
102 102 lvl-0 : 3 (100.00%)
103 103 deltas : 0 ( 0.00%)
104 104
105 105 chunks : 1
106 106 0x75 (u) : 1 (100.00%)
107 107 chunks size : 3
108 108 0x75 (u) : 3 (100.00%)
109 109
110 110 avg chain length : 0
111 111 max chain length : 0
112 112 max chain reach : 3
113 113 compression ratio : 0
114 114
115 115 uncompressed data size (min/max/avg) : 2 / 2 / 2
116 116 full revision size (min/max/avg) : 3 / 3 / 3
117 117 inter-snapshot size (min/max/avg) : 0 / 0 / 0
118 118 delta size (min/max/avg) : 0 / 0 / 0
119 119 #endif
120 120
121 121 Test debugindex, with and without the --verbose/--debug flag
122 122 $ hg debugrevlogindex a
123 123 rev linkrev nodeid p1 p2
124 124 0 0 b789fdd96dc2 000000000000 000000000000
125 125
126 126 #if no-reposimplestore
127 127 $ hg --verbose debugrevlogindex a
128 128 rev offset length linkrev nodeid p1 p2
129 129 0 0 3 0 b789fdd96dc2 000000000000 000000000000
130 130
131 131 $ hg --debug debugrevlogindex a
132 132 rev offset length linkrev nodeid p1 p2
133 133 0 0 3 0 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000
134 134 #endif
135 135
136 136 $ hg debugrevlogindex -f 1 a
137 137 rev flag size link p1 p2 nodeid
138 138 0 0000 2 0 -1 -1 b789fdd96dc2
139 139
140 140 #if no-reposimplestore
141 141 $ hg --verbose debugrevlogindex -f 1 a
142 142 rev flag offset length size link p1 p2 nodeid
143 143 0 0000 0 3 2 0 -1 -1 b789fdd96dc2
144 144
145 145 $ hg --debug debugrevlogindex -f 1 a
146 146 rev flag offset length size link p1 p2 nodeid
147 147 0 0000 0 3 2 0 -1 -1 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3
148 148 #endif
149 149
150 150 $ hg debugindex -c
151 151 rev linkrev nodeid p1 p2
152 152 0 0 07f494440405 000000000000 000000000000
153 153 1 1 8cccb4b5fec2 07f494440405 000000000000
154 154 2 2 b1e228c512c5 8cccb4b5fec2 000000000000
155 155 $ hg debugindex -c --debug
156 156 rev linkrev nodeid p1 p2
157 157 0 0 07f4944404050f47db2e5c5071e0e84e7a27bba9 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000
158 158 1 1 8cccb4b5fec20cafeb99dd01c26d4dee8ea4388a 07f4944404050f47db2e5c5071e0e84e7a27bba9 0000000000000000000000000000000000000000
159 159 2 2 b1e228c512c5d7066d70562ed839c3323a62d6d2 8cccb4b5fec20cafeb99dd01c26d4dee8ea4388a 0000000000000000000000000000000000000000
160 160 $ hg debugindex -m
161 161 rev linkrev nodeid p1 p2
162 162 0 0 a0c8bcbbb45c 000000000000 000000000000
163 163 1 1 57faf8a737ae a0c8bcbbb45c 000000000000
164 164 2 2 a35b10320954 57faf8a737ae 000000000000
165 165 $ hg debugindex -m --debug
166 166 rev linkrev nodeid p1 p2
167 167 0 0 a0c8bcbbb45c63b90b70ad007bf38961f64f2af0 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000
168 168 1 1 57faf8a737ae7faf490582941a82319ba6529dca a0c8bcbbb45c63b90b70ad007bf38961f64f2af0 0000000000000000000000000000000000000000
169 169 2 2 a35b103209548032201c16c7688cb2657f037a38 57faf8a737ae7faf490582941a82319ba6529dca 0000000000000000000000000000000000000000
170 170 $ hg debugindex a
171 171 rev linkrev nodeid p1 p2
172 172 0 0 b789fdd96dc2 000000000000 000000000000
173 173 $ hg debugindex --debug a
174 174 rev linkrev nodeid p1 p2
175 175 0 0 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000
176 176
177 177 debugdelta chain basic output
178 178
179 #if reporevlogstore
179 #if reporevlogstore pure
180 $ hg debugindexstats
181 abort: debugindexstats only works with native code
182 [255]
183 #endif
184 #if reporevlogstore no-pure
180 185 $ hg debugindexstats
181 186 node trie capacity: 4
182 187 node trie count: 2
183 188 node trie depth: 1
184 189 node trie last rev scanned: -1
185 190 node trie lookups: 4
186 191 node trie misses: 1
187 192 node trie splits: 1
188 193 revs in memory: 3
194 #endif
189 195
196 #if reporevlogstore no-pure
190 197 $ hg debugdeltachain -m
191 198 rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio
192 199 0 1 1 -1 base 44 43 44 1.02326 44 0 0.00000
193 200 1 2 1 -1 base 0 0 0 0.00000 0 0 0.00000
194 201 2 3 1 -1 base 44 43 44 1.02326 44 0 0.00000
195 202
196 203 $ hg debugdeltachain -m -T '{rev} {chainid} {chainlen}\n'
197 204 0 1 1
198 205 1 2 1
199 206 2 3 1
200 207
201 208 $ hg debugdeltachain -m -Tjson
202 209 [
203 210 {
204 211 "chainid": 1,
205 212 "chainlen": 1,
206 213 "chainratio": 1.02325581395, (no-py3 !)
207 214 "chainratio": 1.0232558139534884, (py3 !)
208 215 "chainsize": 44,
209 216 "compsize": 44,
210 217 "deltatype": "base",
211 218 "extradist": 0,
212 219 "extraratio": 0.0,
213 220 "lindist": 44,
214 221 "prevrev": -1,
215 222 "rev": 0,
216 223 "uncompsize": 43
217 224 },
218 225 {
219 226 "chainid": 2,
220 227 "chainlen": 1,
221 228 "chainratio": 0,
222 229 "chainsize": 0,
223 230 "compsize": 0,
224 231 "deltatype": "base",
225 232 "extradist": 0,
226 233 "extraratio": 0,
227 234 "lindist": 0,
228 235 "prevrev": -1,
229 236 "rev": 1,
230 237 "uncompsize": 0
231 238 },
232 239 {
233 240 "chainid": 3,
234 241 "chainlen": 1,
235 242 "chainratio": 1.02325581395, (no-py3 !)
236 243 "chainratio": 1.0232558139534884, (py3 !)
237 244 "chainsize": 44,
238 245 "compsize": 44,
239 246 "deltatype": "base",
240 247 "extradist": 0,
241 248 "extraratio": 0.0,
242 249 "lindist": 44,
243 250 "prevrev": -1,
244 251 "rev": 2,
245 252 "uncompsize": 43
246 253 }
247 254 ]
248 255
249 256 debugdelta chain with sparse read enabled
250 257
251 258 $ cat >> $HGRCPATH <<EOF
252 259 > [experimental]
253 260 > sparse-read = True
254 261 > EOF
255 262 $ hg debugdeltachain -m
256 263 rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio readsize largestblk rddensity srchunks
257 264 0 1 1 -1 base 44 43 44 1.02326 44 0 0.00000 44 44 1.00000 1
258 265 1 2 1 -1 base 0 0 0 0.00000 0 0 0.00000 0 0 1.00000 1
259 266 2 3 1 -1 base 44 43 44 1.02326 44 0 0.00000 44 44 1.00000 1
260 267
261 268 $ hg debugdeltachain -m -T '{rev} {chainid} {chainlen} {readsize} {largestblock} {readdensity}\n'
262 269 0 1 1 44 44 1.0
263 270 1 2 1 0 0 1
264 271 2 3 1 44 44 1.0
265 272
266 273 $ hg debugdeltachain -m -Tjson
267 274 [
268 275 {
269 276 "chainid": 1,
270 277 "chainlen": 1,
271 278 "chainratio": 1.02325581395, (no-py3 !)
272 279 "chainratio": 1.0232558139534884, (py3 !)
273 280 "chainsize": 44,
274 281 "compsize": 44,
275 282 "deltatype": "base",
276 283 "extradist": 0,
277 284 "extraratio": 0.0,
278 285 "largestblock": 44,
279 286 "lindist": 44,
280 287 "prevrev": -1,
281 288 "readdensity": 1.0,
282 289 "readsize": 44,
283 290 "rev": 0,
284 291 "srchunks": 1,
285 292 "uncompsize": 43
286 293 },
287 294 {
288 295 "chainid": 2,
289 296 "chainlen": 1,
290 297 "chainratio": 0,
291 298 "chainsize": 0,
292 299 "compsize": 0,
293 300 "deltatype": "base",
294 301 "extradist": 0,
295 302 "extraratio": 0,
296 303 "largestblock": 0,
297 304 "lindist": 0,
298 305 "prevrev": -1,
299 306 "readdensity": 1,
300 307 "readsize": 0,
301 308 "rev": 1,
302 309 "srchunks": 1,
303 310 "uncompsize": 0
304 311 },
305 312 {
306 313 "chainid": 3,
307 314 "chainlen": 1,
308 315 "chainratio": 1.02325581395, (no-py3 !)
309 316 "chainratio": 1.0232558139534884, (py3 !)
310 317 "chainsize": 44,
311 318 "compsize": 44,
312 319 "deltatype": "base",
313 320 "extradist": 0,
314 321 "extraratio": 0.0,
315 322 "largestblock": 44,
316 323 "lindist": 44,
317 324 "prevrev": -1,
318 325 "readdensity": 1.0,
319 326 "readsize": 44,
320 327 "rev": 2,
321 328 "srchunks": 1,
322 329 "uncompsize": 43
323 330 }
324 331 ]
325 332
326 333 $ printf "This test checks things.\n" >> a
327 334 $ hg ci -m a
328 335 $ hg branch other
329 336 marked working directory as branch other
330 337 (branches are permanent and global, did you want a bookmark?)
331 338 $ for i in `$TESTDIR/seq.py 5`; do
332 339 > printf "shorter ${i}" >> a
333 340 > hg ci -m "a other:$i"
334 341 > hg up -q default
335 342 > printf "for the branch default we want longer chains: ${i}" >> a
336 343 > hg ci -m "a default:$i"
337 344 > hg up -q other
338 345 > done
339 346 $ hg debugdeltachain a -T '{rev} {srchunks}\n' \
340 347 > --config experimental.sparse-read.density-threshold=0.50 \
341 348 > --config experimental.sparse-read.min-gap-size=0
342 349 0 1
343 350 1 1
344 351 2 1
345 352 3 1
346 353 4 1
347 354 5 1
348 355 6 1
349 356 7 1
350 357 8 1
351 358 9 1
352 359 10 2
353 360 11 1
354 361 $ hg --config extensions.strip= strip --no-backup -r 1
355 362 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
356 363
357 364 Test max chain len
358 365 $ cat >> $HGRCPATH << EOF
359 366 > [format]
360 367 > maxchainlen=4
361 368 > EOF
362 369
363 370 $ printf "This test checks if maxchainlen config value is respected also it can serve as basic test for debugrevlog -d <file>.\n" >> a
364 371 $ hg ci -m a
365 372 $ printf "b\n" >> a
366 373 $ hg ci -m a
367 374 $ printf "c\n" >> a
368 375 $ hg ci -m a
369 376 $ printf "d\n" >> a
370 377 $ hg ci -m a
371 378 $ printf "e\n" >> a
372 379 $ hg ci -m a
373 380 $ printf "f\n" >> a
374 381 $ hg ci -m a
375 382 $ printf 'g\n' >> a
376 383 $ hg ci -m a
377 384 $ printf 'h\n' >> a
378 385 $ hg ci -m a
379 386
380 387 $ hg debugrevlog -d a
381 388 # rev p1rev p2rev start end deltastart base p1 p2 rawsize totalsize compression heads chainlen
382 389 0 -1 -1 0 ??? 0 0 0 0 ??? ???? ? 1 0 (glob)
383 390 1 0 -1 ??? ??? 0 0 0 0 ??? ???? ? 1 1 (glob)
384 391 2 1 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 2 (glob)
385 392 3 2 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 3 (glob)
386 393 4 3 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 4 (glob)
387 394 5 4 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 0 (glob)
388 395 6 5 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 1 (glob)
389 396 7 6 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 2 (glob)
390 397 8 7 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 3 (glob)
391 398 #endif
392 399
393 400 Test debuglocks command:
394 401
395 402 $ hg debuglocks
396 403 lock: free
397 404 wlock: free
398 405
399 406 * Test setting the lock
400 407
401 408 waitlock <file> will wait for file to be created. If it isn't in a reasonable
402 409 amount of time, displays error message and returns 1
403 410 $ waitlock() {
404 411 > start=`date +%s`
405 412 > timeout=5
406 413 > while [ \( ! -f $1 \) -a \( ! -L $1 \) ]; do
407 414 > now=`date +%s`
408 415 > if [ "`expr $now - $start`" -gt $timeout ]; then
409 416 > echo "timeout: $1 was not created in $timeout seconds"
410 417 > return 1
411 418 > fi
412 419 > sleep 0.1
413 420 > done
414 421 > }
415 422 $ dolock() {
416 423 > {
417 424 > waitlock .hg/unlock
418 425 > rm -f .hg/unlock
419 426 > echo y
420 427 > } | hg debuglocks "$@" > /dev/null
421 428 > }
422 429 $ dolock -s &
423 430 $ waitlock .hg/store/lock
424 431
425 432 $ hg debuglocks
426 433 lock: user *, process * (*s) (glob)
427 434 wlock: free
428 435 [1]
429 436 $ touch .hg/unlock
430 437 $ wait
431 438 $ [ -f .hg/store/lock ] || echo "There is no lock"
432 439 There is no lock
433 440
434 441 * Test setting the wlock
435 442
436 443 $ dolock -S &
437 444 $ waitlock .hg/wlock
438 445
439 446 $ hg debuglocks
440 447 lock: free
441 448 wlock: user *, process * (*s) (glob)
442 449 [1]
443 450 $ touch .hg/unlock
444 451 $ wait
445 452 $ [ -f .hg/wlock ] || echo "There is no wlock"
446 453 There is no wlock
447 454
448 455 * Test setting both locks
449 456
450 457 $ dolock -Ss &
451 458 $ waitlock .hg/wlock && waitlock .hg/store/lock
452 459
453 460 $ hg debuglocks
454 461 lock: user *, process * (*s) (glob)
455 462 wlock: user *, process * (*s) (glob)
456 463 [2]
457 464
458 465 * Test failing to set a lock
459 466
460 467 $ hg debuglocks -s
461 468 abort: lock is already held
462 469 [255]
463 470
464 471 $ hg debuglocks -S
465 472 abort: wlock is already held
466 473 [255]
467 474
468 475 $ touch .hg/unlock
469 476 $ wait
470 477
471 478 $ hg debuglocks
472 479 lock: free
473 480 wlock: free
474 481
475 482 * Test forcing the lock
476 483
477 484 $ dolock -s &
478 485 $ waitlock .hg/store/lock
479 486
480 487 $ hg debuglocks
481 488 lock: user *, process * (*s) (glob)
482 489 wlock: free
483 490 [1]
484 491
485 492 $ hg debuglocks -L
486 493
487 494 $ hg debuglocks
488 495 lock: free
489 496 wlock: free
490 497
491 498 $ touch .hg/unlock
492 499 $ wait
493 500
494 501 * Test forcing the wlock
495 502
496 503 $ dolock -S &
497 504 $ waitlock .hg/wlock
498 505
499 506 $ hg debuglocks
500 507 lock: free
501 508 wlock: user *, process * (*s) (glob)
502 509 [1]
503 510
504 511 $ hg debuglocks -W
505 512
506 513 $ hg debuglocks
507 514 lock: free
508 515 wlock: free
509 516
510 517 $ touch .hg/unlock
511 518 $ wait
512 519
513 520 Test WdirUnsupported exception
514 521
515 522 $ hg debugdata -c ffffffffffffffffffffffffffffffffffffffff
516 523 abort: working directory revision cannot be specified
517 524 [255]
518 525
519 526 Test cache warming command
520 527
521 528 $ rm -rf .hg/cache/
522 529 $ hg debugupdatecaches --debug
523 530 updating the branch cache
524 531 $ ls -r .hg/cache/*
525 532 .hg/cache/rbc-revs-v1
526 533 .hg/cache/rbc-names-v1
527 534 .hg/cache/manifestfulltextcache (reporevlogstore !)
528 535 .hg/cache/branch2-served
529 536
530 537 Test debugcolor
531 538
532 539 #if no-windows
533 540 $ hg debugcolor --style --color always | egrep 'mode|style|log\.'
534 541 color mode: 'ansi'
535 542 available style:
536 543 \x1b[0;33mlog.changeset\x1b[0m: \x1b[0;33myellow\x1b[0m (esc)
537 544 #endif
538 545
539 546 $ hg debugcolor --style --color never
540 547 color mode: None
541 548 available style:
542 549
543 550 $ cd ..
544 551
545 552 Test internal debugstacktrace command
546 553
547 554 $ cat > debugstacktrace.py << EOF
548 555 > from __future__ import absolute_import
549 556 > from mercurial import (
550 557 > pycompat,
551 558 > util,
552 559 > )
553 560 > def f():
554 561 > util.debugstacktrace(f=pycompat.stdout)
555 562 > g()
556 563 > def g():
557 564 > util.dst(b'hello from g\\n', skip=1)
558 565 > h()
559 566 > def h():
560 567 > util.dst(b'hi ...\\nfrom h hidden in g', 1, depth=2)
561 568 > f()
562 569 > EOF
563 570 $ "$PYTHON" debugstacktrace.py
564 571 stacktrace at:
565 572 debugstacktrace.py:14 in * (glob)
566 573 debugstacktrace.py:7 in f
567 574 hello from g at:
568 575 debugstacktrace.py:14 in * (glob)
569 576 debugstacktrace.py:8 in f
570 577 hi ...
571 578 from h hidden in g at:
572 579 debugstacktrace.py:8 in f
573 580 debugstacktrace.py:11 in g
574 581
575 582 Test debugcapabilities command:
576 583
577 584 $ hg debugcapabilities ./debugrevlog/
578 585 Main capabilities:
579 586 branchmap
580 587 $USUAL_BUNDLE2_CAPS$
581 588 getbundle
582 589 known
583 590 lookup
584 591 pushkey
585 592 unbundle
586 593 Bundle2 capabilities:
587 594 HG20
588 595 bookmarks
589 596 changegroup
590 597 01
591 598 02
592 599 digests
593 600 md5
594 601 sha1
595 602 sha512
596 603 error
597 604 abort
598 605 unsupportedcontent
599 606 pushraced
600 607 pushkey
601 608 hgtagsfnodes
602 609 listkeys
603 610 phases
604 611 heads
605 612 pushkey
606 613 remote-changegroup
607 614 http
608 615 https
609 616 rev-branch-cache
610 617 stream
611 618 v2
612 619
613 620 Test debugpeer
614 621
615 622 $ hg --config ui.ssh="\"$PYTHON\" \"$TESTDIR/dummyssh\"" debugpeer ssh://user@dummy/debugrevlog
616 623 url: ssh://user@dummy/debugrevlog
617 624 local: no
618 625 pushable: yes
619 626
620 627 $ hg --config ui.ssh="\"$PYTHON\" \"$TESTDIR/dummyssh\"" --debug debugpeer ssh://user@dummy/debugrevlog
621 628 running "*" "*/tests/dummyssh" 'user@dummy' 'hg -R debugrevlog serve --stdio' (glob) (no-windows !)
622 629 running "*" "*\tests/dummyssh" "user@dummy" "hg -R debugrevlog serve --stdio" (glob) (windows !)
623 630 devel-peer-request: hello+between
624 631 devel-peer-request: pairs: 81 bytes
625 632 sending hello command
626 633 sending between command
627 634 remote: 427
628 635 remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
629 636 remote: 1
630 637 devel-peer-request: protocaps
631 638 devel-peer-request: caps: * bytes (glob)
632 639 sending protocaps command
633 640 url: ssh://user@dummy/debugrevlog
634 641 local: no
635 642 pushable: yes
General Comments 0
You need to be logged in to leave comments. Login now