##// END OF EJS Templates
i18n: omit redundant translatable synopsis text to avoid xgettext warning...
FUJIWARA Katsunori -
r38850:96b2e66d default
parent child Browse files
Show More
@@ -1,3252 +1,3251 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import operator
15 15 import os
16 16 import random
17 17 import re
18 18 import socket
19 19 import ssl
20 20 import stat
21 21 import string
22 22 import subprocess
23 23 import sys
24 24 import time
25 25
26 26 from .i18n import _
27 27 from .node import (
28 28 bin,
29 29 hex,
30 30 nullhex,
31 31 nullid,
32 32 nullrev,
33 33 short,
34 34 )
35 35 from .thirdparty import (
36 36 cbor,
37 37 )
38 38 from . import (
39 39 bundle2,
40 40 changegroup,
41 41 cmdutil,
42 42 color,
43 43 context,
44 44 dagparser,
45 45 dagutil,
46 46 encoding,
47 47 error,
48 48 exchange,
49 49 extensions,
50 50 filemerge,
51 51 filesetlang,
52 52 formatter,
53 53 hg,
54 54 httppeer,
55 55 localrepo,
56 56 lock as lockmod,
57 57 logcmdutil,
58 58 merge as mergemod,
59 59 obsolete,
60 60 obsutil,
61 61 phases,
62 62 policy,
63 63 pvec,
64 64 pycompat,
65 65 registrar,
66 66 repair,
67 67 revlog,
68 68 revset,
69 69 revsetlang,
70 70 scmutil,
71 71 setdiscovery,
72 72 simplemerge,
73 73 sshpeer,
74 74 sslutil,
75 75 streamclone,
76 76 templater,
77 77 treediscovery,
78 78 upgrade,
79 79 url as urlmod,
80 80 util,
81 81 vfs as vfsmod,
82 82 wireprotoframing,
83 83 wireprotoserver,
84 84 wireprotov2peer,
85 85 )
86 86 from .utils import (
87 87 dateutil,
88 88 procutil,
89 89 stringutil,
90 90 )
91 91
92 92 release = lockmod.release
93 93
94 94 command = registrar.command()
95 95
96 96 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
97 97 def debugancestor(ui, repo, *args):
98 98 """find the ancestor revision of two revisions in a given index"""
99 99 if len(args) == 3:
100 100 index, rev1, rev2 = args
101 101 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
102 102 lookup = r.lookup
103 103 elif len(args) == 2:
104 104 if not repo:
105 105 raise error.Abort(_('there is no Mercurial repository here '
106 106 '(.hg not found)'))
107 107 rev1, rev2 = args
108 108 r = repo.changelog
109 109 lookup = repo.lookup
110 110 else:
111 111 raise error.Abort(_('either two or three arguments required'))
112 112 a = r.ancestor(lookup(rev1), lookup(rev2))
113 113 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
114 114
115 115 @command('debugapplystreamclonebundle', [], 'FILE')
116 116 def debugapplystreamclonebundle(ui, repo, fname):
117 117 """apply a stream clone bundle file"""
118 118 f = hg.openpath(ui, fname)
119 119 gen = exchange.readbundle(ui, f, fname)
120 120 gen.apply(repo)
121 121
122 122 @command('debugbuilddag',
123 123 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
124 124 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
125 125 ('n', 'new-file', None, _('add new file at each rev'))],
126 126 _('[OPTION]... [TEXT]'))
127 127 def debugbuilddag(ui, repo, text=None,
128 128 mergeable_file=False,
129 129 overwritten_file=False,
130 130 new_file=False):
131 131 """builds a repo with a given DAG from scratch in the current empty repo
132 132
133 133 The description of the DAG is read from stdin if not given on the
134 134 command line.
135 135
136 136 Elements:
137 137
138 138 - "+n" is a linear run of n nodes based on the current default parent
139 139 - "." is a single node based on the current default parent
140 140 - "$" resets the default parent to null (implied at the start);
141 141 otherwise the default parent is always the last node created
142 142 - "<p" sets the default parent to the backref p
143 143 - "*p" is a fork at parent p, which is a backref
144 144 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
145 145 - "/p2" is a merge of the preceding node and p2
146 146 - ":tag" defines a local tag for the preceding node
147 147 - "@branch" sets the named branch for subsequent nodes
148 148 - "#...\\n" is a comment up to the end of the line
149 149
150 150 Whitespace between the above elements is ignored.
151 151
152 152 A backref is either
153 153
154 154 - a number n, which references the node curr-n, where curr is the current
155 155 node, or
156 156 - the name of a local tag you placed earlier using ":tag", or
157 157 - empty to denote the default parent.
158 158
159 159 All string valued-elements are either strictly alphanumeric, or must
160 160 be enclosed in double quotes ("..."), with "\\" as escape character.
161 161 """
162 162
163 163 if text is None:
164 164 ui.status(_("reading DAG from stdin\n"))
165 165 text = ui.fin.read()
166 166
167 167 cl = repo.changelog
168 168 if len(cl) > 0:
169 169 raise error.Abort(_('repository is not empty'))
170 170
171 171 # determine number of revs in DAG
172 172 total = 0
173 173 for type, data in dagparser.parsedag(text):
174 174 if type == 'n':
175 175 total += 1
176 176
177 177 if mergeable_file:
178 178 linesperrev = 2
179 179 # make a file with k lines per rev
180 180 initialmergedlines = ['%d' % i
181 181 for i in pycompat.xrange(0, total * linesperrev)]
182 182 initialmergedlines.append("")
183 183
184 184 tags = []
185 185 progress = ui.makeprogress(_('building'), unit=_('revisions'),
186 186 total=total)
187 187 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
188 188 at = -1
189 189 atbranch = 'default'
190 190 nodeids = []
191 191 id = 0
192 192 progress.update(id)
193 193 for type, data in dagparser.parsedag(text):
194 194 if type == 'n':
195 195 ui.note(('node %s\n' % pycompat.bytestr(data)))
196 196 id, ps = data
197 197
198 198 files = []
199 199 filecontent = {}
200 200
201 201 p2 = None
202 202 if mergeable_file:
203 203 fn = "mf"
204 204 p1 = repo[ps[0]]
205 205 if len(ps) > 1:
206 206 p2 = repo[ps[1]]
207 207 pa = p1.ancestor(p2)
208 208 base, local, other = [x[fn].data() for x in (pa, p1,
209 209 p2)]
210 210 m3 = simplemerge.Merge3Text(base, local, other)
211 211 ml = [l.strip() for l in m3.merge_lines()]
212 212 ml.append("")
213 213 elif at > 0:
214 214 ml = p1[fn].data().split("\n")
215 215 else:
216 216 ml = initialmergedlines
217 217 ml[id * linesperrev] += " r%i" % id
218 218 mergedtext = "\n".join(ml)
219 219 files.append(fn)
220 220 filecontent[fn] = mergedtext
221 221
222 222 if overwritten_file:
223 223 fn = "of"
224 224 files.append(fn)
225 225 filecontent[fn] = "r%i\n" % id
226 226
227 227 if new_file:
228 228 fn = "nf%i" % id
229 229 files.append(fn)
230 230 filecontent[fn] = "r%i\n" % id
231 231 if len(ps) > 1:
232 232 if not p2:
233 233 p2 = repo[ps[1]]
234 234 for fn in p2:
235 235 if fn.startswith("nf"):
236 236 files.append(fn)
237 237 filecontent[fn] = p2[fn].data()
238 238
239 239 def fctxfn(repo, cx, path):
240 240 if path in filecontent:
241 241 return context.memfilectx(repo, cx, path,
242 242 filecontent[path])
243 243 return None
244 244
245 245 if len(ps) == 0 or ps[0] < 0:
246 246 pars = [None, None]
247 247 elif len(ps) == 1:
248 248 pars = [nodeids[ps[0]], None]
249 249 else:
250 250 pars = [nodeids[p] for p in ps]
251 251 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
252 252 date=(id, 0),
253 253 user="debugbuilddag",
254 254 extra={'branch': atbranch})
255 255 nodeid = repo.commitctx(cx)
256 256 nodeids.append(nodeid)
257 257 at = id
258 258 elif type == 'l':
259 259 id, name = data
260 260 ui.note(('tag %s\n' % name))
261 261 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
262 262 elif type == 'a':
263 263 ui.note(('branch %s\n' % data))
264 264 atbranch = data
265 265 progress.update(id)
266 266
267 267 if tags:
268 268 repo.vfs.write("localtags", "".join(tags))
269 269
270 270 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
271 271 indent_string = ' ' * indent
272 272 if all:
273 273 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
274 274 % indent_string)
275 275
276 276 def showchunks(named):
277 277 ui.write("\n%s%s\n" % (indent_string, named))
278 278 for deltadata in gen.deltaiter():
279 279 node, p1, p2, cs, deltabase, delta, flags = deltadata
280 280 ui.write("%s%s %s %s %s %s %d\n" %
281 281 (indent_string, hex(node), hex(p1), hex(p2),
282 282 hex(cs), hex(deltabase), len(delta)))
283 283
284 284 chunkdata = gen.changelogheader()
285 285 showchunks("changelog")
286 286 chunkdata = gen.manifestheader()
287 287 showchunks("manifest")
288 288 for chunkdata in iter(gen.filelogheader, {}):
289 289 fname = chunkdata['filename']
290 290 showchunks(fname)
291 291 else:
292 292 if isinstance(gen, bundle2.unbundle20):
293 293 raise error.Abort(_('use debugbundle2 for this file'))
294 294 chunkdata = gen.changelogheader()
295 295 for deltadata in gen.deltaiter():
296 296 node, p1, p2, cs, deltabase, delta, flags = deltadata
297 297 ui.write("%s%s\n" % (indent_string, hex(node)))
298 298
299 299 def _debugobsmarkers(ui, part, indent=0, **opts):
300 300 """display version and markers contained in 'data'"""
301 301 opts = pycompat.byteskwargs(opts)
302 302 data = part.read()
303 303 indent_string = ' ' * indent
304 304 try:
305 305 version, markers = obsolete._readmarkers(data)
306 306 except error.UnknownVersion as exc:
307 307 msg = "%sunsupported version: %s (%d bytes)\n"
308 308 msg %= indent_string, exc.version, len(data)
309 309 ui.write(msg)
310 310 else:
311 311 msg = "%sversion: %d (%d bytes)\n"
312 312 msg %= indent_string, version, len(data)
313 313 ui.write(msg)
314 314 fm = ui.formatter('debugobsolete', opts)
315 315 for rawmarker in sorted(markers):
316 316 m = obsutil.marker(None, rawmarker)
317 317 fm.startitem()
318 318 fm.plain(indent_string)
319 319 cmdutil.showmarker(fm, m)
320 320 fm.end()
321 321
322 322 def _debugphaseheads(ui, data, indent=0):
323 323 """display version and markers contained in 'data'"""
324 324 indent_string = ' ' * indent
325 325 headsbyphase = phases.binarydecode(data)
326 326 for phase in phases.allphases:
327 327 for head in headsbyphase[phase]:
328 328 ui.write(indent_string)
329 329 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
330 330
331 331 def _quasirepr(thing):
332 332 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
333 333 return '{%s}' % (
334 334 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
335 335 return pycompat.bytestr(repr(thing))
336 336
337 337 def _debugbundle2(ui, gen, all=None, **opts):
338 338 """lists the contents of a bundle2"""
339 339 if not isinstance(gen, bundle2.unbundle20):
340 340 raise error.Abort(_('not a bundle2 file'))
341 341 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
342 342 parttypes = opts.get(r'part_type', [])
343 343 for part in gen.iterparts():
344 344 if parttypes and part.type not in parttypes:
345 345 continue
346 346 msg = '%s -- %s (mandatory: %r)\n'
347 347 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
348 348 if part.type == 'changegroup':
349 349 version = part.params.get('version', '01')
350 350 cg = changegroup.getunbundler(version, part, 'UN')
351 351 if not ui.quiet:
352 352 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
353 353 if part.type == 'obsmarkers':
354 354 if not ui.quiet:
355 355 _debugobsmarkers(ui, part, indent=4, **opts)
356 356 if part.type == 'phase-heads':
357 357 if not ui.quiet:
358 358 _debugphaseheads(ui, part, indent=4)
359 359
360 360 @command('debugbundle',
361 361 [('a', 'all', None, _('show all details')),
362 362 ('', 'part-type', [], _('show only the named part type')),
363 363 ('', 'spec', None, _('print the bundlespec of the bundle'))],
364 364 _('FILE'),
365 365 norepo=True)
366 366 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
367 367 """lists the contents of a bundle"""
368 368 with hg.openpath(ui, bundlepath) as f:
369 369 if spec:
370 370 spec = exchange.getbundlespec(ui, f)
371 371 ui.write('%s\n' % spec)
372 372 return
373 373
374 374 gen = exchange.readbundle(ui, f, bundlepath)
375 375 if isinstance(gen, bundle2.unbundle20):
376 376 return _debugbundle2(ui, gen, all=all, **opts)
377 377 _debugchangegroup(ui, gen, all=all, **opts)
378 378
379 379 @command('debugcapabilities',
380 380 [], _('PATH'),
381 381 norepo=True)
382 382 def debugcapabilities(ui, path, **opts):
383 383 """lists the capabilities of a remote peer"""
384 384 opts = pycompat.byteskwargs(opts)
385 385 peer = hg.peer(ui, opts, path)
386 386 caps = peer.capabilities()
387 387 ui.write(('Main capabilities:\n'))
388 388 for c in sorted(caps):
389 389 ui.write((' %s\n') % c)
390 390 b2caps = bundle2.bundle2caps(peer)
391 391 if b2caps:
392 392 ui.write(('Bundle2 capabilities:\n'))
393 393 for key, values in sorted(b2caps.iteritems()):
394 394 ui.write((' %s\n') % key)
395 395 for v in values:
396 396 ui.write((' %s\n') % v)
397 397
398 398 @command('debugcheckstate', [], '')
399 399 def debugcheckstate(ui, repo):
400 400 """validate the correctness of the current dirstate"""
401 401 parent1, parent2 = repo.dirstate.parents()
402 402 m1 = repo[parent1].manifest()
403 403 m2 = repo[parent2].manifest()
404 404 errors = 0
405 405 for f in repo.dirstate:
406 406 state = repo.dirstate[f]
407 407 if state in "nr" and f not in m1:
408 408 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
409 409 errors += 1
410 410 if state in "a" and f in m1:
411 411 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
412 412 errors += 1
413 413 if state in "m" and f not in m1 and f not in m2:
414 414 ui.warn(_("%s in state %s, but not in either manifest\n") %
415 415 (f, state))
416 416 errors += 1
417 417 for f in m1:
418 418 state = repo.dirstate[f]
419 419 if state not in "nrm":
420 420 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
421 421 errors += 1
422 422 if errors:
423 423 error = _(".hg/dirstate inconsistent with current parent's manifest")
424 424 raise error.Abort(error)
425 425
426 426 @command('debugcolor',
427 427 [('', 'style', None, _('show all configured styles'))],
428 428 'hg debugcolor')
429 429 def debugcolor(ui, repo, **opts):
430 430 """show available color, effects or style"""
431 431 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
432 432 if opts.get(r'style'):
433 433 return _debugdisplaystyle(ui)
434 434 else:
435 435 return _debugdisplaycolor(ui)
436 436
437 437 def _debugdisplaycolor(ui):
438 438 ui = ui.copy()
439 439 ui._styles.clear()
440 440 for effect in color._activeeffects(ui).keys():
441 441 ui._styles[effect] = effect
442 442 if ui._terminfoparams:
443 443 for k, v in ui.configitems('color'):
444 444 if k.startswith('color.'):
445 445 ui._styles[k] = k[6:]
446 446 elif k.startswith('terminfo.'):
447 447 ui._styles[k] = k[9:]
448 448 ui.write(_('available colors:\n'))
449 449 # sort label with a '_' after the other to group '_background' entry.
450 450 items = sorted(ui._styles.items(),
451 451 key=lambda i: ('_' in i[0], i[0], i[1]))
452 452 for colorname, label in items:
453 453 ui.write(('%s\n') % colorname, label=label)
454 454
455 455 def _debugdisplaystyle(ui):
456 456 ui.write(_('available style:\n'))
457 457 if not ui._styles:
458 458 return
459 459 width = max(len(s) for s in ui._styles)
460 460 for label, effects in sorted(ui._styles.items()):
461 461 ui.write('%s' % label, label=label)
462 462 if effects:
463 463 # 50
464 464 ui.write(': ')
465 465 ui.write(' ' * (max(0, width - len(label))))
466 466 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
467 467 ui.write('\n')
468 468
469 469 @command('debugcreatestreamclonebundle', [], 'FILE')
470 470 def debugcreatestreamclonebundle(ui, repo, fname):
471 471 """create a stream clone bundle file
472 472
473 473 Stream bundles are special bundles that are essentially archives of
474 474 revlog files. They are commonly used for cloning very quickly.
475 475 """
476 476 # TODO we may want to turn this into an abort when this functionality
477 477 # is moved into `hg bundle`.
478 478 if phases.hassecret(repo):
479 479 ui.warn(_('(warning: stream clone bundle will contain secret '
480 480 'revisions)\n'))
481 481
482 482 requirements, gen = streamclone.generatebundlev1(repo)
483 483 changegroup.writechunks(ui, gen, fname)
484 484
485 485 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
486 486
487 487 @command('debugdag',
488 488 [('t', 'tags', None, _('use tags as labels')),
489 489 ('b', 'branches', None, _('annotate with branch names')),
490 490 ('', 'dots', None, _('use dots for runs')),
491 491 ('s', 'spaces', None, _('separate elements by spaces'))],
492 492 _('[OPTION]... [FILE [REV]...]'),
493 493 optionalrepo=True)
494 494 def debugdag(ui, repo, file_=None, *revs, **opts):
495 495 """format the changelog or an index DAG as a concise textual description
496 496
497 497 If you pass a revlog index, the revlog's DAG is emitted. If you list
498 498 revision numbers, they get labeled in the output as rN.
499 499
500 500 Otherwise, the changelog DAG of the current repo is emitted.
501 501 """
502 502 spaces = opts.get(r'spaces')
503 503 dots = opts.get(r'dots')
504 504 if file_:
505 505 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
506 506 file_)
507 507 revs = set((int(r) for r in revs))
508 508 def events():
509 509 for r in rlog:
510 510 yield 'n', (r, list(p for p in rlog.parentrevs(r)
511 511 if p != -1))
512 512 if r in revs:
513 513 yield 'l', (r, "r%i" % r)
514 514 elif repo:
515 515 cl = repo.changelog
516 516 tags = opts.get(r'tags')
517 517 branches = opts.get(r'branches')
518 518 if tags:
519 519 labels = {}
520 520 for l, n in repo.tags().items():
521 521 labels.setdefault(cl.rev(n), []).append(l)
522 522 def events():
523 523 b = "default"
524 524 for r in cl:
525 525 if branches:
526 526 newb = cl.read(cl.node(r))[5]['branch']
527 527 if newb != b:
528 528 yield 'a', newb
529 529 b = newb
530 530 yield 'n', (r, list(p for p in cl.parentrevs(r)
531 531 if p != -1))
532 532 if tags:
533 533 ls = labels.get(r)
534 534 if ls:
535 535 for l in ls:
536 536 yield 'l', (r, l)
537 537 else:
538 538 raise error.Abort(_('need repo for changelog dag'))
539 539
540 540 for line in dagparser.dagtextlines(events(),
541 541 addspaces=spaces,
542 542 wraplabels=True,
543 543 wrapannotations=True,
544 544 wrapnonlinear=dots,
545 545 usedots=dots,
546 546 maxlinewidth=70):
547 547 ui.write(line)
548 548 ui.write("\n")
549 549
550 550 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
551 551 def debugdata(ui, repo, file_, rev=None, **opts):
552 552 """dump the contents of a data file revision"""
553 553 opts = pycompat.byteskwargs(opts)
554 554 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
555 555 if rev is not None:
556 556 raise error.CommandError('debugdata', _('invalid arguments'))
557 557 file_, rev = None, file_
558 558 elif rev is None:
559 559 raise error.CommandError('debugdata', _('invalid arguments'))
560 560 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
561 561 try:
562 562 ui.write(r.revision(r.lookup(rev), raw=True))
563 563 except KeyError:
564 564 raise error.Abort(_('invalid revision identifier %s') % rev)
565 565
566 566 @command('debugdate',
567 567 [('e', 'extended', None, _('try extended date formats'))],
568 568 _('[-e] DATE [RANGE]'),
569 569 norepo=True, optionalrepo=True)
570 570 def debugdate(ui, date, range=None, **opts):
571 571 """parse and display a date"""
572 572 if opts[r"extended"]:
573 573 d = dateutil.parsedate(date, util.extendeddateformats)
574 574 else:
575 575 d = dateutil.parsedate(date)
576 576 ui.write(("internal: %d %d\n") % d)
577 577 ui.write(("standard: %s\n") % dateutil.datestr(d))
578 578 if range:
579 579 m = dateutil.matchdate(range)
580 580 ui.write(("match: %s\n") % m(d[0]))
581 581
582 582 @command('debugdeltachain',
583 583 cmdutil.debugrevlogopts + cmdutil.formatteropts,
584 584 _('-c|-m|FILE'),
585 585 optionalrepo=True)
586 586 def debugdeltachain(ui, repo, file_=None, **opts):
587 587 """dump information about delta chains in a revlog
588 588
589 589 Output can be templatized. Available template keywords are:
590 590
591 591 :``rev``: revision number
592 592 :``chainid``: delta chain identifier (numbered by unique base)
593 593 :``chainlen``: delta chain length to this revision
594 594 :``prevrev``: previous revision in delta chain
595 595 :``deltatype``: role of delta / how it was computed
596 596 :``compsize``: compressed size of revision
597 597 :``uncompsize``: uncompressed size of revision
598 598 :``chainsize``: total size of compressed revisions in chain
599 599 :``chainratio``: total chain size divided by uncompressed revision size
600 600 (new delta chains typically start at ratio 2.00)
601 601 :``lindist``: linear distance from base revision in delta chain to end
602 602 of this revision
603 603 :``extradist``: total size of revisions not part of this delta chain from
604 604 base of delta chain to end of this revision; a measurement
605 605 of how much extra data we need to read/seek across to read
606 606 the delta chain for this revision
607 607 :``extraratio``: extradist divided by chainsize; another representation of
608 608 how much unrelated data is needed to load this delta chain
609 609
610 610 If the repository is configured to use the sparse read, additional keywords
611 611 are available:
612 612
613 613 :``readsize``: total size of data read from the disk for a revision
614 614 (sum of the sizes of all the blocks)
615 615 :``largestblock``: size of the largest block of data read from the disk
616 616 :``readdensity``: density of useful bytes in the data read from the disk
617 617 :``srchunks``: in how many data hunks the whole revision would be read
618 618
619 619 The sparse read can be enabled with experimental.sparse-read = True
620 620 """
621 621 opts = pycompat.byteskwargs(opts)
622 622 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
623 623 index = r.index
624 624 start = r.start
625 625 length = r.length
626 626 generaldelta = r.version & revlog.FLAG_GENERALDELTA
627 627 withsparseread = getattr(r, '_withsparseread', False)
628 628
629 629 def revinfo(rev):
630 630 e = index[rev]
631 631 compsize = e[1]
632 632 uncompsize = e[2]
633 633 chainsize = 0
634 634
635 635 if generaldelta:
636 636 if e[3] == e[5]:
637 637 deltatype = 'p1'
638 638 elif e[3] == e[6]:
639 639 deltatype = 'p2'
640 640 elif e[3] == rev - 1:
641 641 deltatype = 'prev'
642 642 elif e[3] == rev:
643 643 deltatype = 'base'
644 644 else:
645 645 deltatype = 'other'
646 646 else:
647 647 if e[3] == rev:
648 648 deltatype = 'base'
649 649 else:
650 650 deltatype = 'prev'
651 651
652 652 chain = r._deltachain(rev)[0]
653 653 for iterrev in chain:
654 654 e = index[iterrev]
655 655 chainsize += e[1]
656 656
657 657 return compsize, uncompsize, deltatype, chain, chainsize
658 658
659 659 fm = ui.formatter('debugdeltachain', opts)
660 660
661 661 fm.plain(' rev chain# chainlen prev delta '
662 662 'size rawsize chainsize ratio lindist extradist '
663 663 'extraratio')
664 664 if withsparseread:
665 665 fm.plain(' readsize largestblk rddensity srchunks')
666 666 fm.plain('\n')
667 667
668 668 chainbases = {}
669 669 for rev in r:
670 670 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
671 671 chainbase = chain[0]
672 672 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
673 673 basestart = start(chainbase)
674 674 revstart = start(rev)
675 675 lineardist = revstart + comp - basestart
676 676 extradist = lineardist - chainsize
677 677 try:
678 678 prevrev = chain[-2]
679 679 except IndexError:
680 680 prevrev = -1
681 681
682 682 if uncomp != 0:
683 683 chainratio = float(chainsize) / float(uncomp)
684 684 else:
685 685 chainratio = chainsize
686 686
687 687 if chainsize != 0:
688 688 extraratio = float(extradist) / float(chainsize)
689 689 else:
690 690 extraratio = extradist
691 691
692 692 fm.startitem()
693 693 fm.write('rev chainid chainlen prevrev deltatype compsize '
694 694 'uncompsize chainsize chainratio lindist extradist '
695 695 'extraratio',
696 696 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
697 697 rev, chainid, len(chain), prevrev, deltatype, comp,
698 698 uncomp, chainsize, chainratio, lineardist, extradist,
699 699 extraratio,
700 700 rev=rev, chainid=chainid, chainlen=len(chain),
701 701 prevrev=prevrev, deltatype=deltatype, compsize=comp,
702 702 uncompsize=uncomp, chainsize=chainsize,
703 703 chainratio=chainratio, lindist=lineardist,
704 704 extradist=extradist, extraratio=extraratio)
705 705 if withsparseread:
706 706 readsize = 0
707 707 largestblock = 0
708 708 srchunks = 0
709 709
710 710 for revschunk in revlog._slicechunk(r, chain):
711 711 srchunks += 1
712 712 blkend = start(revschunk[-1]) + length(revschunk[-1])
713 713 blksize = blkend - start(revschunk[0])
714 714
715 715 readsize += blksize
716 716 if largestblock < blksize:
717 717 largestblock = blksize
718 718
719 719 if readsize:
720 720 readdensity = float(chainsize) / float(readsize)
721 721 else:
722 722 readdensity = 1
723 723
724 724 fm.write('readsize largestblock readdensity srchunks',
725 725 ' %10d %10d %9.5f %8d',
726 726 readsize, largestblock, readdensity, srchunks,
727 727 readsize=readsize, largestblock=largestblock,
728 728 readdensity=readdensity, srchunks=srchunks)
729 729
730 730 fm.plain('\n')
731 731
732 732 fm.end()
733 733
734 734 @command('debugdirstate|debugstate',
735 735 [('', 'nodates', None, _('do not display the saved mtime')),
736 736 ('', 'datesort', None, _('sort by saved mtime'))],
737 737 _('[OPTION]...'))
738 738 def debugstate(ui, repo, **opts):
739 739 """show the contents of the current dirstate"""
740 740
741 741 nodates = opts.get(r'nodates')
742 742 datesort = opts.get(r'datesort')
743 743
744 744 timestr = ""
745 745 if datesort:
746 746 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
747 747 else:
748 748 keyfunc = None # sort by filename
749 749 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
750 750 if ent[3] == -1:
751 751 timestr = 'unset '
752 752 elif nodates:
753 753 timestr = 'set '
754 754 else:
755 755 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
756 756 time.localtime(ent[3]))
757 757 timestr = encoding.strtolocal(timestr)
758 758 if ent[1] & 0o20000:
759 759 mode = 'lnk'
760 760 else:
761 761 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
762 762 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
763 763 for f in repo.dirstate.copies():
764 764 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
765 765
766 766 @command('debugdiscovery',
767 767 [('', 'old', None, _('use old-style discovery')),
768 768 ('', 'nonheads', None,
769 769 _('use old-style discovery with non-heads included')),
770 770 ('', 'rev', [], 'restrict discovery to this set of revs'),
771 771 ] + cmdutil.remoteopts,
772 772 _('[--rev REV] [OTHER]'))
773 773 def debugdiscovery(ui, repo, remoteurl="default", **opts):
774 774 """runs the changeset discovery protocol in isolation"""
775 775 opts = pycompat.byteskwargs(opts)
776 776 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
777 777 remote = hg.peer(repo, opts, remoteurl)
778 778 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
779 779
780 780 # make sure tests are repeatable
781 781 random.seed(12323)
782 782
783 783 def doit(pushedrevs, remoteheads, remote=remote):
784 784 if opts.get('old'):
785 785 if not util.safehasattr(remote, 'branches'):
786 786 # enable in-client legacy support
787 787 remote = localrepo.locallegacypeer(remote.local())
788 788 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
789 789 force=True)
790 790 common = set(common)
791 791 if not opts.get('nonheads'):
792 792 ui.write(("unpruned common: %s\n") %
793 793 " ".join(sorted(short(n) for n in common)))
794 794 dag = dagutil.revlogdag(repo.changelog)
795 795 all = dag.ancestorset(dag.internalizeall(common))
796 796 common = dag.externalizeall(dag.headsetofconnecteds(all))
797 797 else:
798 798 nodes = None
799 799 if pushedrevs:
800 800 revs = scmutil.revrange(repo, pushedrevs)
801 801 nodes = [repo[r].node() for r in revs]
802 802 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
803 803 ancestorsof=nodes)
804 804 common = set(common)
805 805 rheads = set(hds)
806 806 lheads = set(repo.heads())
807 807 ui.write(("common heads: %s\n") %
808 808 " ".join(sorted(short(n) for n in common)))
809 809 if lheads <= common:
810 810 ui.write(("local is subset\n"))
811 811 elif rheads <= common:
812 812 ui.write(("remote is subset\n"))
813 813
814 814 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
815 815 localrevs = opts['rev']
816 816 doit(localrevs, remoterevs)
817 817
818 818 _chunksize = 4 << 10
819 819
820 820 @command('debugdownload',
821 821 [
822 822 ('o', 'output', '', _('path')),
823 823 ],
824 824 optionalrepo=True)
825 825 def debugdownload(ui, repo, url, output=None, **opts):
826 826 """download a resource using Mercurial logic and config
827 827 """
828 828 fh = urlmod.open(ui, url, output)
829 829
830 830 dest = ui
831 831 if output:
832 832 dest = open(output, "wb", _chunksize)
833 833 try:
834 834 data = fh.read(_chunksize)
835 835 while data:
836 836 dest.write(data)
837 837 data = fh.read(_chunksize)
838 838 finally:
839 839 if output:
840 840 dest.close()
841 841
842 842 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
843 843 def debugextensions(ui, repo, **opts):
844 844 '''show information about active extensions'''
845 845 opts = pycompat.byteskwargs(opts)
846 846 exts = extensions.extensions(ui)
847 847 hgver = util.version()
848 848 fm = ui.formatter('debugextensions', opts)
849 849 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
850 850 isinternal = extensions.ismoduleinternal(extmod)
851 851 extsource = pycompat.fsencode(extmod.__file__)
852 852 if isinternal:
853 853 exttestedwith = [] # never expose magic string to users
854 854 else:
855 855 exttestedwith = getattr(extmod, 'testedwith', '').split()
856 856 extbuglink = getattr(extmod, 'buglink', None)
857 857
858 858 fm.startitem()
859 859
860 860 if ui.quiet or ui.verbose:
861 861 fm.write('name', '%s\n', extname)
862 862 else:
863 863 fm.write('name', '%s', extname)
864 864 if isinternal or hgver in exttestedwith:
865 865 fm.plain('\n')
866 866 elif not exttestedwith:
867 867 fm.plain(_(' (untested!)\n'))
868 868 else:
869 869 lasttestedversion = exttestedwith[-1]
870 870 fm.plain(' (%s!)\n' % lasttestedversion)
871 871
872 872 fm.condwrite(ui.verbose and extsource, 'source',
873 873 _(' location: %s\n'), extsource or "")
874 874
875 875 if ui.verbose:
876 876 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
877 877 fm.data(bundled=isinternal)
878 878
879 879 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
880 880 _(' tested with: %s\n'),
881 881 fm.formatlist(exttestedwith, name='ver'))
882 882
883 883 fm.condwrite(ui.verbose and extbuglink, 'buglink',
884 884 _(' bug reporting: %s\n'), extbuglink or "")
885 885
886 886 fm.end()
887 887
888 888 @command('debugfileset',
889 889 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
890 890 ('', 'all-files', False,
891 891 _('test files from all revisions and working directory')),
892 892 ('s', 'show-matcher', None,
893 893 _('print internal representation of matcher')),
894 894 ('p', 'show-stage', [],
895 895 _('print parsed tree at the given stage'), _('NAME'))],
896 896 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
897 897 def debugfileset(ui, repo, expr, **opts):
898 898 '''parse and apply a fileset specification'''
899 899 opts = pycompat.byteskwargs(opts)
900 900 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
901 901
902 902 stages = [
903 903 ('parsed', pycompat.identity),
904 904 ]
905 905 stagenames = set(n for n, f in stages)
906 906
907 907 showalways = set()
908 908 if ui.verbose and not opts['show_stage']:
909 909 # show parsed tree by --verbose (deprecated)
910 910 showalways.add('parsed')
911 911 if opts['show_stage'] == ['all']:
912 912 showalways.update(stagenames)
913 913 else:
914 914 for n in opts['show_stage']:
915 915 if n not in stagenames:
916 916 raise error.Abort(_('invalid stage name: %s') % n)
917 917 showalways.update(opts['show_stage'])
918 918
919 919 tree = filesetlang.parse(expr)
920 920 for n, f in stages:
921 921 tree = f(tree)
922 922 if n in showalways:
923 923 if opts['show_stage'] or n != 'parsed':
924 924 ui.write(("* %s:\n") % n)
925 925 ui.write(filesetlang.prettyformat(tree), "\n")
926 926
927 927 files = set()
928 928 if opts['all_files']:
929 929 for r in repo:
930 930 c = repo[r]
931 931 files.update(c.files())
932 932 files.update(c.substate)
933 933 if opts['all_files'] or ctx.rev() is None:
934 934 wctx = repo[None]
935 935 files.update(repo.dirstate.walk(scmutil.matchall(repo),
936 936 subrepos=list(wctx.substate),
937 937 unknown=True, ignored=True))
938 938 files.update(wctx.substate)
939 939 else:
940 940 files.update(ctx.files())
941 941 files.update(ctx.substate)
942 942
943 943 m = ctx.matchfileset(expr)
944 944 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
945 945 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
946 946 for f in sorted(files):
947 947 if not m(f):
948 948 continue
949 949 ui.write("%s\n" % f)
950 950
951 951 @command('debugformat',
952 [] + cmdutil.formatteropts,
953 _(''))
952 [] + cmdutil.formatteropts)
954 953 def debugformat(ui, repo, **opts):
955 954 """display format information about the current repository
956 955
957 956 Use --verbose to get extra information about current config value and
958 957 Mercurial default."""
959 958 opts = pycompat.byteskwargs(opts)
960 959 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
961 960 maxvariantlength = max(len('format-variant'), maxvariantlength)
962 961
963 962 def makeformatname(name):
964 963 return '%s:' + (' ' * (maxvariantlength - len(name)))
965 964
966 965 fm = ui.formatter('debugformat', opts)
967 966 if fm.isplain():
968 967 def formatvalue(value):
969 968 if util.safehasattr(value, 'startswith'):
970 969 return value
971 970 if value:
972 971 return 'yes'
973 972 else:
974 973 return 'no'
975 974 else:
976 975 formatvalue = pycompat.identity
977 976
978 977 fm.plain('format-variant')
979 978 fm.plain(' ' * (maxvariantlength - len('format-variant')))
980 979 fm.plain(' repo')
981 980 if ui.verbose:
982 981 fm.plain(' config default')
983 982 fm.plain('\n')
984 983 for fv in upgrade.allformatvariant:
985 984 fm.startitem()
986 985 repovalue = fv.fromrepo(repo)
987 986 configvalue = fv.fromconfig(repo)
988 987
989 988 if repovalue != configvalue:
990 989 namelabel = 'formatvariant.name.mismatchconfig'
991 990 repolabel = 'formatvariant.repo.mismatchconfig'
992 991 elif repovalue != fv.default:
993 992 namelabel = 'formatvariant.name.mismatchdefault'
994 993 repolabel = 'formatvariant.repo.mismatchdefault'
995 994 else:
996 995 namelabel = 'formatvariant.name.uptodate'
997 996 repolabel = 'formatvariant.repo.uptodate'
998 997
999 998 fm.write('name', makeformatname(fv.name), fv.name,
1000 999 label=namelabel)
1001 1000 fm.write('repo', ' %3s', formatvalue(repovalue),
1002 1001 label=repolabel)
1003 1002 if fv.default != configvalue:
1004 1003 configlabel = 'formatvariant.config.special'
1005 1004 else:
1006 1005 configlabel = 'formatvariant.config.default'
1007 1006 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1008 1007 label=configlabel)
1009 1008 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1010 1009 label='formatvariant.default')
1011 1010 fm.plain('\n')
1012 1011 fm.end()
1013 1012
1014 1013 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1015 1014 def debugfsinfo(ui, path="."):
1016 1015 """show information detected about current filesystem"""
1017 1016 ui.write(('path: %s\n') % path)
1018 1017 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1019 1018 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1020 1019 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1021 1020 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1022 1021 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1023 1022 casesensitive = '(unknown)'
1024 1023 try:
1025 1024 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1026 1025 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1027 1026 except OSError:
1028 1027 pass
1029 1028 ui.write(('case-sensitive: %s\n') % casesensitive)
1030 1029
1031 1030 @command('debuggetbundle',
1032 1031 [('H', 'head', [], _('id of head node'), _('ID')),
1033 1032 ('C', 'common', [], _('id of common node'), _('ID')),
1034 1033 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1035 1034 _('REPO FILE [-H|-C ID]...'),
1036 1035 norepo=True)
1037 1036 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1038 1037 """retrieves a bundle from a repo
1039 1038
1040 1039 Every ID must be a full-length hex node id string. Saves the bundle to the
1041 1040 given file.
1042 1041 """
1043 1042 opts = pycompat.byteskwargs(opts)
1044 1043 repo = hg.peer(ui, opts, repopath)
1045 1044 if not repo.capable('getbundle'):
1046 1045 raise error.Abort("getbundle() not supported by target repository")
1047 1046 args = {}
1048 1047 if common:
1049 1048 args[r'common'] = [bin(s) for s in common]
1050 1049 if head:
1051 1050 args[r'heads'] = [bin(s) for s in head]
1052 1051 # TODO: get desired bundlecaps from command line.
1053 1052 args[r'bundlecaps'] = None
1054 1053 bundle = repo.getbundle('debug', **args)
1055 1054
1056 1055 bundletype = opts.get('type', 'bzip2').lower()
1057 1056 btypes = {'none': 'HG10UN',
1058 1057 'bzip2': 'HG10BZ',
1059 1058 'gzip': 'HG10GZ',
1060 1059 'bundle2': 'HG20'}
1061 1060 bundletype = btypes.get(bundletype)
1062 1061 if bundletype not in bundle2.bundletypes:
1063 1062 raise error.Abort(_('unknown bundle type specified with --type'))
1064 1063 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1065 1064
1066 1065 @command('debugignore', [], '[FILE]')
1067 1066 def debugignore(ui, repo, *files, **opts):
1068 1067 """display the combined ignore pattern and information about ignored files
1069 1068
1070 1069 With no argument display the combined ignore pattern.
1071 1070
1072 1071 Given space separated file names, shows if the given file is ignored and
1073 1072 if so, show the ignore rule (file and line number) that matched it.
1074 1073 """
1075 1074 ignore = repo.dirstate._ignore
1076 1075 if not files:
1077 1076 # Show all the patterns
1078 1077 ui.write("%s\n" % pycompat.byterepr(ignore))
1079 1078 else:
1080 1079 m = scmutil.match(repo[None], pats=files)
1081 1080 for f in m.files():
1082 1081 nf = util.normpath(f)
1083 1082 ignored = None
1084 1083 ignoredata = None
1085 1084 if nf != '.':
1086 1085 if ignore(nf):
1087 1086 ignored = nf
1088 1087 ignoredata = repo.dirstate._ignorefileandline(nf)
1089 1088 else:
1090 1089 for p in util.finddirs(nf):
1091 1090 if ignore(p):
1092 1091 ignored = p
1093 1092 ignoredata = repo.dirstate._ignorefileandline(p)
1094 1093 break
1095 1094 if ignored:
1096 1095 if ignored == nf:
1097 1096 ui.write(_("%s is ignored\n") % m.uipath(f))
1098 1097 else:
1099 1098 ui.write(_("%s is ignored because of "
1100 1099 "containing folder %s\n")
1101 1100 % (m.uipath(f), ignored))
1102 1101 ignorefile, lineno, line = ignoredata
1103 1102 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1104 1103 % (ignorefile, lineno, line))
1105 1104 else:
1106 1105 ui.write(_("%s is not ignored\n") % m.uipath(f))
1107 1106
1108 1107 @command('debugindex', cmdutil.debugrevlogopts +
1109 1108 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1110 1109 _('[-f FORMAT] -c|-m|FILE'),
1111 1110 optionalrepo=True)
1112 1111 def debugindex(ui, repo, file_=None, **opts):
1113 1112 """dump the contents of an index file"""
1114 1113 opts = pycompat.byteskwargs(opts)
1115 1114 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1116 1115 format = opts.get('format', 0)
1117 1116 if format not in (0, 1):
1118 1117 raise error.Abort(_("unknown format %d") % format)
1119 1118
1120 1119 if ui.debugflag:
1121 1120 shortfn = hex
1122 1121 else:
1123 1122 shortfn = short
1124 1123
1125 1124 # There might not be anything in r, so have a sane default
1126 1125 idlen = 12
1127 1126 for i in r:
1128 1127 idlen = len(shortfn(r.node(i)))
1129 1128 break
1130 1129
1131 1130 if format == 0:
1132 1131 if ui.verbose:
1133 1132 ui.write((" rev offset length linkrev"
1134 1133 " %s %s p2\n") % ("nodeid".ljust(idlen),
1135 1134 "p1".ljust(idlen)))
1136 1135 else:
1137 1136 ui.write((" rev linkrev %s %s p2\n") % (
1138 1137 "nodeid".ljust(idlen), "p1".ljust(idlen)))
1139 1138 elif format == 1:
1140 1139 if ui.verbose:
1141 1140 ui.write((" rev flag offset length size link p1"
1142 1141 " p2 %s\n") % "nodeid".rjust(idlen))
1143 1142 else:
1144 1143 ui.write((" rev flag size link p1 p2 %s\n") %
1145 1144 "nodeid".rjust(idlen))
1146 1145
1147 1146 for i in r:
1148 1147 node = r.node(i)
1149 1148 if format == 0:
1150 1149 try:
1151 1150 pp = r.parents(node)
1152 1151 except Exception:
1153 1152 pp = [nullid, nullid]
1154 1153 if ui.verbose:
1155 1154 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
1156 1155 i, r.start(i), r.length(i), r.linkrev(i),
1157 1156 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1158 1157 else:
1159 1158 ui.write("% 6d % 7d %s %s %s\n" % (
1160 1159 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
1161 1160 shortfn(pp[1])))
1162 1161 elif format == 1:
1163 1162 pr = r.parentrevs(i)
1164 1163 if ui.verbose:
1165 1164 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
1166 1165 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1167 1166 r.linkrev(i), pr[0], pr[1], shortfn(node)))
1168 1167 else:
1169 1168 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
1170 1169 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
1171 1170 shortfn(node)))
1172 1171
1173 1172 @command('debugindexdot', cmdutil.debugrevlogopts,
1174 1173 _('-c|-m|FILE'), optionalrepo=True)
1175 1174 def debugindexdot(ui, repo, file_=None, **opts):
1176 1175 """dump an index DAG as a graphviz dot file"""
1177 1176 opts = pycompat.byteskwargs(opts)
1178 1177 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1179 1178 ui.write(("digraph G {\n"))
1180 1179 for i in r:
1181 1180 node = r.node(i)
1182 1181 pp = r.parents(node)
1183 1182 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1184 1183 if pp[1] != nullid:
1185 1184 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1186 1185 ui.write("}\n")
1187 1186
1188 1187 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1189 1188 def debuginstall(ui, **opts):
1190 1189 '''test Mercurial installation
1191 1190
1192 1191 Returns 0 on success.
1193 1192 '''
1194 1193 opts = pycompat.byteskwargs(opts)
1195 1194
1196 1195 def writetemp(contents):
1197 1196 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1198 1197 f = os.fdopen(fd, r"wb")
1199 1198 f.write(contents)
1200 1199 f.close()
1201 1200 return name
1202 1201
1203 1202 problems = 0
1204 1203
1205 1204 fm = ui.formatter('debuginstall', opts)
1206 1205 fm.startitem()
1207 1206
1208 1207 # encoding
1209 1208 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1210 1209 err = None
1211 1210 try:
1212 1211 codecs.lookup(pycompat.sysstr(encoding.encoding))
1213 1212 except LookupError as inst:
1214 1213 err = stringutil.forcebytestr(inst)
1215 1214 problems += 1
1216 1215 fm.condwrite(err, 'encodingerror', _(" %s\n"
1217 1216 " (check that your locale is properly set)\n"), err)
1218 1217
1219 1218 # Python
1220 1219 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1221 1220 pycompat.sysexecutable)
1222 1221 fm.write('pythonver', _("checking Python version (%s)\n"),
1223 1222 ("%d.%d.%d" % sys.version_info[:3]))
1224 1223 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1225 1224 os.path.dirname(pycompat.fsencode(os.__file__)))
1226 1225
1227 1226 security = set(sslutil.supportedprotocols)
1228 1227 if sslutil.hassni:
1229 1228 security.add('sni')
1230 1229
1231 1230 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1232 1231 fm.formatlist(sorted(security), name='protocol',
1233 1232 fmt='%s', sep=','))
1234 1233
1235 1234 # These are warnings, not errors. So don't increment problem count. This
1236 1235 # may change in the future.
1237 1236 if 'tls1.2' not in security:
1238 1237 fm.plain(_(' TLS 1.2 not supported by Python install; '
1239 1238 'network connections lack modern security\n'))
1240 1239 if 'sni' not in security:
1241 1240 fm.plain(_(' SNI not supported by Python install; may have '
1242 1241 'connectivity issues with some servers\n'))
1243 1242
1244 1243 # TODO print CA cert info
1245 1244
1246 1245 # hg version
1247 1246 hgver = util.version()
1248 1247 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1249 1248 hgver.split('+')[0])
1250 1249 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1251 1250 '+'.join(hgver.split('+')[1:]))
1252 1251
1253 1252 # compiled modules
1254 1253 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1255 1254 policy.policy)
1256 1255 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1257 1256 os.path.dirname(pycompat.fsencode(__file__)))
1258 1257
1259 1258 if policy.policy in ('c', 'allow'):
1260 1259 err = None
1261 1260 try:
1262 1261 from .cext import (
1263 1262 base85,
1264 1263 bdiff,
1265 1264 mpatch,
1266 1265 osutil,
1267 1266 )
1268 1267 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1269 1268 except Exception as inst:
1270 1269 err = stringutil.forcebytestr(inst)
1271 1270 problems += 1
1272 1271 fm.condwrite(err, 'extensionserror', " %s\n", err)
1273 1272
1274 1273 compengines = util.compengines._engines.values()
1275 1274 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1276 1275 fm.formatlist(sorted(e.name() for e in compengines),
1277 1276 name='compengine', fmt='%s', sep=', '))
1278 1277 fm.write('compenginesavail', _('checking available compression engines '
1279 1278 '(%s)\n'),
1280 1279 fm.formatlist(sorted(e.name() for e in compengines
1281 1280 if e.available()),
1282 1281 name='compengine', fmt='%s', sep=', '))
1283 1282 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1284 1283 fm.write('compenginesserver', _('checking available compression engines '
1285 1284 'for wire protocol (%s)\n'),
1286 1285 fm.formatlist([e.name() for e in wirecompengines
1287 1286 if e.wireprotosupport()],
1288 1287 name='compengine', fmt='%s', sep=', '))
1289 1288 re2 = 'missing'
1290 1289 if util._re2:
1291 1290 re2 = 'available'
1292 1291 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1293 1292 fm.data(re2=bool(util._re2))
1294 1293
1295 1294 # templates
1296 1295 p = templater.templatepaths()
1297 1296 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1298 1297 fm.condwrite(not p, '', _(" no template directories found\n"))
1299 1298 if p:
1300 1299 m = templater.templatepath("map-cmdline.default")
1301 1300 if m:
1302 1301 # template found, check if it is working
1303 1302 err = None
1304 1303 try:
1305 1304 templater.templater.frommapfile(m)
1306 1305 except Exception as inst:
1307 1306 err = stringutil.forcebytestr(inst)
1308 1307 p = None
1309 1308 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1310 1309 else:
1311 1310 p = None
1312 1311 fm.condwrite(p, 'defaulttemplate',
1313 1312 _("checking default template (%s)\n"), m)
1314 1313 fm.condwrite(not m, 'defaulttemplatenotfound',
1315 1314 _(" template '%s' not found\n"), "default")
1316 1315 if not p:
1317 1316 problems += 1
1318 1317 fm.condwrite(not p, '',
1319 1318 _(" (templates seem to have been installed incorrectly)\n"))
1320 1319
1321 1320 # editor
1322 1321 editor = ui.geteditor()
1323 1322 editor = util.expandpath(editor)
1324 1323 editorbin = procutil.shellsplit(editor)[0]
1325 1324 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1326 1325 cmdpath = procutil.findexe(editorbin)
1327 1326 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1328 1327 _(" No commit editor set and can't find %s in PATH\n"
1329 1328 " (specify a commit editor in your configuration"
1330 1329 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1331 1330 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1332 1331 _(" Can't find editor '%s' in PATH\n"
1333 1332 " (specify a commit editor in your configuration"
1334 1333 " file)\n"), not cmdpath and editorbin)
1335 1334 if not cmdpath and editor != 'vi':
1336 1335 problems += 1
1337 1336
1338 1337 # check username
1339 1338 username = None
1340 1339 err = None
1341 1340 try:
1342 1341 username = ui.username()
1343 1342 except error.Abort as e:
1344 1343 err = stringutil.forcebytestr(e)
1345 1344 problems += 1
1346 1345
1347 1346 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1348 1347 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1349 1348 " (specify a username in your configuration file)\n"), err)
1350 1349
1351 1350 fm.condwrite(not problems, '',
1352 1351 _("no problems detected\n"))
1353 1352 if not problems:
1354 1353 fm.data(problems=problems)
1355 1354 fm.condwrite(problems, 'problems',
1356 1355 _("%d problems detected,"
1357 1356 " please check your install!\n"), problems)
1358 1357 fm.end()
1359 1358
1360 1359 return problems
1361 1360
1362 1361 @command('debugknown', [], _('REPO ID...'), norepo=True)
1363 1362 def debugknown(ui, repopath, *ids, **opts):
1364 1363 """test whether node ids are known to a repo
1365 1364
1366 1365 Every ID must be a full-length hex node id string. Returns a list of 0s
1367 1366 and 1s indicating unknown/known.
1368 1367 """
1369 1368 opts = pycompat.byteskwargs(opts)
1370 1369 repo = hg.peer(ui, opts, repopath)
1371 1370 if not repo.capable('known'):
1372 1371 raise error.Abort("known() not supported by target repository")
1373 1372 flags = repo.known([bin(s) for s in ids])
1374 1373 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1375 1374
1376 1375 @command('debuglabelcomplete', [], _('LABEL...'))
1377 1376 def debuglabelcomplete(ui, repo, *args):
1378 1377 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1379 1378 debugnamecomplete(ui, repo, *args)
1380 1379
1381 1380 @command('debuglocks',
1382 1381 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1383 1382 ('W', 'force-wlock', None,
1384 1383 _('free the working state lock (DANGEROUS)')),
1385 1384 ('s', 'set-lock', None, _('set the store lock until stopped')),
1386 1385 ('S', 'set-wlock', None,
1387 1386 _('set the working state lock until stopped'))],
1388 1387 _('[OPTION]...'))
1389 1388 def debuglocks(ui, repo, **opts):
1390 1389 """show or modify state of locks
1391 1390
1392 1391 By default, this command will show which locks are held. This
1393 1392 includes the user and process holding the lock, the amount of time
1394 1393 the lock has been held, and the machine name where the process is
1395 1394 running if it's not local.
1396 1395
1397 1396 Locks protect the integrity of Mercurial's data, so should be
1398 1397 treated with care. System crashes or other interruptions may cause
1399 1398 locks to not be properly released, though Mercurial will usually
1400 1399 detect and remove such stale locks automatically.
1401 1400
1402 1401 However, detecting stale locks may not always be possible (for
1403 1402 instance, on a shared filesystem). Removing locks may also be
1404 1403 blocked by filesystem permissions.
1405 1404
1406 1405 Setting a lock will prevent other commands from changing the data.
1407 1406 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1408 1407 The set locks are removed when the command exits.
1409 1408
1410 1409 Returns 0 if no locks are held.
1411 1410
1412 1411 """
1413 1412
1414 1413 if opts.get(r'force_lock'):
1415 1414 repo.svfs.unlink('lock')
1416 1415 if opts.get(r'force_wlock'):
1417 1416 repo.vfs.unlink('wlock')
1418 1417 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1419 1418 return 0
1420 1419
1421 1420 locks = []
1422 1421 try:
1423 1422 if opts.get(r'set_wlock'):
1424 1423 try:
1425 1424 locks.append(repo.wlock(False))
1426 1425 except error.LockHeld:
1427 1426 raise error.Abort(_('wlock is already held'))
1428 1427 if opts.get(r'set_lock'):
1429 1428 try:
1430 1429 locks.append(repo.lock(False))
1431 1430 except error.LockHeld:
1432 1431 raise error.Abort(_('lock is already held'))
1433 1432 if len(locks):
1434 1433 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1435 1434 return 0
1436 1435 finally:
1437 1436 release(*locks)
1438 1437
1439 1438 now = time.time()
1440 1439 held = 0
1441 1440
1442 1441 def report(vfs, name, method):
1443 1442 # this causes stale locks to get reaped for more accurate reporting
1444 1443 try:
1445 1444 l = method(False)
1446 1445 except error.LockHeld:
1447 1446 l = None
1448 1447
1449 1448 if l:
1450 1449 l.release()
1451 1450 else:
1452 1451 try:
1453 1452 st = vfs.lstat(name)
1454 1453 age = now - st[stat.ST_MTIME]
1455 1454 user = util.username(st.st_uid)
1456 1455 locker = vfs.readlock(name)
1457 1456 if ":" in locker:
1458 1457 host, pid = locker.split(':')
1459 1458 if host == socket.gethostname():
1460 1459 locker = 'user %s, process %s' % (user, pid)
1461 1460 else:
1462 1461 locker = 'user %s, process %s, host %s' \
1463 1462 % (user, pid, host)
1464 1463 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1465 1464 return 1
1466 1465 except OSError as e:
1467 1466 if e.errno != errno.ENOENT:
1468 1467 raise
1469 1468
1470 1469 ui.write(("%-6s free\n") % (name + ":"))
1471 1470 return 0
1472 1471
1473 1472 held += report(repo.svfs, "lock", repo.lock)
1474 1473 held += report(repo.vfs, "wlock", repo.wlock)
1475 1474
1476 1475 return held
1477 1476
1478 1477 @command('debugmanifestfulltextcache', [
1479 1478 ('', 'clear', False, _('clear the cache')),
1480 1479 ('a', 'add', '', _('add the given manifest node to the cache'),
1481 1480 _('NODE'))
1482 1481 ], '')
1483 1482 def debugmanifestfulltextcache(ui, repo, add=None, **opts):
1484 1483 """show, clear or amend the contents of the manifest fulltext cache"""
1485 1484 with repo.lock():
1486 1485 r = repo.manifestlog._revlog
1487 1486 try:
1488 1487 cache = r._fulltextcache
1489 1488 except AttributeError:
1490 1489 ui.warn(_(
1491 1490 "Current revlog implementation doesn't appear to have a "
1492 1491 'manifest fulltext cache\n'))
1493 1492 return
1494 1493
1495 1494 if opts.get(r'clear'):
1496 1495 cache.clear()
1497 1496
1498 1497 if add:
1499 1498 try:
1500 1499 manifest = repo.manifestlog[r.lookup(add)]
1501 1500 except error.LookupError as e:
1502 1501 raise error.Abort(e, hint="Check your manifest node id")
1503 1502 manifest.read() # stores revisision in cache too
1504 1503
1505 1504 if not len(cache):
1506 1505 ui.write(_('Cache empty'))
1507 1506 else:
1508 1507 ui.write(
1509 1508 _('Cache contains %d manifest entries, in order of most to '
1510 1509 'least recent:\n') % (len(cache),))
1511 1510 totalsize = 0
1512 1511 for nodeid in cache:
1513 1512 # Use cache.get to not update the LRU order
1514 1513 data = cache.get(nodeid)
1515 1514 size = len(data)
1516 1515 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1517 1516 ui.write(_('id: %s, size %s\n') % (
1518 1517 hex(nodeid), util.bytecount(size)))
1519 1518 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1520 1519 ui.write(
1521 1520 _('Total cache data size %s, on-disk %s\n') % (
1522 1521 util.bytecount(totalsize), util.bytecount(ondisk))
1523 1522 )
1524 1523
1525 1524 @command('debugmergestate', [], '')
1526 1525 def debugmergestate(ui, repo, *args):
1527 1526 """print merge state
1528 1527
1529 1528 Use --verbose to print out information about whether v1 or v2 merge state
1530 1529 was chosen."""
1531 1530 def _hashornull(h):
1532 1531 if h == nullhex:
1533 1532 return 'null'
1534 1533 else:
1535 1534 return h
1536 1535
1537 1536 def printrecords(version):
1538 1537 ui.write(('* version %d records\n') % version)
1539 1538 if version == 1:
1540 1539 records = v1records
1541 1540 else:
1542 1541 records = v2records
1543 1542
1544 1543 for rtype, record in records:
1545 1544 # pretty print some record types
1546 1545 if rtype == 'L':
1547 1546 ui.write(('local: %s\n') % record)
1548 1547 elif rtype == 'O':
1549 1548 ui.write(('other: %s\n') % record)
1550 1549 elif rtype == 'm':
1551 1550 driver, mdstate = record.split('\0', 1)
1552 1551 ui.write(('merge driver: %s (state "%s")\n')
1553 1552 % (driver, mdstate))
1554 1553 elif rtype in 'FDC':
1555 1554 r = record.split('\0')
1556 1555 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1557 1556 if version == 1:
1558 1557 onode = 'not stored in v1 format'
1559 1558 flags = r[7]
1560 1559 else:
1561 1560 onode, flags = r[7:9]
1562 1561 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1563 1562 % (f, rtype, state, _hashornull(hash)))
1564 1563 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1565 1564 ui.write((' ancestor path: %s (node %s)\n')
1566 1565 % (afile, _hashornull(anode)))
1567 1566 ui.write((' other path: %s (node %s)\n')
1568 1567 % (ofile, _hashornull(onode)))
1569 1568 elif rtype == 'f':
1570 1569 filename, rawextras = record.split('\0', 1)
1571 1570 extras = rawextras.split('\0')
1572 1571 i = 0
1573 1572 extrastrings = []
1574 1573 while i < len(extras):
1575 1574 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1576 1575 i += 2
1577 1576
1578 1577 ui.write(('file extras: %s (%s)\n')
1579 1578 % (filename, ', '.join(extrastrings)))
1580 1579 elif rtype == 'l':
1581 1580 labels = record.split('\0', 2)
1582 1581 labels = [l for l in labels if len(l) > 0]
1583 1582 ui.write(('labels:\n'))
1584 1583 ui.write((' local: %s\n' % labels[0]))
1585 1584 ui.write((' other: %s\n' % labels[1]))
1586 1585 if len(labels) > 2:
1587 1586 ui.write((' base: %s\n' % labels[2]))
1588 1587 else:
1589 1588 ui.write(('unrecognized entry: %s\t%s\n')
1590 1589 % (rtype, record.replace('\0', '\t')))
1591 1590
1592 1591 # Avoid mergestate.read() since it may raise an exception for unsupported
1593 1592 # merge state records. We shouldn't be doing this, but this is OK since this
1594 1593 # command is pretty low-level.
1595 1594 ms = mergemod.mergestate(repo)
1596 1595
1597 1596 # sort so that reasonable information is on top
1598 1597 v1records = ms._readrecordsv1()
1599 1598 v2records = ms._readrecordsv2()
1600 1599 order = 'LOml'
1601 1600 def key(r):
1602 1601 idx = order.find(r[0])
1603 1602 if idx == -1:
1604 1603 return (1, r[1])
1605 1604 else:
1606 1605 return (0, idx)
1607 1606 v1records.sort(key=key)
1608 1607 v2records.sort(key=key)
1609 1608
1610 1609 if not v1records and not v2records:
1611 1610 ui.write(('no merge state found\n'))
1612 1611 elif not v2records:
1613 1612 ui.note(('no version 2 merge state\n'))
1614 1613 printrecords(1)
1615 1614 elif ms._v1v2match(v1records, v2records):
1616 1615 ui.note(('v1 and v2 states match: using v2\n'))
1617 1616 printrecords(2)
1618 1617 else:
1619 1618 ui.note(('v1 and v2 states mismatch: using v1\n'))
1620 1619 printrecords(1)
1621 1620 if ui.verbose:
1622 1621 printrecords(2)
1623 1622
1624 1623 @command('debugnamecomplete', [], _('NAME...'))
1625 1624 def debugnamecomplete(ui, repo, *args):
1626 1625 '''complete "names" - tags, open branch names, bookmark names'''
1627 1626
1628 1627 names = set()
1629 1628 # since we previously only listed open branches, we will handle that
1630 1629 # specially (after this for loop)
1631 1630 for name, ns in repo.names.iteritems():
1632 1631 if name != 'branches':
1633 1632 names.update(ns.listnames(repo))
1634 1633 names.update(tag for (tag, heads, tip, closed)
1635 1634 in repo.branchmap().iterbranches() if not closed)
1636 1635 completions = set()
1637 1636 if not args:
1638 1637 args = ['']
1639 1638 for a in args:
1640 1639 completions.update(n for n in names if n.startswith(a))
1641 1640 ui.write('\n'.join(sorted(completions)))
1642 1641 ui.write('\n')
1643 1642
1644 1643 @command('debugobsolete',
1645 1644 [('', 'flags', 0, _('markers flag')),
1646 1645 ('', 'record-parents', False,
1647 1646 _('record parent information for the precursor')),
1648 1647 ('r', 'rev', [], _('display markers relevant to REV')),
1649 1648 ('', 'exclusive', False, _('restrict display to markers only '
1650 1649 'relevant to REV')),
1651 1650 ('', 'index', False, _('display index of the marker')),
1652 1651 ('', 'delete', [], _('delete markers specified by indices')),
1653 1652 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1654 1653 _('[OBSOLETED [REPLACEMENT ...]]'))
1655 1654 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1656 1655 """create arbitrary obsolete marker
1657 1656
1658 1657 With no arguments, displays the list of obsolescence markers."""
1659 1658
1660 1659 opts = pycompat.byteskwargs(opts)
1661 1660
1662 1661 def parsenodeid(s):
1663 1662 try:
1664 1663 # We do not use revsingle/revrange functions here to accept
1665 1664 # arbitrary node identifiers, possibly not present in the
1666 1665 # local repository.
1667 1666 n = bin(s)
1668 1667 if len(n) != len(nullid):
1669 1668 raise TypeError()
1670 1669 return n
1671 1670 except TypeError:
1672 1671 raise error.Abort('changeset references must be full hexadecimal '
1673 1672 'node identifiers')
1674 1673
1675 1674 if opts.get('delete'):
1676 1675 indices = []
1677 1676 for v in opts.get('delete'):
1678 1677 try:
1679 1678 indices.append(int(v))
1680 1679 except ValueError:
1681 1680 raise error.Abort(_('invalid index value: %r') % v,
1682 1681 hint=_('use integers for indices'))
1683 1682
1684 1683 if repo.currenttransaction():
1685 1684 raise error.Abort(_('cannot delete obsmarkers in the middle '
1686 1685 'of transaction.'))
1687 1686
1688 1687 with repo.lock():
1689 1688 n = repair.deleteobsmarkers(repo.obsstore, indices)
1690 1689 ui.write(_('deleted %i obsolescence markers\n') % n)
1691 1690
1692 1691 return
1693 1692
1694 1693 if precursor is not None:
1695 1694 if opts['rev']:
1696 1695 raise error.Abort('cannot select revision when creating marker')
1697 1696 metadata = {}
1698 1697 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1699 1698 succs = tuple(parsenodeid(succ) for succ in successors)
1700 1699 l = repo.lock()
1701 1700 try:
1702 1701 tr = repo.transaction('debugobsolete')
1703 1702 try:
1704 1703 date = opts.get('date')
1705 1704 if date:
1706 1705 date = dateutil.parsedate(date)
1707 1706 else:
1708 1707 date = None
1709 1708 prec = parsenodeid(precursor)
1710 1709 parents = None
1711 1710 if opts['record_parents']:
1712 1711 if prec not in repo.unfiltered():
1713 1712 raise error.Abort('cannot used --record-parents on '
1714 1713 'unknown changesets')
1715 1714 parents = repo.unfiltered()[prec].parents()
1716 1715 parents = tuple(p.node() for p in parents)
1717 1716 repo.obsstore.create(tr, prec, succs, opts['flags'],
1718 1717 parents=parents, date=date,
1719 1718 metadata=metadata, ui=ui)
1720 1719 tr.close()
1721 1720 except ValueError as exc:
1722 1721 raise error.Abort(_('bad obsmarker input: %s') %
1723 1722 pycompat.bytestr(exc))
1724 1723 finally:
1725 1724 tr.release()
1726 1725 finally:
1727 1726 l.release()
1728 1727 else:
1729 1728 if opts['rev']:
1730 1729 revs = scmutil.revrange(repo, opts['rev'])
1731 1730 nodes = [repo[r].node() for r in revs]
1732 1731 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1733 1732 exclusive=opts['exclusive']))
1734 1733 markers.sort(key=lambda x: x._data)
1735 1734 else:
1736 1735 markers = obsutil.getmarkers(repo)
1737 1736
1738 1737 markerstoiter = markers
1739 1738 isrelevant = lambda m: True
1740 1739 if opts.get('rev') and opts.get('index'):
1741 1740 markerstoiter = obsutil.getmarkers(repo)
1742 1741 markerset = set(markers)
1743 1742 isrelevant = lambda m: m in markerset
1744 1743
1745 1744 fm = ui.formatter('debugobsolete', opts)
1746 1745 for i, m in enumerate(markerstoiter):
1747 1746 if not isrelevant(m):
1748 1747 # marker can be irrelevant when we're iterating over a set
1749 1748 # of markers (markerstoiter) which is bigger than the set
1750 1749 # of markers we want to display (markers)
1751 1750 # this can happen if both --index and --rev options are
1752 1751 # provided and thus we need to iterate over all of the markers
1753 1752 # to get the correct indices, but only display the ones that
1754 1753 # are relevant to --rev value
1755 1754 continue
1756 1755 fm.startitem()
1757 1756 ind = i if opts.get('index') else None
1758 1757 cmdutil.showmarker(fm, m, index=ind)
1759 1758 fm.end()
1760 1759
1761 1760 @command('debugpathcomplete',
1762 1761 [('f', 'full', None, _('complete an entire path')),
1763 1762 ('n', 'normal', None, _('show only normal files')),
1764 1763 ('a', 'added', None, _('show only added files')),
1765 1764 ('r', 'removed', None, _('show only removed files'))],
1766 1765 _('FILESPEC...'))
1767 1766 def debugpathcomplete(ui, repo, *specs, **opts):
1768 1767 '''complete part or all of a tracked path
1769 1768
1770 1769 This command supports shells that offer path name completion. It
1771 1770 currently completes only files already known to the dirstate.
1772 1771
1773 1772 Completion extends only to the next path segment unless
1774 1773 --full is specified, in which case entire paths are used.'''
1775 1774
1776 1775 def complete(path, acceptable):
1777 1776 dirstate = repo.dirstate
1778 1777 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1779 1778 rootdir = repo.root + pycompat.ossep
1780 1779 if spec != repo.root and not spec.startswith(rootdir):
1781 1780 return [], []
1782 1781 if os.path.isdir(spec):
1783 1782 spec += '/'
1784 1783 spec = spec[len(rootdir):]
1785 1784 fixpaths = pycompat.ossep != '/'
1786 1785 if fixpaths:
1787 1786 spec = spec.replace(pycompat.ossep, '/')
1788 1787 speclen = len(spec)
1789 1788 fullpaths = opts[r'full']
1790 1789 files, dirs = set(), set()
1791 1790 adddir, addfile = dirs.add, files.add
1792 1791 for f, st in dirstate.iteritems():
1793 1792 if f.startswith(spec) and st[0] in acceptable:
1794 1793 if fixpaths:
1795 1794 f = f.replace('/', pycompat.ossep)
1796 1795 if fullpaths:
1797 1796 addfile(f)
1798 1797 continue
1799 1798 s = f.find(pycompat.ossep, speclen)
1800 1799 if s >= 0:
1801 1800 adddir(f[:s])
1802 1801 else:
1803 1802 addfile(f)
1804 1803 return files, dirs
1805 1804
1806 1805 acceptable = ''
1807 1806 if opts[r'normal']:
1808 1807 acceptable += 'nm'
1809 1808 if opts[r'added']:
1810 1809 acceptable += 'a'
1811 1810 if opts[r'removed']:
1812 1811 acceptable += 'r'
1813 1812 cwd = repo.getcwd()
1814 1813 if not specs:
1815 1814 specs = ['.']
1816 1815
1817 1816 files, dirs = set(), set()
1818 1817 for spec in specs:
1819 1818 f, d = complete(spec, acceptable or 'nmar')
1820 1819 files.update(f)
1821 1820 dirs.update(d)
1822 1821 files.update(dirs)
1823 1822 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1824 1823 ui.write('\n')
1825 1824
1826 1825 @command('debugpeer', [], _('PATH'), norepo=True)
1827 1826 def debugpeer(ui, path):
1828 1827 """establish a connection to a peer repository"""
1829 1828 # Always enable peer request logging. Requires --debug to display
1830 1829 # though.
1831 1830 overrides = {
1832 1831 ('devel', 'debug.peer-request'): True,
1833 1832 }
1834 1833
1835 1834 with ui.configoverride(overrides):
1836 1835 peer = hg.peer(ui, {}, path)
1837 1836
1838 1837 local = peer.local() is not None
1839 1838 canpush = peer.canpush()
1840 1839
1841 1840 ui.write(_('url: %s\n') % peer.url())
1842 1841 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1843 1842 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1844 1843
1845 1844 @command('debugpickmergetool',
1846 1845 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1847 1846 ('', 'changedelete', None, _('emulate merging change and delete')),
1848 1847 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1849 1848 _('[PATTERN]...'),
1850 1849 inferrepo=True)
1851 1850 def debugpickmergetool(ui, repo, *pats, **opts):
1852 1851 """examine which merge tool is chosen for specified file
1853 1852
1854 1853 As described in :hg:`help merge-tools`, Mercurial examines
1855 1854 configurations below in this order to decide which merge tool is
1856 1855 chosen for specified file.
1857 1856
1858 1857 1. ``--tool`` option
1859 1858 2. ``HGMERGE`` environment variable
1860 1859 3. configurations in ``merge-patterns`` section
1861 1860 4. configuration of ``ui.merge``
1862 1861 5. configurations in ``merge-tools`` section
1863 1862 6. ``hgmerge`` tool (for historical reason only)
1864 1863 7. default tool for fallback (``:merge`` or ``:prompt``)
1865 1864
1866 1865 This command writes out examination result in the style below::
1867 1866
1868 1867 FILE = MERGETOOL
1869 1868
1870 1869 By default, all files known in the first parent context of the
1871 1870 working directory are examined. Use file patterns and/or -I/-X
1872 1871 options to limit target files. -r/--rev is also useful to examine
1873 1872 files in another context without actual updating to it.
1874 1873
1875 1874 With --debug, this command shows warning messages while matching
1876 1875 against ``merge-patterns`` and so on, too. It is recommended to
1877 1876 use this option with explicit file patterns and/or -I/-X options,
1878 1877 because this option increases amount of output per file according
1879 1878 to configurations in hgrc.
1880 1879
1881 1880 With -v/--verbose, this command shows configurations below at
1882 1881 first (only if specified).
1883 1882
1884 1883 - ``--tool`` option
1885 1884 - ``HGMERGE`` environment variable
1886 1885 - configuration of ``ui.merge``
1887 1886
1888 1887 If merge tool is chosen before matching against
1889 1888 ``merge-patterns``, this command can't show any helpful
1890 1889 information, even with --debug. In such case, information above is
1891 1890 useful to know why a merge tool is chosen.
1892 1891 """
1893 1892 opts = pycompat.byteskwargs(opts)
1894 1893 overrides = {}
1895 1894 if opts['tool']:
1896 1895 overrides[('ui', 'forcemerge')] = opts['tool']
1897 1896 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1898 1897
1899 1898 with ui.configoverride(overrides, 'debugmergepatterns'):
1900 1899 hgmerge = encoding.environ.get("HGMERGE")
1901 1900 if hgmerge is not None:
1902 1901 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1903 1902 uimerge = ui.config("ui", "merge")
1904 1903 if uimerge:
1905 1904 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1906 1905
1907 1906 ctx = scmutil.revsingle(repo, opts.get('rev'))
1908 1907 m = scmutil.match(ctx, pats, opts)
1909 1908 changedelete = opts['changedelete']
1910 1909 for path in ctx.walk(m):
1911 1910 fctx = ctx[path]
1912 1911 try:
1913 1912 if not ui.debugflag:
1914 1913 ui.pushbuffer(error=True)
1915 1914 tool, toolpath = filemerge._picktool(repo, ui, path,
1916 1915 fctx.isbinary(),
1917 1916 'l' in fctx.flags(),
1918 1917 changedelete)
1919 1918 finally:
1920 1919 if not ui.debugflag:
1921 1920 ui.popbuffer()
1922 1921 ui.write(('%s = %s\n') % (path, tool))
1923 1922
1924 1923 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1925 1924 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1926 1925 '''access the pushkey key/value protocol
1927 1926
1928 1927 With two args, list the keys in the given namespace.
1929 1928
1930 1929 With five args, set a key to new if it currently is set to old.
1931 1930 Reports success or failure.
1932 1931 '''
1933 1932
1934 1933 target = hg.peer(ui, {}, repopath)
1935 1934 if keyinfo:
1936 1935 key, old, new = keyinfo
1937 1936 with target.commandexecutor() as e:
1938 1937 r = e.callcommand('pushkey', {
1939 1938 'namespace': namespace,
1940 1939 'key': key,
1941 1940 'old': old,
1942 1941 'new': new,
1943 1942 }).result()
1944 1943
1945 1944 ui.status(pycompat.bytestr(r) + '\n')
1946 1945 return not r
1947 1946 else:
1948 1947 for k, v in sorted(target.listkeys(namespace).iteritems()):
1949 1948 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1950 1949 stringutil.escapestr(v)))
1951 1950
1952 1951 @command('debugpvec', [], _('A B'))
1953 1952 def debugpvec(ui, repo, a, b=None):
1954 1953 ca = scmutil.revsingle(repo, a)
1955 1954 cb = scmutil.revsingle(repo, b)
1956 1955 pa = pvec.ctxpvec(ca)
1957 1956 pb = pvec.ctxpvec(cb)
1958 1957 if pa == pb:
1959 1958 rel = "="
1960 1959 elif pa > pb:
1961 1960 rel = ">"
1962 1961 elif pa < pb:
1963 1962 rel = "<"
1964 1963 elif pa | pb:
1965 1964 rel = "|"
1966 1965 ui.write(_("a: %s\n") % pa)
1967 1966 ui.write(_("b: %s\n") % pb)
1968 1967 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1969 1968 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1970 1969 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1971 1970 pa.distance(pb), rel))
1972 1971
1973 1972 @command('debugrebuilddirstate|debugrebuildstate',
1974 1973 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1975 1974 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1976 1975 'the working copy parent')),
1977 1976 ],
1978 1977 _('[-r REV]'))
1979 1978 def debugrebuilddirstate(ui, repo, rev, **opts):
1980 1979 """rebuild the dirstate as it would look like for the given revision
1981 1980
1982 1981 If no revision is specified the first current parent will be used.
1983 1982
1984 1983 The dirstate will be set to the files of the given revision.
1985 1984 The actual working directory content or existing dirstate
1986 1985 information such as adds or removes is not considered.
1987 1986
1988 1987 ``minimal`` will only rebuild the dirstate status for files that claim to be
1989 1988 tracked but are not in the parent manifest, or that exist in the parent
1990 1989 manifest but are not in the dirstate. It will not change adds, removes, or
1991 1990 modified files that are in the working copy parent.
1992 1991
1993 1992 One use of this command is to make the next :hg:`status` invocation
1994 1993 check the actual file content.
1995 1994 """
1996 1995 ctx = scmutil.revsingle(repo, rev)
1997 1996 with repo.wlock():
1998 1997 dirstate = repo.dirstate
1999 1998 changedfiles = None
2000 1999 # See command doc for what minimal does.
2001 2000 if opts.get(r'minimal'):
2002 2001 manifestfiles = set(ctx.manifest().keys())
2003 2002 dirstatefiles = set(dirstate)
2004 2003 manifestonly = manifestfiles - dirstatefiles
2005 2004 dsonly = dirstatefiles - manifestfiles
2006 2005 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2007 2006 changedfiles = manifestonly | dsnotadded
2008 2007
2009 2008 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2010 2009
2011 2010 @command('debugrebuildfncache', [], '')
2012 2011 def debugrebuildfncache(ui, repo):
2013 2012 """rebuild the fncache file"""
2014 2013 repair.rebuildfncache(ui, repo)
2015 2014
2016 2015 @command('debugrename',
2017 2016 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2018 2017 _('[-r REV] FILE'))
2019 2018 def debugrename(ui, repo, file1, *pats, **opts):
2020 2019 """dump rename information"""
2021 2020
2022 2021 opts = pycompat.byteskwargs(opts)
2023 2022 ctx = scmutil.revsingle(repo, opts.get('rev'))
2024 2023 m = scmutil.match(ctx, (file1,) + pats, opts)
2025 2024 for abs in ctx.walk(m):
2026 2025 fctx = ctx[abs]
2027 2026 o = fctx.filelog().renamed(fctx.filenode())
2028 2027 rel = m.rel(abs)
2029 2028 if o:
2030 2029 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2031 2030 else:
2032 2031 ui.write(_("%s not renamed\n") % rel)
2033 2032
2034 2033 @command('debugrevlog', cmdutil.debugrevlogopts +
2035 2034 [('d', 'dump', False, _('dump index data'))],
2036 2035 _('-c|-m|FILE'),
2037 2036 optionalrepo=True)
2038 2037 def debugrevlog(ui, repo, file_=None, **opts):
2039 2038 """show data and statistics about a revlog"""
2040 2039 opts = pycompat.byteskwargs(opts)
2041 2040 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2042 2041
2043 2042 if opts.get("dump"):
2044 2043 numrevs = len(r)
2045 2044 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2046 2045 " rawsize totalsize compression heads chainlen\n"))
2047 2046 ts = 0
2048 2047 heads = set()
2049 2048
2050 2049 for rev in pycompat.xrange(numrevs):
2051 2050 dbase = r.deltaparent(rev)
2052 2051 if dbase == -1:
2053 2052 dbase = rev
2054 2053 cbase = r.chainbase(rev)
2055 2054 clen = r.chainlen(rev)
2056 2055 p1, p2 = r.parentrevs(rev)
2057 2056 rs = r.rawsize(rev)
2058 2057 ts = ts + rs
2059 2058 heads -= set(r.parentrevs(rev))
2060 2059 heads.add(rev)
2061 2060 try:
2062 2061 compression = ts / r.end(rev)
2063 2062 except ZeroDivisionError:
2064 2063 compression = 0
2065 2064 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2066 2065 "%11d %5d %8d\n" %
2067 2066 (rev, p1, p2, r.start(rev), r.end(rev),
2068 2067 r.start(dbase), r.start(cbase),
2069 2068 r.start(p1), r.start(p2),
2070 2069 rs, ts, compression, len(heads), clen))
2071 2070 return 0
2072 2071
2073 2072 v = r.version
2074 2073 format = v & 0xFFFF
2075 2074 flags = []
2076 2075 gdelta = False
2077 2076 if v & revlog.FLAG_INLINE_DATA:
2078 2077 flags.append('inline')
2079 2078 if v & revlog.FLAG_GENERALDELTA:
2080 2079 gdelta = True
2081 2080 flags.append('generaldelta')
2082 2081 if not flags:
2083 2082 flags = ['(none)']
2084 2083
2085 2084 nummerges = 0
2086 2085 numfull = 0
2087 2086 numprev = 0
2088 2087 nump1 = 0
2089 2088 nump2 = 0
2090 2089 numother = 0
2091 2090 nump1prev = 0
2092 2091 nump2prev = 0
2093 2092 chainlengths = []
2094 2093 chainbases = []
2095 2094 chainspans = []
2096 2095
2097 2096 datasize = [None, 0, 0]
2098 2097 fullsize = [None, 0, 0]
2099 2098 deltasize = [None, 0, 0]
2100 2099 chunktypecounts = {}
2101 2100 chunktypesizes = {}
2102 2101
2103 2102 def addsize(size, l):
2104 2103 if l[0] is None or size < l[0]:
2105 2104 l[0] = size
2106 2105 if size > l[1]:
2107 2106 l[1] = size
2108 2107 l[2] += size
2109 2108
2110 2109 numrevs = len(r)
2111 2110 for rev in pycompat.xrange(numrevs):
2112 2111 p1, p2 = r.parentrevs(rev)
2113 2112 delta = r.deltaparent(rev)
2114 2113 if format > 0:
2115 2114 addsize(r.rawsize(rev), datasize)
2116 2115 if p2 != nullrev:
2117 2116 nummerges += 1
2118 2117 size = r.length(rev)
2119 2118 if delta == nullrev:
2120 2119 chainlengths.append(0)
2121 2120 chainbases.append(r.start(rev))
2122 2121 chainspans.append(size)
2123 2122 numfull += 1
2124 2123 addsize(size, fullsize)
2125 2124 else:
2126 2125 chainlengths.append(chainlengths[delta] + 1)
2127 2126 baseaddr = chainbases[delta]
2128 2127 revaddr = r.start(rev)
2129 2128 chainbases.append(baseaddr)
2130 2129 chainspans.append((revaddr - baseaddr) + size)
2131 2130 addsize(size, deltasize)
2132 2131 if delta == rev - 1:
2133 2132 numprev += 1
2134 2133 if delta == p1:
2135 2134 nump1prev += 1
2136 2135 elif delta == p2:
2137 2136 nump2prev += 1
2138 2137 elif delta == p1:
2139 2138 nump1 += 1
2140 2139 elif delta == p2:
2141 2140 nump2 += 1
2142 2141 elif delta != nullrev:
2143 2142 numother += 1
2144 2143
2145 2144 # Obtain data on the raw chunks in the revlog.
2146 2145 segment = r._getsegmentforrevs(rev, rev)[1]
2147 2146 if segment:
2148 2147 chunktype = bytes(segment[0:1])
2149 2148 else:
2150 2149 chunktype = 'empty'
2151 2150
2152 2151 if chunktype not in chunktypecounts:
2153 2152 chunktypecounts[chunktype] = 0
2154 2153 chunktypesizes[chunktype] = 0
2155 2154
2156 2155 chunktypecounts[chunktype] += 1
2157 2156 chunktypesizes[chunktype] += size
2158 2157
2159 2158 # Adjust size min value for empty cases
2160 2159 for size in (datasize, fullsize, deltasize):
2161 2160 if size[0] is None:
2162 2161 size[0] = 0
2163 2162
2164 2163 numdeltas = numrevs - numfull
2165 2164 numoprev = numprev - nump1prev - nump2prev
2166 2165 totalrawsize = datasize[2]
2167 2166 datasize[2] /= numrevs
2168 2167 fulltotal = fullsize[2]
2169 2168 fullsize[2] /= numfull
2170 2169 deltatotal = deltasize[2]
2171 2170 if numrevs - numfull > 0:
2172 2171 deltasize[2] /= numrevs - numfull
2173 2172 totalsize = fulltotal + deltatotal
2174 2173 avgchainlen = sum(chainlengths) / numrevs
2175 2174 maxchainlen = max(chainlengths)
2176 2175 maxchainspan = max(chainspans)
2177 2176 compratio = 1
2178 2177 if totalsize:
2179 2178 compratio = totalrawsize / totalsize
2180 2179
2181 2180 basedfmtstr = '%%%dd\n'
2182 2181 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2183 2182
2184 2183 def dfmtstr(max):
2185 2184 return basedfmtstr % len(str(max))
2186 2185 def pcfmtstr(max, padding=0):
2187 2186 return basepcfmtstr % (len(str(max)), ' ' * padding)
2188 2187
2189 2188 def pcfmt(value, total):
2190 2189 if total:
2191 2190 return (value, 100 * float(value) / total)
2192 2191 else:
2193 2192 return value, 100.0
2194 2193
2195 2194 ui.write(('format : %d\n') % format)
2196 2195 ui.write(('flags : %s\n') % ', '.join(flags))
2197 2196
2198 2197 ui.write('\n')
2199 2198 fmt = pcfmtstr(totalsize)
2200 2199 fmt2 = dfmtstr(totalsize)
2201 2200 ui.write(('revisions : ') + fmt2 % numrevs)
2202 2201 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2203 2202 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2204 2203 ui.write(('revisions : ') + fmt2 % numrevs)
2205 2204 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2206 2205 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2207 2206 ui.write(('revision size : ') + fmt2 % totalsize)
2208 2207 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2209 2208 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2210 2209
2211 2210 def fmtchunktype(chunktype):
2212 2211 if chunktype == 'empty':
2213 2212 return ' %s : ' % chunktype
2214 2213 elif chunktype in pycompat.bytestr(string.ascii_letters):
2215 2214 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2216 2215 else:
2217 2216 return ' 0x%s : ' % hex(chunktype)
2218 2217
2219 2218 ui.write('\n')
2220 2219 ui.write(('chunks : ') + fmt2 % numrevs)
2221 2220 for chunktype in sorted(chunktypecounts):
2222 2221 ui.write(fmtchunktype(chunktype))
2223 2222 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2224 2223 ui.write(('chunks size : ') + fmt2 % totalsize)
2225 2224 for chunktype in sorted(chunktypecounts):
2226 2225 ui.write(fmtchunktype(chunktype))
2227 2226 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2228 2227
2229 2228 ui.write('\n')
2230 2229 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2231 2230 ui.write(('avg chain length : ') + fmt % avgchainlen)
2232 2231 ui.write(('max chain length : ') + fmt % maxchainlen)
2233 2232 ui.write(('max chain reach : ') + fmt % maxchainspan)
2234 2233 ui.write(('compression ratio : ') + fmt % compratio)
2235 2234
2236 2235 if format > 0:
2237 2236 ui.write('\n')
2238 2237 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2239 2238 % tuple(datasize))
2240 2239 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2241 2240 % tuple(fullsize))
2242 2241 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2243 2242 % tuple(deltasize))
2244 2243
2245 2244 if numdeltas > 0:
2246 2245 ui.write('\n')
2247 2246 fmt = pcfmtstr(numdeltas)
2248 2247 fmt2 = pcfmtstr(numdeltas, 4)
2249 2248 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2250 2249 if numprev > 0:
2251 2250 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2252 2251 numprev))
2253 2252 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2254 2253 numprev))
2255 2254 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2256 2255 numprev))
2257 2256 if gdelta:
2258 2257 ui.write(('deltas against p1 : ')
2259 2258 + fmt % pcfmt(nump1, numdeltas))
2260 2259 ui.write(('deltas against p2 : ')
2261 2260 + fmt % pcfmt(nump2, numdeltas))
2262 2261 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2263 2262 numdeltas))
2264 2263
2265 2264 @command('debugrevspec',
2266 2265 [('', 'optimize', None,
2267 2266 _('print parsed tree after optimizing (DEPRECATED)')),
2268 2267 ('', 'show-revs', True, _('print list of result revisions (default)')),
2269 2268 ('s', 'show-set', None, _('print internal representation of result set')),
2270 2269 ('p', 'show-stage', [],
2271 2270 _('print parsed tree at the given stage'), _('NAME')),
2272 2271 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2273 2272 ('', 'verify-optimized', False, _('verify optimized result')),
2274 2273 ],
2275 2274 ('REVSPEC'))
2276 2275 def debugrevspec(ui, repo, expr, **opts):
2277 2276 """parse and apply a revision specification
2278 2277
2279 2278 Use -p/--show-stage option to print the parsed tree at the given stages.
2280 2279 Use -p all to print tree at every stage.
2281 2280
2282 2281 Use --no-show-revs option with -s or -p to print only the set
2283 2282 representation or the parsed tree respectively.
2284 2283
2285 2284 Use --verify-optimized to compare the optimized result with the unoptimized
2286 2285 one. Returns 1 if the optimized result differs.
2287 2286 """
2288 2287 opts = pycompat.byteskwargs(opts)
2289 2288 aliases = ui.configitems('revsetalias')
2290 2289 stages = [
2291 2290 ('parsed', lambda tree: tree),
2292 2291 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2293 2292 ui.warn)),
2294 2293 ('concatenated', revsetlang.foldconcat),
2295 2294 ('analyzed', revsetlang.analyze),
2296 2295 ('optimized', revsetlang.optimize),
2297 2296 ]
2298 2297 if opts['no_optimized']:
2299 2298 stages = stages[:-1]
2300 2299 if opts['verify_optimized'] and opts['no_optimized']:
2301 2300 raise error.Abort(_('cannot use --verify-optimized with '
2302 2301 '--no-optimized'))
2303 2302 stagenames = set(n for n, f in stages)
2304 2303
2305 2304 showalways = set()
2306 2305 showchanged = set()
2307 2306 if ui.verbose and not opts['show_stage']:
2308 2307 # show parsed tree by --verbose (deprecated)
2309 2308 showalways.add('parsed')
2310 2309 showchanged.update(['expanded', 'concatenated'])
2311 2310 if opts['optimize']:
2312 2311 showalways.add('optimized')
2313 2312 if opts['show_stage'] and opts['optimize']:
2314 2313 raise error.Abort(_('cannot use --optimize with --show-stage'))
2315 2314 if opts['show_stage'] == ['all']:
2316 2315 showalways.update(stagenames)
2317 2316 else:
2318 2317 for n in opts['show_stage']:
2319 2318 if n not in stagenames:
2320 2319 raise error.Abort(_('invalid stage name: %s') % n)
2321 2320 showalways.update(opts['show_stage'])
2322 2321
2323 2322 treebystage = {}
2324 2323 printedtree = None
2325 2324 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2326 2325 for n, f in stages:
2327 2326 treebystage[n] = tree = f(tree)
2328 2327 if n in showalways or (n in showchanged and tree != printedtree):
2329 2328 if opts['show_stage'] or n != 'parsed':
2330 2329 ui.write(("* %s:\n") % n)
2331 2330 ui.write(revsetlang.prettyformat(tree), "\n")
2332 2331 printedtree = tree
2333 2332
2334 2333 if opts['verify_optimized']:
2335 2334 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2336 2335 brevs = revset.makematcher(treebystage['optimized'])(repo)
2337 2336 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2338 2337 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2339 2338 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2340 2339 arevs = list(arevs)
2341 2340 brevs = list(brevs)
2342 2341 if arevs == brevs:
2343 2342 return 0
2344 2343 ui.write(('--- analyzed\n'), label='diff.file_a')
2345 2344 ui.write(('+++ optimized\n'), label='diff.file_b')
2346 2345 sm = difflib.SequenceMatcher(None, arevs, brevs)
2347 2346 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2348 2347 if tag in ('delete', 'replace'):
2349 2348 for c in arevs[alo:ahi]:
2350 2349 ui.write('-%s\n' % c, label='diff.deleted')
2351 2350 if tag in ('insert', 'replace'):
2352 2351 for c in brevs[blo:bhi]:
2353 2352 ui.write('+%s\n' % c, label='diff.inserted')
2354 2353 if tag == 'equal':
2355 2354 for c in arevs[alo:ahi]:
2356 2355 ui.write(' %s\n' % c)
2357 2356 return 1
2358 2357
2359 2358 func = revset.makematcher(tree)
2360 2359 revs = func(repo)
2361 2360 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2362 2361 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2363 2362 if not opts['show_revs']:
2364 2363 return
2365 2364 for c in revs:
2366 2365 ui.write("%d\n" % c)
2367 2366
2368 2367 @command('debugserve', [
2369 2368 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2370 2369 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2371 2370 ('', 'logiofile', '', _('file to log server I/O to')),
2372 2371 ], '')
2373 2372 def debugserve(ui, repo, **opts):
2374 2373 """run a server with advanced settings
2375 2374
2376 2375 This command is similar to :hg:`serve`. It exists partially as a
2377 2376 workaround to the fact that ``hg serve --stdio`` must have specific
2378 2377 arguments for security reasons.
2379 2378 """
2380 2379 opts = pycompat.byteskwargs(opts)
2381 2380
2382 2381 if not opts['sshstdio']:
2383 2382 raise error.Abort(_('only --sshstdio is currently supported'))
2384 2383
2385 2384 logfh = None
2386 2385
2387 2386 if opts['logiofd'] and opts['logiofile']:
2388 2387 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2389 2388
2390 2389 if opts['logiofd']:
2391 2390 # Line buffered because output is line based.
2392 2391 try:
2393 2392 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2394 2393 except OSError as e:
2395 2394 if e.errno != errno.ESPIPE:
2396 2395 raise
2397 2396 # can't seek a pipe, so `ab` mode fails on py3
2398 2397 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2399 2398 elif opts['logiofile']:
2400 2399 logfh = open(opts['logiofile'], 'ab', 1)
2401 2400
2402 2401 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2403 2402 s.serve_forever()
2404 2403
2405 2404 @command('debugsetparents', [], _('REV1 [REV2]'))
2406 2405 def debugsetparents(ui, repo, rev1, rev2=None):
2407 2406 """manually set the parents of the current working directory
2408 2407
2409 2408 This is useful for writing repository conversion tools, but should
2410 2409 be used with care. For example, neither the working directory nor the
2411 2410 dirstate is updated, so file status may be incorrect after running this
2412 2411 command.
2413 2412
2414 2413 Returns 0 on success.
2415 2414 """
2416 2415
2417 2416 node1 = scmutil.revsingle(repo, rev1).node()
2418 2417 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2419 2418
2420 2419 with repo.wlock():
2421 2420 repo.setparents(node1, node2)
2422 2421
2423 2422 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2424 2423 def debugssl(ui, repo, source=None, **opts):
2425 2424 '''test a secure connection to a server
2426 2425
2427 2426 This builds the certificate chain for the server on Windows, installing the
2428 2427 missing intermediates and trusted root via Windows Update if necessary. It
2429 2428 does nothing on other platforms.
2430 2429
2431 2430 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2432 2431 that server is used. See :hg:`help urls` for more information.
2433 2432
2434 2433 If the update succeeds, retry the original operation. Otherwise, the cause
2435 2434 of the SSL error is likely another issue.
2436 2435 '''
2437 2436 if not pycompat.iswindows:
2438 2437 raise error.Abort(_('certificate chain building is only possible on '
2439 2438 'Windows'))
2440 2439
2441 2440 if not source:
2442 2441 if not repo:
2443 2442 raise error.Abort(_("there is no Mercurial repository here, and no "
2444 2443 "server specified"))
2445 2444 source = "default"
2446 2445
2447 2446 source, branches = hg.parseurl(ui.expandpath(source))
2448 2447 url = util.url(source)
2449 2448 addr = None
2450 2449
2451 2450 defaultport = {'https': 443, 'ssh': 22}
2452 2451 if url.scheme in defaultport:
2453 2452 try:
2454 2453 addr = (url.host, int(url.port or defaultport[url.scheme]))
2455 2454 except ValueError:
2456 2455 raise error.Abort(_("malformed port number in URL"))
2457 2456 else:
2458 2457 raise error.Abort(_("only https and ssh connections are supported"))
2459 2458
2460 2459 from . import win32
2461 2460
2462 2461 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2463 2462 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2464 2463
2465 2464 try:
2466 2465 s.connect(addr)
2467 2466 cert = s.getpeercert(True)
2468 2467
2469 2468 ui.status(_('checking the certificate chain for %s\n') % url.host)
2470 2469
2471 2470 complete = win32.checkcertificatechain(cert, build=False)
2472 2471
2473 2472 if not complete:
2474 2473 ui.status(_('certificate chain is incomplete, updating... '))
2475 2474
2476 2475 if not win32.checkcertificatechain(cert):
2477 2476 ui.status(_('failed.\n'))
2478 2477 else:
2479 2478 ui.status(_('done.\n'))
2480 2479 else:
2481 2480 ui.status(_('full certificate chain is available\n'))
2482 2481 finally:
2483 2482 s.close()
2484 2483
2485 2484 @command('debugsub',
2486 2485 [('r', 'rev', '',
2487 2486 _('revision to check'), _('REV'))],
2488 2487 _('[-r REV] [REV]'))
2489 2488 def debugsub(ui, repo, rev=None):
2490 2489 ctx = scmutil.revsingle(repo, rev, None)
2491 2490 for k, v in sorted(ctx.substate.items()):
2492 2491 ui.write(('path %s\n') % k)
2493 2492 ui.write((' source %s\n') % v[0])
2494 2493 ui.write((' revision %s\n') % v[1])
2495 2494
2496 2495 @command('debugsuccessorssets',
2497 2496 [('', 'closest', False, _('return closest successors sets only'))],
2498 2497 _('[REV]'))
2499 2498 def debugsuccessorssets(ui, repo, *revs, **opts):
2500 2499 """show set of successors for revision
2501 2500
2502 2501 A successors set of changeset A is a consistent group of revisions that
2503 2502 succeed A. It contains non-obsolete changesets only unless closests
2504 2503 successors set is set.
2505 2504
2506 2505 In most cases a changeset A has a single successors set containing a single
2507 2506 successor (changeset A replaced by A').
2508 2507
2509 2508 A changeset that is made obsolete with no successors are called "pruned".
2510 2509 Such changesets have no successors sets at all.
2511 2510
2512 2511 A changeset that has been "split" will have a successors set containing
2513 2512 more than one successor.
2514 2513
2515 2514 A changeset that has been rewritten in multiple different ways is called
2516 2515 "divergent". Such changesets have multiple successor sets (each of which
2517 2516 may also be split, i.e. have multiple successors).
2518 2517
2519 2518 Results are displayed as follows::
2520 2519
2521 2520 <rev1>
2522 2521 <successors-1A>
2523 2522 <rev2>
2524 2523 <successors-2A>
2525 2524 <successors-2B1> <successors-2B2> <successors-2B3>
2526 2525
2527 2526 Here rev2 has two possible (i.e. divergent) successors sets. The first
2528 2527 holds one element, whereas the second holds three (i.e. the changeset has
2529 2528 been split).
2530 2529 """
2531 2530 # passed to successorssets caching computation from one call to another
2532 2531 cache = {}
2533 2532 ctx2str = bytes
2534 2533 node2str = short
2535 2534 for rev in scmutil.revrange(repo, revs):
2536 2535 ctx = repo[rev]
2537 2536 ui.write('%s\n'% ctx2str(ctx))
2538 2537 for succsset in obsutil.successorssets(repo, ctx.node(),
2539 2538 closest=opts[r'closest'],
2540 2539 cache=cache):
2541 2540 if succsset:
2542 2541 ui.write(' ')
2543 2542 ui.write(node2str(succsset[0]))
2544 2543 for node in succsset[1:]:
2545 2544 ui.write(' ')
2546 2545 ui.write(node2str(node))
2547 2546 ui.write('\n')
2548 2547
2549 2548 @command('debugtemplate',
2550 2549 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2551 2550 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2552 2551 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2553 2552 optionalrepo=True)
2554 2553 def debugtemplate(ui, repo, tmpl, **opts):
2555 2554 """parse and apply a template
2556 2555
2557 2556 If -r/--rev is given, the template is processed as a log template and
2558 2557 applied to the given changesets. Otherwise, it is processed as a generic
2559 2558 template.
2560 2559
2561 2560 Use --verbose to print the parsed tree.
2562 2561 """
2563 2562 revs = None
2564 2563 if opts[r'rev']:
2565 2564 if repo is None:
2566 2565 raise error.RepoError(_('there is no Mercurial repository here '
2567 2566 '(.hg not found)'))
2568 2567 revs = scmutil.revrange(repo, opts[r'rev'])
2569 2568
2570 2569 props = {}
2571 2570 for d in opts[r'define']:
2572 2571 try:
2573 2572 k, v = (e.strip() for e in d.split('=', 1))
2574 2573 if not k or k == 'ui':
2575 2574 raise ValueError
2576 2575 props[k] = v
2577 2576 except ValueError:
2578 2577 raise error.Abort(_('malformed keyword definition: %s') % d)
2579 2578
2580 2579 if ui.verbose:
2581 2580 aliases = ui.configitems('templatealias')
2582 2581 tree = templater.parse(tmpl)
2583 2582 ui.note(templater.prettyformat(tree), '\n')
2584 2583 newtree = templater.expandaliases(tree, aliases)
2585 2584 if newtree != tree:
2586 2585 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2587 2586
2588 2587 if revs is None:
2589 2588 tres = formatter.templateresources(ui, repo)
2590 2589 t = formatter.maketemplater(ui, tmpl, resources=tres)
2591 2590 if ui.verbose:
2592 2591 kwds, funcs = t.symbolsuseddefault()
2593 2592 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2594 2593 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2595 2594 ui.write(t.renderdefault(props))
2596 2595 else:
2597 2596 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2598 2597 if ui.verbose:
2599 2598 kwds, funcs = displayer.t.symbolsuseddefault()
2600 2599 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2601 2600 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2602 2601 for r in revs:
2603 2602 displayer.show(repo[r], **pycompat.strkwargs(props))
2604 2603 displayer.close()
2605 2604
2606 2605 @command('debuguigetpass', [
2607 2606 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2608 2607 ], _('[-p TEXT]'), norepo=True)
2609 2608 def debuguigetpass(ui, prompt=''):
2610 2609 """show prompt to type password"""
2611 2610 r = ui.getpass(prompt)
2612 2611 ui.write(('respose: %s\n') % r)
2613 2612
2614 2613 @command('debuguiprompt', [
2615 2614 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2616 2615 ], _('[-p TEXT]'), norepo=True)
2617 2616 def debuguiprompt(ui, prompt=''):
2618 2617 """show plain prompt"""
2619 2618 r = ui.prompt(prompt)
2620 2619 ui.write(('response: %s\n') % r)
2621 2620
2622 2621 @command('debugupdatecaches', [])
2623 2622 def debugupdatecaches(ui, repo, *pats, **opts):
2624 2623 """warm all known caches in the repository"""
2625 2624 with repo.wlock(), repo.lock():
2626 2625 repo.updatecaches(full=True)
2627 2626
2628 2627 @command('debugupgraderepo', [
2629 2628 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2630 2629 ('', 'run', False, _('performs an upgrade')),
2631 2630 ])
2632 2631 def debugupgraderepo(ui, repo, run=False, optimize=None):
2633 2632 """upgrade a repository to use different features
2634 2633
2635 2634 If no arguments are specified, the repository is evaluated for upgrade
2636 2635 and a list of problems and potential optimizations is printed.
2637 2636
2638 2637 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2639 2638 can be influenced via additional arguments. More details will be provided
2640 2639 by the command output when run without ``--run``.
2641 2640
2642 2641 During the upgrade, the repository will be locked and no writes will be
2643 2642 allowed.
2644 2643
2645 2644 At the end of the upgrade, the repository may not be readable while new
2646 2645 repository data is swapped in. This window will be as long as it takes to
2647 2646 rename some directories inside the ``.hg`` directory. On most machines, this
2648 2647 should complete almost instantaneously and the chances of a consumer being
2649 2648 unable to access the repository should be low.
2650 2649 """
2651 2650 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2652 2651
2653 2652 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2654 2653 inferrepo=True)
2655 2654 def debugwalk(ui, repo, *pats, **opts):
2656 2655 """show how files match on given patterns"""
2657 2656 opts = pycompat.byteskwargs(opts)
2658 2657 m = scmutil.match(repo[None], pats, opts)
2659 2658 if ui.verbose:
2660 2659 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2661 2660 items = list(repo[None].walk(m))
2662 2661 if not items:
2663 2662 return
2664 2663 f = lambda fn: fn
2665 2664 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2666 2665 f = lambda fn: util.normpath(fn)
2667 2666 fmt = 'f %%-%ds %%-%ds %%s' % (
2668 2667 max([len(abs) for abs in items]),
2669 2668 max([len(m.rel(abs)) for abs in items]))
2670 2669 for abs in items:
2671 2670 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2672 2671 ui.write("%s\n" % line.rstrip())
2673 2672
2674 2673 @command('debugwhyunstable', [], _('REV'))
2675 2674 def debugwhyunstable(ui, repo, rev):
2676 2675 """explain instabilities of a changeset"""
2677 2676 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2678 2677 dnodes = ''
2679 2678 if entry.get('divergentnodes'):
2680 2679 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2681 2680 for ctx in entry['divergentnodes']) + ' '
2682 2681 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2683 2682 entry['reason'], entry['node']))
2684 2683
2685 2684 @command('debugwireargs',
2686 2685 [('', 'three', '', 'three'),
2687 2686 ('', 'four', '', 'four'),
2688 2687 ('', 'five', '', 'five'),
2689 2688 ] + cmdutil.remoteopts,
2690 2689 _('REPO [OPTIONS]... [ONE [TWO]]'),
2691 2690 norepo=True)
2692 2691 def debugwireargs(ui, repopath, *vals, **opts):
2693 2692 opts = pycompat.byteskwargs(opts)
2694 2693 repo = hg.peer(ui, opts, repopath)
2695 2694 for opt in cmdutil.remoteopts:
2696 2695 del opts[opt[1]]
2697 2696 args = {}
2698 2697 for k, v in opts.iteritems():
2699 2698 if v:
2700 2699 args[k] = v
2701 2700 args = pycompat.strkwargs(args)
2702 2701 # run twice to check that we don't mess up the stream for the next command
2703 2702 res1 = repo.debugwireargs(*vals, **args)
2704 2703 res2 = repo.debugwireargs(*vals, **args)
2705 2704 ui.write("%s\n" % res1)
2706 2705 if res1 != res2:
2707 2706 ui.warn("%s\n" % res2)
2708 2707
2709 2708 def _parsewirelangblocks(fh):
2710 2709 activeaction = None
2711 2710 blocklines = []
2712 2711
2713 2712 for line in fh:
2714 2713 line = line.rstrip()
2715 2714 if not line:
2716 2715 continue
2717 2716
2718 2717 if line.startswith(b'#'):
2719 2718 continue
2720 2719
2721 2720 if not line.startswith(' '):
2722 2721 # New block. Flush previous one.
2723 2722 if activeaction:
2724 2723 yield activeaction, blocklines
2725 2724
2726 2725 activeaction = line
2727 2726 blocklines = []
2728 2727 continue
2729 2728
2730 2729 # Else we start with an indent.
2731 2730
2732 2731 if not activeaction:
2733 2732 raise error.Abort(_('indented line outside of block'))
2734 2733
2735 2734 blocklines.append(line)
2736 2735
2737 2736 # Flush last block.
2738 2737 if activeaction:
2739 2738 yield activeaction, blocklines
2740 2739
2741 2740 @command('debugwireproto',
2742 2741 [
2743 2742 ('', 'localssh', False, _('start an SSH server for this repo')),
2744 2743 ('', 'peer', '', _('construct a specific version of the peer')),
2745 2744 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2746 2745 ('', 'nologhandshake', False,
2747 2746 _('do not log I/O related to the peer handshake')),
2748 2747 ] + cmdutil.remoteopts,
2749 2748 _('[PATH]'),
2750 2749 optionalrepo=True)
2751 2750 def debugwireproto(ui, repo, path=None, **opts):
2752 2751 """send wire protocol commands to a server
2753 2752
2754 2753 This command can be used to issue wire protocol commands to remote
2755 2754 peers and to debug the raw data being exchanged.
2756 2755
2757 2756 ``--localssh`` will start an SSH server against the current repository
2758 2757 and connect to that. By default, the connection will perform a handshake
2759 2758 and establish an appropriate peer instance.
2760 2759
2761 2760 ``--peer`` can be used to bypass the handshake protocol and construct a
2762 2761 peer instance using the specified class type. Valid values are ``raw``,
2763 2762 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2764 2763 raw data payloads and don't support higher-level command actions.
2765 2764
2766 2765 ``--noreadstderr`` can be used to disable automatic reading from stderr
2767 2766 of the peer (for SSH connections only). Disabling automatic reading of
2768 2767 stderr is useful for making output more deterministic.
2769 2768
2770 2769 Commands are issued via a mini language which is specified via stdin.
2771 2770 The language consists of individual actions to perform. An action is
2772 2771 defined by a block. A block is defined as a line with no leading
2773 2772 space followed by 0 or more lines with leading space. Blocks are
2774 2773 effectively a high-level command with additional metadata.
2775 2774
2776 2775 Lines beginning with ``#`` are ignored.
2777 2776
2778 2777 The following sections denote available actions.
2779 2778
2780 2779 raw
2781 2780 ---
2782 2781
2783 2782 Send raw data to the server.
2784 2783
2785 2784 The block payload contains the raw data to send as one atomic send
2786 2785 operation. The data may not actually be delivered in a single system
2787 2786 call: it depends on the abilities of the transport being used.
2788 2787
2789 2788 Each line in the block is de-indented and concatenated. Then, that
2790 2789 value is evaluated as a Python b'' literal. This allows the use of
2791 2790 backslash escaping, etc.
2792 2791
2793 2792 raw+
2794 2793 ----
2795 2794
2796 2795 Behaves like ``raw`` except flushes output afterwards.
2797 2796
2798 2797 command <X>
2799 2798 -----------
2800 2799
2801 2800 Send a request to run a named command, whose name follows the ``command``
2802 2801 string.
2803 2802
2804 2803 Arguments to the command are defined as lines in this block. The format of
2805 2804 each line is ``<key> <value>``. e.g.::
2806 2805
2807 2806 command listkeys
2808 2807 namespace bookmarks
2809 2808
2810 2809 If the value begins with ``eval:``, it will be interpreted as a Python
2811 2810 literal expression. Otherwise values are interpreted as Python b'' literals.
2812 2811 This allows sending complex types and encoding special byte sequences via
2813 2812 backslash escaping.
2814 2813
2815 2814 The following arguments have special meaning:
2816 2815
2817 2816 ``PUSHFILE``
2818 2817 When defined, the *push* mechanism of the peer will be used instead
2819 2818 of the static request-response mechanism and the content of the
2820 2819 file specified in the value of this argument will be sent as the
2821 2820 command payload.
2822 2821
2823 2822 This can be used to submit a local bundle file to the remote.
2824 2823
2825 2824 batchbegin
2826 2825 ----------
2827 2826
2828 2827 Instruct the peer to begin a batched send.
2829 2828
2830 2829 All ``command`` blocks are queued for execution until the next
2831 2830 ``batchsubmit`` block.
2832 2831
2833 2832 batchsubmit
2834 2833 -----------
2835 2834
2836 2835 Submit previously queued ``command`` blocks as a batch request.
2837 2836
2838 2837 This action MUST be paired with a ``batchbegin`` action.
2839 2838
2840 2839 httprequest <method> <path>
2841 2840 ---------------------------
2842 2841
2843 2842 (HTTP peer only)
2844 2843
2845 2844 Send an HTTP request to the peer.
2846 2845
2847 2846 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2848 2847
2849 2848 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2850 2849 headers to add to the request. e.g. ``Accept: foo``.
2851 2850
2852 2851 The following arguments are special:
2853 2852
2854 2853 ``BODYFILE``
2855 2854 The content of the file defined as the value to this argument will be
2856 2855 transferred verbatim as the HTTP request body.
2857 2856
2858 2857 ``frame <type> <flags> <payload>``
2859 2858 Send a unified protocol frame as part of the request body.
2860 2859
2861 2860 All frames will be collected and sent as the body to the HTTP
2862 2861 request.
2863 2862
2864 2863 close
2865 2864 -----
2866 2865
2867 2866 Close the connection to the server.
2868 2867
2869 2868 flush
2870 2869 -----
2871 2870
2872 2871 Flush data written to the server.
2873 2872
2874 2873 readavailable
2875 2874 -------------
2876 2875
2877 2876 Close the write end of the connection and read all available data from
2878 2877 the server.
2879 2878
2880 2879 If the connection to the server encompasses multiple pipes, we poll both
2881 2880 pipes and read available data.
2882 2881
2883 2882 readline
2884 2883 --------
2885 2884
2886 2885 Read a line of output from the server. If there are multiple output
2887 2886 pipes, reads only the main pipe.
2888 2887
2889 2888 ereadline
2890 2889 ---------
2891 2890
2892 2891 Like ``readline``, but read from the stderr pipe, if available.
2893 2892
2894 2893 read <X>
2895 2894 --------
2896 2895
2897 2896 ``read()`` N bytes from the server's main output pipe.
2898 2897
2899 2898 eread <X>
2900 2899 ---------
2901 2900
2902 2901 ``read()`` N bytes from the server's stderr pipe, if available.
2903 2902
2904 2903 Specifying Unified Frame-Based Protocol Frames
2905 2904 ----------------------------------------------
2906 2905
2907 2906 It is possible to emit a *Unified Frame-Based Protocol* by using special
2908 2907 syntax.
2909 2908
2910 2909 A frame is composed as a type, flags, and payload. These can be parsed
2911 2910 from a string of the form:
2912 2911
2913 2912 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
2914 2913
2915 2914 ``request-id`` and ``stream-id`` are integers defining the request and
2916 2915 stream identifiers.
2917 2916
2918 2917 ``type`` can be an integer value for the frame type or the string name
2919 2918 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
2920 2919 ``command-name``.
2921 2920
2922 2921 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
2923 2922 components. Each component (and there can be just one) can be an integer
2924 2923 or a flag name for stream flags or frame flags, respectively. Values are
2925 2924 resolved to integers and then bitwise OR'd together.
2926 2925
2927 2926 ``payload`` represents the raw frame payload. If it begins with
2928 2927 ``cbor:``, the following string is evaluated as Python code and the
2929 2928 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
2930 2929 as a Python byte string literal.
2931 2930 """
2932 2931 opts = pycompat.byteskwargs(opts)
2933 2932
2934 2933 if opts['localssh'] and not repo:
2935 2934 raise error.Abort(_('--localssh requires a repository'))
2936 2935
2937 2936 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
2938 2937 raise error.Abort(_('invalid value for --peer'),
2939 2938 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
2940 2939
2941 2940 if path and opts['localssh']:
2942 2941 raise error.Abort(_('cannot specify --localssh with an explicit '
2943 2942 'path'))
2944 2943
2945 2944 if ui.interactive():
2946 2945 ui.write(_('(waiting for commands on stdin)\n'))
2947 2946
2948 2947 blocks = list(_parsewirelangblocks(ui.fin))
2949 2948
2950 2949 proc = None
2951 2950 stdin = None
2952 2951 stdout = None
2953 2952 stderr = None
2954 2953 opener = None
2955 2954
2956 2955 if opts['localssh']:
2957 2956 # We start the SSH server in its own process so there is process
2958 2957 # separation. This prevents a whole class of potential bugs around
2959 2958 # shared state from interfering with server operation.
2960 2959 args = procutil.hgcmd() + [
2961 2960 '-R', repo.root,
2962 2961 'debugserve', '--sshstdio',
2963 2962 ]
2964 2963 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
2965 2964 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
2966 2965 bufsize=0)
2967 2966
2968 2967 stdin = proc.stdin
2969 2968 stdout = proc.stdout
2970 2969 stderr = proc.stderr
2971 2970
2972 2971 # We turn the pipes into observers so we can log I/O.
2973 2972 if ui.verbose or opts['peer'] == 'raw':
2974 2973 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
2975 2974 logdata=True)
2976 2975 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
2977 2976 logdata=True)
2978 2977 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
2979 2978 logdata=True)
2980 2979
2981 2980 # --localssh also implies the peer connection settings.
2982 2981
2983 2982 url = 'ssh://localserver'
2984 2983 autoreadstderr = not opts['noreadstderr']
2985 2984
2986 2985 if opts['peer'] == 'ssh1':
2987 2986 ui.write(_('creating ssh peer for wire protocol version 1\n'))
2988 2987 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
2989 2988 None, autoreadstderr=autoreadstderr)
2990 2989 elif opts['peer'] == 'ssh2':
2991 2990 ui.write(_('creating ssh peer for wire protocol version 2\n'))
2992 2991 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
2993 2992 None, autoreadstderr=autoreadstderr)
2994 2993 elif opts['peer'] == 'raw':
2995 2994 ui.write(_('using raw connection to peer\n'))
2996 2995 peer = None
2997 2996 else:
2998 2997 ui.write(_('creating ssh peer from handshake results\n'))
2999 2998 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3000 2999 autoreadstderr=autoreadstderr)
3001 3000
3002 3001 elif path:
3003 3002 # We bypass hg.peer() so we can proxy the sockets.
3004 3003 # TODO consider not doing this because we skip
3005 3004 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3006 3005 u = util.url(path)
3007 3006 if u.scheme != 'http':
3008 3007 raise error.Abort(_('only http:// paths are currently supported'))
3009 3008
3010 3009 url, authinfo = u.authinfo()
3011 3010 openerargs = {
3012 3011 r'useragent': b'Mercurial debugwireproto',
3013 3012 }
3014 3013
3015 3014 # Turn pipes/sockets into observers so we can log I/O.
3016 3015 if ui.verbose:
3017 3016 openerargs.update({
3018 3017 r'loggingfh': ui,
3019 3018 r'loggingname': b's',
3020 3019 r'loggingopts': {
3021 3020 r'logdata': True,
3022 3021 r'logdataapis': False,
3023 3022 },
3024 3023 })
3025 3024
3026 3025 if ui.debugflag:
3027 3026 openerargs[r'loggingopts'][r'logdataapis'] = True
3028 3027
3029 3028 # Don't send default headers when in raw mode. This allows us to
3030 3029 # bypass most of the behavior of our URL handling code so we can
3031 3030 # have near complete control over what's sent on the wire.
3032 3031 if opts['peer'] == 'raw':
3033 3032 openerargs[r'sendaccept'] = False
3034 3033
3035 3034 opener = urlmod.opener(ui, authinfo, **openerargs)
3036 3035
3037 3036 if opts['peer'] == 'http2':
3038 3037 ui.write(_('creating http peer for wire protocol version 2\n'))
3039 3038 # We go through makepeer() because we need an API descriptor for
3040 3039 # the peer instance to be useful.
3041 3040 with ui.configoverride({
3042 3041 ('experimental', 'httppeer.advertise-v2'): True}):
3043 3042 if opts['nologhandshake']:
3044 3043 ui.pushbuffer()
3045 3044
3046 3045 peer = httppeer.makepeer(ui, path, opener=opener)
3047 3046
3048 3047 if opts['nologhandshake']:
3049 3048 ui.popbuffer()
3050 3049
3051 3050 if not isinstance(peer, httppeer.httpv2peer):
3052 3051 raise error.Abort(_('could not instantiate HTTP peer for '
3053 3052 'wire protocol version 2'),
3054 3053 hint=_('the server may not have the feature '
3055 3054 'enabled or is not allowing this '
3056 3055 'client version'))
3057 3056
3058 3057 elif opts['peer'] == 'raw':
3059 3058 ui.write(_('using raw connection to peer\n'))
3060 3059 peer = None
3061 3060 elif opts['peer']:
3062 3061 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3063 3062 opts['peer'])
3064 3063 else:
3065 3064 peer = httppeer.makepeer(ui, path, opener=opener)
3066 3065
3067 3066 # We /could/ populate stdin/stdout with sock.makefile()...
3068 3067 else:
3069 3068 raise error.Abort(_('unsupported connection configuration'))
3070 3069
3071 3070 batchedcommands = None
3072 3071
3073 3072 # Now perform actions based on the parsed wire language instructions.
3074 3073 for action, lines in blocks:
3075 3074 if action in ('raw', 'raw+'):
3076 3075 if not stdin:
3077 3076 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3078 3077
3079 3078 # Concatenate the data together.
3080 3079 data = ''.join(l.lstrip() for l in lines)
3081 3080 data = stringutil.unescapestr(data)
3082 3081 stdin.write(data)
3083 3082
3084 3083 if action == 'raw+':
3085 3084 stdin.flush()
3086 3085 elif action == 'flush':
3087 3086 if not stdin:
3088 3087 raise error.Abort(_('cannot call flush on this peer'))
3089 3088 stdin.flush()
3090 3089 elif action.startswith('command'):
3091 3090 if not peer:
3092 3091 raise error.Abort(_('cannot send commands unless peer instance '
3093 3092 'is available'))
3094 3093
3095 3094 command = action.split(' ', 1)[1]
3096 3095
3097 3096 args = {}
3098 3097 for line in lines:
3099 3098 # We need to allow empty values.
3100 3099 fields = line.lstrip().split(' ', 1)
3101 3100 if len(fields) == 1:
3102 3101 key = fields[0]
3103 3102 value = ''
3104 3103 else:
3105 3104 key, value = fields
3106 3105
3107 3106 if value.startswith('eval:'):
3108 3107 value = stringutil.evalpythonliteral(value[5:])
3109 3108 else:
3110 3109 value = stringutil.unescapestr(value)
3111 3110
3112 3111 args[key] = value
3113 3112
3114 3113 if batchedcommands is not None:
3115 3114 batchedcommands.append((command, args))
3116 3115 continue
3117 3116
3118 3117 ui.status(_('sending %s command\n') % command)
3119 3118
3120 3119 if 'PUSHFILE' in args:
3121 3120 with open(args['PUSHFILE'], r'rb') as fh:
3122 3121 del args['PUSHFILE']
3123 3122 res, output = peer._callpush(command, fh,
3124 3123 **pycompat.strkwargs(args))
3125 3124 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3126 3125 ui.status(_('remote output: %s\n') %
3127 3126 stringutil.escapestr(output))
3128 3127 else:
3129 3128 with peer.commandexecutor() as e:
3130 3129 res = e.callcommand(command, args).result()
3131 3130
3132 3131 if isinstance(res, wireprotov2peer.commandresponse):
3133 3132 val = list(res.cborobjects())
3134 3133 ui.status(_('response: %s\n') %
3135 3134 stringutil.pprint(val, bprefix=True))
3136 3135
3137 3136 else:
3138 3137 ui.status(_('response: %s\n') %
3139 3138 stringutil.pprint(res, bprefix=True))
3140 3139
3141 3140 elif action == 'batchbegin':
3142 3141 if batchedcommands is not None:
3143 3142 raise error.Abort(_('nested batchbegin not allowed'))
3144 3143
3145 3144 batchedcommands = []
3146 3145 elif action == 'batchsubmit':
3147 3146 # There is a batching API we could go through. But it would be
3148 3147 # difficult to normalize requests into function calls. It is easier
3149 3148 # to bypass this layer and normalize to commands + args.
3150 3149 ui.status(_('sending batch with %d sub-commands\n') %
3151 3150 len(batchedcommands))
3152 3151 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3153 3152 ui.status(_('response #%d: %s\n') %
3154 3153 (i, stringutil.escapestr(chunk)))
3155 3154
3156 3155 batchedcommands = None
3157 3156
3158 3157 elif action.startswith('httprequest '):
3159 3158 if not opener:
3160 3159 raise error.Abort(_('cannot use httprequest without an HTTP '
3161 3160 'peer'))
3162 3161
3163 3162 request = action.split(' ', 2)
3164 3163 if len(request) != 3:
3165 3164 raise error.Abort(_('invalid httprequest: expected format is '
3166 3165 '"httprequest <method> <path>'))
3167 3166
3168 3167 method, httppath = request[1:]
3169 3168 headers = {}
3170 3169 body = None
3171 3170 frames = []
3172 3171 for line in lines:
3173 3172 line = line.lstrip()
3174 3173 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3175 3174 if m:
3176 3175 headers[m.group(1)] = m.group(2)
3177 3176 continue
3178 3177
3179 3178 if line.startswith(b'BODYFILE '):
3180 3179 with open(line.split(b' ', 1), 'rb') as fh:
3181 3180 body = fh.read()
3182 3181 elif line.startswith(b'frame '):
3183 3182 frame = wireprotoframing.makeframefromhumanstring(
3184 3183 line[len(b'frame '):])
3185 3184
3186 3185 frames.append(frame)
3187 3186 else:
3188 3187 raise error.Abort(_('unknown argument to httprequest: %s') %
3189 3188 line)
3190 3189
3191 3190 url = path + httppath
3192 3191
3193 3192 if frames:
3194 3193 body = b''.join(bytes(f) for f in frames)
3195 3194
3196 3195 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3197 3196
3198 3197 # urllib.Request insists on using has_data() as a proxy for
3199 3198 # determining the request method. Override that to use our
3200 3199 # explicitly requested method.
3201 3200 req.get_method = lambda: method
3202 3201
3203 3202 try:
3204 3203 res = opener.open(req)
3205 3204 body = res.read()
3206 3205 except util.urlerr.urlerror as e:
3207 3206 e.read()
3208 3207 continue
3209 3208
3210 3209 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3211 3210 ui.write(_('cbor> %s\n') %
3212 3211 stringutil.pprint(cbor.loads(body), bprefix=True))
3213 3212
3214 3213 elif action == 'close':
3215 3214 peer.close()
3216 3215 elif action == 'readavailable':
3217 3216 if not stdout or not stderr:
3218 3217 raise error.Abort(_('readavailable not available on this peer'))
3219 3218
3220 3219 stdin.close()
3221 3220 stdout.read()
3222 3221 stderr.read()
3223 3222
3224 3223 elif action == 'readline':
3225 3224 if not stdout:
3226 3225 raise error.Abort(_('readline not available on this peer'))
3227 3226 stdout.readline()
3228 3227 elif action == 'ereadline':
3229 3228 if not stderr:
3230 3229 raise error.Abort(_('ereadline not available on this peer'))
3231 3230 stderr.readline()
3232 3231 elif action.startswith('read '):
3233 3232 count = int(action.split(' ', 1)[1])
3234 3233 if not stdout:
3235 3234 raise error.Abort(_('read not available on this peer'))
3236 3235 stdout.read(count)
3237 3236 elif action.startswith('eread '):
3238 3237 count = int(action.split(' ', 1)[1])
3239 3238 if not stderr:
3240 3239 raise error.Abort(_('eread not available on this peer'))
3241 3240 stderr.read(count)
3242 3241 else:
3243 3242 raise error.Abort(_('unknown action: %s') % action)
3244 3243
3245 3244 if batchedcommands is not None:
3246 3245 raise error.Abort(_('unclosed "batchbegin" request'))
3247 3246
3248 3247 if peer:
3249 3248 peer.close()
3250 3249
3251 3250 if proc:
3252 3251 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now