##// END OF EJS Templates
debugcommands: add support for extensions adding their own debug info...
Augie Fackler -
r42875:0c0478b7 default draft
parent child Browse files
Show More
@@ -1,3484 +1,3489 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import operator
15 15 import os
16 16 import random
17 17 import re
18 18 import socket
19 19 import ssl
20 20 import stat
21 21 import string
22 22 import subprocess
23 23 import sys
24 24 import time
25 25
26 26 from .i18n import _
27 27 from .node import (
28 28 bin,
29 29 hex,
30 30 nullhex,
31 31 nullid,
32 32 nullrev,
33 33 short,
34 34 )
35 35 from . import (
36 36 bundle2,
37 37 changegroup,
38 38 cmdutil,
39 39 color,
40 40 context,
41 41 copies,
42 42 dagparser,
43 43 encoding,
44 44 error,
45 45 exchange,
46 46 extensions,
47 47 filemerge,
48 48 filesetlang,
49 49 formatter,
50 50 hg,
51 51 httppeer,
52 52 localrepo,
53 53 lock as lockmod,
54 54 logcmdutil,
55 55 merge as mergemod,
56 56 obsolete,
57 57 obsutil,
58 58 phases,
59 59 policy,
60 60 pvec,
61 61 pycompat,
62 62 registrar,
63 63 repair,
64 64 revlog,
65 65 revset,
66 66 revsetlang,
67 67 scmutil,
68 68 setdiscovery,
69 69 simplemerge,
70 70 sshpeer,
71 71 sslutil,
72 72 streamclone,
73 73 templater,
74 74 treediscovery,
75 75 upgrade,
76 76 url as urlmod,
77 77 util,
78 78 vfs as vfsmod,
79 79 wireprotoframing,
80 80 wireprotoserver,
81 81 wireprotov2peer,
82 82 )
83 83 from .utils import (
84 84 cborutil,
85 85 compression,
86 86 dateutil,
87 87 procutil,
88 88 stringutil,
89 89 )
90 90
91 91 from .revlogutils import (
92 92 deltas as deltautil
93 93 )
94 94
95 95 release = lockmod.release
96 96
97 97 command = registrar.command()
98 98
99 99 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
100 100 def debugancestor(ui, repo, *args):
101 101 """find the ancestor revision of two revisions in a given index"""
102 102 if len(args) == 3:
103 103 index, rev1, rev2 = args
104 104 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
105 105 lookup = r.lookup
106 106 elif len(args) == 2:
107 107 if not repo:
108 108 raise error.Abort(_('there is no Mercurial repository here '
109 109 '(.hg not found)'))
110 110 rev1, rev2 = args
111 111 r = repo.changelog
112 112 lookup = repo.lookup
113 113 else:
114 114 raise error.Abort(_('either two or three arguments required'))
115 115 a = r.ancestor(lookup(rev1), lookup(rev2))
116 116 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
117 117
118 118 @command('debugapplystreamclonebundle', [], 'FILE')
119 119 def debugapplystreamclonebundle(ui, repo, fname):
120 120 """apply a stream clone bundle file"""
121 121 f = hg.openpath(ui, fname)
122 122 gen = exchange.readbundle(ui, f, fname)
123 123 gen.apply(repo)
124 124
125 125 @command('debugbuilddag',
126 126 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
127 127 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
128 128 ('n', 'new-file', None, _('add new file at each rev'))],
129 129 _('[OPTION]... [TEXT]'))
130 130 def debugbuilddag(ui, repo, text=None,
131 131 mergeable_file=False,
132 132 overwritten_file=False,
133 133 new_file=False):
134 134 """builds a repo with a given DAG from scratch in the current empty repo
135 135
136 136 The description of the DAG is read from stdin if not given on the
137 137 command line.
138 138
139 139 Elements:
140 140
141 141 - "+n" is a linear run of n nodes based on the current default parent
142 142 - "." is a single node based on the current default parent
143 143 - "$" resets the default parent to null (implied at the start);
144 144 otherwise the default parent is always the last node created
145 145 - "<p" sets the default parent to the backref p
146 146 - "*p" is a fork at parent p, which is a backref
147 147 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
148 148 - "/p2" is a merge of the preceding node and p2
149 149 - ":tag" defines a local tag for the preceding node
150 150 - "@branch" sets the named branch for subsequent nodes
151 151 - "#...\\n" is a comment up to the end of the line
152 152
153 153 Whitespace between the above elements is ignored.
154 154
155 155 A backref is either
156 156
157 157 - a number n, which references the node curr-n, where curr is the current
158 158 node, or
159 159 - the name of a local tag you placed earlier using ":tag", or
160 160 - empty to denote the default parent.
161 161
162 162 All string valued-elements are either strictly alphanumeric, or must
163 163 be enclosed in double quotes ("..."), with "\\" as escape character.
164 164 """
165 165
166 166 if text is None:
167 167 ui.status(_("reading DAG from stdin\n"))
168 168 text = ui.fin.read()
169 169
170 170 cl = repo.changelog
171 171 if len(cl) > 0:
172 172 raise error.Abort(_('repository is not empty'))
173 173
174 174 # determine number of revs in DAG
175 175 total = 0
176 176 for type, data in dagparser.parsedag(text):
177 177 if type == 'n':
178 178 total += 1
179 179
180 180 if mergeable_file:
181 181 linesperrev = 2
182 182 # make a file with k lines per rev
183 183 initialmergedlines = ['%d' % i
184 184 for i in pycompat.xrange(0, total * linesperrev)]
185 185 initialmergedlines.append("")
186 186
187 187 tags = []
188 188 progress = ui.makeprogress(_('building'), unit=_('revisions'),
189 189 total=total)
190 190 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
191 191 at = -1
192 192 atbranch = 'default'
193 193 nodeids = []
194 194 id = 0
195 195 progress.update(id)
196 196 for type, data in dagparser.parsedag(text):
197 197 if type == 'n':
198 198 ui.note(('node %s\n' % pycompat.bytestr(data)))
199 199 id, ps = data
200 200
201 201 files = []
202 202 filecontent = {}
203 203
204 204 p2 = None
205 205 if mergeable_file:
206 206 fn = "mf"
207 207 p1 = repo[ps[0]]
208 208 if len(ps) > 1:
209 209 p2 = repo[ps[1]]
210 210 pa = p1.ancestor(p2)
211 211 base, local, other = [x[fn].data() for x in (pa, p1,
212 212 p2)]
213 213 m3 = simplemerge.Merge3Text(base, local, other)
214 214 ml = [l.strip() for l in m3.merge_lines()]
215 215 ml.append("")
216 216 elif at > 0:
217 217 ml = p1[fn].data().split("\n")
218 218 else:
219 219 ml = initialmergedlines
220 220 ml[id * linesperrev] += " r%i" % id
221 221 mergedtext = "\n".join(ml)
222 222 files.append(fn)
223 223 filecontent[fn] = mergedtext
224 224
225 225 if overwritten_file:
226 226 fn = "of"
227 227 files.append(fn)
228 228 filecontent[fn] = "r%i\n" % id
229 229
230 230 if new_file:
231 231 fn = "nf%i" % id
232 232 files.append(fn)
233 233 filecontent[fn] = "r%i\n" % id
234 234 if len(ps) > 1:
235 235 if not p2:
236 236 p2 = repo[ps[1]]
237 237 for fn in p2:
238 238 if fn.startswith("nf"):
239 239 files.append(fn)
240 240 filecontent[fn] = p2[fn].data()
241 241
242 242 def fctxfn(repo, cx, path):
243 243 if path in filecontent:
244 244 return context.memfilectx(repo, cx, path,
245 245 filecontent[path])
246 246 return None
247 247
248 248 if len(ps) == 0 or ps[0] < 0:
249 249 pars = [None, None]
250 250 elif len(ps) == 1:
251 251 pars = [nodeids[ps[0]], None]
252 252 else:
253 253 pars = [nodeids[p] for p in ps]
254 254 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
255 255 date=(id, 0),
256 256 user="debugbuilddag",
257 257 extra={'branch': atbranch})
258 258 nodeid = repo.commitctx(cx)
259 259 nodeids.append(nodeid)
260 260 at = id
261 261 elif type == 'l':
262 262 id, name = data
263 263 ui.note(('tag %s\n' % name))
264 264 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
265 265 elif type == 'a':
266 266 ui.note(('branch %s\n' % data))
267 267 atbranch = data
268 268 progress.update(id)
269 269
270 270 if tags:
271 271 repo.vfs.write("localtags", "".join(tags))
272 272
273 273 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
274 274 indent_string = ' ' * indent
275 275 if all:
276 276 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
277 277 % indent_string)
278 278
279 279 def showchunks(named):
280 280 ui.write("\n%s%s\n" % (indent_string, named))
281 281 for deltadata in gen.deltaiter():
282 282 node, p1, p2, cs, deltabase, delta, flags = deltadata
283 283 ui.write("%s%s %s %s %s %s %d\n" %
284 284 (indent_string, hex(node), hex(p1), hex(p2),
285 285 hex(cs), hex(deltabase), len(delta)))
286 286
287 287 chunkdata = gen.changelogheader()
288 288 showchunks("changelog")
289 289 chunkdata = gen.manifestheader()
290 290 showchunks("manifest")
291 291 for chunkdata in iter(gen.filelogheader, {}):
292 292 fname = chunkdata['filename']
293 293 showchunks(fname)
294 294 else:
295 295 if isinstance(gen, bundle2.unbundle20):
296 296 raise error.Abort(_('use debugbundle2 for this file'))
297 297 chunkdata = gen.changelogheader()
298 298 for deltadata in gen.deltaiter():
299 299 node, p1, p2, cs, deltabase, delta, flags = deltadata
300 300 ui.write("%s%s\n" % (indent_string, hex(node)))
301 301
302 302 def _debugobsmarkers(ui, part, indent=0, **opts):
303 303 """display version and markers contained in 'data'"""
304 304 opts = pycompat.byteskwargs(opts)
305 305 data = part.read()
306 306 indent_string = ' ' * indent
307 307 try:
308 308 version, markers = obsolete._readmarkers(data)
309 309 except error.UnknownVersion as exc:
310 310 msg = "%sunsupported version: %s (%d bytes)\n"
311 311 msg %= indent_string, exc.version, len(data)
312 312 ui.write(msg)
313 313 else:
314 314 msg = "%sversion: %d (%d bytes)\n"
315 315 msg %= indent_string, version, len(data)
316 316 ui.write(msg)
317 317 fm = ui.formatter('debugobsolete', opts)
318 318 for rawmarker in sorted(markers):
319 319 m = obsutil.marker(None, rawmarker)
320 320 fm.startitem()
321 321 fm.plain(indent_string)
322 322 cmdutil.showmarker(fm, m)
323 323 fm.end()
324 324
325 325 def _debugphaseheads(ui, data, indent=0):
326 326 """display version and markers contained in 'data'"""
327 327 indent_string = ' ' * indent
328 328 headsbyphase = phases.binarydecode(data)
329 329 for phase in phases.allphases:
330 330 for head in headsbyphase[phase]:
331 331 ui.write(indent_string)
332 332 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
333 333
334 334 def _quasirepr(thing):
335 335 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
336 336 return '{%s}' % (
337 337 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
338 338 return pycompat.bytestr(repr(thing))
339 339
340 340 def _debugbundle2(ui, gen, all=None, **opts):
341 341 """lists the contents of a bundle2"""
342 342 if not isinstance(gen, bundle2.unbundle20):
343 343 raise error.Abort(_('not a bundle2 file'))
344 344 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
345 345 parttypes = opts.get(r'part_type', [])
346 346 for part in gen.iterparts():
347 347 if parttypes and part.type not in parttypes:
348 348 continue
349 349 msg = '%s -- %s (mandatory: %r)\n'
350 350 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
351 351 if part.type == 'changegroup':
352 352 version = part.params.get('version', '01')
353 353 cg = changegroup.getunbundler(version, part, 'UN')
354 354 if not ui.quiet:
355 355 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
356 356 if part.type == 'obsmarkers':
357 357 if not ui.quiet:
358 358 _debugobsmarkers(ui, part, indent=4, **opts)
359 359 if part.type == 'phase-heads':
360 360 if not ui.quiet:
361 361 _debugphaseheads(ui, part, indent=4)
362 362
363 363 @command('debugbundle',
364 364 [('a', 'all', None, _('show all details')),
365 365 ('', 'part-type', [], _('show only the named part type')),
366 366 ('', 'spec', None, _('print the bundlespec of the bundle'))],
367 367 _('FILE'),
368 368 norepo=True)
369 369 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
370 370 """lists the contents of a bundle"""
371 371 with hg.openpath(ui, bundlepath) as f:
372 372 if spec:
373 373 spec = exchange.getbundlespec(ui, f)
374 374 ui.write('%s\n' % spec)
375 375 return
376 376
377 377 gen = exchange.readbundle(ui, f, bundlepath)
378 378 if isinstance(gen, bundle2.unbundle20):
379 379 return _debugbundle2(ui, gen, all=all, **opts)
380 380 _debugchangegroup(ui, gen, all=all, **opts)
381 381
382 382 @command('debugcapabilities',
383 383 [], _('PATH'),
384 384 norepo=True)
385 385 def debugcapabilities(ui, path, **opts):
386 386 """lists the capabilities of a remote peer"""
387 387 opts = pycompat.byteskwargs(opts)
388 388 peer = hg.peer(ui, opts, path)
389 389 caps = peer.capabilities()
390 390 ui.write(('Main capabilities:\n'))
391 391 for c in sorted(caps):
392 392 ui.write((' %s\n') % c)
393 393 b2caps = bundle2.bundle2caps(peer)
394 394 if b2caps:
395 395 ui.write(('Bundle2 capabilities:\n'))
396 396 for key, values in sorted(b2caps.iteritems()):
397 397 ui.write((' %s\n') % key)
398 398 for v in values:
399 399 ui.write((' %s\n') % v)
400 400
401 401 @command('debugcheckstate', [], '')
402 402 def debugcheckstate(ui, repo):
403 403 """validate the correctness of the current dirstate"""
404 404 parent1, parent2 = repo.dirstate.parents()
405 405 m1 = repo[parent1].manifest()
406 406 m2 = repo[parent2].manifest()
407 407 errors = 0
408 408 for f in repo.dirstate:
409 409 state = repo.dirstate[f]
410 410 if state in "nr" and f not in m1:
411 411 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
412 412 errors += 1
413 413 if state in "a" and f in m1:
414 414 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
415 415 errors += 1
416 416 if state in "m" and f not in m1 and f not in m2:
417 417 ui.warn(_("%s in state %s, but not in either manifest\n") %
418 418 (f, state))
419 419 errors += 1
420 420 for f in m1:
421 421 state = repo.dirstate[f]
422 422 if state not in "nrm":
423 423 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
424 424 errors += 1
425 425 if errors:
426 426 error = _(".hg/dirstate inconsistent with current parent's manifest")
427 427 raise error.Abort(error)
428 428
429 429 @command('debugcolor',
430 430 [('', 'style', None, _('show all configured styles'))],
431 431 'hg debugcolor')
432 432 def debugcolor(ui, repo, **opts):
433 433 """show available color, effects or style"""
434 434 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
435 435 if opts.get(r'style'):
436 436 return _debugdisplaystyle(ui)
437 437 else:
438 438 return _debugdisplaycolor(ui)
439 439
440 440 def _debugdisplaycolor(ui):
441 441 ui = ui.copy()
442 442 ui._styles.clear()
443 443 for effect in color._activeeffects(ui).keys():
444 444 ui._styles[effect] = effect
445 445 if ui._terminfoparams:
446 446 for k, v in ui.configitems('color'):
447 447 if k.startswith('color.'):
448 448 ui._styles[k] = k[6:]
449 449 elif k.startswith('terminfo.'):
450 450 ui._styles[k] = k[9:]
451 451 ui.write(_('available colors:\n'))
452 452 # sort label with a '_' after the other to group '_background' entry.
453 453 items = sorted(ui._styles.items(),
454 454 key=lambda i: ('_' in i[0], i[0], i[1]))
455 455 for colorname, label in items:
456 456 ui.write(('%s\n') % colorname, label=label)
457 457
458 458 def _debugdisplaystyle(ui):
459 459 ui.write(_('available style:\n'))
460 460 if not ui._styles:
461 461 return
462 462 width = max(len(s) for s in ui._styles)
463 463 for label, effects in sorted(ui._styles.items()):
464 464 ui.write('%s' % label, label=label)
465 465 if effects:
466 466 # 50
467 467 ui.write(': ')
468 468 ui.write(' ' * (max(0, width - len(label))))
469 469 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
470 470 ui.write('\n')
471 471
472 472 @command('debugcreatestreamclonebundle', [], 'FILE')
473 473 def debugcreatestreamclonebundle(ui, repo, fname):
474 474 """create a stream clone bundle file
475 475
476 476 Stream bundles are special bundles that are essentially archives of
477 477 revlog files. They are commonly used for cloning very quickly.
478 478 """
479 479 # TODO we may want to turn this into an abort when this functionality
480 480 # is moved into `hg bundle`.
481 481 if phases.hassecret(repo):
482 482 ui.warn(_('(warning: stream clone bundle will contain secret '
483 483 'revisions)\n'))
484 484
485 485 requirements, gen = streamclone.generatebundlev1(repo)
486 486 changegroup.writechunks(ui, gen, fname)
487 487
488 488 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
489 489
490 490 @command('debugdag',
491 491 [('t', 'tags', None, _('use tags as labels')),
492 492 ('b', 'branches', None, _('annotate with branch names')),
493 493 ('', 'dots', None, _('use dots for runs')),
494 494 ('s', 'spaces', None, _('separate elements by spaces'))],
495 495 _('[OPTION]... [FILE [REV]...]'),
496 496 optionalrepo=True)
497 497 def debugdag(ui, repo, file_=None, *revs, **opts):
498 498 """format the changelog or an index DAG as a concise textual description
499 499
500 500 If you pass a revlog index, the revlog's DAG is emitted. If you list
501 501 revision numbers, they get labeled in the output as rN.
502 502
503 503 Otherwise, the changelog DAG of the current repo is emitted.
504 504 """
505 505 spaces = opts.get(r'spaces')
506 506 dots = opts.get(r'dots')
507 507 if file_:
508 508 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
509 509 file_)
510 510 revs = set((int(r) for r in revs))
511 511 def events():
512 512 for r in rlog:
513 513 yield 'n', (r, list(p for p in rlog.parentrevs(r)
514 514 if p != -1))
515 515 if r in revs:
516 516 yield 'l', (r, "r%i" % r)
517 517 elif repo:
518 518 cl = repo.changelog
519 519 tags = opts.get(r'tags')
520 520 branches = opts.get(r'branches')
521 521 if tags:
522 522 labels = {}
523 523 for l, n in repo.tags().items():
524 524 labels.setdefault(cl.rev(n), []).append(l)
525 525 def events():
526 526 b = "default"
527 527 for r in cl:
528 528 if branches:
529 529 newb = cl.read(cl.node(r))[5]['branch']
530 530 if newb != b:
531 531 yield 'a', newb
532 532 b = newb
533 533 yield 'n', (r, list(p for p in cl.parentrevs(r)
534 534 if p != -1))
535 535 if tags:
536 536 ls = labels.get(r)
537 537 if ls:
538 538 for l in ls:
539 539 yield 'l', (r, l)
540 540 else:
541 541 raise error.Abort(_('need repo for changelog dag'))
542 542
543 543 for line in dagparser.dagtextlines(events(),
544 544 addspaces=spaces,
545 545 wraplabels=True,
546 546 wrapannotations=True,
547 547 wrapnonlinear=dots,
548 548 usedots=dots,
549 549 maxlinewidth=70):
550 550 ui.write(line)
551 551 ui.write("\n")
552 552
553 553 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
554 554 def debugdata(ui, repo, file_, rev=None, **opts):
555 555 """dump the contents of a data file revision"""
556 556 opts = pycompat.byteskwargs(opts)
557 557 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
558 558 if rev is not None:
559 559 raise error.CommandError('debugdata', _('invalid arguments'))
560 560 file_, rev = None, file_
561 561 elif rev is None:
562 562 raise error.CommandError('debugdata', _('invalid arguments'))
563 563 r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
564 564 try:
565 565 ui.write(r.revision(r.lookup(rev), raw=True))
566 566 except KeyError:
567 567 raise error.Abort(_('invalid revision identifier %s') % rev)
568 568
569 569 @command('debugdate',
570 570 [('e', 'extended', None, _('try extended date formats'))],
571 571 _('[-e] DATE [RANGE]'),
572 572 norepo=True, optionalrepo=True)
573 573 def debugdate(ui, date, range=None, **opts):
574 574 """parse and display a date"""
575 575 if opts[r"extended"]:
576 576 d = dateutil.parsedate(date, util.extendeddateformats)
577 577 else:
578 578 d = dateutil.parsedate(date)
579 579 ui.write(("internal: %d %d\n") % d)
580 580 ui.write(("standard: %s\n") % dateutil.datestr(d))
581 581 if range:
582 582 m = dateutil.matchdate(range)
583 583 ui.write(("match: %s\n") % m(d[0]))
584 584
585 585 @command('debugdeltachain',
586 586 cmdutil.debugrevlogopts + cmdutil.formatteropts,
587 587 _('-c|-m|FILE'),
588 588 optionalrepo=True)
589 589 def debugdeltachain(ui, repo, file_=None, **opts):
590 590 """dump information about delta chains in a revlog
591 591
592 592 Output can be templatized. Available template keywords are:
593 593
594 594 :``rev``: revision number
595 595 :``chainid``: delta chain identifier (numbered by unique base)
596 596 :``chainlen``: delta chain length to this revision
597 597 :``prevrev``: previous revision in delta chain
598 598 :``deltatype``: role of delta / how it was computed
599 599 :``compsize``: compressed size of revision
600 600 :``uncompsize``: uncompressed size of revision
601 601 :``chainsize``: total size of compressed revisions in chain
602 602 :``chainratio``: total chain size divided by uncompressed revision size
603 603 (new delta chains typically start at ratio 2.00)
604 604 :``lindist``: linear distance from base revision in delta chain to end
605 605 of this revision
606 606 :``extradist``: total size of revisions not part of this delta chain from
607 607 base of delta chain to end of this revision; a measurement
608 608 of how much extra data we need to read/seek across to read
609 609 the delta chain for this revision
610 610 :``extraratio``: extradist divided by chainsize; another representation of
611 611 how much unrelated data is needed to load this delta chain
612 612
613 613 If the repository is configured to use the sparse read, additional keywords
614 614 are available:
615 615
616 616 :``readsize``: total size of data read from the disk for a revision
617 617 (sum of the sizes of all the blocks)
618 618 :``largestblock``: size of the largest block of data read from the disk
619 619 :``readdensity``: density of useful bytes in the data read from the disk
620 620 :``srchunks``: in how many data hunks the whole revision would be read
621 621
622 622 The sparse read can be enabled with experimental.sparse-read = True
623 623 """
624 624 opts = pycompat.byteskwargs(opts)
625 625 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
626 626 index = r.index
627 627 start = r.start
628 628 length = r.length
629 629 generaldelta = r.version & revlog.FLAG_GENERALDELTA
630 630 withsparseread = getattr(r, '_withsparseread', False)
631 631
632 632 def revinfo(rev):
633 633 e = index[rev]
634 634 compsize = e[1]
635 635 uncompsize = e[2]
636 636 chainsize = 0
637 637
638 638 if generaldelta:
639 639 if e[3] == e[5]:
640 640 deltatype = 'p1'
641 641 elif e[3] == e[6]:
642 642 deltatype = 'p2'
643 643 elif e[3] == rev - 1:
644 644 deltatype = 'prev'
645 645 elif e[3] == rev:
646 646 deltatype = 'base'
647 647 else:
648 648 deltatype = 'other'
649 649 else:
650 650 if e[3] == rev:
651 651 deltatype = 'base'
652 652 else:
653 653 deltatype = 'prev'
654 654
655 655 chain = r._deltachain(rev)[0]
656 656 for iterrev in chain:
657 657 e = index[iterrev]
658 658 chainsize += e[1]
659 659
660 660 return compsize, uncompsize, deltatype, chain, chainsize
661 661
662 662 fm = ui.formatter('debugdeltachain', opts)
663 663
664 664 fm.plain(' rev chain# chainlen prev delta '
665 665 'size rawsize chainsize ratio lindist extradist '
666 666 'extraratio')
667 667 if withsparseread:
668 668 fm.plain(' readsize largestblk rddensity srchunks')
669 669 fm.plain('\n')
670 670
671 671 chainbases = {}
672 672 for rev in r:
673 673 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
674 674 chainbase = chain[0]
675 675 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
676 676 basestart = start(chainbase)
677 677 revstart = start(rev)
678 678 lineardist = revstart + comp - basestart
679 679 extradist = lineardist - chainsize
680 680 try:
681 681 prevrev = chain[-2]
682 682 except IndexError:
683 683 prevrev = -1
684 684
685 685 if uncomp != 0:
686 686 chainratio = float(chainsize) / float(uncomp)
687 687 else:
688 688 chainratio = chainsize
689 689
690 690 if chainsize != 0:
691 691 extraratio = float(extradist) / float(chainsize)
692 692 else:
693 693 extraratio = extradist
694 694
695 695 fm.startitem()
696 696 fm.write('rev chainid chainlen prevrev deltatype compsize '
697 697 'uncompsize chainsize chainratio lindist extradist '
698 698 'extraratio',
699 699 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
700 700 rev, chainid, len(chain), prevrev, deltatype, comp,
701 701 uncomp, chainsize, chainratio, lineardist, extradist,
702 702 extraratio,
703 703 rev=rev, chainid=chainid, chainlen=len(chain),
704 704 prevrev=prevrev, deltatype=deltatype, compsize=comp,
705 705 uncompsize=uncomp, chainsize=chainsize,
706 706 chainratio=chainratio, lindist=lineardist,
707 707 extradist=extradist, extraratio=extraratio)
708 708 if withsparseread:
709 709 readsize = 0
710 710 largestblock = 0
711 711 srchunks = 0
712 712
713 713 for revschunk in deltautil.slicechunk(r, chain):
714 714 srchunks += 1
715 715 blkend = start(revschunk[-1]) + length(revschunk[-1])
716 716 blksize = blkend - start(revschunk[0])
717 717
718 718 readsize += blksize
719 719 if largestblock < blksize:
720 720 largestblock = blksize
721 721
722 722 if readsize:
723 723 readdensity = float(chainsize) / float(readsize)
724 724 else:
725 725 readdensity = 1
726 726
727 727 fm.write('readsize largestblock readdensity srchunks',
728 728 ' %10d %10d %9.5f %8d',
729 729 readsize, largestblock, readdensity, srchunks,
730 730 readsize=readsize, largestblock=largestblock,
731 731 readdensity=readdensity, srchunks=srchunks)
732 732
733 733 fm.plain('\n')
734 734
735 735 fm.end()
736 736
737 737 @command('debugdirstate|debugstate',
738 738 [('', 'nodates', None, _('do not display the saved mtime (DEPRECATED)')),
739 739 ('', 'dates', True, _('display the saved mtime')),
740 740 ('', 'datesort', None, _('sort by saved mtime'))],
741 741 _('[OPTION]...'))
742 742 def debugstate(ui, repo, **opts):
743 743 """show the contents of the current dirstate"""
744 744
745 745 nodates = not opts[r'dates']
746 746 if opts.get(r'nodates') is not None:
747 747 nodates = True
748 748 datesort = opts.get(r'datesort')
749 749
750 750 if datesort:
751 751 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
752 752 else:
753 753 keyfunc = None # sort by filename
754 754 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
755 755 if ent[3] == -1:
756 756 timestr = 'unset '
757 757 elif nodates:
758 758 timestr = 'set '
759 759 else:
760 760 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
761 761 time.localtime(ent[3]))
762 762 timestr = encoding.strtolocal(timestr)
763 763 if ent[1] & 0o20000:
764 764 mode = 'lnk'
765 765 else:
766 766 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
767 767 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
768 768 for f in repo.dirstate.copies():
769 769 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
770 770
771 771 @command('debugdiscovery',
772 772 [('', 'old', None, _('use old-style discovery')),
773 773 ('', 'nonheads', None,
774 774 _('use old-style discovery with non-heads included')),
775 775 ('', 'rev', [], 'restrict discovery to this set of revs'),
776 776 ('', 'seed', '12323', 'specify the random seed use for discovery'),
777 777 ] + cmdutil.remoteopts,
778 778 _('[--rev REV] [OTHER]'))
779 779 def debugdiscovery(ui, repo, remoteurl="default", **opts):
780 780 """runs the changeset discovery protocol in isolation"""
781 781 opts = pycompat.byteskwargs(opts)
782 782 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
783 783 remote = hg.peer(repo, opts, remoteurl)
784 784 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
785 785
786 786 # make sure tests are repeatable
787 787 random.seed(int(opts['seed']))
788 788
789 789
790 790
791 791 if opts.get('old'):
792 792 def doit(pushedrevs, remoteheads, remote=remote):
793 793 if not util.safehasattr(remote, 'branches'):
794 794 # enable in-client legacy support
795 795 remote = localrepo.locallegacypeer(remote.local())
796 796 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
797 797 force=True)
798 798 common = set(common)
799 799 if not opts.get('nonheads'):
800 800 ui.write(("unpruned common: %s\n") %
801 801 " ".join(sorted(short(n) for n in common)))
802 802
803 803 clnode = repo.changelog.node
804 804 common = repo.revs('heads(::%ln)', common)
805 805 common = {clnode(r) for r in common}
806 806 return common, hds
807 807 else:
808 808 def doit(pushedrevs, remoteheads, remote=remote):
809 809 nodes = None
810 810 if pushedrevs:
811 811 revs = scmutil.revrange(repo, pushedrevs)
812 812 nodes = [repo[r].node() for r in revs]
813 813 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
814 814 ancestorsof=nodes)
815 815 return common, hds
816 816
817 817 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
818 818 localrevs = opts['rev']
819 819 with util.timedcm('debug-discovery') as t:
820 820 common, hds = doit(localrevs, remoterevs)
821 821
822 822 # compute all statistics
823 823 common = set(common)
824 824 rheads = set(hds)
825 825 lheads = set(repo.heads())
826 826
827 827 data = {}
828 828 data['elapsed'] = t.elapsed
829 829 data['nb-common'] = len(common)
830 830 data['nb-common-local'] = len(common & lheads)
831 831 data['nb-common-remote'] = len(common & rheads)
832 832 data['nb-common-both'] = len(common & rheads & lheads)
833 833 data['nb-local'] = len(lheads)
834 834 data['nb-local-missing'] = data['nb-local'] - data['nb-common-local']
835 835 data['nb-remote'] = len(rheads)
836 836 data['nb-remote-unknown'] = data['nb-remote'] - data['nb-common-remote']
837 837 data['nb-revs'] = len(repo.revs('all()'))
838 838 data['nb-revs-common'] = len(repo.revs('::%ln', common))
839 839 data['nb-revs-missing'] = data['nb-revs'] - data['nb-revs-common']
840 840
841 841 # display discovery summary
842 842 ui.write(("elapsed time: %(elapsed)f seconds\n") % data)
843 843 ui.write(("heads summary:\n"))
844 844 ui.write((" total common heads: %(nb-common)9d\n") % data)
845 845 ui.write((" also local heads: %(nb-common-local)9d\n") % data)
846 846 ui.write((" also remote heads: %(nb-common-remote)9d\n") % data)
847 847 ui.write((" both: %(nb-common-both)9d\n") % data)
848 848 ui.write((" local heads: %(nb-local)9d\n") % data)
849 849 ui.write((" common: %(nb-common-local)9d\n") % data)
850 850 ui.write((" missing: %(nb-local-missing)9d\n") % data)
851 851 ui.write((" remote heads: %(nb-remote)9d\n") % data)
852 852 ui.write((" common: %(nb-common-remote)9d\n") % data)
853 853 ui.write((" unknown: %(nb-remote-unknown)9d\n") % data)
854 854 ui.write(("local changesets: %(nb-revs)9d\n") % data)
855 855 ui.write((" common: %(nb-revs-common)9d\n") % data)
856 856 ui.write((" missing: %(nb-revs-missing)9d\n") % data)
857 857
858 858 if ui.verbose:
859 859 ui.write(("common heads: %s\n") %
860 860 " ".join(sorted(short(n) for n in common)))
861 861
862 862 _chunksize = 4 << 10
863 863
864 864 @command('debugdownload',
865 865 [
866 866 ('o', 'output', '', _('path')),
867 867 ],
868 868 optionalrepo=True)
869 869 def debugdownload(ui, repo, url, output=None, **opts):
870 870 """download a resource using Mercurial logic and config
871 871 """
872 872 fh = urlmod.open(ui, url, output)
873 873
874 874 dest = ui
875 875 if output:
876 876 dest = open(output, "wb", _chunksize)
877 877 try:
878 878 data = fh.read(_chunksize)
879 879 while data:
880 880 dest.write(data)
881 881 data = fh.read(_chunksize)
882 882 finally:
883 883 if output:
884 884 dest.close()
885 885
886 886 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
887 887 def debugextensions(ui, repo, **opts):
888 888 '''show information about active extensions'''
889 889 opts = pycompat.byteskwargs(opts)
890 890 exts = extensions.extensions(ui)
891 891 hgver = util.version()
892 892 fm = ui.formatter('debugextensions', opts)
893 893 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
894 894 isinternal = extensions.ismoduleinternal(extmod)
895 895 extsource = pycompat.fsencode(extmod.__file__)
896 896 if isinternal:
897 897 exttestedwith = [] # never expose magic string to users
898 898 else:
899 899 exttestedwith = getattr(extmod, 'testedwith', '').split()
900 900 extbuglink = getattr(extmod, 'buglink', None)
901 901
902 902 fm.startitem()
903 903
904 904 if ui.quiet or ui.verbose:
905 905 fm.write('name', '%s\n', extname)
906 906 else:
907 907 fm.write('name', '%s', extname)
908 908 if isinternal or hgver in exttestedwith:
909 909 fm.plain('\n')
910 910 elif not exttestedwith:
911 911 fm.plain(_(' (untested!)\n'))
912 912 else:
913 913 lasttestedversion = exttestedwith[-1]
914 914 fm.plain(' (%s!)\n' % lasttestedversion)
915 915
916 916 fm.condwrite(ui.verbose and extsource, 'source',
917 917 _(' location: %s\n'), extsource or "")
918 918
919 919 if ui.verbose:
920 920 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
921 921 fm.data(bundled=isinternal)
922 922
923 923 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
924 924 _(' tested with: %s\n'),
925 925 fm.formatlist(exttestedwith, name='ver'))
926 926
927 927 fm.condwrite(ui.verbose and extbuglink, 'buglink',
928 928 _(' bug reporting: %s\n'), extbuglink or "")
929 929
930 930 fm.end()
931 931
932 932 @command('debugfileset',
933 933 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
934 934 ('', 'all-files', False,
935 935 _('test files from all revisions and working directory')),
936 936 ('s', 'show-matcher', None,
937 937 _('print internal representation of matcher')),
938 938 ('p', 'show-stage', [],
939 939 _('print parsed tree at the given stage'), _('NAME'))],
940 940 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
941 941 def debugfileset(ui, repo, expr, **opts):
942 942 '''parse and apply a fileset specification'''
943 943 from . import fileset
944 944 fileset.symbols # force import of fileset so we have predicates to optimize
945 945 opts = pycompat.byteskwargs(opts)
946 946 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
947 947
948 948 stages = [
949 949 ('parsed', pycompat.identity),
950 950 ('analyzed', filesetlang.analyze),
951 951 ('optimized', filesetlang.optimize),
952 952 ]
953 953 stagenames = set(n for n, f in stages)
954 954
955 955 showalways = set()
956 956 if ui.verbose and not opts['show_stage']:
957 957 # show parsed tree by --verbose (deprecated)
958 958 showalways.add('parsed')
959 959 if opts['show_stage'] == ['all']:
960 960 showalways.update(stagenames)
961 961 else:
962 962 for n in opts['show_stage']:
963 963 if n not in stagenames:
964 964 raise error.Abort(_('invalid stage name: %s') % n)
965 965 showalways.update(opts['show_stage'])
966 966
967 967 tree = filesetlang.parse(expr)
968 968 for n, f in stages:
969 969 tree = f(tree)
970 970 if n in showalways:
971 971 if opts['show_stage'] or n != 'parsed':
972 972 ui.write(("* %s:\n") % n)
973 973 ui.write(filesetlang.prettyformat(tree), "\n")
974 974
975 975 files = set()
976 976 if opts['all_files']:
977 977 for r in repo:
978 978 c = repo[r]
979 979 files.update(c.files())
980 980 files.update(c.substate)
981 981 if opts['all_files'] or ctx.rev() is None:
982 982 wctx = repo[None]
983 983 files.update(repo.dirstate.walk(scmutil.matchall(repo),
984 984 subrepos=list(wctx.substate),
985 985 unknown=True, ignored=True))
986 986 files.update(wctx.substate)
987 987 else:
988 988 files.update(ctx.files())
989 989 files.update(ctx.substate)
990 990
991 991 m = ctx.matchfileset(expr)
992 992 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
993 993 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
994 994 for f in sorted(files):
995 995 if not m(f):
996 996 continue
997 997 ui.write("%s\n" % f)
998 998
999 999 @command('debugformat',
1000 1000 [] + cmdutil.formatteropts)
1001 1001 def debugformat(ui, repo, **opts):
1002 1002 """display format information about the current repository
1003 1003
1004 1004 Use --verbose to get extra information about current config value and
1005 1005 Mercurial default."""
1006 1006 opts = pycompat.byteskwargs(opts)
1007 1007 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1008 1008 maxvariantlength = max(len('format-variant'), maxvariantlength)
1009 1009
1010 1010 def makeformatname(name):
1011 1011 return '%s:' + (' ' * (maxvariantlength - len(name)))
1012 1012
1013 1013 fm = ui.formatter('debugformat', opts)
1014 1014 if fm.isplain():
1015 1015 def formatvalue(value):
1016 1016 if util.safehasattr(value, 'startswith'):
1017 1017 return value
1018 1018 if value:
1019 1019 return 'yes'
1020 1020 else:
1021 1021 return 'no'
1022 1022 else:
1023 1023 formatvalue = pycompat.identity
1024 1024
1025 1025 fm.plain('format-variant')
1026 1026 fm.plain(' ' * (maxvariantlength - len('format-variant')))
1027 1027 fm.plain(' repo')
1028 1028 if ui.verbose:
1029 1029 fm.plain(' config default')
1030 1030 fm.plain('\n')
1031 1031 for fv in upgrade.allformatvariant:
1032 1032 fm.startitem()
1033 1033 repovalue = fv.fromrepo(repo)
1034 1034 configvalue = fv.fromconfig(repo)
1035 1035
1036 1036 if repovalue != configvalue:
1037 1037 namelabel = 'formatvariant.name.mismatchconfig'
1038 1038 repolabel = 'formatvariant.repo.mismatchconfig'
1039 1039 elif repovalue != fv.default:
1040 1040 namelabel = 'formatvariant.name.mismatchdefault'
1041 1041 repolabel = 'formatvariant.repo.mismatchdefault'
1042 1042 else:
1043 1043 namelabel = 'formatvariant.name.uptodate'
1044 1044 repolabel = 'formatvariant.repo.uptodate'
1045 1045
1046 1046 fm.write('name', makeformatname(fv.name), fv.name,
1047 1047 label=namelabel)
1048 1048 fm.write('repo', ' %3s', formatvalue(repovalue),
1049 1049 label=repolabel)
1050 1050 if fv.default != configvalue:
1051 1051 configlabel = 'formatvariant.config.special'
1052 1052 else:
1053 1053 configlabel = 'formatvariant.config.default'
1054 1054 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1055 1055 label=configlabel)
1056 1056 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1057 1057 label='formatvariant.default')
1058 1058 fm.plain('\n')
1059 1059 fm.end()
1060 1060
1061 1061 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1062 1062 def debugfsinfo(ui, path="."):
1063 1063 """show information detected about current filesystem"""
1064 1064 ui.write(('path: %s\n') % path)
1065 1065 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1066 1066 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1067 1067 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1068 1068 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1069 1069 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1070 1070 casesensitive = '(unknown)'
1071 1071 try:
1072 1072 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1073 1073 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1074 1074 except OSError:
1075 1075 pass
1076 1076 ui.write(('case-sensitive: %s\n') % casesensitive)
1077 1077
1078 1078 @command('debuggetbundle',
1079 1079 [('H', 'head', [], _('id of head node'), _('ID')),
1080 1080 ('C', 'common', [], _('id of common node'), _('ID')),
1081 1081 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1082 1082 _('REPO FILE [-H|-C ID]...'),
1083 1083 norepo=True)
1084 1084 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1085 1085 """retrieves a bundle from a repo
1086 1086
1087 1087 Every ID must be a full-length hex node id string. Saves the bundle to the
1088 1088 given file.
1089 1089 """
1090 1090 opts = pycompat.byteskwargs(opts)
1091 1091 repo = hg.peer(ui, opts, repopath)
1092 1092 if not repo.capable('getbundle'):
1093 1093 raise error.Abort("getbundle() not supported by target repository")
1094 1094 args = {}
1095 1095 if common:
1096 1096 args[r'common'] = [bin(s) for s in common]
1097 1097 if head:
1098 1098 args[r'heads'] = [bin(s) for s in head]
1099 1099 # TODO: get desired bundlecaps from command line.
1100 1100 args[r'bundlecaps'] = None
1101 1101 bundle = repo.getbundle('debug', **args)
1102 1102
1103 1103 bundletype = opts.get('type', 'bzip2').lower()
1104 1104 btypes = {'none': 'HG10UN',
1105 1105 'bzip2': 'HG10BZ',
1106 1106 'gzip': 'HG10GZ',
1107 1107 'bundle2': 'HG20'}
1108 1108 bundletype = btypes.get(bundletype)
1109 1109 if bundletype not in bundle2.bundletypes:
1110 1110 raise error.Abort(_('unknown bundle type specified with --type'))
1111 1111 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1112 1112
1113 1113 @command('debugignore', [], '[FILE]')
1114 1114 def debugignore(ui, repo, *files, **opts):
1115 1115 """display the combined ignore pattern and information about ignored files
1116 1116
1117 1117 With no argument display the combined ignore pattern.
1118 1118
1119 1119 Given space separated file names, shows if the given file is ignored and
1120 1120 if so, show the ignore rule (file and line number) that matched it.
1121 1121 """
1122 1122 ignore = repo.dirstate._ignore
1123 1123 if not files:
1124 1124 # Show all the patterns
1125 1125 ui.write("%s\n" % pycompat.byterepr(ignore))
1126 1126 else:
1127 1127 m = scmutil.match(repo[None], pats=files)
1128 1128 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1129 1129 for f in m.files():
1130 1130 nf = util.normpath(f)
1131 1131 ignored = None
1132 1132 ignoredata = None
1133 1133 if nf != '.':
1134 1134 if ignore(nf):
1135 1135 ignored = nf
1136 1136 ignoredata = repo.dirstate._ignorefileandline(nf)
1137 1137 else:
1138 1138 for p in util.finddirs(nf):
1139 1139 if ignore(p):
1140 1140 ignored = p
1141 1141 ignoredata = repo.dirstate._ignorefileandline(p)
1142 1142 break
1143 1143 if ignored:
1144 1144 if ignored == nf:
1145 1145 ui.write(_("%s is ignored\n") % uipathfn(f))
1146 1146 else:
1147 1147 ui.write(_("%s is ignored because of "
1148 1148 "containing directory %s\n")
1149 1149 % (uipathfn(f), ignored))
1150 1150 ignorefile, lineno, line = ignoredata
1151 1151 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1152 1152 % (ignorefile, lineno, line))
1153 1153 else:
1154 1154 ui.write(_("%s is not ignored\n") % uipathfn(f))
1155 1155
1156 1156 @command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts,
1157 1157 _('-c|-m|FILE'))
1158 1158 def debugindex(ui, repo, file_=None, **opts):
1159 1159 """dump index data for a storage primitive"""
1160 1160 opts = pycompat.byteskwargs(opts)
1161 1161 store = cmdutil.openstorage(repo, 'debugindex', file_, opts)
1162 1162
1163 1163 if ui.debugflag:
1164 1164 shortfn = hex
1165 1165 else:
1166 1166 shortfn = short
1167 1167
1168 1168 idlen = 12
1169 1169 for i in store:
1170 1170 idlen = len(shortfn(store.node(i)))
1171 1171 break
1172 1172
1173 1173 fm = ui.formatter('debugindex', opts)
1174 1174 fm.plain(b' rev linkrev %s %s p2\n' % (
1175 1175 b'nodeid'.ljust(idlen),
1176 1176 b'p1'.ljust(idlen)))
1177 1177
1178 1178 for rev in store:
1179 1179 node = store.node(rev)
1180 1180 parents = store.parents(node)
1181 1181
1182 1182 fm.startitem()
1183 1183 fm.write(b'rev', b'%6d ', rev)
1184 1184 fm.write(b'linkrev', '%7d ', store.linkrev(rev))
1185 1185 fm.write(b'node', '%s ', shortfn(node))
1186 1186 fm.write(b'p1', '%s ', shortfn(parents[0]))
1187 1187 fm.write(b'p2', '%s', shortfn(parents[1]))
1188 1188 fm.plain(b'\n')
1189 1189
1190 1190 fm.end()
1191 1191
1192 1192 @command('debugindexdot', cmdutil.debugrevlogopts,
1193 1193 _('-c|-m|FILE'), optionalrepo=True)
1194 1194 def debugindexdot(ui, repo, file_=None, **opts):
1195 1195 """dump an index DAG as a graphviz dot file"""
1196 1196 opts = pycompat.byteskwargs(opts)
1197 1197 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
1198 1198 ui.write(("digraph G {\n"))
1199 1199 for i in r:
1200 1200 node = r.node(i)
1201 1201 pp = r.parents(node)
1202 1202 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1203 1203 if pp[1] != nullid:
1204 1204 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1205 1205 ui.write("}\n")
1206 1206
1207 1207 @command('debugindexstats', [])
1208 1208 def debugindexstats(ui, repo):
1209 1209 """show stats related to the changelog index"""
1210 1210 repo.changelog.shortest(nullid, 1)
1211 1211 index = repo.changelog.index
1212 1212 if not util.safehasattr(index, 'stats'):
1213 1213 raise error.Abort(_('debugindexstats only works with native code'))
1214 1214 for k, v in sorted(index.stats().items()):
1215 1215 ui.write('%s: %d\n' % (k, v))
1216 1216
1217 1217 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1218 1218 def debuginstall(ui, **opts):
1219 1219 '''test Mercurial installation
1220 1220
1221 1221 Returns 0 on success.
1222 1222 '''
1223 1223 opts = pycompat.byteskwargs(opts)
1224 1224
1225 1225 problems = 0
1226 1226
1227 1227 fm = ui.formatter('debuginstall', opts)
1228 1228 fm.startitem()
1229 1229
1230 1230 # encoding
1231 1231 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1232 1232 err = None
1233 1233 try:
1234 1234 codecs.lookup(pycompat.sysstr(encoding.encoding))
1235 1235 except LookupError as inst:
1236 1236 err = stringutil.forcebytestr(inst)
1237 1237 problems += 1
1238 1238 fm.condwrite(err, 'encodingerror', _(" %s\n"
1239 1239 " (check that your locale is properly set)\n"), err)
1240 1240
1241 1241 # Python
1242 1242 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1243 1243 pycompat.sysexecutable or _("unknown"))
1244 1244 fm.write('pythonver', _("checking Python version (%s)\n"),
1245 1245 ("%d.%d.%d" % sys.version_info[:3]))
1246 1246 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1247 1247 os.path.dirname(pycompat.fsencode(os.__file__)))
1248 1248
1249 1249 security = set(sslutil.supportedprotocols)
1250 1250 if sslutil.hassni:
1251 1251 security.add('sni')
1252 1252
1253 1253 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1254 1254 fm.formatlist(sorted(security), name='protocol',
1255 1255 fmt='%s', sep=','))
1256 1256
1257 1257 # These are warnings, not errors. So don't increment problem count. This
1258 1258 # may change in the future.
1259 1259 if 'tls1.2' not in security:
1260 1260 fm.plain(_(' TLS 1.2 not supported by Python install; '
1261 1261 'network connections lack modern security\n'))
1262 1262 if 'sni' not in security:
1263 1263 fm.plain(_(' SNI not supported by Python install; may have '
1264 1264 'connectivity issues with some servers\n'))
1265 1265
1266 1266 # TODO print CA cert info
1267 1267
1268 1268 # hg version
1269 1269 hgver = util.version()
1270 1270 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1271 1271 hgver.split('+')[0])
1272 1272 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1273 1273 '+'.join(hgver.split('+')[1:]))
1274 1274
1275 1275 # compiled modules
1276 1276 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1277 1277 policy.policy)
1278 1278 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1279 1279 os.path.dirname(pycompat.fsencode(__file__)))
1280 1280
1281 1281 rustandc = policy.policy in ('rust+c', 'rust+c-allow')
1282 1282 rustext = rustandc # for now, that's the only case
1283 1283 cext = policy.policy in ('c', 'allow') or rustandc
1284 1284 nopure = cext or rustext
1285 1285 if nopure:
1286 1286 err = None
1287 1287 try:
1288 1288 if cext:
1289 1289 from .cext import (
1290 1290 base85,
1291 1291 bdiff,
1292 1292 mpatch,
1293 1293 osutil,
1294 1294 )
1295 1295 # quiet pyflakes
1296 1296 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1297 1297 if rustext:
1298 1298 from .rustext import (
1299 1299 ancestor,
1300 1300 dirstate,
1301 1301 )
1302 1302 dir(ancestor), dir(dirstate) # quiet pyflakes
1303 1303 except Exception as inst:
1304 1304 err = stringutil.forcebytestr(inst)
1305 1305 problems += 1
1306 1306 fm.condwrite(err, 'extensionserror', " %s\n", err)
1307 1307
1308 1308 compengines = util.compengines._engines.values()
1309 1309 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1310 1310 fm.formatlist(sorted(e.name() for e in compengines),
1311 1311 name='compengine', fmt='%s', sep=', '))
1312 1312 fm.write('compenginesavail', _('checking available compression engines '
1313 1313 '(%s)\n'),
1314 1314 fm.formatlist(sorted(e.name() for e in compengines
1315 1315 if e.available()),
1316 1316 name='compengine', fmt='%s', sep=', '))
1317 1317 wirecompengines = compression.compengines.supportedwireengines(
1318 1318 compression.SERVERROLE)
1319 1319 fm.write('compenginesserver', _('checking available compression engines '
1320 1320 'for wire protocol (%s)\n'),
1321 1321 fm.formatlist([e.name() for e in wirecompengines
1322 1322 if e.wireprotosupport()],
1323 1323 name='compengine', fmt='%s', sep=', '))
1324 1324 re2 = 'missing'
1325 1325 if util._re2:
1326 1326 re2 = 'available'
1327 1327 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1328 1328 fm.data(re2=bool(util._re2))
1329 1329
1330 1330 # templates
1331 1331 p = templater.templatepaths()
1332 1332 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1333 1333 fm.condwrite(not p, '', _(" no template directories found\n"))
1334 1334 if p:
1335 1335 m = templater.templatepath("map-cmdline.default")
1336 1336 if m:
1337 1337 # template found, check if it is working
1338 1338 err = None
1339 1339 try:
1340 1340 templater.templater.frommapfile(m)
1341 1341 except Exception as inst:
1342 1342 err = stringutil.forcebytestr(inst)
1343 1343 p = None
1344 1344 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1345 1345 else:
1346 1346 p = None
1347 1347 fm.condwrite(p, 'defaulttemplate',
1348 1348 _("checking default template (%s)\n"), m)
1349 1349 fm.condwrite(not m, 'defaulttemplatenotfound',
1350 1350 _(" template '%s' not found\n"), "default")
1351 1351 if not p:
1352 1352 problems += 1
1353 1353 fm.condwrite(not p, '',
1354 1354 _(" (templates seem to have been installed incorrectly)\n"))
1355 1355
1356 1356 # editor
1357 1357 editor = ui.geteditor()
1358 1358 editor = util.expandpath(editor)
1359 1359 editorbin = procutil.shellsplit(editor)[0]
1360 1360 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1361 1361 cmdpath = procutil.findexe(editorbin)
1362 1362 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1363 1363 _(" No commit editor set and can't find %s in PATH\n"
1364 1364 " (specify a commit editor in your configuration"
1365 1365 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1366 1366 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1367 1367 _(" Can't find editor '%s' in PATH\n"
1368 1368 " (specify a commit editor in your configuration"
1369 1369 " file)\n"), not cmdpath and editorbin)
1370 1370 if not cmdpath and editor != 'vi':
1371 1371 problems += 1
1372 1372
1373 1373 # check username
1374 1374 username = None
1375 1375 err = None
1376 1376 try:
1377 1377 username = ui.username()
1378 1378 except error.Abort as e:
1379 1379 err = stringutil.forcebytestr(e)
1380 1380 problems += 1
1381 1381
1382 1382 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1383 1383 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1384 1384 " (specify a username in your configuration file)\n"), err)
1385 1385
1386 for name, mod in extensions.extensions():
1387 handler = getattr(mod, 'debuginstall', None)
1388 if handler is not None:
1389 problems += handler(ui, fm)
1390
1386 1391 fm.condwrite(not problems, '',
1387 1392 _("no problems detected\n"))
1388 1393 if not problems:
1389 1394 fm.data(problems=problems)
1390 1395 fm.condwrite(problems, 'problems',
1391 1396 _("%d problems detected,"
1392 1397 " please check your install!\n"), problems)
1393 1398 fm.end()
1394 1399
1395 1400 return problems
1396 1401
1397 1402 @command('debugknown', [], _('REPO ID...'), norepo=True)
1398 1403 def debugknown(ui, repopath, *ids, **opts):
1399 1404 """test whether node ids are known to a repo
1400 1405
1401 1406 Every ID must be a full-length hex node id string. Returns a list of 0s
1402 1407 and 1s indicating unknown/known.
1403 1408 """
1404 1409 opts = pycompat.byteskwargs(opts)
1405 1410 repo = hg.peer(ui, opts, repopath)
1406 1411 if not repo.capable('known'):
1407 1412 raise error.Abort("known() not supported by target repository")
1408 1413 flags = repo.known([bin(s) for s in ids])
1409 1414 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1410 1415
1411 1416 @command('debuglabelcomplete', [], _('LABEL...'))
1412 1417 def debuglabelcomplete(ui, repo, *args):
1413 1418 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1414 1419 debugnamecomplete(ui, repo, *args)
1415 1420
1416 1421 @command('debuglocks',
1417 1422 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1418 1423 ('W', 'force-wlock', None,
1419 1424 _('free the working state lock (DANGEROUS)')),
1420 1425 ('s', 'set-lock', None, _('set the store lock until stopped')),
1421 1426 ('S', 'set-wlock', None,
1422 1427 _('set the working state lock until stopped'))],
1423 1428 _('[OPTION]...'))
1424 1429 def debuglocks(ui, repo, **opts):
1425 1430 """show or modify state of locks
1426 1431
1427 1432 By default, this command will show which locks are held. This
1428 1433 includes the user and process holding the lock, the amount of time
1429 1434 the lock has been held, and the machine name where the process is
1430 1435 running if it's not local.
1431 1436
1432 1437 Locks protect the integrity of Mercurial's data, so should be
1433 1438 treated with care. System crashes or other interruptions may cause
1434 1439 locks to not be properly released, though Mercurial will usually
1435 1440 detect and remove such stale locks automatically.
1436 1441
1437 1442 However, detecting stale locks may not always be possible (for
1438 1443 instance, on a shared filesystem). Removing locks may also be
1439 1444 blocked by filesystem permissions.
1440 1445
1441 1446 Setting a lock will prevent other commands from changing the data.
1442 1447 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1443 1448 The set locks are removed when the command exits.
1444 1449
1445 1450 Returns 0 if no locks are held.
1446 1451
1447 1452 """
1448 1453
1449 1454 if opts.get(r'force_lock'):
1450 1455 repo.svfs.unlink('lock')
1451 1456 if opts.get(r'force_wlock'):
1452 1457 repo.vfs.unlink('wlock')
1453 1458 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1454 1459 return 0
1455 1460
1456 1461 locks = []
1457 1462 try:
1458 1463 if opts.get(r'set_wlock'):
1459 1464 try:
1460 1465 locks.append(repo.wlock(False))
1461 1466 except error.LockHeld:
1462 1467 raise error.Abort(_('wlock is already held'))
1463 1468 if opts.get(r'set_lock'):
1464 1469 try:
1465 1470 locks.append(repo.lock(False))
1466 1471 except error.LockHeld:
1467 1472 raise error.Abort(_('lock is already held'))
1468 1473 if len(locks):
1469 1474 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1470 1475 return 0
1471 1476 finally:
1472 1477 release(*locks)
1473 1478
1474 1479 now = time.time()
1475 1480 held = 0
1476 1481
1477 1482 def report(vfs, name, method):
1478 1483 # this causes stale locks to get reaped for more accurate reporting
1479 1484 try:
1480 1485 l = method(False)
1481 1486 except error.LockHeld:
1482 1487 l = None
1483 1488
1484 1489 if l:
1485 1490 l.release()
1486 1491 else:
1487 1492 try:
1488 1493 st = vfs.lstat(name)
1489 1494 age = now - st[stat.ST_MTIME]
1490 1495 user = util.username(st.st_uid)
1491 1496 locker = vfs.readlock(name)
1492 1497 if ":" in locker:
1493 1498 host, pid = locker.split(':')
1494 1499 if host == socket.gethostname():
1495 1500 locker = 'user %s, process %s' % (user or b'None', pid)
1496 1501 else:
1497 1502 locker = ('user %s, process %s, host %s'
1498 1503 % (user or b'None', pid, host))
1499 1504 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1500 1505 return 1
1501 1506 except OSError as e:
1502 1507 if e.errno != errno.ENOENT:
1503 1508 raise
1504 1509
1505 1510 ui.write(("%-6s free\n") % (name + ":"))
1506 1511 return 0
1507 1512
1508 1513 held += report(repo.svfs, "lock", repo.lock)
1509 1514 held += report(repo.vfs, "wlock", repo.wlock)
1510 1515
1511 1516 return held
1512 1517
1513 1518 @command('debugmanifestfulltextcache', [
1514 1519 ('', 'clear', False, _('clear the cache')),
1515 1520 ('a', 'add', [], _('add the given manifest nodes to the cache'),
1516 1521 _('NODE'))
1517 1522 ], '')
1518 1523 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1519 1524 """show, clear or amend the contents of the manifest fulltext cache"""
1520 1525
1521 1526 def getcache():
1522 1527 r = repo.manifestlog.getstorage(b'')
1523 1528 try:
1524 1529 return r._fulltextcache
1525 1530 except AttributeError:
1526 1531 msg = _("Current revlog implementation doesn't appear to have a "
1527 1532 "manifest fulltext cache\n")
1528 1533 raise error.Abort(msg)
1529 1534
1530 1535 if opts.get(r'clear'):
1531 1536 with repo.wlock():
1532 1537 cache = getcache()
1533 1538 cache.clear(clear_persisted_data=True)
1534 1539 return
1535 1540
1536 1541 if add:
1537 1542 with repo.wlock():
1538 1543 m = repo.manifestlog
1539 1544 store = m.getstorage(b'')
1540 1545 for n in add:
1541 1546 try:
1542 1547 manifest = m[store.lookup(n)]
1543 1548 except error.LookupError as e:
1544 1549 raise error.Abort(e, hint="Check your manifest node id")
1545 1550 manifest.read() # stores revisision in cache too
1546 1551 return
1547 1552
1548 1553 cache = getcache()
1549 1554 if not len(cache):
1550 1555 ui.write(_('cache empty\n'))
1551 1556 else:
1552 1557 ui.write(
1553 1558 _('cache contains %d manifest entries, in order of most to '
1554 1559 'least recent:\n') % (len(cache),))
1555 1560 totalsize = 0
1556 1561 for nodeid in cache:
1557 1562 # Use cache.get to not update the LRU order
1558 1563 data = cache.peek(nodeid)
1559 1564 size = len(data)
1560 1565 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1561 1566 ui.write(_('id: %s, size %s\n') % (
1562 1567 hex(nodeid), util.bytecount(size)))
1563 1568 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1564 1569 ui.write(
1565 1570 _('total cache data size %s, on-disk %s\n') % (
1566 1571 util.bytecount(totalsize), util.bytecount(ondisk))
1567 1572 )
1568 1573
1569 1574 @command('debugmergestate', [], '')
1570 1575 def debugmergestate(ui, repo, *args):
1571 1576 """print merge state
1572 1577
1573 1578 Use --verbose to print out information about whether v1 or v2 merge state
1574 1579 was chosen."""
1575 1580 def _hashornull(h):
1576 1581 if h == nullhex:
1577 1582 return 'null'
1578 1583 else:
1579 1584 return h
1580 1585
1581 1586 def printrecords(version):
1582 1587 ui.write(('* version %d records\n') % version)
1583 1588 if version == 1:
1584 1589 records = v1records
1585 1590 else:
1586 1591 records = v2records
1587 1592
1588 1593 for rtype, record in records:
1589 1594 # pretty print some record types
1590 1595 if rtype == 'L':
1591 1596 ui.write(('local: %s\n') % record)
1592 1597 elif rtype == 'O':
1593 1598 ui.write(('other: %s\n') % record)
1594 1599 elif rtype == 'm':
1595 1600 driver, mdstate = record.split('\0', 1)
1596 1601 ui.write(('merge driver: %s (state "%s")\n')
1597 1602 % (driver, mdstate))
1598 1603 elif rtype in 'FDC':
1599 1604 r = record.split('\0')
1600 1605 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1601 1606 if version == 1:
1602 1607 onode = 'not stored in v1 format'
1603 1608 flags = r[7]
1604 1609 else:
1605 1610 onode, flags = r[7:9]
1606 1611 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1607 1612 % (f, rtype, state, _hashornull(hash)))
1608 1613 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1609 1614 ui.write((' ancestor path: %s (node %s)\n')
1610 1615 % (afile, _hashornull(anode)))
1611 1616 ui.write((' other path: %s (node %s)\n')
1612 1617 % (ofile, _hashornull(onode)))
1613 1618 elif rtype == 'f':
1614 1619 filename, rawextras = record.split('\0', 1)
1615 1620 extras = rawextras.split('\0')
1616 1621 i = 0
1617 1622 extrastrings = []
1618 1623 while i < len(extras):
1619 1624 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1620 1625 i += 2
1621 1626
1622 1627 ui.write(('file extras: %s (%s)\n')
1623 1628 % (filename, ', '.join(extrastrings)))
1624 1629 elif rtype == 'l':
1625 1630 labels = record.split('\0', 2)
1626 1631 labels = [l for l in labels if len(l) > 0]
1627 1632 ui.write(('labels:\n'))
1628 1633 ui.write((' local: %s\n' % labels[0]))
1629 1634 ui.write((' other: %s\n' % labels[1]))
1630 1635 if len(labels) > 2:
1631 1636 ui.write((' base: %s\n' % labels[2]))
1632 1637 else:
1633 1638 ui.write(('unrecognized entry: %s\t%s\n')
1634 1639 % (rtype, record.replace('\0', '\t')))
1635 1640
1636 1641 # Avoid mergestate.read() since it may raise an exception for unsupported
1637 1642 # merge state records. We shouldn't be doing this, but this is OK since this
1638 1643 # command is pretty low-level.
1639 1644 ms = mergemod.mergestate(repo)
1640 1645
1641 1646 # sort so that reasonable information is on top
1642 1647 v1records = ms._readrecordsv1()
1643 1648 v2records = ms._readrecordsv2()
1644 1649 order = 'LOml'
1645 1650 def key(r):
1646 1651 idx = order.find(r[0])
1647 1652 if idx == -1:
1648 1653 return (1, r[1])
1649 1654 else:
1650 1655 return (0, idx)
1651 1656 v1records.sort(key=key)
1652 1657 v2records.sort(key=key)
1653 1658
1654 1659 if not v1records and not v2records:
1655 1660 ui.write(('no merge state found\n'))
1656 1661 elif not v2records:
1657 1662 ui.note(('no version 2 merge state\n'))
1658 1663 printrecords(1)
1659 1664 elif ms._v1v2match(v1records, v2records):
1660 1665 ui.note(('v1 and v2 states match: using v2\n'))
1661 1666 printrecords(2)
1662 1667 else:
1663 1668 ui.note(('v1 and v2 states mismatch: using v1\n'))
1664 1669 printrecords(1)
1665 1670 if ui.verbose:
1666 1671 printrecords(2)
1667 1672
1668 1673 @command('debugnamecomplete', [], _('NAME...'))
1669 1674 def debugnamecomplete(ui, repo, *args):
1670 1675 '''complete "names" - tags, open branch names, bookmark names'''
1671 1676
1672 1677 names = set()
1673 1678 # since we previously only listed open branches, we will handle that
1674 1679 # specially (after this for loop)
1675 1680 for name, ns in repo.names.iteritems():
1676 1681 if name != 'branches':
1677 1682 names.update(ns.listnames(repo))
1678 1683 names.update(tag for (tag, heads, tip, closed)
1679 1684 in repo.branchmap().iterbranches() if not closed)
1680 1685 completions = set()
1681 1686 if not args:
1682 1687 args = ['']
1683 1688 for a in args:
1684 1689 completions.update(n for n in names if n.startswith(a))
1685 1690 ui.write('\n'.join(sorted(completions)))
1686 1691 ui.write('\n')
1687 1692
1688 1693 @command('debugobsolete',
1689 1694 [('', 'flags', 0, _('markers flag')),
1690 1695 ('', 'record-parents', False,
1691 1696 _('record parent information for the precursor')),
1692 1697 ('r', 'rev', [], _('display markers relevant to REV')),
1693 1698 ('', 'exclusive', False, _('restrict display to markers only '
1694 1699 'relevant to REV')),
1695 1700 ('', 'index', False, _('display index of the marker')),
1696 1701 ('', 'delete', [], _('delete markers specified by indices')),
1697 1702 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1698 1703 _('[OBSOLETED [REPLACEMENT ...]]'))
1699 1704 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1700 1705 """create arbitrary obsolete marker
1701 1706
1702 1707 With no arguments, displays the list of obsolescence markers."""
1703 1708
1704 1709 opts = pycompat.byteskwargs(opts)
1705 1710
1706 1711 def parsenodeid(s):
1707 1712 try:
1708 1713 # We do not use revsingle/revrange functions here to accept
1709 1714 # arbitrary node identifiers, possibly not present in the
1710 1715 # local repository.
1711 1716 n = bin(s)
1712 1717 if len(n) != len(nullid):
1713 1718 raise TypeError()
1714 1719 return n
1715 1720 except TypeError:
1716 1721 raise error.Abort('changeset references must be full hexadecimal '
1717 1722 'node identifiers')
1718 1723
1719 1724 if opts.get('delete'):
1720 1725 indices = []
1721 1726 for v in opts.get('delete'):
1722 1727 try:
1723 1728 indices.append(int(v))
1724 1729 except ValueError:
1725 1730 raise error.Abort(_('invalid index value: %r') % v,
1726 1731 hint=_('use integers for indices'))
1727 1732
1728 1733 if repo.currenttransaction():
1729 1734 raise error.Abort(_('cannot delete obsmarkers in the middle '
1730 1735 'of transaction.'))
1731 1736
1732 1737 with repo.lock():
1733 1738 n = repair.deleteobsmarkers(repo.obsstore, indices)
1734 1739 ui.write(_('deleted %i obsolescence markers\n') % n)
1735 1740
1736 1741 return
1737 1742
1738 1743 if precursor is not None:
1739 1744 if opts['rev']:
1740 1745 raise error.Abort('cannot select revision when creating marker')
1741 1746 metadata = {}
1742 1747 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1743 1748 succs = tuple(parsenodeid(succ) for succ in successors)
1744 1749 l = repo.lock()
1745 1750 try:
1746 1751 tr = repo.transaction('debugobsolete')
1747 1752 try:
1748 1753 date = opts.get('date')
1749 1754 if date:
1750 1755 date = dateutil.parsedate(date)
1751 1756 else:
1752 1757 date = None
1753 1758 prec = parsenodeid(precursor)
1754 1759 parents = None
1755 1760 if opts['record_parents']:
1756 1761 if prec not in repo.unfiltered():
1757 1762 raise error.Abort('cannot used --record-parents on '
1758 1763 'unknown changesets')
1759 1764 parents = repo.unfiltered()[prec].parents()
1760 1765 parents = tuple(p.node() for p in parents)
1761 1766 repo.obsstore.create(tr, prec, succs, opts['flags'],
1762 1767 parents=parents, date=date,
1763 1768 metadata=metadata, ui=ui)
1764 1769 tr.close()
1765 1770 except ValueError as exc:
1766 1771 raise error.Abort(_('bad obsmarker input: %s') %
1767 1772 pycompat.bytestr(exc))
1768 1773 finally:
1769 1774 tr.release()
1770 1775 finally:
1771 1776 l.release()
1772 1777 else:
1773 1778 if opts['rev']:
1774 1779 revs = scmutil.revrange(repo, opts['rev'])
1775 1780 nodes = [repo[r].node() for r in revs]
1776 1781 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1777 1782 exclusive=opts['exclusive']))
1778 1783 markers.sort(key=lambda x: x._data)
1779 1784 else:
1780 1785 markers = obsutil.getmarkers(repo)
1781 1786
1782 1787 markerstoiter = markers
1783 1788 isrelevant = lambda m: True
1784 1789 if opts.get('rev') and opts.get('index'):
1785 1790 markerstoiter = obsutil.getmarkers(repo)
1786 1791 markerset = set(markers)
1787 1792 isrelevant = lambda m: m in markerset
1788 1793
1789 1794 fm = ui.formatter('debugobsolete', opts)
1790 1795 for i, m in enumerate(markerstoiter):
1791 1796 if not isrelevant(m):
1792 1797 # marker can be irrelevant when we're iterating over a set
1793 1798 # of markers (markerstoiter) which is bigger than the set
1794 1799 # of markers we want to display (markers)
1795 1800 # this can happen if both --index and --rev options are
1796 1801 # provided and thus we need to iterate over all of the markers
1797 1802 # to get the correct indices, but only display the ones that
1798 1803 # are relevant to --rev value
1799 1804 continue
1800 1805 fm.startitem()
1801 1806 ind = i if opts.get('index') else None
1802 1807 cmdutil.showmarker(fm, m, index=ind)
1803 1808 fm.end()
1804 1809
1805 1810 @command('debugp1copies',
1806 1811 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1807 1812 _('[-r REV]'))
1808 1813 def debugp1copies(ui, repo, **opts):
1809 1814 """dump copy information compared to p1"""
1810 1815
1811 1816 opts = pycompat.byteskwargs(opts)
1812 1817 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1813 1818 for dst, src in ctx.p1copies().items():
1814 1819 ui.write('%s -> %s\n' % (src, dst))
1815 1820
1816 1821 @command('debugp2copies',
1817 1822 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1818 1823 _('[-r REV]'))
1819 1824 def debugp1copies(ui, repo, **opts):
1820 1825 """dump copy information compared to p2"""
1821 1826
1822 1827 opts = pycompat.byteskwargs(opts)
1823 1828 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1824 1829 for dst, src in ctx.p2copies().items():
1825 1830 ui.write('%s -> %s\n' % (src, dst))
1826 1831
1827 1832 @command('debugpathcomplete',
1828 1833 [('f', 'full', None, _('complete an entire path')),
1829 1834 ('n', 'normal', None, _('show only normal files')),
1830 1835 ('a', 'added', None, _('show only added files')),
1831 1836 ('r', 'removed', None, _('show only removed files'))],
1832 1837 _('FILESPEC...'))
1833 1838 def debugpathcomplete(ui, repo, *specs, **opts):
1834 1839 '''complete part or all of a tracked path
1835 1840
1836 1841 This command supports shells that offer path name completion. It
1837 1842 currently completes only files already known to the dirstate.
1838 1843
1839 1844 Completion extends only to the next path segment unless
1840 1845 --full is specified, in which case entire paths are used.'''
1841 1846
1842 1847 def complete(path, acceptable):
1843 1848 dirstate = repo.dirstate
1844 1849 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
1845 1850 rootdir = repo.root + pycompat.ossep
1846 1851 if spec != repo.root and not spec.startswith(rootdir):
1847 1852 return [], []
1848 1853 if os.path.isdir(spec):
1849 1854 spec += '/'
1850 1855 spec = spec[len(rootdir):]
1851 1856 fixpaths = pycompat.ossep != '/'
1852 1857 if fixpaths:
1853 1858 spec = spec.replace(pycompat.ossep, '/')
1854 1859 speclen = len(spec)
1855 1860 fullpaths = opts[r'full']
1856 1861 files, dirs = set(), set()
1857 1862 adddir, addfile = dirs.add, files.add
1858 1863 for f, st in dirstate.iteritems():
1859 1864 if f.startswith(spec) and st[0] in acceptable:
1860 1865 if fixpaths:
1861 1866 f = f.replace('/', pycompat.ossep)
1862 1867 if fullpaths:
1863 1868 addfile(f)
1864 1869 continue
1865 1870 s = f.find(pycompat.ossep, speclen)
1866 1871 if s >= 0:
1867 1872 adddir(f[:s])
1868 1873 else:
1869 1874 addfile(f)
1870 1875 return files, dirs
1871 1876
1872 1877 acceptable = ''
1873 1878 if opts[r'normal']:
1874 1879 acceptable += 'nm'
1875 1880 if opts[r'added']:
1876 1881 acceptable += 'a'
1877 1882 if opts[r'removed']:
1878 1883 acceptable += 'r'
1879 1884 cwd = repo.getcwd()
1880 1885 if not specs:
1881 1886 specs = ['.']
1882 1887
1883 1888 files, dirs = set(), set()
1884 1889 for spec in specs:
1885 1890 f, d = complete(spec, acceptable or 'nmar')
1886 1891 files.update(f)
1887 1892 dirs.update(d)
1888 1893 files.update(dirs)
1889 1894 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1890 1895 ui.write('\n')
1891 1896
1892 1897 @command('debugpathcopies',
1893 1898 cmdutil.walkopts,
1894 1899 'hg debugpathcopies REV1 REV2 [FILE]',
1895 1900 inferrepo=True)
1896 1901 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
1897 1902 """show copies between two revisions"""
1898 1903 ctx1 = scmutil.revsingle(repo, rev1)
1899 1904 ctx2 = scmutil.revsingle(repo, rev2)
1900 1905 m = scmutil.match(ctx1, pats, opts)
1901 1906 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
1902 1907 ui.write('%s -> %s\n' % (src, dst))
1903 1908
1904 1909 @command('debugpeer', [], _('PATH'), norepo=True)
1905 1910 def debugpeer(ui, path):
1906 1911 """establish a connection to a peer repository"""
1907 1912 # Always enable peer request logging. Requires --debug to display
1908 1913 # though.
1909 1914 overrides = {
1910 1915 ('devel', 'debug.peer-request'): True,
1911 1916 }
1912 1917
1913 1918 with ui.configoverride(overrides):
1914 1919 peer = hg.peer(ui, {}, path)
1915 1920
1916 1921 local = peer.local() is not None
1917 1922 canpush = peer.canpush()
1918 1923
1919 1924 ui.write(_('url: %s\n') % peer.url())
1920 1925 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1921 1926 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1922 1927
1923 1928 @command('debugpickmergetool',
1924 1929 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1925 1930 ('', 'changedelete', None, _('emulate merging change and delete')),
1926 1931 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1927 1932 _('[PATTERN]...'),
1928 1933 inferrepo=True)
1929 1934 def debugpickmergetool(ui, repo, *pats, **opts):
1930 1935 """examine which merge tool is chosen for specified file
1931 1936
1932 1937 As described in :hg:`help merge-tools`, Mercurial examines
1933 1938 configurations below in this order to decide which merge tool is
1934 1939 chosen for specified file.
1935 1940
1936 1941 1. ``--tool`` option
1937 1942 2. ``HGMERGE`` environment variable
1938 1943 3. configurations in ``merge-patterns`` section
1939 1944 4. configuration of ``ui.merge``
1940 1945 5. configurations in ``merge-tools`` section
1941 1946 6. ``hgmerge`` tool (for historical reason only)
1942 1947 7. default tool for fallback (``:merge`` or ``:prompt``)
1943 1948
1944 1949 This command writes out examination result in the style below::
1945 1950
1946 1951 FILE = MERGETOOL
1947 1952
1948 1953 By default, all files known in the first parent context of the
1949 1954 working directory are examined. Use file patterns and/or -I/-X
1950 1955 options to limit target files. -r/--rev is also useful to examine
1951 1956 files in another context without actual updating to it.
1952 1957
1953 1958 With --debug, this command shows warning messages while matching
1954 1959 against ``merge-patterns`` and so on, too. It is recommended to
1955 1960 use this option with explicit file patterns and/or -I/-X options,
1956 1961 because this option increases amount of output per file according
1957 1962 to configurations in hgrc.
1958 1963
1959 1964 With -v/--verbose, this command shows configurations below at
1960 1965 first (only if specified).
1961 1966
1962 1967 - ``--tool`` option
1963 1968 - ``HGMERGE`` environment variable
1964 1969 - configuration of ``ui.merge``
1965 1970
1966 1971 If merge tool is chosen before matching against
1967 1972 ``merge-patterns``, this command can't show any helpful
1968 1973 information, even with --debug. In such case, information above is
1969 1974 useful to know why a merge tool is chosen.
1970 1975 """
1971 1976 opts = pycompat.byteskwargs(opts)
1972 1977 overrides = {}
1973 1978 if opts['tool']:
1974 1979 overrides[('ui', 'forcemerge')] = opts['tool']
1975 1980 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1976 1981
1977 1982 with ui.configoverride(overrides, 'debugmergepatterns'):
1978 1983 hgmerge = encoding.environ.get("HGMERGE")
1979 1984 if hgmerge is not None:
1980 1985 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1981 1986 uimerge = ui.config("ui", "merge")
1982 1987 if uimerge:
1983 1988 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1984 1989
1985 1990 ctx = scmutil.revsingle(repo, opts.get('rev'))
1986 1991 m = scmutil.match(ctx, pats, opts)
1987 1992 changedelete = opts['changedelete']
1988 1993 for path in ctx.walk(m):
1989 1994 fctx = ctx[path]
1990 1995 try:
1991 1996 if not ui.debugflag:
1992 1997 ui.pushbuffer(error=True)
1993 1998 tool, toolpath = filemerge._picktool(repo, ui, path,
1994 1999 fctx.isbinary(),
1995 2000 'l' in fctx.flags(),
1996 2001 changedelete)
1997 2002 finally:
1998 2003 if not ui.debugflag:
1999 2004 ui.popbuffer()
2000 2005 ui.write(('%s = %s\n') % (path, tool))
2001 2006
2002 2007 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2003 2008 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2004 2009 '''access the pushkey key/value protocol
2005 2010
2006 2011 With two args, list the keys in the given namespace.
2007 2012
2008 2013 With five args, set a key to new if it currently is set to old.
2009 2014 Reports success or failure.
2010 2015 '''
2011 2016
2012 2017 target = hg.peer(ui, {}, repopath)
2013 2018 if keyinfo:
2014 2019 key, old, new = keyinfo
2015 2020 with target.commandexecutor() as e:
2016 2021 r = e.callcommand('pushkey', {
2017 2022 'namespace': namespace,
2018 2023 'key': key,
2019 2024 'old': old,
2020 2025 'new': new,
2021 2026 }).result()
2022 2027
2023 2028 ui.status(pycompat.bytestr(r) + '\n')
2024 2029 return not r
2025 2030 else:
2026 2031 for k, v in sorted(target.listkeys(namespace).iteritems()):
2027 2032 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
2028 2033 stringutil.escapestr(v)))
2029 2034
2030 2035 @command('debugpvec', [], _('A B'))
2031 2036 def debugpvec(ui, repo, a, b=None):
2032 2037 ca = scmutil.revsingle(repo, a)
2033 2038 cb = scmutil.revsingle(repo, b)
2034 2039 pa = pvec.ctxpvec(ca)
2035 2040 pb = pvec.ctxpvec(cb)
2036 2041 if pa == pb:
2037 2042 rel = "="
2038 2043 elif pa > pb:
2039 2044 rel = ">"
2040 2045 elif pa < pb:
2041 2046 rel = "<"
2042 2047 elif pa | pb:
2043 2048 rel = "|"
2044 2049 ui.write(_("a: %s\n") % pa)
2045 2050 ui.write(_("b: %s\n") % pb)
2046 2051 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2047 2052 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
2048 2053 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
2049 2054 pa.distance(pb), rel))
2050 2055
2051 2056 @command('debugrebuilddirstate|debugrebuildstate',
2052 2057 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
2053 2058 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
2054 2059 'the working copy parent')),
2055 2060 ],
2056 2061 _('[-r REV]'))
2057 2062 def debugrebuilddirstate(ui, repo, rev, **opts):
2058 2063 """rebuild the dirstate as it would look like for the given revision
2059 2064
2060 2065 If no revision is specified the first current parent will be used.
2061 2066
2062 2067 The dirstate will be set to the files of the given revision.
2063 2068 The actual working directory content or existing dirstate
2064 2069 information such as adds or removes is not considered.
2065 2070
2066 2071 ``minimal`` will only rebuild the dirstate status for files that claim to be
2067 2072 tracked but are not in the parent manifest, or that exist in the parent
2068 2073 manifest but are not in the dirstate. It will not change adds, removes, or
2069 2074 modified files that are in the working copy parent.
2070 2075
2071 2076 One use of this command is to make the next :hg:`status` invocation
2072 2077 check the actual file content.
2073 2078 """
2074 2079 ctx = scmutil.revsingle(repo, rev)
2075 2080 with repo.wlock():
2076 2081 dirstate = repo.dirstate
2077 2082 changedfiles = None
2078 2083 # See command doc for what minimal does.
2079 2084 if opts.get(r'minimal'):
2080 2085 manifestfiles = set(ctx.manifest().keys())
2081 2086 dirstatefiles = set(dirstate)
2082 2087 manifestonly = manifestfiles - dirstatefiles
2083 2088 dsonly = dirstatefiles - manifestfiles
2084 2089 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2085 2090 changedfiles = manifestonly | dsnotadded
2086 2091
2087 2092 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2088 2093
2089 2094 @command('debugrebuildfncache', [], '')
2090 2095 def debugrebuildfncache(ui, repo):
2091 2096 """rebuild the fncache file"""
2092 2097 repair.rebuildfncache(ui, repo)
2093 2098
2094 2099 @command('debugrename',
2095 2100 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2096 2101 _('[-r REV] [FILE]...'))
2097 2102 def debugrename(ui, repo, *pats, **opts):
2098 2103 """dump rename information"""
2099 2104
2100 2105 opts = pycompat.byteskwargs(opts)
2101 2106 ctx = scmutil.revsingle(repo, opts.get('rev'))
2102 2107 m = scmutil.match(ctx, pats, opts)
2103 2108 for abs in ctx.walk(m):
2104 2109 fctx = ctx[abs]
2105 2110 o = fctx.filelog().renamed(fctx.filenode())
2106 2111 rel = repo.pathto(abs)
2107 2112 if o:
2108 2113 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2109 2114 else:
2110 2115 ui.write(_("%s not renamed\n") % rel)
2111 2116
2112 2117 @command('debugrevlog', cmdutil.debugrevlogopts +
2113 2118 [('d', 'dump', False, _('dump index data'))],
2114 2119 _('-c|-m|FILE'),
2115 2120 optionalrepo=True)
2116 2121 def debugrevlog(ui, repo, file_=None, **opts):
2117 2122 """show data and statistics about a revlog"""
2118 2123 opts = pycompat.byteskwargs(opts)
2119 2124 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2120 2125
2121 2126 if opts.get("dump"):
2122 2127 numrevs = len(r)
2123 2128 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2124 2129 " rawsize totalsize compression heads chainlen\n"))
2125 2130 ts = 0
2126 2131 heads = set()
2127 2132
2128 2133 for rev in pycompat.xrange(numrevs):
2129 2134 dbase = r.deltaparent(rev)
2130 2135 if dbase == -1:
2131 2136 dbase = rev
2132 2137 cbase = r.chainbase(rev)
2133 2138 clen = r.chainlen(rev)
2134 2139 p1, p2 = r.parentrevs(rev)
2135 2140 rs = r.rawsize(rev)
2136 2141 ts = ts + rs
2137 2142 heads -= set(r.parentrevs(rev))
2138 2143 heads.add(rev)
2139 2144 try:
2140 2145 compression = ts / r.end(rev)
2141 2146 except ZeroDivisionError:
2142 2147 compression = 0
2143 2148 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2144 2149 "%11d %5d %8d\n" %
2145 2150 (rev, p1, p2, r.start(rev), r.end(rev),
2146 2151 r.start(dbase), r.start(cbase),
2147 2152 r.start(p1), r.start(p2),
2148 2153 rs, ts, compression, len(heads), clen))
2149 2154 return 0
2150 2155
2151 2156 v = r.version
2152 2157 format = v & 0xFFFF
2153 2158 flags = []
2154 2159 gdelta = False
2155 2160 if v & revlog.FLAG_INLINE_DATA:
2156 2161 flags.append('inline')
2157 2162 if v & revlog.FLAG_GENERALDELTA:
2158 2163 gdelta = True
2159 2164 flags.append('generaldelta')
2160 2165 if not flags:
2161 2166 flags = ['(none)']
2162 2167
2163 2168 ### tracks merge vs single parent
2164 2169 nummerges = 0
2165 2170
2166 2171 ### tracks ways the "delta" are build
2167 2172 # nodelta
2168 2173 numempty = 0
2169 2174 numemptytext = 0
2170 2175 numemptydelta = 0
2171 2176 # full file content
2172 2177 numfull = 0
2173 2178 # intermediate snapshot against a prior snapshot
2174 2179 numsemi = 0
2175 2180 # snapshot count per depth
2176 2181 numsnapdepth = collections.defaultdict(lambda: 0)
2177 2182 # delta against previous revision
2178 2183 numprev = 0
2179 2184 # delta against first or second parent (not prev)
2180 2185 nump1 = 0
2181 2186 nump2 = 0
2182 2187 # delta against neither prev nor parents
2183 2188 numother = 0
2184 2189 # delta against prev that are also first or second parent
2185 2190 # (details of `numprev`)
2186 2191 nump1prev = 0
2187 2192 nump2prev = 0
2188 2193
2189 2194 # data about delta chain of each revs
2190 2195 chainlengths = []
2191 2196 chainbases = []
2192 2197 chainspans = []
2193 2198
2194 2199 # data about each revision
2195 2200 datasize = [None, 0, 0]
2196 2201 fullsize = [None, 0, 0]
2197 2202 semisize = [None, 0, 0]
2198 2203 # snapshot count per depth
2199 2204 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2200 2205 deltasize = [None, 0, 0]
2201 2206 chunktypecounts = {}
2202 2207 chunktypesizes = {}
2203 2208
2204 2209 def addsize(size, l):
2205 2210 if l[0] is None or size < l[0]:
2206 2211 l[0] = size
2207 2212 if size > l[1]:
2208 2213 l[1] = size
2209 2214 l[2] += size
2210 2215
2211 2216 numrevs = len(r)
2212 2217 for rev in pycompat.xrange(numrevs):
2213 2218 p1, p2 = r.parentrevs(rev)
2214 2219 delta = r.deltaparent(rev)
2215 2220 if format > 0:
2216 2221 addsize(r.rawsize(rev), datasize)
2217 2222 if p2 != nullrev:
2218 2223 nummerges += 1
2219 2224 size = r.length(rev)
2220 2225 if delta == nullrev:
2221 2226 chainlengths.append(0)
2222 2227 chainbases.append(r.start(rev))
2223 2228 chainspans.append(size)
2224 2229 if size == 0:
2225 2230 numempty += 1
2226 2231 numemptytext += 1
2227 2232 else:
2228 2233 numfull += 1
2229 2234 numsnapdepth[0] += 1
2230 2235 addsize(size, fullsize)
2231 2236 addsize(size, snapsizedepth[0])
2232 2237 else:
2233 2238 chainlengths.append(chainlengths[delta] + 1)
2234 2239 baseaddr = chainbases[delta]
2235 2240 revaddr = r.start(rev)
2236 2241 chainbases.append(baseaddr)
2237 2242 chainspans.append((revaddr - baseaddr) + size)
2238 2243 if size == 0:
2239 2244 numempty += 1
2240 2245 numemptydelta += 1
2241 2246 elif r.issnapshot(rev):
2242 2247 addsize(size, semisize)
2243 2248 numsemi += 1
2244 2249 depth = r.snapshotdepth(rev)
2245 2250 numsnapdepth[depth] += 1
2246 2251 addsize(size, snapsizedepth[depth])
2247 2252 else:
2248 2253 addsize(size, deltasize)
2249 2254 if delta == rev - 1:
2250 2255 numprev += 1
2251 2256 if delta == p1:
2252 2257 nump1prev += 1
2253 2258 elif delta == p2:
2254 2259 nump2prev += 1
2255 2260 elif delta == p1:
2256 2261 nump1 += 1
2257 2262 elif delta == p2:
2258 2263 nump2 += 1
2259 2264 elif delta != nullrev:
2260 2265 numother += 1
2261 2266
2262 2267 # Obtain data on the raw chunks in the revlog.
2263 2268 if util.safehasattr(r, '_getsegmentforrevs'):
2264 2269 segment = r._getsegmentforrevs(rev, rev)[1]
2265 2270 else:
2266 2271 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2267 2272 if segment:
2268 2273 chunktype = bytes(segment[0:1])
2269 2274 else:
2270 2275 chunktype = 'empty'
2271 2276
2272 2277 if chunktype not in chunktypecounts:
2273 2278 chunktypecounts[chunktype] = 0
2274 2279 chunktypesizes[chunktype] = 0
2275 2280
2276 2281 chunktypecounts[chunktype] += 1
2277 2282 chunktypesizes[chunktype] += size
2278 2283
2279 2284 # Adjust size min value for empty cases
2280 2285 for size in (datasize, fullsize, semisize, deltasize):
2281 2286 if size[0] is None:
2282 2287 size[0] = 0
2283 2288
2284 2289 numdeltas = numrevs - numfull - numempty - numsemi
2285 2290 numoprev = numprev - nump1prev - nump2prev
2286 2291 totalrawsize = datasize[2]
2287 2292 datasize[2] /= numrevs
2288 2293 fulltotal = fullsize[2]
2289 2294 if numfull == 0:
2290 2295 fullsize[2] = 0
2291 2296 else:
2292 2297 fullsize[2] /= numfull
2293 2298 semitotal = semisize[2]
2294 2299 snaptotal = {}
2295 2300 if numsemi > 0:
2296 2301 semisize[2] /= numsemi
2297 2302 for depth in snapsizedepth:
2298 2303 snaptotal[depth] = snapsizedepth[depth][2]
2299 2304 snapsizedepth[depth][2] /= numsnapdepth[depth]
2300 2305
2301 2306 deltatotal = deltasize[2]
2302 2307 if numdeltas > 0:
2303 2308 deltasize[2] /= numdeltas
2304 2309 totalsize = fulltotal + semitotal + deltatotal
2305 2310 avgchainlen = sum(chainlengths) / numrevs
2306 2311 maxchainlen = max(chainlengths)
2307 2312 maxchainspan = max(chainspans)
2308 2313 compratio = 1
2309 2314 if totalsize:
2310 2315 compratio = totalrawsize / totalsize
2311 2316
2312 2317 basedfmtstr = '%%%dd\n'
2313 2318 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2314 2319
2315 2320 def dfmtstr(max):
2316 2321 return basedfmtstr % len(str(max))
2317 2322 def pcfmtstr(max, padding=0):
2318 2323 return basepcfmtstr % (len(str(max)), ' ' * padding)
2319 2324
2320 2325 def pcfmt(value, total):
2321 2326 if total:
2322 2327 return (value, 100 * float(value) / total)
2323 2328 else:
2324 2329 return value, 100.0
2325 2330
2326 2331 ui.write(('format : %d\n') % format)
2327 2332 ui.write(('flags : %s\n') % ', '.join(flags))
2328 2333
2329 2334 ui.write('\n')
2330 2335 fmt = pcfmtstr(totalsize)
2331 2336 fmt2 = dfmtstr(totalsize)
2332 2337 ui.write(('revisions : ') + fmt2 % numrevs)
2333 2338 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2334 2339 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2335 2340 ui.write(('revisions : ') + fmt2 % numrevs)
2336 2341 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2337 2342 ui.write((' text : ')
2338 2343 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2339 2344 ui.write((' delta : ')
2340 2345 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2341 2346 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2342 2347 for depth in sorted(numsnapdepth):
2343 2348 ui.write((' lvl-%-3d : ' % depth)
2344 2349 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2345 2350 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2346 2351 ui.write(('revision size : ') + fmt2 % totalsize)
2347 2352 ui.write((' snapshot : ')
2348 2353 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2349 2354 for depth in sorted(numsnapdepth):
2350 2355 ui.write((' lvl-%-3d : ' % depth)
2351 2356 + fmt % pcfmt(snaptotal[depth], totalsize))
2352 2357 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2353 2358
2354 2359 def fmtchunktype(chunktype):
2355 2360 if chunktype == 'empty':
2356 2361 return ' %s : ' % chunktype
2357 2362 elif chunktype in pycompat.bytestr(string.ascii_letters):
2358 2363 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2359 2364 else:
2360 2365 return ' 0x%s : ' % hex(chunktype)
2361 2366
2362 2367 ui.write('\n')
2363 2368 ui.write(('chunks : ') + fmt2 % numrevs)
2364 2369 for chunktype in sorted(chunktypecounts):
2365 2370 ui.write(fmtchunktype(chunktype))
2366 2371 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2367 2372 ui.write(('chunks size : ') + fmt2 % totalsize)
2368 2373 for chunktype in sorted(chunktypecounts):
2369 2374 ui.write(fmtchunktype(chunktype))
2370 2375 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2371 2376
2372 2377 ui.write('\n')
2373 2378 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2374 2379 ui.write(('avg chain length : ') + fmt % avgchainlen)
2375 2380 ui.write(('max chain length : ') + fmt % maxchainlen)
2376 2381 ui.write(('max chain reach : ') + fmt % maxchainspan)
2377 2382 ui.write(('compression ratio : ') + fmt % compratio)
2378 2383
2379 2384 if format > 0:
2380 2385 ui.write('\n')
2381 2386 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2382 2387 % tuple(datasize))
2383 2388 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2384 2389 % tuple(fullsize))
2385 2390 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2386 2391 % tuple(semisize))
2387 2392 for depth in sorted(snapsizedepth):
2388 2393 if depth == 0:
2389 2394 continue
2390 2395 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2391 2396 % ((depth,) + tuple(snapsizedepth[depth])))
2392 2397 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2393 2398 % tuple(deltasize))
2394 2399
2395 2400 if numdeltas > 0:
2396 2401 ui.write('\n')
2397 2402 fmt = pcfmtstr(numdeltas)
2398 2403 fmt2 = pcfmtstr(numdeltas, 4)
2399 2404 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2400 2405 if numprev > 0:
2401 2406 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2402 2407 numprev))
2403 2408 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2404 2409 numprev))
2405 2410 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2406 2411 numprev))
2407 2412 if gdelta:
2408 2413 ui.write(('deltas against p1 : ')
2409 2414 + fmt % pcfmt(nump1, numdeltas))
2410 2415 ui.write(('deltas against p2 : ')
2411 2416 + fmt % pcfmt(nump2, numdeltas))
2412 2417 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2413 2418 numdeltas))
2414 2419
2415 2420 @command('debugrevlogindex', cmdutil.debugrevlogopts +
2416 2421 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2417 2422 _('[-f FORMAT] -c|-m|FILE'),
2418 2423 optionalrepo=True)
2419 2424 def debugrevlogindex(ui, repo, file_=None, **opts):
2420 2425 """dump the contents of a revlog index"""
2421 2426 opts = pycompat.byteskwargs(opts)
2422 2427 r = cmdutil.openrevlog(repo, 'debugrevlogindex', file_, opts)
2423 2428 format = opts.get('format', 0)
2424 2429 if format not in (0, 1):
2425 2430 raise error.Abort(_("unknown format %d") % format)
2426 2431
2427 2432 if ui.debugflag:
2428 2433 shortfn = hex
2429 2434 else:
2430 2435 shortfn = short
2431 2436
2432 2437 # There might not be anything in r, so have a sane default
2433 2438 idlen = 12
2434 2439 for i in r:
2435 2440 idlen = len(shortfn(r.node(i)))
2436 2441 break
2437 2442
2438 2443 if format == 0:
2439 2444 if ui.verbose:
2440 2445 ui.write((" rev offset length linkrev"
2441 2446 " %s %s p2\n") % ("nodeid".ljust(idlen),
2442 2447 "p1".ljust(idlen)))
2443 2448 else:
2444 2449 ui.write((" rev linkrev %s %s p2\n") % (
2445 2450 "nodeid".ljust(idlen), "p1".ljust(idlen)))
2446 2451 elif format == 1:
2447 2452 if ui.verbose:
2448 2453 ui.write((" rev flag offset length size link p1"
2449 2454 " p2 %s\n") % "nodeid".rjust(idlen))
2450 2455 else:
2451 2456 ui.write((" rev flag size link p1 p2 %s\n") %
2452 2457 "nodeid".rjust(idlen))
2453 2458
2454 2459 for i in r:
2455 2460 node = r.node(i)
2456 2461 if format == 0:
2457 2462 try:
2458 2463 pp = r.parents(node)
2459 2464 except Exception:
2460 2465 pp = [nullid, nullid]
2461 2466 if ui.verbose:
2462 2467 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
2463 2468 i, r.start(i), r.length(i), r.linkrev(i),
2464 2469 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2465 2470 else:
2466 2471 ui.write("% 6d % 7d %s %s %s\n" % (
2467 2472 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
2468 2473 shortfn(pp[1])))
2469 2474 elif format == 1:
2470 2475 pr = r.parentrevs(i)
2471 2476 if ui.verbose:
2472 2477 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
2473 2478 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2474 2479 r.linkrev(i), pr[0], pr[1], shortfn(node)))
2475 2480 else:
2476 2481 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
2477 2482 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
2478 2483 shortfn(node)))
2479 2484
2480 2485 @command('debugrevspec',
2481 2486 [('', 'optimize', None,
2482 2487 _('print parsed tree after optimizing (DEPRECATED)')),
2483 2488 ('', 'show-revs', True, _('print list of result revisions (default)')),
2484 2489 ('s', 'show-set', None, _('print internal representation of result set')),
2485 2490 ('p', 'show-stage', [],
2486 2491 _('print parsed tree at the given stage'), _('NAME')),
2487 2492 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2488 2493 ('', 'verify-optimized', False, _('verify optimized result')),
2489 2494 ],
2490 2495 ('REVSPEC'))
2491 2496 def debugrevspec(ui, repo, expr, **opts):
2492 2497 """parse and apply a revision specification
2493 2498
2494 2499 Use -p/--show-stage option to print the parsed tree at the given stages.
2495 2500 Use -p all to print tree at every stage.
2496 2501
2497 2502 Use --no-show-revs option with -s or -p to print only the set
2498 2503 representation or the parsed tree respectively.
2499 2504
2500 2505 Use --verify-optimized to compare the optimized result with the unoptimized
2501 2506 one. Returns 1 if the optimized result differs.
2502 2507 """
2503 2508 opts = pycompat.byteskwargs(opts)
2504 2509 aliases = ui.configitems('revsetalias')
2505 2510 stages = [
2506 2511 ('parsed', lambda tree: tree),
2507 2512 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2508 2513 ui.warn)),
2509 2514 ('concatenated', revsetlang.foldconcat),
2510 2515 ('analyzed', revsetlang.analyze),
2511 2516 ('optimized', revsetlang.optimize),
2512 2517 ]
2513 2518 if opts['no_optimized']:
2514 2519 stages = stages[:-1]
2515 2520 if opts['verify_optimized'] and opts['no_optimized']:
2516 2521 raise error.Abort(_('cannot use --verify-optimized with '
2517 2522 '--no-optimized'))
2518 2523 stagenames = set(n for n, f in stages)
2519 2524
2520 2525 showalways = set()
2521 2526 showchanged = set()
2522 2527 if ui.verbose and not opts['show_stage']:
2523 2528 # show parsed tree by --verbose (deprecated)
2524 2529 showalways.add('parsed')
2525 2530 showchanged.update(['expanded', 'concatenated'])
2526 2531 if opts['optimize']:
2527 2532 showalways.add('optimized')
2528 2533 if opts['show_stage'] and opts['optimize']:
2529 2534 raise error.Abort(_('cannot use --optimize with --show-stage'))
2530 2535 if opts['show_stage'] == ['all']:
2531 2536 showalways.update(stagenames)
2532 2537 else:
2533 2538 for n in opts['show_stage']:
2534 2539 if n not in stagenames:
2535 2540 raise error.Abort(_('invalid stage name: %s') % n)
2536 2541 showalways.update(opts['show_stage'])
2537 2542
2538 2543 treebystage = {}
2539 2544 printedtree = None
2540 2545 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2541 2546 for n, f in stages:
2542 2547 treebystage[n] = tree = f(tree)
2543 2548 if n in showalways or (n in showchanged and tree != printedtree):
2544 2549 if opts['show_stage'] or n != 'parsed':
2545 2550 ui.write(("* %s:\n") % n)
2546 2551 ui.write(revsetlang.prettyformat(tree), "\n")
2547 2552 printedtree = tree
2548 2553
2549 2554 if opts['verify_optimized']:
2550 2555 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2551 2556 brevs = revset.makematcher(treebystage['optimized'])(repo)
2552 2557 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2553 2558 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2554 2559 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2555 2560 arevs = list(arevs)
2556 2561 brevs = list(brevs)
2557 2562 if arevs == brevs:
2558 2563 return 0
2559 2564 ui.write(('--- analyzed\n'), label='diff.file_a')
2560 2565 ui.write(('+++ optimized\n'), label='diff.file_b')
2561 2566 sm = difflib.SequenceMatcher(None, arevs, brevs)
2562 2567 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2563 2568 if tag in (r'delete', r'replace'):
2564 2569 for c in arevs[alo:ahi]:
2565 2570 ui.write('-%d\n' % c, label='diff.deleted')
2566 2571 if tag in (r'insert', r'replace'):
2567 2572 for c in brevs[blo:bhi]:
2568 2573 ui.write('+%d\n' % c, label='diff.inserted')
2569 2574 if tag == r'equal':
2570 2575 for c in arevs[alo:ahi]:
2571 2576 ui.write(' %d\n' % c)
2572 2577 return 1
2573 2578
2574 2579 func = revset.makematcher(tree)
2575 2580 revs = func(repo)
2576 2581 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2577 2582 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2578 2583 if not opts['show_revs']:
2579 2584 return
2580 2585 for c in revs:
2581 2586 ui.write("%d\n" % c)
2582 2587
2583 2588 @command('debugserve', [
2584 2589 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2585 2590 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2586 2591 ('', 'logiofile', '', _('file to log server I/O to')),
2587 2592 ], '')
2588 2593 def debugserve(ui, repo, **opts):
2589 2594 """run a server with advanced settings
2590 2595
2591 2596 This command is similar to :hg:`serve`. It exists partially as a
2592 2597 workaround to the fact that ``hg serve --stdio`` must have specific
2593 2598 arguments for security reasons.
2594 2599 """
2595 2600 opts = pycompat.byteskwargs(opts)
2596 2601
2597 2602 if not opts['sshstdio']:
2598 2603 raise error.Abort(_('only --sshstdio is currently supported'))
2599 2604
2600 2605 logfh = None
2601 2606
2602 2607 if opts['logiofd'] and opts['logiofile']:
2603 2608 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2604 2609
2605 2610 if opts['logiofd']:
2606 2611 # Line buffered because output is line based.
2607 2612 try:
2608 2613 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2609 2614 except OSError as e:
2610 2615 if e.errno != errno.ESPIPE:
2611 2616 raise
2612 2617 # can't seek a pipe, so `ab` mode fails on py3
2613 2618 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2614 2619 elif opts['logiofile']:
2615 2620 logfh = open(opts['logiofile'], 'ab', 1)
2616 2621
2617 2622 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2618 2623 s.serve_forever()
2619 2624
2620 2625 @command('debugsetparents', [], _('REV1 [REV2]'))
2621 2626 def debugsetparents(ui, repo, rev1, rev2=None):
2622 2627 """manually set the parents of the current working directory
2623 2628
2624 2629 This is useful for writing repository conversion tools, but should
2625 2630 be used with care. For example, neither the working directory nor the
2626 2631 dirstate is updated, so file status may be incorrect after running this
2627 2632 command.
2628 2633
2629 2634 Returns 0 on success.
2630 2635 """
2631 2636
2632 2637 node1 = scmutil.revsingle(repo, rev1).node()
2633 2638 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2634 2639
2635 2640 with repo.wlock():
2636 2641 repo.setparents(node1, node2)
2637 2642
2638 2643 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2639 2644 def debugssl(ui, repo, source=None, **opts):
2640 2645 '''test a secure connection to a server
2641 2646
2642 2647 This builds the certificate chain for the server on Windows, installing the
2643 2648 missing intermediates and trusted root via Windows Update if necessary. It
2644 2649 does nothing on other platforms.
2645 2650
2646 2651 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2647 2652 that server is used. See :hg:`help urls` for more information.
2648 2653
2649 2654 If the update succeeds, retry the original operation. Otherwise, the cause
2650 2655 of the SSL error is likely another issue.
2651 2656 '''
2652 2657 if not pycompat.iswindows:
2653 2658 raise error.Abort(_('certificate chain building is only possible on '
2654 2659 'Windows'))
2655 2660
2656 2661 if not source:
2657 2662 if not repo:
2658 2663 raise error.Abort(_("there is no Mercurial repository here, and no "
2659 2664 "server specified"))
2660 2665 source = "default"
2661 2666
2662 2667 source, branches = hg.parseurl(ui.expandpath(source))
2663 2668 url = util.url(source)
2664 2669
2665 2670 defaultport = {'https': 443, 'ssh': 22}
2666 2671 if url.scheme in defaultport:
2667 2672 try:
2668 2673 addr = (url.host, int(url.port or defaultport[url.scheme]))
2669 2674 except ValueError:
2670 2675 raise error.Abort(_("malformed port number in URL"))
2671 2676 else:
2672 2677 raise error.Abort(_("only https and ssh connections are supported"))
2673 2678
2674 2679 from . import win32
2675 2680
2676 2681 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2677 2682 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2678 2683
2679 2684 try:
2680 2685 s.connect(addr)
2681 2686 cert = s.getpeercert(True)
2682 2687
2683 2688 ui.status(_('checking the certificate chain for %s\n') % url.host)
2684 2689
2685 2690 complete = win32.checkcertificatechain(cert, build=False)
2686 2691
2687 2692 if not complete:
2688 2693 ui.status(_('certificate chain is incomplete, updating... '))
2689 2694
2690 2695 if not win32.checkcertificatechain(cert):
2691 2696 ui.status(_('failed.\n'))
2692 2697 else:
2693 2698 ui.status(_('done.\n'))
2694 2699 else:
2695 2700 ui.status(_('full certificate chain is available\n'))
2696 2701 finally:
2697 2702 s.close()
2698 2703
2699 2704 @command('debugsub',
2700 2705 [('r', 'rev', '',
2701 2706 _('revision to check'), _('REV'))],
2702 2707 _('[-r REV] [REV]'))
2703 2708 def debugsub(ui, repo, rev=None):
2704 2709 ctx = scmutil.revsingle(repo, rev, None)
2705 2710 for k, v in sorted(ctx.substate.items()):
2706 2711 ui.write(('path %s\n') % k)
2707 2712 ui.write((' source %s\n') % v[0])
2708 2713 ui.write((' revision %s\n') % v[1])
2709 2714
2710 2715 @command('debugsuccessorssets',
2711 2716 [('', 'closest', False, _('return closest successors sets only'))],
2712 2717 _('[REV]'))
2713 2718 def debugsuccessorssets(ui, repo, *revs, **opts):
2714 2719 """show set of successors for revision
2715 2720
2716 2721 A successors set of changeset A is a consistent group of revisions that
2717 2722 succeed A. It contains non-obsolete changesets only unless closests
2718 2723 successors set is set.
2719 2724
2720 2725 In most cases a changeset A has a single successors set containing a single
2721 2726 successor (changeset A replaced by A').
2722 2727
2723 2728 A changeset that is made obsolete with no successors are called "pruned".
2724 2729 Such changesets have no successors sets at all.
2725 2730
2726 2731 A changeset that has been "split" will have a successors set containing
2727 2732 more than one successor.
2728 2733
2729 2734 A changeset that has been rewritten in multiple different ways is called
2730 2735 "divergent". Such changesets have multiple successor sets (each of which
2731 2736 may also be split, i.e. have multiple successors).
2732 2737
2733 2738 Results are displayed as follows::
2734 2739
2735 2740 <rev1>
2736 2741 <successors-1A>
2737 2742 <rev2>
2738 2743 <successors-2A>
2739 2744 <successors-2B1> <successors-2B2> <successors-2B3>
2740 2745
2741 2746 Here rev2 has two possible (i.e. divergent) successors sets. The first
2742 2747 holds one element, whereas the second holds three (i.e. the changeset has
2743 2748 been split).
2744 2749 """
2745 2750 # passed to successorssets caching computation from one call to another
2746 2751 cache = {}
2747 2752 ctx2str = bytes
2748 2753 node2str = short
2749 2754 for rev in scmutil.revrange(repo, revs):
2750 2755 ctx = repo[rev]
2751 2756 ui.write('%s\n'% ctx2str(ctx))
2752 2757 for succsset in obsutil.successorssets(repo, ctx.node(),
2753 2758 closest=opts[r'closest'],
2754 2759 cache=cache):
2755 2760 if succsset:
2756 2761 ui.write(' ')
2757 2762 ui.write(node2str(succsset[0]))
2758 2763 for node in succsset[1:]:
2759 2764 ui.write(' ')
2760 2765 ui.write(node2str(node))
2761 2766 ui.write('\n')
2762 2767
2763 2768 @command('debugtemplate',
2764 2769 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2765 2770 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2766 2771 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2767 2772 optionalrepo=True)
2768 2773 def debugtemplate(ui, repo, tmpl, **opts):
2769 2774 """parse and apply a template
2770 2775
2771 2776 If -r/--rev is given, the template is processed as a log template and
2772 2777 applied to the given changesets. Otherwise, it is processed as a generic
2773 2778 template.
2774 2779
2775 2780 Use --verbose to print the parsed tree.
2776 2781 """
2777 2782 revs = None
2778 2783 if opts[r'rev']:
2779 2784 if repo is None:
2780 2785 raise error.RepoError(_('there is no Mercurial repository here '
2781 2786 '(.hg not found)'))
2782 2787 revs = scmutil.revrange(repo, opts[r'rev'])
2783 2788
2784 2789 props = {}
2785 2790 for d in opts[r'define']:
2786 2791 try:
2787 2792 k, v = (e.strip() for e in d.split('=', 1))
2788 2793 if not k or k == 'ui':
2789 2794 raise ValueError
2790 2795 props[k] = v
2791 2796 except ValueError:
2792 2797 raise error.Abort(_('malformed keyword definition: %s') % d)
2793 2798
2794 2799 if ui.verbose:
2795 2800 aliases = ui.configitems('templatealias')
2796 2801 tree = templater.parse(tmpl)
2797 2802 ui.note(templater.prettyformat(tree), '\n')
2798 2803 newtree = templater.expandaliases(tree, aliases)
2799 2804 if newtree != tree:
2800 2805 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2801 2806
2802 2807 if revs is None:
2803 2808 tres = formatter.templateresources(ui, repo)
2804 2809 t = formatter.maketemplater(ui, tmpl, resources=tres)
2805 2810 if ui.verbose:
2806 2811 kwds, funcs = t.symbolsuseddefault()
2807 2812 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2808 2813 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2809 2814 ui.write(t.renderdefault(props))
2810 2815 else:
2811 2816 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2812 2817 if ui.verbose:
2813 2818 kwds, funcs = displayer.t.symbolsuseddefault()
2814 2819 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2815 2820 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2816 2821 for r in revs:
2817 2822 displayer.show(repo[r], **pycompat.strkwargs(props))
2818 2823 displayer.close()
2819 2824
2820 2825 @command('debuguigetpass', [
2821 2826 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2822 2827 ], _('[-p TEXT]'), norepo=True)
2823 2828 def debuguigetpass(ui, prompt=''):
2824 2829 """show prompt to type password"""
2825 2830 r = ui.getpass(prompt)
2826 2831 ui.write(('respose: %s\n') % r)
2827 2832
2828 2833 @command('debuguiprompt', [
2829 2834 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2830 2835 ], _('[-p TEXT]'), norepo=True)
2831 2836 def debuguiprompt(ui, prompt=''):
2832 2837 """show plain prompt"""
2833 2838 r = ui.prompt(prompt)
2834 2839 ui.write(('response: %s\n') % r)
2835 2840
2836 2841 @command('debugupdatecaches', [])
2837 2842 def debugupdatecaches(ui, repo, *pats, **opts):
2838 2843 """warm all known caches in the repository"""
2839 2844 with repo.wlock(), repo.lock():
2840 2845 repo.updatecaches(full=True)
2841 2846
2842 2847 @command('debugupgraderepo', [
2843 2848 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2844 2849 ('', 'run', False, _('performs an upgrade')),
2845 2850 ('', 'backup', True, _('keep the old repository content around')),
2846 2851 ])
2847 2852 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True):
2848 2853 """upgrade a repository to use different features
2849 2854
2850 2855 If no arguments are specified, the repository is evaluated for upgrade
2851 2856 and a list of problems and potential optimizations is printed.
2852 2857
2853 2858 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2854 2859 can be influenced via additional arguments. More details will be provided
2855 2860 by the command output when run without ``--run``.
2856 2861
2857 2862 During the upgrade, the repository will be locked and no writes will be
2858 2863 allowed.
2859 2864
2860 2865 At the end of the upgrade, the repository may not be readable while new
2861 2866 repository data is swapped in. This window will be as long as it takes to
2862 2867 rename some directories inside the ``.hg`` directory. On most machines, this
2863 2868 should complete almost instantaneously and the chances of a consumer being
2864 2869 unable to access the repository should be low.
2865 2870 """
2866 2871 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize,
2867 2872 backup=backup)
2868 2873
2869 2874 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2870 2875 inferrepo=True)
2871 2876 def debugwalk(ui, repo, *pats, **opts):
2872 2877 """show how files match on given patterns"""
2873 2878 opts = pycompat.byteskwargs(opts)
2874 2879 m = scmutil.match(repo[None], pats, opts)
2875 2880 if ui.verbose:
2876 2881 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2877 2882 items = list(repo[None].walk(m))
2878 2883 if not items:
2879 2884 return
2880 2885 f = lambda fn: fn
2881 2886 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2882 2887 f = lambda fn: util.normpath(fn)
2883 2888 fmt = 'f %%-%ds %%-%ds %%s' % (
2884 2889 max([len(abs) for abs in items]),
2885 2890 max([len(repo.pathto(abs)) for abs in items]))
2886 2891 for abs in items:
2887 2892 line = fmt % (abs, f(repo.pathto(abs)), m.exact(abs) and 'exact' or '')
2888 2893 ui.write("%s\n" % line.rstrip())
2889 2894
2890 2895 @command('debugwhyunstable', [], _('REV'))
2891 2896 def debugwhyunstable(ui, repo, rev):
2892 2897 """explain instabilities of a changeset"""
2893 2898 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2894 2899 dnodes = ''
2895 2900 if entry.get('divergentnodes'):
2896 2901 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2897 2902 for ctx in entry['divergentnodes']) + ' '
2898 2903 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2899 2904 entry['reason'], entry['node']))
2900 2905
2901 2906 @command('debugwireargs',
2902 2907 [('', 'three', '', 'three'),
2903 2908 ('', 'four', '', 'four'),
2904 2909 ('', 'five', '', 'five'),
2905 2910 ] + cmdutil.remoteopts,
2906 2911 _('REPO [OPTIONS]... [ONE [TWO]]'),
2907 2912 norepo=True)
2908 2913 def debugwireargs(ui, repopath, *vals, **opts):
2909 2914 opts = pycompat.byteskwargs(opts)
2910 2915 repo = hg.peer(ui, opts, repopath)
2911 2916 for opt in cmdutil.remoteopts:
2912 2917 del opts[opt[1]]
2913 2918 args = {}
2914 2919 for k, v in opts.iteritems():
2915 2920 if v:
2916 2921 args[k] = v
2917 2922 args = pycompat.strkwargs(args)
2918 2923 # run twice to check that we don't mess up the stream for the next command
2919 2924 res1 = repo.debugwireargs(*vals, **args)
2920 2925 res2 = repo.debugwireargs(*vals, **args)
2921 2926 ui.write("%s\n" % res1)
2922 2927 if res1 != res2:
2923 2928 ui.warn("%s\n" % res2)
2924 2929
2925 2930 def _parsewirelangblocks(fh):
2926 2931 activeaction = None
2927 2932 blocklines = []
2928 2933 lastindent = 0
2929 2934
2930 2935 for line in fh:
2931 2936 line = line.rstrip()
2932 2937 if not line:
2933 2938 continue
2934 2939
2935 2940 if line.startswith(b'#'):
2936 2941 continue
2937 2942
2938 2943 if not line.startswith(b' '):
2939 2944 # New block. Flush previous one.
2940 2945 if activeaction:
2941 2946 yield activeaction, blocklines
2942 2947
2943 2948 activeaction = line
2944 2949 blocklines = []
2945 2950 lastindent = 0
2946 2951 continue
2947 2952
2948 2953 # Else we start with an indent.
2949 2954
2950 2955 if not activeaction:
2951 2956 raise error.Abort(_('indented line outside of block'))
2952 2957
2953 2958 indent = len(line) - len(line.lstrip())
2954 2959
2955 2960 # If this line is indented more than the last line, concatenate it.
2956 2961 if indent > lastindent and blocklines:
2957 2962 blocklines[-1] += line.lstrip()
2958 2963 else:
2959 2964 blocklines.append(line)
2960 2965 lastindent = indent
2961 2966
2962 2967 # Flush last block.
2963 2968 if activeaction:
2964 2969 yield activeaction, blocklines
2965 2970
2966 2971 @command('debugwireproto',
2967 2972 [
2968 2973 ('', 'localssh', False, _('start an SSH server for this repo')),
2969 2974 ('', 'peer', '', _('construct a specific version of the peer')),
2970 2975 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2971 2976 ('', 'nologhandshake', False,
2972 2977 _('do not log I/O related to the peer handshake')),
2973 2978 ] + cmdutil.remoteopts,
2974 2979 _('[PATH]'),
2975 2980 optionalrepo=True)
2976 2981 def debugwireproto(ui, repo, path=None, **opts):
2977 2982 """send wire protocol commands to a server
2978 2983
2979 2984 This command can be used to issue wire protocol commands to remote
2980 2985 peers and to debug the raw data being exchanged.
2981 2986
2982 2987 ``--localssh`` will start an SSH server against the current repository
2983 2988 and connect to that. By default, the connection will perform a handshake
2984 2989 and establish an appropriate peer instance.
2985 2990
2986 2991 ``--peer`` can be used to bypass the handshake protocol and construct a
2987 2992 peer instance using the specified class type. Valid values are ``raw``,
2988 2993 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2989 2994 raw data payloads and don't support higher-level command actions.
2990 2995
2991 2996 ``--noreadstderr`` can be used to disable automatic reading from stderr
2992 2997 of the peer (for SSH connections only). Disabling automatic reading of
2993 2998 stderr is useful for making output more deterministic.
2994 2999
2995 3000 Commands are issued via a mini language which is specified via stdin.
2996 3001 The language consists of individual actions to perform. An action is
2997 3002 defined by a block. A block is defined as a line with no leading
2998 3003 space followed by 0 or more lines with leading space. Blocks are
2999 3004 effectively a high-level command with additional metadata.
3000 3005
3001 3006 Lines beginning with ``#`` are ignored.
3002 3007
3003 3008 The following sections denote available actions.
3004 3009
3005 3010 raw
3006 3011 ---
3007 3012
3008 3013 Send raw data to the server.
3009 3014
3010 3015 The block payload contains the raw data to send as one atomic send
3011 3016 operation. The data may not actually be delivered in a single system
3012 3017 call: it depends on the abilities of the transport being used.
3013 3018
3014 3019 Each line in the block is de-indented and concatenated. Then, that
3015 3020 value is evaluated as a Python b'' literal. This allows the use of
3016 3021 backslash escaping, etc.
3017 3022
3018 3023 raw+
3019 3024 ----
3020 3025
3021 3026 Behaves like ``raw`` except flushes output afterwards.
3022 3027
3023 3028 command <X>
3024 3029 -----------
3025 3030
3026 3031 Send a request to run a named command, whose name follows the ``command``
3027 3032 string.
3028 3033
3029 3034 Arguments to the command are defined as lines in this block. The format of
3030 3035 each line is ``<key> <value>``. e.g.::
3031 3036
3032 3037 command listkeys
3033 3038 namespace bookmarks
3034 3039
3035 3040 If the value begins with ``eval:``, it will be interpreted as a Python
3036 3041 literal expression. Otherwise values are interpreted as Python b'' literals.
3037 3042 This allows sending complex types and encoding special byte sequences via
3038 3043 backslash escaping.
3039 3044
3040 3045 The following arguments have special meaning:
3041 3046
3042 3047 ``PUSHFILE``
3043 3048 When defined, the *push* mechanism of the peer will be used instead
3044 3049 of the static request-response mechanism and the content of the
3045 3050 file specified in the value of this argument will be sent as the
3046 3051 command payload.
3047 3052
3048 3053 This can be used to submit a local bundle file to the remote.
3049 3054
3050 3055 batchbegin
3051 3056 ----------
3052 3057
3053 3058 Instruct the peer to begin a batched send.
3054 3059
3055 3060 All ``command`` blocks are queued for execution until the next
3056 3061 ``batchsubmit`` block.
3057 3062
3058 3063 batchsubmit
3059 3064 -----------
3060 3065
3061 3066 Submit previously queued ``command`` blocks as a batch request.
3062 3067
3063 3068 This action MUST be paired with a ``batchbegin`` action.
3064 3069
3065 3070 httprequest <method> <path>
3066 3071 ---------------------------
3067 3072
3068 3073 (HTTP peer only)
3069 3074
3070 3075 Send an HTTP request to the peer.
3071 3076
3072 3077 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3073 3078
3074 3079 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3075 3080 headers to add to the request. e.g. ``Accept: foo``.
3076 3081
3077 3082 The following arguments are special:
3078 3083
3079 3084 ``BODYFILE``
3080 3085 The content of the file defined as the value to this argument will be
3081 3086 transferred verbatim as the HTTP request body.
3082 3087
3083 3088 ``frame <type> <flags> <payload>``
3084 3089 Send a unified protocol frame as part of the request body.
3085 3090
3086 3091 All frames will be collected and sent as the body to the HTTP
3087 3092 request.
3088 3093
3089 3094 close
3090 3095 -----
3091 3096
3092 3097 Close the connection to the server.
3093 3098
3094 3099 flush
3095 3100 -----
3096 3101
3097 3102 Flush data written to the server.
3098 3103
3099 3104 readavailable
3100 3105 -------------
3101 3106
3102 3107 Close the write end of the connection and read all available data from
3103 3108 the server.
3104 3109
3105 3110 If the connection to the server encompasses multiple pipes, we poll both
3106 3111 pipes and read available data.
3107 3112
3108 3113 readline
3109 3114 --------
3110 3115
3111 3116 Read a line of output from the server. If there are multiple output
3112 3117 pipes, reads only the main pipe.
3113 3118
3114 3119 ereadline
3115 3120 ---------
3116 3121
3117 3122 Like ``readline``, but read from the stderr pipe, if available.
3118 3123
3119 3124 read <X>
3120 3125 --------
3121 3126
3122 3127 ``read()`` N bytes from the server's main output pipe.
3123 3128
3124 3129 eread <X>
3125 3130 ---------
3126 3131
3127 3132 ``read()`` N bytes from the server's stderr pipe, if available.
3128 3133
3129 3134 Specifying Unified Frame-Based Protocol Frames
3130 3135 ----------------------------------------------
3131 3136
3132 3137 It is possible to emit a *Unified Frame-Based Protocol* by using special
3133 3138 syntax.
3134 3139
3135 3140 A frame is composed as a type, flags, and payload. These can be parsed
3136 3141 from a string of the form:
3137 3142
3138 3143 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3139 3144
3140 3145 ``request-id`` and ``stream-id`` are integers defining the request and
3141 3146 stream identifiers.
3142 3147
3143 3148 ``type`` can be an integer value for the frame type or the string name
3144 3149 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3145 3150 ``command-name``.
3146 3151
3147 3152 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3148 3153 components. Each component (and there can be just one) can be an integer
3149 3154 or a flag name for stream flags or frame flags, respectively. Values are
3150 3155 resolved to integers and then bitwise OR'd together.
3151 3156
3152 3157 ``payload`` represents the raw frame payload. If it begins with
3153 3158 ``cbor:``, the following string is evaluated as Python code and the
3154 3159 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3155 3160 as a Python byte string literal.
3156 3161 """
3157 3162 opts = pycompat.byteskwargs(opts)
3158 3163
3159 3164 if opts['localssh'] and not repo:
3160 3165 raise error.Abort(_('--localssh requires a repository'))
3161 3166
3162 3167 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3163 3168 raise error.Abort(_('invalid value for --peer'),
3164 3169 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3165 3170
3166 3171 if path and opts['localssh']:
3167 3172 raise error.Abort(_('cannot specify --localssh with an explicit '
3168 3173 'path'))
3169 3174
3170 3175 if ui.interactive():
3171 3176 ui.write(_('(waiting for commands on stdin)\n'))
3172 3177
3173 3178 blocks = list(_parsewirelangblocks(ui.fin))
3174 3179
3175 3180 proc = None
3176 3181 stdin = None
3177 3182 stdout = None
3178 3183 stderr = None
3179 3184 opener = None
3180 3185
3181 3186 if opts['localssh']:
3182 3187 # We start the SSH server in its own process so there is process
3183 3188 # separation. This prevents a whole class of potential bugs around
3184 3189 # shared state from interfering with server operation.
3185 3190 args = procutil.hgcmd() + [
3186 3191 '-R', repo.root,
3187 3192 'debugserve', '--sshstdio',
3188 3193 ]
3189 3194 proc = subprocess.Popen(pycompat.rapply(procutil.tonativestr, args),
3190 3195 stdin=subprocess.PIPE,
3191 3196 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3192 3197 bufsize=0)
3193 3198
3194 3199 stdin = proc.stdin
3195 3200 stdout = proc.stdout
3196 3201 stderr = proc.stderr
3197 3202
3198 3203 # We turn the pipes into observers so we can log I/O.
3199 3204 if ui.verbose or opts['peer'] == 'raw':
3200 3205 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3201 3206 logdata=True)
3202 3207 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3203 3208 logdata=True)
3204 3209 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3205 3210 logdata=True)
3206 3211
3207 3212 # --localssh also implies the peer connection settings.
3208 3213
3209 3214 url = 'ssh://localserver'
3210 3215 autoreadstderr = not opts['noreadstderr']
3211 3216
3212 3217 if opts['peer'] == 'ssh1':
3213 3218 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3214 3219 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3215 3220 None, autoreadstderr=autoreadstderr)
3216 3221 elif opts['peer'] == 'ssh2':
3217 3222 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3218 3223 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3219 3224 None, autoreadstderr=autoreadstderr)
3220 3225 elif opts['peer'] == 'raw':
3221 3226 ui.write(_('using raw connection to peer\n'))
3222 3227 peer = None
3223 3228 else:
3224 3229 ui.write(_('creating ssh peer from handshake results\n'))
3225 3230 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3226 3231 autoreadstderr=autoreadstderr)
3227 3232
3228 3233 elif path:
3229 3234 # We bypass hg.peer() so we can proxy the sockets.
3230 3235 # TODO consider not doing this because we skip
3231 3236 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3232 3237 u = util.url(path)
3233 3238 if u.scheme != 'http':
3234 3239 raise error.Abort(_('only http:// paths are currently supported'))
3235 3240
3236 3241 url, authinfo = u.authinfo()
3237 3242 openerargs = {
3238 3243 r'useragent': b'Mercurial debugwireproto',
3239 3244 }
3240 3245
3241 3246 # Turn pipes/sockets into observers so we can log I/O.
3242 3247 if ui.verbose:
3243 3248 openerargs.update({
3244 3249 r'loggingfh': ui,
3245 3250 r'loggingname': b's',
3246 3251 r'loggingopts': {
3247 3252 r'logdata': True,
3248 3253 r'logdataapis': False,
3249 3254 },
3250 3255 })
3251 3256
3252 3257 if ui.debugflag:
3253 3258 openerargs[r'loggingopts'][r'logdataapis'] = True
3254 3259
3255 3260 # Don't send default headers when in raw mode. This allows us to
3256 3261 # bypass most of the behavior of our URL handling code so we can
3257 3262 # have near complete control over what's sent on the wire.
3258 3263 if opts['peer'] == 'raw':
3259 3264 openerargs[r'sendaccept'] = False
3260 3265
3261 3266 opener = urlmod.opener(ui, authinfo, **openerargs)
3262 3267
3263 3268 if opts['peer'] == 'http2':
3264 3269 ui.write(_('creating http peer for wire protocol version 2\n'))
3265 3270 # We go through makepeer() because we need an API descriptor for
3266 3271 # the peer instance to be useful.
3267 3272 with ui.configoverride({
3268 3273 ('experimental', 'httppeer.advertise-v2'): True}):
3269 3274 if opts['nologhandshake']:
3270 3275 ui.pushbuffer()
3271 3276
3272 3277 peer = httppeer.makepeer(ui, path, opener=opener)
3273 3278
3274 3279 if opts['nologhandshake']:
3275 3280 ui.popbuffer()
3276 3281
3277 3282 if not isinstance(peer, httppeer.httpv2peer):
3278 3283 raise error.Abort(_('could not instantiate HTTP peer for '
3279 3284 'wire protocol version 2'),
3280 3285 hint=_('the server may not have the feature '
3281 3286 'enabled or is not allowing this '
3282 3287 'client version'))
3283 3288
3284 3289 elif opts['peer'] == 'raw':
3285 3290 ui.write(_('using raw connection to peer\n'))
3286 3291 peer = None
3287 3292 elif opts['peer']:
3288 3293 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3289 3294 opts['peer'])
3290 3295 else:
3291 3296 peer = httppeer.makepeer(ui, path, opener=opener)
3292 3297
3293 3298 # We /could/ populate stdin/stdout with sock.makefile()...
3294 3299 else:
3295 3300 raise error.Abort(_('unsupported connection configuration'))
3296 3301
3297 3302 batchedcommands = None
3298 3303
3299 3304 # Now perform actions based on the parsed wire language instructions.
3300 3305 for action, lines in blocks:
3301 3306 if action in ('raw', 'raw+'):
3302 3307 if not stdin:
3303 3308 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3304 3309
3305 3310 # Concatenate the data together.
3306 3311 data = ''.join(l.lstrip() for l in lines)
3307 3312 data = stringutil.unescapestr(data)
3308 3313 stdin.write(data)
3309 3314
3310 3315 if action == 'raw+':
3311 3316 stdin.flush()
3312 3317 elif action == 'flush':
3313 3318 if not stdin:
3314 3319 raise error.Abort(_('cannot call flush on this peer'))
3315 3320 stdin.flush()
3316 3321 elif action.startswith('command'):
3317 3322 if not peer:
3318 3323 raise error.Abort(_('cannot send commands unless peer instance '
3319 3324 'is available'))
3320 3325
3321 3326 command = action.split(' ', 1)[1]
3322 3327
3323 3328 args = {}
3324 3329 for line in lines:
3325 3330 # We need to allow empty values.
3326 3331 fields = line.lstrip().split(' ', 1)
3327 3332 if len(fields) == 1:
3328 3333 key = fields[0]
3329 3334 value = ''
3330 3335 else:
3331 3336 key, value = fields
3332 3337
3333 3338 if value.startswith('eval:'):
3334 3339 value = stringutil.evalpythonliteral(value[5:])
3335 3340 else:
3336 3341 value = stringutil.unescapestr(value)
3337 3342
3338 3343 args[key] = value
3339 3344
3340 3345 if batchedcommands is not None:
3341 3346 batchedcommands.append((command, args))
3342 3347 continue
3343 3348
3344 3349 ui.status(_('sending %s command\n') % command)
3345 3350
3346 3351 if 'PUSHFILE' in args:
3347 3352 with open(args['PUSHFILE'], r'rb') as fh:
3348 3353 del args['PUSHFILE']
3349 3354 res, output = peer._callpush(command, fh,
3350 3355 **pycompat.strkwargs(args))
3351 3356 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3352 3357 ui.status(_('remote output: %s\n') %
3353 3358 stringutil.escapestr(output))
3354 3359 else:
3355 3360 with peer.commandexecutor() as e:
3356 3361 res = e.callcommand(command, args).result()
3357 3362
3358 3363 if isinstance(res, wireprotov2peer.commandresponse):
3359 3364 val = res.objects()
3360 3365 ui.status(_('response: %s\n') %
3361 3366 stringutil.pprint(val, bprefix=True, indent=2))
3362 3367 else:
3363 3368 ui.status(_('response: %s\n') %
3364 3369 stringutil.pprint(res, bprefix=True, indent=2))
3365 3370
3366 3371 elif action == 'batchbegin':
3367 3372 if batchedcommands is not None:
3368 3373 raise error.Abort(_('nested batchbegin not allowed'))
3369 3374
3370 3375 batchedcommands = []
3371 3376 elif action == 'batchsubmit':
3372 3377 # There is a batching API we could go through. But it would be
3373 3378 # difficult to normalize requests into function calls. It is easier
3374 3379 # to bypass this layer and normalize to commands + args.
3375 3380 ui.status(_('sending batch with %d sub-commands\n') %
3376 3381 len(batchedcommands))
3377 3382 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3378 3383 ui.status(_('response #%d: %s\n') %
3379 3384 (i, stringutil.escapestr(chunk)))
3380 3385
3381 3386 batchedcommands = None
3382 3387
3383 3388 elif action.startswith('httprequest '):
3384 3389 if not opener:
3385 3390 raise error.Abort(_('cannot use httprequest without an HTTP '
3386 3391 'peer'))
3387 3392
3388 3393 request = action.split(' ', 2)
3389 3394 if len(request) != 3:
3390 3395 raise error.Abort(_('invalid httprequest: expected format is '
3391 3396 '"httprequest <method> <path>'))
3392 3397
3393 3398 method, httppath = request[1:]
3394 3399 headers = {}
3395 3400 body = None
3396 3401 frames = []
3397 3402 for line in lines:
3398 3403 line = line.lstrip()
3399 3404 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3400 3405 if m:
3401 3406 # Headers need to use native strings.
3402 3407 key = pycompat.strurl(m.group(1))
3403 3408 value = pycompat.strurl(m.group(2))
3404 3409 headers[key] = value
3405 3410 continue
3406 3411
3407 3412 if line.startswith(b'BODYFILE '):
3408 3413 with open(line.split(b' ', 1), 'rb') as fh:
3409 3414 body = fh.read()
3410 3415 elif line.startswith(b'frame '):
3411 3416 frame = wireprotoframing.makeframefromhumanstring(
3412 3417 line[len(b'frame '):])
3413 3418
3414 3419 frames.append(frame)
3415 3420 else:
3416 3421 raise error.Abort(_('unknown argument to httprequest: %s') %
3417 3422 line)
3418 3423
3419 3424 url = path + httppath
3420 3425
3421 3426 if frames:
3422 3427 body = b''.join(bytes(f) for f in frames)
3423 3428
3424 3429 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3425 3430
3426 3431 # urllib.Request insists on using has_data() as a proxy for
3427 3432 # determining the request method. Override that to use our
3428 3433 # explicitly requested method.
3429 3434 req.get_method = lambda: pycompat.sysstr(method)
3430 3435
3431 3436 try:
3432 3437 res = opener.open(req)
3433 3438 body = res.read()
3434 3439 except util.urlerr.urlerror as e:
3435 3440 # read() method must be called, but only exists in Python 2
3436 3441 getattr(e, 'read', lambda: None)()
3437 3442 continue
3438 3443
3439 3444 ct = res.headers.get(r'Content-Type')
3440 3445 if ct == r'application/mercurial-cbor':
3441 3446 ui.write(_('cbor> %s\n') %
3442 3447 stringutil.pprint(cborutil.decodeall(body),
3443 3448 bprefix=True,
3444 3449 indent=2))
3445 3450
3446 3451 elif action == 'close':
3447 3452 peer.close()
3448 3453 elif action == 'readavailable':
3449 3454 if not stdout or not stderr:
3450 3455 raise error.Abort(_('readavailable not available on this peer'))
3451 3456
3452 3457 stdin.close()
3453 3458 stdout.read()
3454 3459 stderr.read()
3455 3460
3456 3461 elif action == 'readline':
3457 3462 if not stdout:
3458 3463 raise error.Abort(_('readline not available on this peer'))
3459 3464 stdout.readline()
3460 3465 elif action == 'ereadline':
3461 3466 if not stderr:
3462 3467 raise error.Abort(_('ereadline not available on this peer'))
3463 3468 stderr.readline()
3464 3469 elif action.startswith('read '):
3465 3470 count = int(action.split(' ', 1)[1])
3466 3471 if not stdout:
3467 3472 raise error.Abort(_('read not available on this peer'))
3468 3473 stdout.read(count)
3469 3474 elif action.startswith('eread '):
3470 3475 count = int(action.split(' ', 1)[1])
3471 3476 if not stderr:
3472 3477 raise error.Abort(_('eread not available on this peer'))
3473 3478 stderr.read(count)
3474 3479 else:
3475 3480 raise error.Abort(_('unknown action: %s') % action)
3476 3481
3477 3482 if batchedcommands is not None:
3478 3483 raise error.Abort(_('unclosed "batchbegin" request'))
3479 3484
3480 3485 if peer:
3481 3486 peer.close()
3482 3487
3483 3488 if proc:
3484 3489 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now