##// END OF EJS Templates
debugcommands: support wrapping long lines...
Gregory Szorc -
r40210:64360202 default
parent child Browse files
Show More
@@ -1,3379 +1,3388
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import operator
15 15 import os
16 16 import random
17 17 import re
18 18 import socket
19 19 import ssl
20 20 import stat
21 21 import string
22 22 import subprocess
23 23 import sys
24 24 import time
25 25
26 26 from .i18n import _
27 27 from .node import (
28 28 bin,
29 29 hex,
30 30 nullhex,
31 31 nullid,
32 32 nullrev,
33 33 short,
34 34 )
35 35 from . import (
36 36 bundle2,
37 37 changegroup,
38 38 cmdutil,
39 39 color,
40 40 context,
41 41 dagparser,
42 42 encoding,
43 43 error,
44 44 exchange,
45 45 extensions,
46 46 filemerge,
47 47 filesetlang,
48 48 formatter,
49 49 hg,
50 50 httppeer,
51 51 localrepo,
52 52 lock as lockmod,
53 53 logcmdutil,
54 54 merge as mergemod,
55 55 obsolete,
56 56 obsutil,
57 57 phases,
58 58 policy,
59 59 pvec,
60 60 pycompat,
61 61 registrar,
62 62 repair,
63 63 revlog,
64 64 revset,
65 65 revsetlang,
66 66 scmutil,
67 67 setdiscovery,
68 68 simplemerge,
69 69 sshpeer,
70 70 sslutil,
71 71 streamclone,
72 72 templater,
73 73 treediscovery,
74 74 upgrade,
75 75 url as urlmod,
76 76 util,
77 77 vfs as vfsmod,
78 78 wireprotoframing,
79 79 wireprotoserver,
80 80 wireprotov2peer,
81 81 )
82 82 from .utils import (
83 83 cborutil,
84 84 dateutil,
85 85 procutil,
86 86 stringutil,
87 87 )
88 88
89 89 from .revlogutils import (
90 90 deltas as deltautil
91 91 )
92 92
93 93 release = lockmod.release
94 94
95 95 command = registrar.command()
96 96
97 97 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
98 98 def debugancestor(ui, repo, *args):
99 99 """find the ancestor revision of two revisions in a given index"""
100 100 if len(args) == 3:
101 101 index, rev1, rev2 = args
102 102 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
103 103 lookup = r.lookup
104 104 elif len(args) == 2:
105 105 if not repo:
106 106 raise error.Abort(_('there is no Mercurial repository here '
107 107 '(.hg not found)'))
108 108 rev1, rev2 = args
109 109 r = repo.changelog
110 110 lookup = repo.lookup
111 111 else:
112 112 raise error.Abort(_('either two or three arguments required'))
113 113 a = r.ancestor(lookup(rev1), lookup(rev2))
114 114 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
115 115
116 116 @command('debugapplystreamclonebundle', [], 'FILE')
117 117 def debugapplystreamclonebundle(ui, repo, fname):
118 118 """apply a stream clone bundle file"""
119 119 f = hg.openpath(ui, fname)
120 120 gen = exchange.readbundle(ui, f, fname)
121 121 gen.apply(repo)
122 122
123 123 @command('debugbuilddag',
124 124 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
125 125 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
126 126 ('n', 'new-file', None, _('add new file at each rev'))],
127 127 _('[OPTION]... [TEXT]'))
128 128 def debugbuilddag(ui, repo, text=None,
129 129 mergeable_file=False,
130 130 overwritten_file=False,
131 131 new_file=False):
132 132 """builds a repo with a given DAG from scratch in the current empty repo
133 133
134 134 The description of the DAG is read from stdin if not given on the
135 135 command line.
136 136
137 137 Elements:
138 138
139 139 - "+n" is a linear run of n nodes based on the current default parent
140 140 - "." is a single node based on the current default parent
141 141 - "$" resets the default parent to null (implied at the start);
142 142 otherwise the default parent is always the last node created
143 143 - "<p" sets the default parent to the backref p
144 144 - "*p" is a fork at parent p, which is a backref
145 145 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
146 146 - "/p2" is a merge of the preceding node and p2
147 147 - ":tag" defines a local tag for the preceding node
148 148 - "@branch" sets the named branch for subsequent nodes
149 149 - "#...\\n" is a comment up to the end of the line
150 150
151 151 Whitespace between the above elements is ignored.
152 152
153 153 A backref is either
154 154
155 155 - a number n, which references the node curr-n, where curr is the current
156 156 node, or
157 157 - the name of a local tag you placed earlier using ":tag", or
158 158 - empty to denote the default parent.
159 159
160 160 All string valued-elements are either strictly alphanumeric, or must
161 161 be enclosed in double quotes ("..."), with "\\" as escape character.
162 162 """
163 163
164 164 if text is None:
165 165 ui.status(_("reading DAG from stdin\n"))
166 166 text = ui.fin.read()
167 167
168 168 cl = repo.changelog
169 169 if len(cl) > 0:
170 170 raise error.Abort(_('repository is not empty'))
171 171
172 172 # determine number of revs in DAG
173 173 total = 0
174 174 for type, data in dagparser.parsedag(text):
175 175 if type == 'n':
176 176 total += 1
177 177
178 178 if mergeable_file:
179 179 linesperrev = 2
180 180 # make a file with k lines per rev
181 181 initialmergedlines = ['%d' % i
182 182 for i in pycompat.xrange(0, total * linesperrev)]
183 183 initialmergedlines.append("")
184 184
185 185 tags = []
186 186 progress = ui.makeprogress(_('building'), unit=_('revisions'),
187 187 total=total)
188 188 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
189 189 at = -1
190 190 atbranch = 'default'
191 191 nodeids = []
192 192 id = 0
193 193 progress.update(id)
194 194 for type, data in dagparser.parsedag(text):
195 195 if type == 'n':
196 196 ui.note(('node %s\n' % pycompat.bytestr(data)))
197 197 id, ps = data
198 198
199 199 files = []
200 200 filecontent = {}
201 201
202 202 p2 = None
203 203 if mergeable_file:
204 204 fn = "mf"
205 205 p1 = repo[ps[0]]
206 206 if len(ps) > 1:
207 207 p2 = repo[ps[1]]
208 208 pa = p1.ancestor(p2)
209 209 base, local, other = [x[fn].data() for x in (pa, p1,
210 210 p2)]
211 211 m3 = simplemerge.Merge3Text(base, local, other)
212 212 ml = [l.strip() for l in m3.merge_lines()]
213 213 ml.append("")
214 214 elif at > 0:
215 215 ml = p1[fn].data().split("\n")
216 216 else:
217 217 ml = initialmergedlines
218 218 ml[id * linesperrev] += " r%i" % id
219 219 mergedtext = "\n".join(ml)
220 220 files.append(fn)
221 221 filecontent[fn] = mergedtext
222 222
223 223 if overwritten_file:
224 224 fn = "of"
225 225 files.append(fn)
226 226 filecontent[fn] = "r%i\n" % id
227 227
228 228 if new_file:
229 229 fn = "nf%i" % id
230 230 files.append(fn)
231 231 filecontent[fn] = "r%i\n" % id
232 232 if len(ps) > 1:
233 233 if not p2:
234 234 p2 = repo[ps[1]]
235 235 for fn in p2:
236 236 if fn.startswith("nf"):
237 237 files.append(fn)
238 238 filecontent[fn] = p2[fn].data()
239 239
240 240 def fctxfn(repo, cx, path):
241 241 if path in filecontent:
242 242 return context.memfilectx(repo, cx, path,
243 243 filecontent[path])
244 244 return None
245 245
246 246 if len(ps) == 0 or ps[0] < 0:
247 247 pars = [None, None]
248 248 elif len(ps) == 1:
249 249 pars = [nodeids[ps[0]], None]
250 250 else:
251 251 pars = [nodeids[p] for p in ps]
252 252 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
253 253 date=(id, 0),
254 254 user="debugbuilddag",
255 255 extra={'branch': atbranch})
256 256 nodeid = repo.commitctx(cx)
257 257 nodeids.append(nodeid)
258 258 at = id
259 259 elif type == 'l':
260 260 id, name = data
261 261 ui.note(('tag %s\n' % name))
262 262 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
263 263 elif type == 'a':
264 264 ui.note(('branch %s\n' % data))
265 265 atbranch = data
266 266 progress.update(id)
267 267
268 268 if tags:
269 269 repo.vfs.write("localtags", "".join(tags))
270 270
271 271 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
272 272 indent_string = ' ' * indent
273 273 if all:
274 274 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
275 275 % indent_string)
276 276
277 277 def showchunks(named):
278 278 ui.write("\n%s%s\n" % (indent_string, named))
279 279 for deltadata in gen.deltaiter():
280 280 node, p1, p2, cs, deltabase, delta, flags = deltadata
281 281 ui.write("%s%s %s %s %s %s %d\n" %
282 282 (indent_string, hex(node), hex(p1), hex(p2),
283 283 hex(cs), hex(deltabase), len(delta)))
284 284
285 285 chunkdata = gen.changelogheader()
286 286 showchunks("changelog")
287 287 chunkdata = gen.manifestheader()
288 288 showchunks("manifest")
289 289 for chunkdata in iter(gen.filelogheader, {}):
290 290 fname = chunkdata['filename']
291 291 showchunks(fname)
292 292 else:
293 293 if isinstance(gen, bundle2.unbundle20):
294 294 raise error.Abort(_('use debugbundle2 for this file'))
295 295 chunkdata = gen.changelogheader()
296 296 for deltadata in gen.deltaiter():
297 297 node, p1, p2, cs, deltabase, delta, flags = deltadata
298 298 ui.write("%s%s\n" % (indent_string, hex(node)))
299 299
300 300 def _debugobsmarkers(ui, part, indent=0, **opts):
301 301 """display version and markers contained in 'data'"""
302 302 opts = pycompat.byteskwargs(opts)
303 303 data = part.read()
304 304 indent_string = ' ' * indent
305 305 try:
306 306 version, markers = obsolete._readmarkers(data)
307 307 except error.UnknownVersion as exc:
308 308 msg = "%sunsupported version: %s (%d bytes)\n"
309 309 msg %= indent_string, exc.version, len(data)
310 310 ui.write(msg)
311 311 else:
312 312 msg = "%sversion: %d (%d bytes)\n"
313 313 msg %= indent_string, version, len(data)
314 314 ui.write(msg)
315 315 fm = ui.formatter('debugobsolete', opts)
316 316 for rawmarker in sorted(markers):
317 317 m = obsutil.marker(None, rawmarker)
318 318 fm.startitem()
319 319 fm.plain(indent_string)
320 320 cmdutil.showmarker(fm, m)
321 321 fm.end()
322 322
323 323 def _debugphaseheads(ui, data, indent=0):
324 324 """display version and markers contained in 'data'"""
325 325 indent_string = ' ' * indent
326 326 headsbyphase = phases.binarydecode(data)
327 327 for phase in phases.allphases:
328 328 for head in headsbyphase[phase]:
329 329 ui.write(indent_string)
330 330 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
331 331
332 332 def _quasirepr(thing):
333 333 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
334 334 return '{%s}' % (
335 335 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
336 336 return pycompat.bytestr(repr(thing))
337 337
338 338 def _debugbundle2(ui, gen, all=None, **opts):
339 339 """lists the contents of a bundle2"""
340 340 if not isinstance(gen, bundle2.unbundle20):
341 341 raise error.Abort(_('not a bundle2 file'))
342 342 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
343 343 parttypes = opts.get(r'part_type', [])
344 344 for part in gen.iterparts():
345 345 if parttypes and part.type not in parttypes:
346 346 continue
347 347 msg = '%s -- %s (mandatory: %r)\n'
348 348 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
349 349 if part.type == 'changegroup':
350 350 version = part.params.get('version', '01')
351 351 cg = changegroup.getunbundler(version, part, 'UN')
352 352 if not ui.quiet:
353 353 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
354 354 if part.type == 'obsmarkers':
355 355 if not ui.quiet:
356 356 _debugobsmarkers(ui, part, indent=4, **opts)
357 357 if part.type == 'phase-heads':
358 358 if not ui.quiet:
359 359 _debugphaseheads(ui, part, indent=4)
360 360
361 361 @command('debugbundle',
362 362 [('a', 'all', None, _('show all details')),
363 363 ('', 'part-type', [], _('show only the named part type')),
364 364 ('', 'spec', None, _('print the bundlespec of the bundle'))],
365 365 _('FILE'),
366 366 norepo=True)
367 367 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
368 368 """lists the contents of a bundle"""
369 369 with hg.openpath(ui, bundlepath) as f:
370 370 if spec:
371 371 spec = exchange.getbundlespec(ui, f)
372 372 ui.write('%s\n' % spec)
373 373 return
374 374
375 375 gen = exchange.readbundle(ui, f, bundlepath)
376 376 if isinstance(gen, bundle2.unbundle20):
377 377 return _debugbundle2(ui, gen, all=all, **opts)
378 378 _debugchangegroup(ui, gen, all=all, **opts)
379 379
380 380 @command('debugcapabilities',
381 381 [], _('PATH'),
382 382 norepo=True)
383 383 def debugcapabilities(ui, path, **opts):
384 384 """lists the capabilities of a remote peer"""
385 385 opts = pycompat.byteskwargs(opts)
386 386 peer = hg.peer(ui, opts, path)
387 387 caps = peer.capabilities()
388 388 ui.write(('Main capabilities:\n'))
389 389 for c in sorted(caps):
390 390 ui.write((' %s\n') % c)
391 391 b2caps = bundle2.bundle2caps(peer)
392 392 if b2caps:
393 393 ui.write(('Bundle2 capabilities:\n'))
394 394 for key, values in sorted(b2caps.iteritems()):
395 395 ui.write((' %s\n') % key)
396 396 for v in values:
397 397 ui.write((' %s\n') % v)
398 398
399 399 @command('debugcheckstate', [], '')
400 400 def debugcheckstate(ui, repo):
401 401 """validate the correctness of the current dirstate"""
402 402 parent1, parent2 = repo.dirstate.parents()
403 403 m1 = repo[parent1].manifest()
404 404 m2 = repo[parent2].manifest()
405 405 errors = 0
406 406 for f in repo.dirstate:
407 407 state = repo.dirstate[f]
408 408 if state in "nr" and f not in m1:
409 409 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
410 410 errors += 1
411 411 if state in "a" and f in m1:
412 412 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
413 413 errors += 1
414 414 if state in "m" and f not in m1 and f not in m2:
415 415 ui.warn(_("%s in state %s, but not in either manifest\n") %
416 416 (f, state))
417 417 errors += 1
418 418 for f in m1:
419 419 state = repo.dirstate[f]
420 420 if state not in "nrm":
421 421 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
422 422 errors += 1
423 423 if errors:
424 424 error = _(".hg/dirstate inconsistent with current parent's manifest")
425 425 raise error.Abort(error)
426 426
427 427 @command('debugcolor',
428 428 [('', 'style', None, _('show all configured styles'))],
429 429 'hg debugcolor')
430 430 def debugcolor(ui, repo, **opts):
431 431 """show available color, effects or style"""
432 432 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
433 433 if opts.get(r'style'):
434 434 return _debugdisplaystyle(ui)
435 435 else:
436 436 return _debugdisplaycolor(ui)
437 437
438 438 def _debugdisplaycolor(ui):
439 439 ui = ui.copy()
440 440 ui._styles.clear()
441 441 for effect in color._activeeffects(ui).keys():
442 442 ui._styles[effect] = effect
443 443 if ui._terminfoparams:
444 444 for k, v in ui.configitems('color'):
445 445 if k.startswith('color.'):
446 446 ui._styles[k] = k[6:]
447 447 elif k.startswith('terminfo.'):
448 448 ui._styles[k] = k[9:]
449 449 ui.write(_('available colors:\n'))
450 450 # sort label with a '_' after the other to group '_background' entry.
451 451 items = sorted(ui._styles.items(),
452 452 key=lambda i: ('_' in i[0], i[0], i[1]))
453 453 for colorname, label in items:
454 454 ui.write(('%s\n') % colorname, label=label)
455 455
456 456 def _debugdisplaystyle(ui):
457 457 ui.write(_('available style:\n'))
458 458 if not ui._styles:
459 459 return
460 460 width = max(len(s) for s in ui._styles)
461 461 for label, effects in sorted(ui._styles.items()):
462 462 ui.write('%s' % label, label=label)
463 463 if effects:
464 464 # 50
465 465 ui.write(': ')
466 466 ui.write(' ' * (max(0, width - len(label))))
467 467 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
468 468 ui.write('\n')
469 469
470 470 @command('debugcreatestreamclonebundle', [], 'FILE')
471 471 def debugcreatestreamclonebundle(ui, repo, fname):
472 472 """create a stream clone bundle file
473 473
474 474 Stream bundles are special bundles that are essentially archives of
475 475 revlog files. They are commonly used for cloning very quickly.
476 476 """
477 477 # TODO we may want to turn this into an abort when this functionality
478 478 # is moved into `hg bundle`.
479 479 if phases.hassecret(repo):
480 480 ui.warn(_('(warning: stream clone bundle will contain secret '
481 481 'revisions)\n'))
482 482
483 483 requirements, gen = streamclone.generatebundlev1(repo)
484 484 changegroup.writechunks(ui, gen, fname)
485 485
486 486 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
487 487
488 488 @command('debugdag',
489 489 [('t', 'tags', None, _('use tags as labels')),
490 490 ('b', 'branches', None, _('annotate with branch names')),
491 491 ('', 'dots', None, _('use dots for runs')),
492 492 ('s', 'spaces', None, _('separate elements by spaces'))],
493 493 _('[OPTION]... [FILE [REV]...]'),
494 494 optionalrepo=True)
495 495 def debugdag(ui, repo, file_=None, *revs, **opts):
496 496 """format the changelog or an index DAG as a concise textual description
497 497
498 498 If you pass a revlog index, the revlog's DAG is emitted. If you list
499 499 revision numbers, they get labeled in the output as rN.
500 500
501 501 Otherwise, the changelog DAG of the current repo is emitted.
502 502 """
503 503 spaces = opts.get(r'spaces')
504 504 dots = opts.get(r'dots')
505 505 if file_:
506 506 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
507 507 file_)
508 508 revs = set((int(r) for r in revs))
509 509 def events():
510 510 for r in rlog:
511 511 yield 'n', (r, list(p for p in rlog.parentrevs(r)
512 512 if p != -1))
513 513 if r in revs:
514 514 yield 'l', (r, "r%i" % r)
515 515 elif repo:
516 516 cl = repo.changelog
517 517 tags = opts.get(r'tags')
518 518 branches = opts.get(r'branches')
519 519 if tags:
520 520 labels = {}
521 521 for l, n in repo.tags().items():
522 522 labels.setdefault(cl.rev(n), []).append(l)
523 523 def events():
524 524 b = "default"
525 525 for r in cl:
526 526 if branches:
527 527 newb = cl.read(cl.node(r))[5]['branch']
528 528 if newb != b:
529 529 yield 'a', newb
530 530 b = newb
531 531 yield 'n', (r, list(p for p in cl.parentrevs(r)
532 532 if p != -1))
533 533 if tags:
534 534 ls = labels.get(r)
535 535 if ls:
536 536 for l in ls:
537 537 yield 'l', (r, l)
538 538 else:
539 539 raise error.Abort(_('need repo for changelog dag'))
540 540
541 541 for line in dagparser.dagtextlines(events(),
542 542 addspaces=spaces,
543 543 wraplabels=True,
544 544 wrapannotations=True,
545 545 wrapnonlinear=dots,
546 546 usedots=dots,
547 547 maxlinewidth=70):
548 548 ui.write(line)
549 549 ui.write("\n")
550 550
551 551 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
552 552 def debugdata(ui, repo, file_, rev=None, **opts):
553 553 """dump the contents of a data file revision"""
554 554 opts = pycompat.byteskwargs(opts)
555 555 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
556 556 if rev is not None:
557 557 raise error.CommandError('debugdata', _('invalid arguments'))
558 558 file_, rev = None, file_
559 559 elif rev is None:
560 560 raise error.CommandError('debugdata', _('invalid arguments'))
561 561 r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
562 562 try:
563 563 ui.write(r.revision(r.lookup(rev), raw=True))
564 564 except KeyError:
565 565 raise error.Abort(_('invalid revision identifier %s') % rev)
566 566
567 567 @command('debugdate',
568 568 [('e', 'extended', None, _('try extended date formats'))],
569 569 _('[-e] DATE [RANGE]'),
570 570 norepo=True, optionalrepo=True)
571 571 def debugdate(ui, date, range=None, **opts):
572 572 """parse and display a date"""
573 573 if opts[r"extended"]:
574 574 d = dateutil.parsedate(date, util.extendeddateformats)
575 575 else:
576 576 d = dateutil.parsedate(date)
577 577 ui.write(("internal: %d %d\n") % d)
578 578 ui.write(("standard: %s\n") % dateutil.datestr(d))
579 579 if range:
580 580 m = dateutil.matchdate(range)
581 581 ui.write(("match: %s\n") % m(d[0]))
582 582
583 583 @command('debugdeltachain',
584 584 cmdutil.debugrevlogopts + cmdutil.formatteropts,
585 585 _('-c|-m|FILE'),
586 586 optionalrepo=True)
587 587 def debugdeltachain(ui, repo, file_=None, **opts):
588 588 """dump information about delta chains in a revlog
589 589
590 590 Output can be templatized. Available template keywords are:
591 591
592 592 :``rev``: revision number
593 593 :``chainid``: delta chain identifier (numbered by unique base)
594 594 :``chainlen``: delta chain length to this revision
595 595 :``prevrev``: previous revision in delta chain
596 596 :``deltatype``: role of delta / how it was computed
597 597 :``compsize``: compressed size of revision
598 598 :``uncompsize``: uncompressed size of revision
599 599 :``chainsize``: total size of compressed revisions in chain
600 600 :``chainratio``: total chain size divided by uncompressed revision size
601 601 (new delta chains typically start at ratio 2.00)
602 602 :``lindist``: linear distance from base revision in delta chain to end
603 603 of this revision
604 604 :``extradist``: total size of revisions not part of this delta chain from
605 605 base of delta chain to end of this revision; a measurement
606 606 of how much extra data we need to read/seek across to read
607 607 the delta chain for this revision
608 608 :``extraratio``: extradist divided by chainsize; another representation of
609 609 how much unrelated data is needed to load this delta chain
610 610
611 611 If the repository is configured to use the sparse read, additional keywords
612 612 are available:
613 613
614 614 :``readsize``: total size of data read from the disk for a revision
615 615 (sum of the sizes of all the blocks)
616 616 :``largestblock``: size of the largest block of data read from the disk
617 617 :``readdensity``: density of useful bytes in the data read from the disk
618 618 :``srchunks``: in how many data hunks the whole revision would be read
619 619
620 620 The sparse read can be enabled with experimental.sparse-read = True
621 621 """
622 622 opts = pycompat.byteskwargs(opts)
623 623 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
624 624 index = r.index
625 625 start = r.start
626 626 length = r.length
627 627 generaldelta = r.version & revlog.FLAG_GENERALDELTA
628 628 withsparseread = getattr(r, '_withsparseread', False)
629 629
630 630 def revinfo(rev):
631 631 e = index[rev]
632 632 compsize = e[1]
633 633 uncompsize = e[2]
634 634 chainsize = 0
635 635
636 636 if generaldelta:
637 637 if e[3] == e[5]:
638 638 deltatype = 'p1'
639 639 elif e[3] == e[6]:
640 640 deltatype = 'p2'
641 641 elif e[3] == rev - 1:
642 642 deltatype = 'prev'
643 643 elif e[3] == rev:
644 644 deltatype = 'base'
645 645 else:
646 646 deltatype = 'other'
647 647 else:
648 648 if e[3] == rev:
649 649 deltatype = 'base'
650 650 else:
651 651 deltatype = 'prev'
652 652
653 653 chain = r._deltachain(rev)[0]
654 654 for iterrev in chain:
655 655 e = index[iterrev]
656 656 chainsize += e[1]
657 657
658 658 return compsize, uncompsize, deltatype, chain, chainsize
659 659
660 660 fm = ui.formatter('debugdeltachain', opts)
661 661
662 662 fm.plain(' rev chain# chainlen prev delta '
663 663 'size rawsize chainsize ratio lindist extradist '
664 664 'extraratio')
665 665 if withsparseread:
666 666 fm.plain(' readsize largestblk rddensity srchunks')
667 667 fm.plain('\n')
668 668
669 669 chainbases = {}
670 670 for rev in r:
671 671 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
672 672 chainbase = chain[0]
673 673 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
674 674 basestart = start(chainbase)
675 675 revstart = start(rev)
676 676 lineardist = revstart + comp - basestart
677 677 extradist = lineardist - chainsize
678 678 try:
679 679 prevrev = chain[-2]
680 680 except IndexError:
681 681 prevrev = -1
682 682
683 683 if uncomp != 0:
684 684 chainratio = float(chainsize) / float(uncomp)
685 685 else:
686 686 chainratio = chainsize
687 687
688 688 if chainsize != 0:
689 689 extraratio = float(extradist) / float(chainsize)
690 690 else:
691 691 extraratio = extradist
692 692
693 693 fm.startitem()
694 694 fm.write('rev chainid chainlen prevrev deltatype compsize '
695 695 'uncompsize chainsize chainratio lindist extradist '
696 696 'extraratio',
697 697 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
698 698 rev, chainid, len(chain), prevrev, deltatype, comp,
699 699 uncomp, chainsize, chainratio, lineardist, extradist,
700 700 extraratio,
701 701 rev=rev, chainid=chainid, chainlen=len(chain),
702 702 prevrev=prevrev, deltatype=deltatype, compsize=comp,
703 703 uncompsize=uncomp, chainsize=chainsize,
704 704 chainratio=chainratio, lindist=lineardist,
705 705 extradist=extradist, extraratio=extraratio)
706 706 if withsparseread:
707 707 readsize = 0
708 708 largestblock = 0
709 709 srchunks = 0
710 710
711 711 for revschunk in deltautil.slicechunk(r, chain):
712 712 srchunks += 1
713 713 blkend = start(revschunk[-1]) + length(revschunk[-1])
714 714 blksize = blkend - start(revschunk[0])
715 715
716 716 readsize += blksize
717 717 if largestblock < blksize:
718 718 largestblock = blksize
719 719
720 720 if readsize:
721 721 readdensity = float(chainsize) / float(readsize)
722 722 else:
723 723 readdensity = 1
724 724
725 725 fm.write('readsize largestblock readdensity srchunks',
726 726 ' %10d %10d %9.5f %8d',
727 727 readsize, largestblock, readdensity, srchunks,
728 728 readsize=readsize, largestblock=largestblock,
729 729 readdensity=readdensity, srchunks=srchunks)
730 730
731 731 fm.plain('\n')
732 732
733 733 fm.end()
734 734
735 735 @command('debugdirstate|debugstate',
736 736 [('', 'nodates', None, _('do not display the saved mtime (DEPRECATED)')),
737 737 ('', 'dates', True, _('display the saved mtime')),
738 738 ('', 'datesort', None, _('sort by saved mtime'))],
739 739 _('[OPTION]...'))
740 740 def debugstate(ui, repo, **opts):
741 741 """show the contents of the current dirstate"""
742 742
743 743 nodates = not opts[r'dates']
744 744 if opts.get(r'nodates') is not None:
745 745 nodates = True
746 746 datesort = opts.get(r'datesort')
747 747
748 748 timestr = ""
749 749 if datesort:
750 750 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
751 751 else:
752 752 keyfunc = None # sort by filename
753 753 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
754 754 if ent[3] == -1:
755 755 timestr = 'unset '
756 756 elif nodates:
757 757 timestr = 'set '
758 758 else:
759 759 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
760 760 time.localtime(ent[3]))
761 761 timestr = encoding.strtolocal(timestr)
762 762 if ent[1] & 0o20000:
763 763 mode = 'lnk'
764 764 else:
765 765 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
766 766 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
767 767 for f in repo.dirstate.copies():
768 768 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
769 769
770 770 @command('debugdiscovery',
771 771 [('', 'old', None, _('use old-style discovery')),
772 772 ('', 'nonheads', None,
773 773 _('use old-style discovery with non-heads included')),
774 774 ('', 'rev', [], 'restrict discovery to this set of revs'),
775 775 ] + cmdutil.remoteopts,
776 776 _('[--rev REV] [OTHER]'))
777 777 def debugdiscovery(ui, repo, remoteurl="default", **opts):
778 778 """runs the changeset discovery protocol in isolation"""
779 779 opts = pycompat.byteskwargs(opts)
780 780 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
781 781 remote = hg.peer(repo, opts, remoteurl)
782 782 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
783 783
784 784 # make sure tests are repeatable
785 785 random.seed(12323)
786 786
787 787 def doit(pushedrevs, remoteheads, remote=remote):
788 788 if opts.get('old'):
789 789 if not util.safehasattr(remote, 'branches'):
790 790 # enable in-client legacy support
791 791 remote = localrepo.locallegacypeer(remote.local())
792 792 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
793 793 force=True)
794 794 common = set(common)
795 795 if not opts.get('nonheads'):
796 796 ui.write(("unpruned common: %s\n") %
797 797 " ".join(sorted(short(n) for n in common)))
798 798
799 799 clnode = repo.changelog.node
800 800 common = repo.revs('heads(::%ln)', common)
801 801 common = {clnode(r) for r in common}
802 802 else:
803 803 nodes = None
804 804 if pushedrevs:
805 805 revs = scmutil.revrange(repo, pushedrevs)
806 806 nodes = [repo[r].node() for r in revs]
807 807 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
808 808 ancestorsof=nodes)
809 809 common = set(common)
810 810 rheads = set(hds)
811 811 lheads = set(repo.heads())
812 812 ui.write(("common heads: %s\n") %
813 813 " ".join(sorted(short(n) for n in common)))
814 814 if lheads <= common:
815 815 ui.write(("local is subset\n"))
816 816 elif rheads <= common:
817 817 ui.write(("remote is subset\n"))
818 818
819 819 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
820 820 localrevs = opts['rev']
821 821 doit(localrevs, remoterevs)
822 822
823 823 _chunksize = 4 << 10
824 824
825 825 @command('debugdownload',
826 826 [
827 827 ('o', 'output', '', _('path')),
828 828 ],
829 829 optionalrepo=True)
830 830 def debugdownload(ui, repo, url, output=None, **opts):
831 831 """download a resource using Mercurial logic and config
832 832 """
833 833 fh = urlmod.open(ui, url, output)
834 834
835 835 dest = ui
836 836 if output:
837 837 dest = open(output, "wb", _chunksize)
838 838 try:
839 839 data = fh.read(_chunksize)
840 840 while data:
841 841 dest.write(data)
842 842 data = fh.read(_chunksize)
843 843 finally:
844 844 if output:
845 845 dest.close()
846 846
847 847 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
848 848 def debugextensions(ui, repo, **opts):
849 849 '''show information about active extensions'''
850 850 opts = pycompat.byteskwargs(opts)
851 851 exts = extensions.extensions(ui)
852 852 hgver = util.version()
853 853 fm = ui.formatter('debugextensions', opts)
854 854 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
855 855 isinternal = extensions.ismoduleinternal(extmod)
856 856 extsource = pycompat.fsencode(extmod.__file__)
857 857 if isinternal:
858 858 exttestedwith = [] # never expose magic string to users
859 859 else:
860 860 exttestedwith = getattr(extmod, 'testedwith', '').split()
861 861 extbuglink = getattr(extmod, 'buglink', None)
862 862
863 863 fm.startitem()
864 864
865 865 if ui.quiet or ui.verbose:
866 866 fm.write('name', '%s\n', extname)
867 867 else:
868 868 fm.write('name', '%s', extname)
869 869 if isinternal or hgver in exttestedwith:
870 870 fm.plain('\n')
871 871 elif not exttestedwith:
872 872 fm.plain(_(' (untested!)\n'))
873 873 else:
874 874 lasttestedversion = exttestedwith[-1]
875 875 fm.plain(' (%s!)\n' % lasttestedversion)
876 876
877 877 fm.condwrite(ui.verbose and extsource, 'source',
878 878 _(' location: %s\n'), extsource or "")
879 879
880 880 if ui.verbose:
881 881 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
882 882 fm.data(bundled=isinternal)
883 883
884 884 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
885 885 _(' tested with: %s\n'),
886 886 fm.formatlist(exttestedwith, name='ver'))
887 887
888 888 fm.condwrite(ui.verbose and extbuglink, 'buglink',
889 889 _(' bug reporting: %s\n'), extbuglink or "")
890 890
891 891 fm.end()
892 892
893 893 @command('debugfileset',
894 894 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
895 895 ('', 'all-files', False,
896 896 _('test files from all revisions and working directory')),
897 897 ('s', 'show-matcher', None,
898 898 _('print internal representation of matcher')),
899 899 ('p', 'show-stage', [],
900 900 _('print parsed tree at the given stage'), _('NAME'))],
901 901 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
902 902 def debugfileset(ui, repo, expr, **opts):
903 903 '''parse and apply a fileset specification'''
904 904 from . import fileset
905 905 fileset.symbols # force import of fileset so we have predicates to optimize
906 906 opts = pycompat.byteskwargs(opts)
907 907 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
908 908
909 909 stages = [
910 910 ('parsed', pycompat.identity),
911 911 ('analyzed', filesetlang.analyze),
912 912 ('optimized', filesetlang.optimize),
913 913 ]
914 914 stagenames = set(n for n, f in stages)
915 915
916 916 showalways = set()
917 917 if ui.verbose and not opts['show_stage']:
918 918 # show parsed tree by --verbose (deprecated)
919 919 showalways.add('parsed')
920 920 if opts['show_stage'] == ['all']:
921 921 showalways.update(stagenames)
922 922 else:
923 923 for n in opts['show_stage']:
924 924 if n not in stagenames:
925 925 raise error.Abort(_('invalid stage name: %s') % n)
926 926 showalways.update(opts['show_stage'])
927 927
928 928 tree = filesetlang.parse(expr)
929 929 for n, f in stages:
930 930 tree = f(tree)
931 931 if n in showalways:
932 932 if opts['show_stage'] or n != 'parsed':
933 933 ui.write(("* %s:\n") % n)
934 934 ui.write(filesetlang.prettyformat(tree), "\n")
935 935
936 936 files = set()
937 937 if opts['all_files']:
938 938 for r in repo:
939 939 c = repo[r]
940 940 files.update(c.files())
941 941 files.update(c.substate)
942 942 if opts['all_files'] or ctx.rev() is None:
943 943 wctx = repo[None]
944 944 files.update(repo.dirstate.walk(scmutil.matchall(repo),
945 945 subrepos=list(wctx.substate),
946 946 unknown=True, ignored=True))
947 947 files.update(wctx.substate)
948 948 else:
949 949 files.update(ctx.files())
950 950 files.update(ctx.substate)
951 951
952 952 m = ctx.matchfileset(expr)
953 953 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
954 954 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
955 955 for f in sorted(files):
956 956 if not m(f):
957 957 continue
958 958 ui.write("%s\n" % f)
959 959
960 960 @command('debugformat',
961 961 [] + cmdutil.formatteropts)
962 962 def debugformat(ui, repo, **opts):
963 963 """display format information about the current repository
964 964
965 965 Use --verbose to get extra information about current config value and
966 966 Mercurial default."""
967 967 opts = pycompat.byteskwargs(opts)
968 968 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
969 969 maxvariantlength = max(len('format-variant'), maxvariantlength)
970 970
971 971 def makeformatname(name):
972 972 return '%s:' + (' ' * (maxvariantlength - len(name)))
973 973
974 974 fm = ui.formatter('debugformat', opts)
975 975 if fm.isplain():
976 976 def formatvalue(value):
977 977 if util.safehasattr(value, 'startswith'):
978 978 return value
979 979 if value:
980 980 return 'yes'
981 981 else:
982 982 return 'no'
983 983 else:
984 984 formatvalue = pycompat.identity
985 985
986 986 fm.plain('format-variant')
987 987 fm.plain(' ' * (maxvariantlength - len('format-variant')))
988 988 fm.plain(' repo')
989 989 if ui.verbose:
990 990 fm.plain(' config default')
991 991 fm.plain('\n')
992 992 for fv in upgrade.allformatvariant:
993 993 fm.startitem()
994 994 repovalue = fv.fromrepo(repo)
995 995 configvalue = fv.fromconfig(repo)
996 996
997 997 if repovalue != configvalue:
998 998 namelabel = 'formatvariant.name.mismatchconfig'
999 999 repolabel = 'formatvariant.repo.mismatchconfig'
1000 1000 elif repovalue != fv.default:
1001 1001 namelabel = 'formatvariant.name.mismatchdefault'
1002 1002 repolabel = 'formatvariant.repo.mismatchdefault'
1003 1003 else:
1004 1004 namelabel = 'formatvariant.name.uptodate'
1005 1005 repolabel = 'formatvariant.repo.uptodate'
1006 1006
1007 1007 fm.write('name', makeformatname(fv.name), fv.name,
1008 1008 label=namelabel)
1009 1009 fm.write('repo', ' %3s', formatvalue(repovalue),
1010 1010 label=repolabel)
1011 1011 if fv.default != configvalue:
1012 1012 configlabel = 'formatvariant.config.special'
1013 1013 else:
1014 1014 configlabel = 'formatvariant.config.default'
1015 1015 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1016 1016 label=configlabel)
1017 1017 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1018 1018 label='formatvariant.default')
1019 1019 fm.plain('\n')
1020 1020 fm.end()
1021 1021
1022 1022 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1023 1023 def debugfsinfo(ui, path="."):
1024 1024 """show information detected about current filesystem"""
1025 1025 ui.write(('path: %s\n') % path)
1026 1026 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1027 1027 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1028 1028 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1029 1029 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1030 1030 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1031 1031 casesensitive = '(unknown)'
1032 1032 try:
1033 1033 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1034 1034 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1035 1035 except OSError:
1036 1036 pass
1037 1037 ui.write(('case-sensitive: %s\n') % casesensitive)
1038 1038
1039 1039 @command('debuggetbundle',
1040 1040 [('H', 'head', [], _('id of head node'), _('ID')),
1041 1041 ('C', 'common', [], _('id of common node'), _('ID')),
1042 1042 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1043 1043 _('REPO FILE [-H|-C ID]...'),
1044 1044 norepo=True)
1045 1045 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1046 1046 """retrieves a bundle from a repo
1047 1047
1048 1048 Every ID must be a full-length hex node id string. Saves the bundle to the
1049 1049 given file.
1050 1050 """
1051 1051 opts = pycompat.byteskwargs(opts)
1052 1052 repo = hg.peer(ui, opts, repopath)
1053 1053 if not repo.capable('getbundle'):
1054 1054 raise error.Abort("getbundle() not supported by target repository")
1055 1055 args = {}
1056 1056 if common:
1057 1057 args[r'common'] = [bin(s) for s in common]
1058 1058 if head:
1059 1059 args[r'heads'] = [bin(s) for s in head]
1060 1060 # TODO: get desired bundlecaps from command line.
1061 1061 args[r'bundlecaps'] = None
1062 1062 bundle = repo.getbundle('debug', **args)
1063 1063
1064 1064 bundletype = opts.get('type', 'bzip2').lower()
1065 1065 btypes = {'none': 'HG10UN',
1066 1066 'bzip2': 'HG10BZ',
1067 1067 'gzip': 'HG10GZ',
1068 1068 'bundle2': 'HG20'}
1069 1069 bundletype = btypes.get(bundletype)
1070 1070 if bundletype not in bundle2.bundletypes:
1071 1071 raise error.Abort(_('unknown bundle type specified with --type'))
1072 1072 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1073 1073
1074 1074 @command('debugignore', [], '[FILE]')
1075 1075 def debugignore(ui, repo, *files, **opts):
1076 1076 """display the combined ignore pattern and information about ignored files
1077 1077
1078 1078 With no argument display the combined ignore pattern.
1079 1079
1080 1080 Given space separated file names, shows if the given file is ignored and
1081 1081 if so, show the ignore rule (file and line number) that matched it.
1082 1082 """
1083 1083 ignore = repo.dirstate._ignore
1084 1084 if not files:
1085 1085 # Show all the patterns
1086 1086 ui.write("%s\n" % pycompat.byterepr(ignore))
1087 1087 else:
1088 1088 m = scmutil.match(repo[None], pats=files)
1089 1089 for f in m.files():
1090 1090 nf = util.normpath(f)
1091 1091 ignored = None
1092 1092 ignoredata = None
1093 1093 if nf != '.':
1094 1094 if ignore(nf):
1095 1095 ignored = nf
1096 1096 ignoredata = repo.dirstate._ignorefileandline(nf)
1097 1097 else:
1098 1098 for p in util.finddirs(nf):
1099 1099 if ignore(p):
1100 1100 ignored = p
1101 1101 ignoredata = repo.dirstate._ignorefileandline(p)
1102 1102 break
1103 1103 if ignored:
1104 1104 if ignored == nf:
1105 1105 ui.write(_("%s is ignored\n") % m.uipath(f))
1106 1106 else:
1107 1107 ui.write(_("%s is ignored because of "
1108 1108 "containing folder %s\n")
1109 1109 % (m.uipath(f), ignored))
1110 1110 ignorefile, lineno, line = ignoredata
1111 1111 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1112 1112 % (ignorefile, lineno, line))
1113 1113 else:
1114 1114 ui.write(_("%s is not ignored\n") % m.uipath(f))
1115 1115
1116 1116 @command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts,
1117 1117 _('-c|-m|FILE'))
1118 1118 def debugindex(ui, repo, file_=None, **opts):
1119 1119 """dump index data for a storage primitive"""
1120 1120 opts = pycompat.byteskwargs(opts)
1121 1121 store = cmdutil.openstorage(repo, 'debugindex', file_, opts)
1122 1122
1123 1123 if ui.debugflag:
1124 1124 shortfn = hex
1125 1125 else:
1126 1126 shortfn = short
1127 1127
1128 1128 idlen = 12
1129 1129 for i in store:
1130 1130 idlen = len(shortfn(store.node(i)))
1131 1131 break
1132 1132
1133 1133 fm = ui.formatter('debugindex', opts)
1134 1134 fm.plain(b' rev linkrev %s %s p2\n' % (
1135 1135 b'nodeid'.ljust(idlen),
1136 1136 b'p1'.ljust(idlen)))
1137 1137
1138 1138 for rev in store:
1139 1139 node = store.node(rev)
1140 1140 parents = store.parents(node)
1141 1141
1142 1142 fm.startitem()
1143 1143 fm.write(b'rev', b'%6d ', rev)
1144 1144 fm.write(b'linkrev', '%7d ', store.linkrev(rev))
1145 1145 fm.write(b'node', '%s ', shortfn(node))
1146 1146 fm.write(b'p1', '%s ', shortfn(parents[0]))
1147 1147 fm.write(b'p2', '%s', shortfn(parents[1]))
1148 1148 fm.plain(b'\n')
1149 1149
1150 1150 fm.end()
1151 1151
1152 1152 @command('debugindexdot', cmdutil.debugrevlogopts,
1153 1153 _('-c|-m|FILE'), optionalrepo=True)
1154 1154 def debugindexdot(ui, repo, file_=None, **opts):
1155 1155 """dump an index DAG as a graphviz dot file"""
1156 1156 opts = pycompat.byteskwargs(opts)
1157 1157 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
1158 1158 ui.write(("digraph G {\n"))
1159 1159 for i in r:
1160 1160 node = r.node(i)
1161 1161 pp = r.parents(node)
1162 1162 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1163 1163 if pp[1] != nullid:
1164 1164 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1165 1165 ui.write("}\n")
1166 1166
1167 1167 @command('debugindexstats', [])
1168 1168 def debugindexstats(ui, repo):
1169 1169 """show stats related to the changelog index"""
1170 1170 repo.changelog.shortest(nullid, 1)
1171 1171 for k, v in sorted(repo.changelog.index.stats().items()):
1172 1172 ui.write('%s: %s\n' % (k, v))
1173 1173
1174 1174 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1175 1175 def debuginstall(ui, **opts):
1176 1176 '''test Mercurial installation
1177 1177
1178 1178 Returns 0 on success.
1179 1179 '''
1180 1180 opts = pycompat.byteskwargs(opts)
1181 1181
1182 1182 def writetemp(contents):
1183 1183 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1184 1184 f = os.fdopen(fd, r"wb")
1185 1185 f.write(contents)
1186 1186 f.close()
1187 1187 return name
1188 1188
1189 1189 problems = 0
1190 1190
1191 1191 fm = ui.formatter('debuginstall', opts)
1192 1192 fm.startitem()
1193 1193
1194 1194 # encoding
1195 1195 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1196 1196 err = None
1197 1197 try:
1198 1198 codecs.lookup(pycompat.sysstr(encoding.encoding))
1199 1199 except LookupError as inst:
1200 1200 err = stringutil.forcebytestr(inst)
1201 1201 problems += 1
1202 1202 fm.condwrite(err, 'encodingerror', _(" %s\n"
1203 1203 " (check that your locale is properly set)\n"), err)
1204 1204
1205 1205 # Python
1206 1206 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1207 1207 pycompat.sysexecutable)
1208 1208 fm.write('pythonver', _("checking Python version (%s)\n"),
1209 1209 ("%d.%d.%d" % sys.version_info[:3]))
1210 1210 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1211 1211 os.path.dirname(pycompat.fsencode(os.__file__)))
1212 1212
1213 1213 security = set(sslutil.supportedprotocols)
1214 1214 if sslutil.hassni:
1215 1215 security.add('sni')
1216 1216
1217 1217 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1218 1218 fm.formatlist(sorted(security), name='protocol',
1219 1219 fmt='%s', sep=','))
1220 1220
1221 1221 # These are warnings, not errors. So don't increment problem count. This
1222 1222 # may change in the future.
1223 1223 if 'tls1.2' not in security:
1224 1224 fm.plain(_(' TLS 1.2 not supported by Python install; '
1225 1225 'network connections lack modern security\n'))
1226 1226 if 'sni' not in security:
1227 1227 fm.plain(_(' SNI not supported by Python install; may have '
1228 1228 'connectivity issues with some servers\n'))
1229 1229
1230 1230 # TODO print CA cert info
1231 1231
1232 1232 # hg version
1233 1233 hgver = util.version()
1234 1234 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1235 1235 hgver.split('+')[0])
1236 1236 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1237 1237 '+'.join(hgver.split('+')[1:]))
1238 1238
1239 1239 # compiled modules
1240 1240 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1241 1241 policy.policy)
1242 1242 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1243 1243 os.path.dirname(pycompat.fsencode(__file__)))
1244 1244
1245 1245 if policy.policy in ('c', 'allow'):
1246 1246 err = None
1247 1247 try:
1248 1248 from .cext import (
1249 1249 base85,
1250 1250 bdiff,
1251 1251 mpatch,
1252 1252 osutil,
1253 1253 )
1254 1254 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1255 1255 except Exception as inst:
1256 1256 err = stringutil.forcebytestr(inst)
1257 1257 problems += 1
1258 1258 fm.condwrite(err, 'extensionserror', " %s\n", err)
1259 1259
1260 1260 compengines = util.compengines._engines.values()
1261 1261 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1262 1262 fm.formatlist(sorted(e.name() for e in compengines),
1263 1263 name='compengine', fmt='%s', sep=', '))
1264 1264 fm.write('compenginesavail', _('checking available compression engines '
1265 1265 '(%s)\n'),
1266 1266 fm.formatlist(sorted(e.name() for e in compengines
1267 1267 if e.available()),
1268 1268 name='compengine', fmt='%s', sep=', '))
1269 1269 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1270 1270 fm.write('compenginesserver', _('checking available compression engines '
1271 1271 'for wire protocol (%s)\n'),
1272 1272 fm.formatlist([e.name() for e in wirecompengines
1273 1273 if e.wireprotosupport()],
1274 1274 name='compengine', fmt='%s', sep=', '))
1275 1275 re2 = 'missing'
1276 1276 if util._re2:
1277 1277 re2 = 'available'
1278 1278 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1279 1279 fm.data(re2=bool(util._re2))
1280 1280
1281 1281 # templates
1282 1282 p = templater.templatepaths()
1283 1283 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1284 1284 fm.condwrite(not p, '', _(" no template directories found\n"))
1285 1285 if p:
1286 1286 m = templater.templatepath("map-cmdline.default")
1287 1287 if m:
1288 1288 # template found, check if it is working
1289 1289 err = None
1290 1290 try:
1291 1291 templater.templater.frommapfile(m)
1292 1292 except Exception as inst:
1293 1293 err = stringutil.forcebytestr(inst)
1294 1294 p = None
1295 1295 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1296 1296 else:
1297 1297 p = None
1298 1298 fm.condwrite(p, 'defaulttemplate',
1299 1299 _("checking default template (%s)\n"), m)
1300 1300 fm.condwrite(not m, 'defaulttemplatenotfound',
1301 1301 _(" template '%s' not found\n"), "default")
1302 1302 if not p:
1303 1303 problems += 1
1304 1304 fm.condwrite(not p, '',
1305 1305 _(" (templates seem to have been installed incorrectly)\n"))
1306 1306
1307 1307 # editor
1308 1308 editor = ui.geteditor()
1309 1309 editor = util.expandpath(editor)
1310 1310 editorbin = procutil.shellsplit(editor)[0]
1311 1311 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1312 1312 cmdpath = procutil.findexe(editorbin)
1313 1313 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1314 1314 _(" No commit editor set and can't find %s in PATH\n"
1315 1315 " (specify a commit editor in your configuration"
1316 1316 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1317 1317 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1318 1318 _(" Can't find editor '%s' in PATH\n"
1319 1319 " (specify a commit editor in your configuration"
1320 1320 " file)\n"), not cmdpath and editorbin)
1321 1321 if not cmdpath and editor != 'vi':
1322 1322 problems += 1
1323 1323
1324 1324 # check username
1325 1325 username = None
1326 1326 err = None
1327 1327 try:
1328 1328 username = ui.username()
1329 1329 except error.Abort as e:
1330 1330 err = stringutil.forcebytestr(e)
1331 1331 problems += 1
1332 1332
1333 1333 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1334 1334 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1335 1335 " (specify a username in your configuration file)\n"), err)
1336 1336
1337 1337 fm.condwrite(not problems, '',
1338 1338 _("no problems detected\n"))
1339 1339 if not problems:
1340 1340 fm.data(problems=problems)
1341 1341 fm.condwrite(problems, 'problems',
1342 1342 _("%d problems detected,"
1343 1343 " please check your install!\n"), problems)
1344 1344 fm.end()
1345 1345
1346 1346 return problems
1347 1347
1348 1348 @command('debugknown', [], _('REPO ID...'), norepo=True)
1349 1349 def debugknown(ui, repopath, *ids, **opts):
1350 1350 """test whether node ids are known to a repo
1351 1351
1352 1352 Every ID must be a full-length hex node id string. Returns a list of 0s
1353 1353 and 1s indicating unknown/known.
1354 1354 """
1355 1355 opts = pycompat.byteskwargs(opts)
1356 1356 repo = hg.peer(ui, opts, repopath)
1357 1357 if not repo.capable('known'):
1358 1358 raise error.Abort("known() not supported by target repository")
1359 1359 flags = repo.known([bin(s) for s in ids])
1360 1360 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1361 1361
1362 1362 @command('debuglabelcomplete', [], _('LABEL...'))
1363 1363 def debuglabelcomplete(ui, repo, *args):
1364 1364 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1365 1365 debugnamecomplete(ui, repo, *args)
1366 1366
1367 1367 @command('debuglocks',
1368 1368 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1369 1369 ('W', 'force-wlock', None,
1370 1370 _('free the working state lock (DANGEROUS)')),
1371 1371 ('s', 'set-lock', None, _('set the store lock until stopped')),
1372 1372 ('S', 'set-wlock', None,
1373 1373 _('set the working state lock until stopped'))],
1374 1374 _('[OPTION]...'))
1375 1375 def debuglocks(ui, repo, **opts):
1376 1376 """show or modify state of locks
1377 1377
1378 1378 By default, this command will show which locks are held. This
1379 1379 includes the user and process holding the lock, the amount of time
1380 1380 the lock has been held, and the machine name where the process is
1381 1381 running if it's not local.
1382 1382
1383 1383 Locks protect the integrity of Mercurial's data, so should be
1384 1384 treated with care. System crashes or other interruptions may cause
1385 1385 locks to not be properly released, though Mercurial will usually
1386 1386 detect and remove such stale locks automatically.
1387 1387
1388 1388 However, detecting stale locks may not always be possible (for
1389 1389 instance, on a shared filesystem). Removing locks may also be
1390 1390 blocked by filesystem permissions.
1391 1391
1392 1392 Setting a lock will prevent other commands from changing the data.
1393 1393 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1394 1394 The set locks are removed when the command exits.
1395 1395
1396 1396 Returns 0 if no locks are held.
1397 1397
1398 1398 """
1399 1399
1400 1400 if opts.get(r'force_lock'):
1401 1401 repo.svfs.unlink('lock')
1402 1402 if opts.get(r'force_wlock'):
1403 1403 repo.vfs.unlink('wlock')
1404 1404 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1405 1405 return 0
1406 1406
1407 1407 locks = []
1408 1408 try:
1409 1409 if opts.get(r'set_wlock'):
1410 1410 try:
1411 1411 locks.append(repo.wlock(False))
1412 1412 except error.LockHeld:
1413 1413 raise error.Abort(_('wlock is already held'))
1414 1414 if opts.get(r'set_lock'):
1415 1415 try:
1416 1416 locks.append(repo.lock(False))
1417 1417 except error.LockHeld:
1418 1418 raise error.Abort(_('lock is already held'))
1419 1419 if len(locks):
1420 1420 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1421 1421 return 0
1422 1422 finally:
1423 1423 release(*locks)
1424 1424
1425 1425 now = time.time()
1426 1426 held = 0
1427 1427
1428 1428 def report(vfs, name, method):
1429 1429 # this causes stale locks to get reaped for more accurate reporting
1430 1430 try:
1431 1431 l = method(False)
1432 1432 except error.LockHeld:
1433 1433 l = None
1434 1434
1435 1435 if l:
1436 1436 l.release()
1437 1437 else:
1438 1438 try:
1439 1439 st = vfs.lstat(name)
1440 1440 age = now - st[stat.ST_MTIME]
1441 1441 user = util.username(st.st_uid)
1442 1442 locker = vfs.readlock(name)
1443 1443 if ":" in locker:
1444 1444 host, pid = locker.split(':')
1445 1445 if host == socket.gethostname():
1446 1446 locker = 'user %s, process %s' % (user or b'None', pid)
1447 1447 else:
1448 1448 locker = 'user %s, process %s, host %s' \
1449 1449 % (user or b'None', pid, host)
1450 1450 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1451 1451 return 1
1452 1452 except OSError as e:
1453 1453 if e.errno != errno.ENOENT:
1454 1454 raise
1455 1455
1456 1456 ui.write(("%-6s free\n") % (name + ":"))
1457 1457 return 0
1458 1458
1459 1459 held += report(repo.svfs, "lock", repo.lock)
1460 1460 held += report(repo.vfs, "wlock", repo.wlock)
1461 1461
1462 1462 return held
1463 1463
1464 1464 @command('debugmanifestfulltextcache', [
1465 1465 ('', 'clear', False, _('clear the cache')),
1466 1466 ('a', 'add', '', _('add the given manifest node to the cache'),
1467 1467 _('NODE'))
1468 1468 ], '')
1469 1469 def debugmanifestfulltextcache(ui, repo, add=None, **opts):
1470 1470 """show, clear or amend the contents of the manifest fulltext cache"""
1471 1471 with repo.lock():
1472 1472 r = repo.manifestlog.getstorage(b'')
1473 1473 try:
1474 1474 cache = r._fulltextcache
1475 1475 except AttributeError:
1476 1476 ui.warn(_(
1477 1477 "Current revlog implementation doesn't appear to have a "
1478 1478 'manifest fulltext cache\n'))
1479 1479 return
1480 1480
1481 1481 if opts.get(r'clear'):
1482 1482 cache.clear()
1483 1483
1484 1484 if add:
1485 1485 try:
1486 1486 manifest = repo.manifestlog[r.lookup(add)]
1487 1487 except error.LookupError as e:
1488 1488 raise error.Abort(e, hint="Check your manifest node id")
1489 1489 manifest.read() # stores revisision in cache too
1490 1490
1491 1491 if not len(cache):
1492 1492 ui.write(_('Cache empty'))
1493 1493 else:
1494 1494 ui.write(
1495 1495 _('Cache contains %d manifest entries, in order of most to '
1496 1496 'least recent:\n') % (len(cache),))
1497 1497 totalsize = 0
1498 1498 for nodeid in cache:
1499 1499 # Use cache.get to not update the LRU order
1500 1500 data = cache.get(nodeid)
1501 1501 size = len(data)
1502 1502 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1503 1503 ui.write(_('id: %s, size %s\n') % (
1504 1504 hex(nodeid), util.bytecount(size)))
1505 1505 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1506 1506 ui.write(
1507 1507 _('Total cache data size %s, on-disk %s\n') % (
1508 1508 util.bytecount(totalsize), util.bytecount(ondisk))
1509 1509 )
1510 1510
1511 1511 @command('debugmergestate', [], '')
1512 1512 def debugmergestate(ui, repo, *args):
1513 1513 """print merge state
1514 1514
1515 1515 Use --verbose to print out information about whether v1 or v2 merge state
1516 1516 was chosen."""
1517 1517 def _hashornull(h):
1518 1518 if h == nullhex:
1519 1519 return 'null'
1520 1520 else:
1521 1521 return h
1522 1522
1523 1523 def printrecords(version):
1524 1524 ui.write(('* version %d records\n') % version)
1525 1525 if version == 1:
1526 1526 records = v1records
1527 1527 else:
1528 1528 records = v2records
1529 1529
1530 1530 for rtype, record in records:
1531 1531 # pretty print some record types
1532 1532 if rtype == 'L':
1533 1533 ui.write(('local: %s\n') % record)
1534 1534 elif rtype == 'O':
1535 1535 ui.write(('other: %s\n') % record)
1536 1536 elif rtype == 'm':
1537 1537 driver, mdstate = record.split('\0', 1)
1538 1538 ui.write(('merge driver: %s (state "%s")\n')
1539 1539 % (driver, mdstate))
1540 1540 elif rtype in 'FDC':
1541 1541 r = record.split('\0')
1542 1542 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1543 1543 if version == 1:
1544 1544 onode = 'not stored in v1 format'
1545 1545 flags = r[7]
1546 1546 else:
1547 1547 onode, flags = r[7:9]
1548 1548 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1549 1549 % (f, rtype, state, _hashornull(hash)))
1550 1550 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1551 1551 ui.write((' ancestor path: %s (node %s)\n')
1552 1552 % (afile, _hashornull(anode)))
1553 1553 ui.write((' other path: %s (node %s)\n')
1554 1554 % (ofile, _hashornull(onode)))
1555 1555 elif rtype == 'f':
1556 1556 filename, rawextras = record.split('\0', 1)
1557 1557 extras = rawextras.split('\0')
1558 1558 i = 0
1559 1559 extrastrings = []
1560 1560 while i < len(extras):
1561 1561 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1562 1562 i += 2
1563 1563
1564 1564 ui.write(('file extras: %s (%s)\n')
1565 1565 % (filename, ', '.join(extrastrings)))
1566 1566 elif rtype == 'l':
1567 1567 labels = record.split('\0', 2)
1568 1568 labels = [l for l in labels if len(l) > 0]
1569 1569 ui.write(('labels:\n'))
1570 1570 ui.write((' local: %s\n' % labels[0]))
1571 1571 ui.write((' other: %s\n' % labels[1]))
1572 1572 if len(labels) > 2:
1573 1573 ui.write((' base: %s\n' % labels[2]))
1574 1574 else:
1575 1575 ui.write(('unrecognized entry: %s\t%s\n')
1576 1576 % (rtype, record.replace('\0', '\t')))
1577 1577
1578 1578 # Avoid mergestate.read() since it may raise an exception for unsupported
1579 1579 # merge state records. We shouldn't be doing this, but this is OK since this
1580 1580 # command is pretty low-level.
1581 1581 ms = mergemod.mergestate(repo)
1582 1582
1583 1583 # sort so that reasonable information is on top
1584 1584 v1records = ms._readrecordsv1()
1585 1585 v2records = ms._readrecordsv2()
1586 1586 order = 'LOml'
1587 1587 def key(r):
1588 1588 idx = order.find(r[0])
1589 1589 if idx == -1:
1590 1590 return (1, r[1])
1591 1591 else:
1592 1592 return (0, idx)
1593 1593 v1records.sort(key=key)
1594 1594 v2records.sort(key=key)
1595 1595
1596 1596 if not v1records and not v2records:
1597 1597 ui.write(('no merge state found\n'))
1598 1598 elif not v2records:
1599 1599 ui.note(('no version 2 merge state\n'))
1600 1600 printrecords(1)
1601 1601 elif ms._v1v2match(v1records, v2records):
1602 1602 ui.note(('v1 and v2 states match: using v2\n'))
1603 1603 printrecords(2)
1604 1604 else:
1605 1605 ui.note(('v1 and v2 states mismatch: using v1\n'))
1606 1606 printrecords(1)
1607 1607 if ui.verbose:
1608 1608 printrecords(2)
1609 1609
1610 1610 @command('debugnamecomplete', [], _('NAME...'))
1611 1611 def debugnamecomplete(ui, repo, *args):
1612 1612 '''complete "names" - tags, open branch names, bookmark names'''
1613 1613
1614 1614 names = set()
1615 1615 # since we previously only listed open branches, we will handle that
1616 1616 # specially (after this for loop)
1617 1617 for name, ns in repo.names.iteritems():
1618 1618 if name != 'branches':
1619 1619 names.update(ns.listnames(repo))
1620 1620 names.update(tag for (tag, heads, tip, closed)
1621 1621 in repo.branchmap().iterbranches() if not closed)
1622 1622 completions = set()
1623 1623 if not args:
1624 1624 args = ['']
1625 1625 for a in args:
1626 1626 completions.update(n for n in names if n.startswith(a))
1627 1627 ui.write('\n'.join(sorted(completions)))
1628 1628 ui.write('\n')
1629 1629
1630 1630 @command('debugobsolete',
1631 1631 [('', 'flags', 0, _('markers flag')),
1632 1632 ('', 'record-parents', False,
1633 1633 _('record parent information for the precursor')),
1634 1634 ('r', 'rev', [], _('display markers relevant to REV')),
1635 1635 ('', 'exclusive', False, _('restrict display to markers only '
1636 1636 'relevant to REV')),
1637 1637 ('', 'index', False, _('display index of the marker')),
1638 1638 ('', 'delete', [], _('delete markers specified by indices')),
1639 1639 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1640 1640 _('[OBSOLETED [REPLACEMENT ...]]'))
1641 1641 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1642 1642 """create arbitrary obsolete marker
1643 1643
1644 1644 With no arguments, displays the list of obsolescence markers."""
1645 1645
1646 1646 opts = pycompat.byteskwargs(opts)
1647 1647
1648 1648 def parsenodeid(s):
1649 1649 try:
1650 1650 # We do not use revsingle/revrange functions here to accept
1651 1651 # arbitrary node identifiers, possibly not present in the
1652 1652 # local repository.
1653 1653 n = bin(s)
1654 1654 if len(n) != len(nullid):
1655 1655 raise TypeError()
1656 1656 return n
1657 1657 except TypeError:
1658 1658 raise error.Abort('changeset references must be full hexadecimal '
1659 1659 'node identifiers')
1660 1660
1661 1661 if opts.get('delete'):
1662 1662 indices = []
1663 1663 for v in opts.get('delete'):
1664 1664 try:
1665 1665 indices.append(int(v))
1666 1666 except ValueError:
1667 1667 raise error.Abort(_('invalid index value: %r') % v,
1668 1668 hint=_('use integers for indices'))
1669 1669
1670 1670 if repo.currenttransaction():
1671 1671 raise error.Abort(_('cannot delete obsmarkers in the middle '
1672 1672 'of transaction.'))
1673 1673
1674 1674 with repo.lock():
1675 1675 n = repair.deleteobsmarkers(repo.obsstore, indices)
1676 1676 ui.write(_('deleted %i obsolescence markers\n') % n)
1677 1677
1678 1678 return
1679 1679
1680 1680 if precursor is not None:
1681 1681 if opts['rev']:
1682 1682 raise error.Abort('cannot select revision when creating marker')
1683 1683 metadata = {}
1684 1684 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1685 1685 succs = tuple(parsenodeid(succ) for succ in successors)
1686 1686 l = repo.lock()
1687 1687 try:
1688 1688 tr = repo.transaction('debugobsolete')
1689 1689 try:
1690 1690 date = opts.get('date')
1691 1691 if date:
1692 1692 date = dateutil.parsedate(date)
1693 1693 else:
1694 1694 date = None
1695 1695 prec = parsenodeid(precursor)
1696 1696 parents = None
1697 1697 if opts['record_parents']:
1698 1698 if prec not in repo.unfiltered():
1699 1699 raise error.Abort('cannot used --record-parents on '
1700 1700 'unknown changesets')
1701 1701 parents = repo.unfiltered()[prec].parents()
1702 1702 parents = tuple(p.node() for p in parents)
1703 1703 repo.obsstore.create(tr, prec, succs, opts['flags'],
1704 1704 parents=parents, date=date,
1705 1705 metadata=metadata, ui=ui)
1706 1706 tr.close()
1707 1707 except ValueError as exc:
1708 1708 raise error.Abort(_('bad obsmarker input: %s') %
1709 1709 pycompat.bytestr(exc))
1710 1710 finally:
1711 1711 tr.release()
1712 1712 finally:
1713 1713 l.release()
1714 1714 else:
1715 1715 if opts['rev']:
1716 1716 revs = scmutil.revrange(repo, opts['rev'])
1717 1717 nodes = [repo[r].node() for r in revs]
1718 1718 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1719 1719 exclusive=opts['exclusive']))
1720 1720 markers.sort(key=lambda x: x._data)
1721 1721 else:
1722 1722 markers = obsutil.getmarkers(repo)
1723 1723
1724 1724 markerstoiter = markers
1725 1725 isrelevant = lambda m: True
1726 1726 if opts.get('rev') and opts.get('index'):
1727 1727 markerstoiter = obsutil.getmarkers(repo)
1728 1728 markerset = set(markers)
1729 1729 isrelevant = lambda m: m in markerset
1730 1730
1731 1731 fm = ui.formatter('debugobsolete', opts)
1732 1732 for i, m in enumerate(markerstoiter):
1733 1733 if not isrelevant(m):
1734 1734 # marker can be irrelevant when we're iterating over a set
1735 1735 # of markers (markerstoiter) which is bigger than the set
1736 1736 # of markers we want to display (markers)
1737 1737 # this can happen if both --index and --rev options are
1738 1738 # provided and thus we need to iterate over all of the markers
1739 1739 # to get the correct indices, but only display the ones that
1740 1740 # are relevant to --rev value
1741 1741 continue
1742 1742 fm.startitem()
1743 1743 ind = i if opts.get('index') else None
1744 1744 cmdutil.showmarker(fm, m, index=ind)
1745 1745 fm.end()
1746 1746
1747 1747 @command('debugpathcomplete',
1748 1748 [('f', 'full', None, _('complete an entire path')),
1749 1749 ('n', 'normal', None, _('show only normal files')),
1750 1750 ('a', 'added', None, _('show only added files')),
1751 1751 ('r', 'removed', None, _('show only removed files'))],
1752 1752 _('FILESPEC...'))
1753 1753 def debugpathcomplete(ui, repo, *specs, **opts):
1754 1754 '''complete part or all of a tracked path
1755 1755
1756 1756 This command supports shells that offer path name completion. It
1757 1757 currently completes only files already known to the dirstate.
1758 1758
1759 1759 Completion extends only to the next path segment unless
1760 1760 --full is specified, in which case entire paths are used.'''
1761 1761
1762 1762 def complete(path, acceptable):
1763 1763 dirstate = repo.dirstate
1764 1764 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
1765 1765 rootdir = repo.root + pycompat.ossep
1766 1766 if spec != repo.root and not spec.startswith(rootdir):
1767 1767 return [], []
1768 1768 if os.path.isdir(spec):
1769 1769 spec += '/'
1770 1770 spec = spec[len(rootdir):]
1771 1771 fixpaths = pycompat.ossep != '/'
1772 1772 if fixpaths:
1773 1773 spec = spec.replace(pycompat.ossep, '/')
1774 1774 speclen = len(spec)
1775 1775 fullpaths = opts[r'full']
1776 1776 files, dirs = set(), set()
1777 1777 adddir, addfile = dirs.add, files.add
1778 1778 for f, st in dirstate.iteritems():
1779 1779 if f.startswith(spec) and st[0] in acceptable:
1780 1780 if fixpaths:
1781 1781 f = f.replace('/', pycompat.ossep)
1782 1782 if fullpaths:
1783 1783 addfile(f)
1784 1784 continue
1785 1785 s = f.find(pycompat.ossep, speclen)
1786 1786 if s >= 0:
1787 1787 adddir(f[:s])
1788 1788 else:
1789 1789 addfile(f)
1790 1790 return files, dirs
1791 1791
1792 1792 acceptable = ''
1793 1793 if opts[r'normal']:
1794 1794 acceptable += 'nm'
1795 1795 if opts[r'added']:
1796 1796 acceptable += 'a'
1797 1797 if opts[r'removed']:
1798 1798 acceptable += 'r'
1799 1799 cwd = repo.getcwd()
1800 1800 if not specs:
1801 1801 specs = ['.']
1802 1802
1803 1803 files, dirs = set(), set()
1804 1804 for spec in specs:
1805 1805 f, d = complete(spec, acceptable or 'nmar')
1806 1806 files.update(f)
1807 1807 dirs.update(d)
1808 1808 files.update(dirs)
1809 1809 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1810 1810 ui.write('\n')
1811 1811
1812 1812 @command('debugpeer', [], _('PATH'), norepo=True)
1813 1813 def debugpeer(ui, path):
1814 1814 """establish a connection to a peer repository"""
1815 1815 # Always enable peer request logging. Requires --debug to display
1816 1816 # though.
1817 1817 overrides = {
1818 1818 ('devel', 'debug.peer-request'): True,
1819 1819 }
1820 1820
1821 1821 with ui.configoverride(overrides):
1822 1822 peer = hg.peer(ui, {}, path)
1823 1823
1824 1824 local = peer.local() is not None
1825 1825 canpush = peer.canpush()
1826 1826
1827 1827 ui.write(_('url: %s\n') % peer.url())
1828 1828 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1829 1829 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1830 1830
1831 1831 @command('debugpickmergetool',
1832 1832 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1833 1833 ('', 'changedelete', None, _('emulate merging change and delete')),
1834 1834 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1835 1835 _('[PATTERN]...'),
1836 1836 inferrepo=True)
1837 1837 def debugpickmergetool(ui, repo, *pats, **opts):
1838 1838 """examine which merge tool is chosen for specified file
1839 1839
1840 1840 As described in :hg:`help merge-tools`, Mercurial examines
1841 1841 configurations below in this order to decide which merge tool is
1842 1842 chosen for specified file.
1843 1843
1844 1844 1. ``--tool`` option
1845 1845 2. ``HGMERGE`` environment variable
1846 1846 3. configurations in ``merge-patterns`` section
1847 1847 4. configuration of ``ui.merge``
1848 1848 5. configurations in ``merge-tools`` section
1849 1849 6. ``hgmerge`` tool (for historical reason only)
1850 1850 7. default tool for fallback (``:merge`` or ``:prompt``)
1851 1851
1852 1852 This command writes out examination result in the style below::
1853 1853
1854 1854 FILE = MERGETOOL
1855 1855
1856 1856 By default, all files known in the first parent context of the
1857 1857 working directory are examined. Use file patterns and/or -I/-X
1858 1858 options to limit target files. -r/--rev is also useful to examine
1859 1859 files in another context without actual updating to it.
1860 1860
1861 1861 With --debug, this command shows warning messages while matching
1862 1862 against ``merge-patterns`` and so on, too. It is recommended to
1863 1863 use this option with explicit file patterns and/or -I/-X options,
1864 1864 because this option increases amount of output per file according
1865 1865 to configurations in hgrc.
1866 1866
1867 1867 With -v/--verbose, this command shows configurations below at
1868 1868 first (only if specified).
1869 1869
1870 1870 - ``--tool`` option
1871 1871 - ``HGMERGE`` environment variable
1872 1872 - configuration of ``ui.merge``
1873 1873
1874 1874 If merge tool is chosen before matching against
1875 1875 ``merge-patterns``, this command can't show any helpful
1876 1876 information, even with --debug. In such case, information above is
1877 1877 useful to know why a merge tool is chosen.
1878 1878 """
1879 1879 opts = pycompat.byteskwargs(opts)
1880 1880 overrides = {}
1881 1881 if opts['tool']:
1882 1882 overrides[('ui', 'forcemerge')] = opts['tool']
1883 1883 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1884 1884
1885 1885 with ui.configoverride(overrides, 'debugmergepatterns'):
1886 1886 hgmerge = encoding.environ.get("HGMERGE")
1887 1887 if hgmerge is not None:
1888 1888 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1889 1889 uimerge = ui.config("ui", "merge")
1890 1890 if uimerge:
1891 1891 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1892 1892
1893 1893 ctx = scmutil.revsingle(repo, opts.get('rev'))
1894 1894 m = scmutil.match(ctx, pats, opts)
1895 1895 changedelete = opts['changedelete']
1896 1896 for path in ctx.walk(m):
1897 1897 fctx = ctx[path]
1898 1898 try:
1899 1899 if not ui.debugflag:
1900 1900 ui.pushbuffer(error=True)
1901 1901 tool, toolpath = filemerge._picktool(repo, ui, path,
1902 1902 fctx.isbinary(),
1903 1903 'l' in fctx.flags(),
1904 1904 changedelete)
1905 1905 finally:
1906 1906 if not ui.debugflag:
1907 1907 ui.popbuffer()
1908 1908 ui.write(('%s = %s\n') % (path, tool))
1909 1909
1910 1910 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1911 1911 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1912 1912 '''access the pushkey key/value protocol
1913 1913
1914 1914 With two args, list the keys in the given namespace.
1915 1915
1916 1916 With five args, set a key to new if it currently is set to old.
1917 1917 Reports success or failure.
1918 1918 '''
1919 1919
1920 1920 target = hg.peer(ui, {}, repopath)
1921 1921 if keyinfo:
1922 1922 key, old, new = keyinfo
1923 1923 with target.commandexecutor() as e:
1924 1924 r = e.callcommand('pushkey', {
1925 1925 'namespace': namespace,
1926 1926 'key': key,
1927 1927 'old': old,
1928 1928 'new': new,
1929 1929 }).result()
1930 1930
1931 1931 ui.status(pycompat.bytestr(r) + '\n')
1932 1932 return not r
1933 1933 else:
1934 1934 for k, v in sorted(target.listkeys(namespace).iteritems()):
1935 1935 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1936 1936 stringutil.escapestr(v)))
1937 1937
1938 1938 @command('debugpvec', [], _('A B'))
1939 1939 def debugpvec(ui, repo, a, b=None):
1940 1940 ca = scmutil.revsingle(repo, a)
1941 1941 cb = scmutil.revsingle(repo, b)
1942 1942 pa = pvec.ctxpvec(ca)
1943 1943 pb = pvec.ctxpvec(cb)
1944 1944 if pa == pb:
1945 1945 rel = "="
1946 1946 elif pa > pb:
1947 1947 rel = ">"
1948 1948 elif pa < pb:
1949 1949 rel = "<"
1950 1950 elif pa | pb:
1951 1951 rel = "|"
1952 1952 ui.write(_("a: %s\n") % pa)
1953 1953 ui.write(_("b: %s\n") % pb)
1954 1954 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1955 1955 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1956 1956 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1957 1957 pa.distance(pb), rel))
1958 1958
1959 1959 @command('debugrebuilddirstate|debugrebuildstate',
1960 1960 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1961 1961 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1962 1962 'the working copy parent')),
1963 1963 ],
1964 1964 _('[-r REV]'))
1965 1965 def debugrebuilddirstate(ui, repo, rev, **opts):
1966 1966 """rebuild the dirstate as it would look like for the given revision
1967 1967
1968 1968 If no revision is specified the first current parent will be used.
1969 1969
1970 1970 The dirstate will be set to the files of the given revision.
1971 1971 The actual working directory content or existing dirstate
1972 1972 information such as adds or removes is not considered.
1973 1973
1974 1974 ``minimal`` will only rebuild the dirstate status for files that claim to be
1975 1975 tracked but are not in the parent manifest, or that exist in the parent
1976 1976 manifest but are not in the dirstate. It will not change adds, removes, or
1977 1977 modified files that are in the working copy parent.
1978 1978
1979 1979 One use of this command is to make the next :hg:`status` invocation
1980 1980 check the actual file content.
1981 1981 """
1982 1982 ctx = scmutil.revsingle(repo, rev)
1983 1983 with repo.wlock():
1984 1984 dirstate = repo.dirstate
1985 1985 changedfiles = None
1986 1986 # See command doc for what minimal does.
1987 1987 if opts.get(r'minimal'):
1988 1988 manifestfiles = set(ctx.manifest().keys())
1989 1989 dirstatefiles = set(dirstate)
1990 1990 manifestonly = manifestfiles - dirstatefiles
1991 1991 dsonly = dirstatefiles - manifestfiles
1992 1992 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1993 1993 changedfiles = manifestonly | dsnotadded
1994 1994
1995 1995 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1996 1996
1997 1997 @command('debugrebuildfncache', [], '')
1998 1998 def debugrebuildfncache(ui, repo):
1999 1999 """rebuild the fncache file"""
2000 2000 repair.rebuildfncache(ui, repo)
2001 2001
2002 2002 @command('debugrename',
2003 2003 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2004 2004 _('[-r REV] FILE'))
2005 2005 def debugrename(ui, repo, file1, *pats, **opts):
2006 2006 """dump rename information"""
2007 2007
2008 2008 opts = pycompat.byteskwargs(opts)
2009 2009 ctx = scmutil.revsingle(repo, opts.get('rev'))
2010 2010 m = scmutil.match(ctx, (file1,) + pats, opts)
2011 2011 for abs in ctx.walk(m):
2012 2012 fctx = ctx[abs]
2013 2013 o = fctx.filelog().renamed(fctx.filenode())
2014 2014 rel = m.rel(abs)
2015 2015 if o:
2016 2016 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2017 2017 else:
2018 2018 ui.write(_("%s not renamed\n") % rel)
2019 2019
2020 2020 @command('debugrevlog', cmdutil.debugrevlogopts +
2021 2021 [('d', 'dump', False, _('dump index data'))],
2022 2022 _('-c|-m|FILE'),
2023 2023 optionalrepo=True)
2024 2024 def debugrevlog(ui, repo, file_=None, **opts):
2025 2025 """show data and statistics about a revlog"""
2026 2026 opts = pycompat.byteskwargs(opts)
2027 2027 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2028 2028
2029 2029 if opts.get("dump"):
2030 2030 numrevs = len(r)
2031 2031 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2032 2032 " rawsize totalsize compression heads chainlen\n"))
2033 2033 ts = 0
2034 2034 heads = set()
2035 2035
2036 2036 for rev in pycompat.xrange(numrevs):
2037 2037 dbase = r.deltaparent(rev)
2038 2038 if dbase == -1:
2039 2039 dbase = rev
2040 2040 cbase = r.chainbase(rev)
2041 2041 clen = r.chainlen(rev)
2042 2042 p1, p2 = r.parentrevs(rev)
2043 2043 rs = r.rawsize(rev)
2044 2044 ts = ts + rs
2045 2045 heads -= set(r.parentrevs(rev))
2046 2046 heads.add(rev)
2047 2047 try:
2048 2048 compression = ts / r.end(rev)
2049 2049 except ZeroDivisionError:
2050 2050 compression = 0
2051 2051 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2052 2052 "%11d %5d %8d\n" %
2053 2053 (rev, p1, p2, r.start(rev), r.end(rev),
2054 2054 r.start(dbase), r.start(cbase),
2055 2055 r.start(p1), r.start(p2),
2056 2056 rs, ts, compression, len(heads), clen))
2057 2057 return 0
2058 2058
2059 2059 v = r.version
2060 2060 format = v & 0xFFFF
2061 2061 flags = []
2062 2062 gdelta = False
2063 2063 if v & revlog.FLAG_INLINE_DATA:
2064 2064 flags.append('inline')
2065 2065 if v & revlog.FLAG_GENERALDELTA:
2066 2066 gdelta = True
2067 2067 flags.append('generaldelta')
2068 2068 if not flags:
2069 2069 flags = ['(none)']
2070 2070
2071 2071 ### tracks merge vs single parent
2072 2072 nummerges = 0
2073 2073
2074 2074 ### tracks ways the "delta" are build
2075 2075 # nodelta
2076 2076 numempty = 0
2077 2077 numemptytext = 0
2078 2078 numemptydelta = 0
2079 2079 # full file content
2080 2080 numfull = 0
2081 2081 # intermediate snapshot against a prior snapshot
2082 2082 numsemi = 0
2083 2083 # snapshot count per depth
2084 2084 numsnapdepth = collections.defaultdict(lambda: 0)
2085 2085 # delta against previous revision
2086 2086 numprev = 0
2087 2087 # delta against first or second parent (not prev)
2088 2088 nump1 = 0
2089 2089 nump2 = 0
2090 2090 # delta against neither prev nor parents
2091 2091 numother = 0
2092 2092 # delta against prev that are also first or second parent
2093 2093 # (details of `numprev`)
2094 2094 nump1prev = 0
2095 2095 nump2prev = 0
2096 2096
2097 2097 # data about delta chain of each revs
2098 2098 chainlengths = []
2099 2099 chainbases = []
2100 2100 chainspans = []
2101 2101
2102 2102 # data about each revision
2103 2103 datasize = [None, 0, 0]
2104 2104 fullsize = [None, 0, 0]
2105 2105 semisize = [None, 0, 0]
2106 2106 # snapshot count per depth
2107 2107 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2108 2108 deltasize = [None, 0, 0]
2109 2109 chunktypecounts = {}
2110 2110 chunktypesizes = {}
2111 2111
2112 2112 def addsize(size, l):
2113 2113 if l[0] is None or size < l[0]:
2114 2114 l[0] = size
2115 2115 if size > l[1]:
2116 2116 l[1] = size
2117 2117 l[2] += size
2118 2118
2119 2119 numrevs = len(r)
2120 2120 for rev in pycompat.xrange(numrevs):
2121 2121 p1, p2 = r.parentrevs(rev)
2122 2122 delta = r.deltaparent(rev)
2123 2123 if format > 0:
2124 2124 addsize(r.rawsize(rev), datasize)
2125 2125 if p2 != nullrev:
2126 2126 nummerges += 1
2127 2127 size = r.length(rev)
2128 2128 if delta == nullrev:
2129 2129 chainlengths.append(0)
2130 2130 chainbases.append(r.start(rev))
2131 2131 chainspans.append(size)
2132 2132 if size == 0:
2133 2133 numempty += 1
2134 2134 numemptytext += 1
2135 2135 else:
2136 2136 numfull += 1
2137 2137 numsnapdepth[0] += 1
2138 2138 addsize(size, fullsize)
2139 2139 addsize(size, snapsizedepth[0])
2140 2140 else:
2141 2141 chainlengths.append(chainlengths[delta] + 1)
2142 2142 baseaddr = chainbases[delta]
2143 2143 revaddr = r.start(rev)
2144 2144 chainbases.append(baseaddr)
2145 2145 chainspans.append((revaddr - baseaddr) + size)
2146 2146 if size == 0:
2147 2147 numempty += 1
2148 2148 numemptydelta += 1
2149 2149 elif r.issnapshot(rev):
2150 2150 addsize(size, semisize)
2151 2151 numsemi += 1
2152 2152 depth = r.snapshotdepth(rev)
2153 2153 numsnapdepth[depth] += 1
2154 2154 addsize(size, snapsizedepth[depth])
2155 2155 else:
2156 2156 addsize(size, deltasize)
2157 2157 if delta == rev - 1:
2158 2158 numprev += 1
2159 2159 if delta == p1:
2160 2160 nump1prev += 1
2161 2161 elif delta == p2:
2162 2162 nump2prev += 1
2163 2163 elif delta == p1:
2164 2164 nump1 += 1
2165 2165 elif delta == p2:
2166 2166 nump2 += 1
2167 2167 elif delta != nullrev:
2168 2168 numother += 1
2169 2169
2170 2170 # Obtain data on the raw chunks in the revlog.
2171 2171 if util.safehasattr(r, '_getsegmentforrevs'):
2172 2172 segment = r._getsegmentforrevs(rev, rev)[1]
2173 2173 else:
2174 2174 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2175 2175 if segment:
2176 2176 chunktype = bytes(segment[0:1])
2177 2177 else:
2178 2178 chunktype = 'empty'
2179 2179
2180 2180 if chunktype not in chunktypecounts:
2181 2181 chunktypecounts[chunktype] = 0
2182 2182 chunktypesizes[chunktype] = 0
2183 2183
2184 2184 chunktypecounts[chunktype] += 1
2185 2185 chunktypesizes[chunktype] += size
2186 2186
2187 2187 # Adjust size min value for empty cases
2188 2188 for size in (datasize, fullsize, semisize, deltasize):
2189 2189 if size[0] is None:
2190 2190 size[0] = 0
2191 2191
2192 2192 numdeltas = numrevs - numfull - numempty - numsemi
2193 2193 numoprev = numprev - nump1prev - nump2prev
2194 2194 totalrawsize = datasize[2]
2195 2195 datasize[2] /= numrevs
2196 2196 fulltotal = fullsize[2]
2197 2197 fullsize[2] /= numfull
2198 2198 semitotal = semisize[2]
2199 2199 snaptotal = {}
2200 2200 if numsemi > 0:
2201 2201 semisize[2] /= numsemi
2202 2202 for depth in snapsizedepth:
2203 2203 snaptotal[depth] = snapsizedepth[depth][2]
2204 2204 snapsizedepth[depth][2] /= numsnapdepth[depth]
2205 2205
2206 2206 deltatotal = deltasize[2]
2207 2207 if numdeltas > 0:
2208 2208 deltasize[2] /= numdeltas
2209 2209 totalsize = fulltotal + semitotal + deltatotal
2210 2210 avgchainlen = sum(chainlengths) / numrevs
2211 2211 maxchainlen = max(chainlengths)
2212 2212 maxchainspan = max(chainspans)
2213 2213 compratio = 1
2214 2214 if totalsize:
2215 2215 compratio = totalrawsize / totalsize
2216 2216
2217 2217 basedfmtstr = '%%%dd\n'
2218 2218 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2219 2219
2220 2220 def dfmtstr(max):
2221 2221 return basedfmtstr % len(str(max))
2222 2222 def pcfmtstr(max, padding=0):
2223 2223 return basepcfmtstr % (len(str(max)), ' ' * padding)
2224 2224
2225 2225 def pcfmt(value, total):
2226 2226 if total:
2227 2227 return (value, 100 * float(value) / total)
2228 2228 else:
2229 2229 return value, 100.0
2230 2230
2231 2231 ui.write(('format : %d\n') % format)
2232 2232 ui.write(('flags : %s\n') % ', '.join(flags))
2233 2233
2234 2234 ui.write('\n')
2235 2235 fmt = pcfmtstr(totalsize)
2236 2236 fmt2 = dfmtstr(totalsize)
2237 2237 ui.write(('revisions : ') + fmt2 % numrevs)
2238 2238 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2239 2239 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2240 2240 ui.write(('revisions : ') + fmt2 % numrevs)
2241 2241 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2242 2242 ui.write((' text : ')
2243 2243 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2244 2244 ui.write((' delta : ')
2245 2245 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2246 2246 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2247 2247 for depth in sorted(numsnapdepth):
2248 2248 ui.write((' lvl-%-3d : ' % depth)
2249 2249 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2250 2250 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2251 2251 ui.write(('revision size : ') + fmt2 % totalsize)
2252 2252 ui.write((' snapshot : ')
2253 2253 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2254 2254 for depth in sorted(numsnapdepth):
2255 2255 ui.write((' lvl-%-3d : ' % depth)
2256 2256 + fmt % pcfmt(snaptotal[depth], totalsize))
2257 2257 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2258 2258
2259 2259 def fmtchunktype(chunktype):
2260 2260 if chunktype == 'empty':
2261 2261 return ' %s : ' % chunktype
2262 2262 elif chunktype in pycompat.bytestr(string.ascii_letters):
2263 2263 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2264 2264 else:
2265 2265 return ' 0x%s : ' % hex(chunktype)
2266 2266
2267 2267 ui.write('\n')
2268 2268 ui.write(('chunks : ') + fmt2 % numrevs)
2269 2269 for chunktype in sorted(chunktypecounts):
2270 2270 ui.write(fmtchunktype(chunktype))
2271 2271 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2272 2272 ui.write(('chunks size : ') + fmt2 % totalsize)
2273 2273 for chunktype in sorted(chunktypecounts):
2274 2274 ui.write(fmtchunktype(chunktype))
2275 2275 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2276 2276
2277 2277 ui.write('\n')
2278 2278 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2279 2279 ui.write(('avg chain length : ') + fmt % avgchainlen)
2280 2280 ui.write(('max chain length : ') + fmt % maxchainlen)
2281 2281 ui.write(('max chain reach : ') + fmt % maxchainspan)
2282 2282 ui.write(('compression ratio : ') + fmt % compratio)
2283 2283
2284 2284 if format > 0:
2285 2285 ui.write('\n')
2286 2286 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2287 2287 % tuple(datasize))
2288 2288 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2289 2289 % tuple(fullsize))
2290 2290 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2291 2291 % tuple(semisize))
2292 2292 for depth in sorted(snapsizedepth):
2293 2293 if depth == 0:
2294 2294 continue
2295 2295 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2296 2296 % ((depth,) + tuple(snapsizedepth[depth])))
2297 2297 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2298 2298 % tuple(deltasize))
2299 2299
2300 2300 if numdeltas > 0:
2301 2301 ui.write('\n')
2302 2302 fmt = pcfmtstr(numdeltas)
2303 2303 fmt2 = pcfmtstr(numdeltas, 4)
2304 2304 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2305 2305 if numprev > 0:
2306 2306 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2307 2307 numprev))
2308 2308 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2309 2309 numprev))
2310 2310 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2311 2311 numprev))
2312 2312 if gdelta:
2313 2313 ui.write(('deltas against p1 : ')
2314 2314 + fmt % pcfmt(nump1, numdeltas))
2315 2315 ui.write(('deltas against p2 : ')
2316 2316 + fmt % pcfmt(nump2, numdeltas))
2317 2317 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2318 2318 numdeltas))
2319 2319
2320 2320 @command('debugrevlogindex', cmdutil.debugrevlogopts +
2321 2321 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2322 2322 _('[-f FORMAT] -c|-m|FILE'),
2323 2323 optionalrepo=True)
2324 2324 def debugrevlogindex(ui, repo, file_=None, **opts):
2325 2325 """dump the contents of a revlog index"""
2326 2326 opts = pycompat.byteskwargs(opts)
2327 2327 r = cmdutil.openrevlog(repo, 'debugrevlogindex', file_, opts)
2328 2328 format = opts.get('format', 0)
2329 2329 if format not in (0, 1):
2330 2330 raise error.Abort(_("unknown format %d") % format)
2331 2331
2332 2332 if ui.debugflag:
2333 2333 shortfn = hex
2334 2334 else:
2335 2335 shortfn = short
2336 2336
2337 2337 # There might not be anything in r, so have a sane default
2338 2338 idlen = 12
2339 2339 for i in r:
2340 2340 idlen = len(shortfn(r.node(i)))
2341 2341 break
2342 2342
2343 2343 if format == 0:
2344 2344 if ui.verbose:
2345 2345 ui.write((" rev offset length linkrev"
2346 2346 " %s %s p2\n") % ("nodeid".ljust(idlen),
2347 2347 "p1".ljust(idlen)))
2348 2348 else:
2349 2349 ui.write((" rev linkrev %s %s p2\n") % (
2350 2350 "nodeid".ljust(idlen), "p1".ljust(idlen)))
2351 2351 elif format == 1:
2352 2352 if ui.verbose:
2353 2353 ui.write((" rev flag offset length size link p1"
2354 2354 " p2 %s\n") % "nodeid".rjust(idlen))
2355 2355 else:
2356 2356 ui.write((" rev flag size link p1 p2 %s\n") %
2357 2357 "nodeid".rjust(idlen))
2358 2358
2359 2359 for i in r:
2360 2360 node = r.node(i)
2361 2361 if format == 0:
2362 2362 try:
2363 2363 pp = r.parents(node)
2364 2364 except Exception:
2365 2365 pp = [nullid, nullid]
2366 2366 if ui.verbose:
2367 2367 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
2368 2368 i, r.start(i), r.length(i), r.linkrev(i),
2369 2369 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2370 2370 else:
2371 2371 ui.write("% 6d % 7d %s %s %s\n" % (
2372 2372 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
2373 2373 shortfn(pp[1])))
2374 2374 elif format == 1:
2375 2375 pr = r.parentrevs(i)
2376 2376 if ui.verbose:
2377 2377 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
2378 2378 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2379 2379 r.linkrev(i), pr[0], pr[1], shortfn(node)))
2380 2380 else:
2381 2381 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
2382 2382 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
2383 2383 shortfn(node)))
2384 2384
2385 2385 @command('debugrevspec',
2386 2386 [('', 'optimize', None,
2387 2387 _('print parsed tree after optimizing (DEPRECATED)')),
2388 2388 ('', 'show-revs', True, _('print list of result revisions (default)')),
2389 2389 ('s', 'show-set', None, _('print internal representation of result set')),
2390 2390 ('p', 'show-stage', [],
2391 2391 _('print parsed tree at the given stage'), _('NAME')),
2392 2392 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2393 2393 ('', 'verify-optimized', False, _('verify optimized result')),
2394 2394 ],
2395 2395 ('REVSPEC'))
2396 2396 def debugrevspec(ui, repo, expr, **opts):
2397 2397 """parse and apply a revision specification
2398 2398
2399 2399 Use -p/--show-stage option to print the parsed tree at the given stages.
2400 2400 Use -p all to print tree at every stage.
2401 2401
2402 2402 Use --no-show-revs option with -s or -p to print only the set
2403 2403 representation or the parsed tree respectively.
2404 2404
2405 2405 Use --verify-optimized to compare the optimized result with the unoptimized
2406 2406 one. Returns 1 if the optimized result differs.
2407 2407 """
2408 2408 opts = pycompat.byteskwargs(opts)
2409 2409 aliases = ui.configitems('revsetalias')
2410 2410 stages = [
2411 2411 ('parsed', lambda tree: tree),
2412 2412 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2413 2413 ui.warn)),
2414 2414 ('concatenated', revsetlang.foldconcat),
2415 2415 ('analyzed', revsetlang.analyze),
2416 2416 ('optimized', revsetlang.optimize),
2417 2417 ]
2418 2418 if opts['no_optimized']:
2419 2419 stages = stages[:-1]
2420 2420 if opts['verify_optimized'] and opts['no_optimized']:
2421 2421 raise error.Abort(_('cannot use --verify-optimized with '
2422 2422 '--no-optimized'))
2423 2423 stagenames = set(n for n, f in stages)
2424 2424
2425 2425 showalways = set()
2426 2426 showchanged = set()
2427 2427 if ui.verbose and not opts['show_stage']:
2428 2428 # show parsed tree by --verbose (deprecated)
2429 2429 showalways.add('parsed')
2430 2430 showchanged.update(['expanded', 'concatenated'])
2431 2431 if opts['optimize']:
2432 2432 showalways.add('optimized')
2433 2433 if opts['show_stage'] and opts['optimize']:
2434 2434 raise error.Abort(_('cannot use --optimize with --show-stage'))
2435 2435 if opts['show_stage'] == ['all']:
2436 2436 showalways.update(stagenames)
2437 2437 else:
2438 2438 for n in opts['show_stage']:
2439 2439 if n not in stagenames:
2440 2440 raise error.Abort(_('invalid stage name: %s') % n)
2441 2441 showalways.update(opts['show_stage'])
2442 2442
2443 2443 treebystage = {}
2444 2444 printedtree = None
2445 2445 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2446 2446 for n, f in stages:
2447 2447 treebystage[n] = tree = f(tree)
2448 2448 if n in showalways or (n in showchanged and tree != printedtree):
2449 2449 if opts['show_stage'] or n != 'parsed':
2450 2450 ui.write(("* %s:\n") % n)
2451 2451 ui.write(revsetlang.prettyformat(tree), "\n")
2452 2452 printedtree = tree
2453 2453
2454 2454 if opts['verify_optimized']:
2455 2455 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2456 2456 brevs = revset.makematcher(treebystage['optimized'])(repo)
2457 2457 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2458 2458 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2459 2459 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2460 2460 arevs = list(arevs)
2461 2461 brevs = list(brevs)
2462 2462 if arevs == brevs:
2463 2463 return 0
2464 2464 ui.write(('--- analyzed\n'), label='diff.file_a')
2465 2465 ui.write(('+++ optimized\n'), label='diff.file_b')
2466 2466 sm = difflib.SequenceMatcher(None, arevs, brevs)
2467 2467 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2468 2468 if tag in ('delete', 'replace'):
2469 2469 for c in arevs[alo:ahi]:
2470 2470 ui.write('-%s\n' % c, label='diff.deleted')
2471 2471 if tag in ('insert', 'replace'):
2472 2472 for c in brevs[blo:bhi]:
2473 2473 ui.write('+%s\n' % c, label='diff.inserted')
2474 2474 if tag == 'equal':
2475 2475 for c in arevs[alo:ahi]:
2476 2476 ui.write(' %s\n' % c)
2477 2477 return 1
2478 2478
2479 2479 func = revset.makematcher(tree)
2480 2480 revs = func(repo)
2481 2481 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2482 2482 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2483 2483 if not opts['show_revs']:
2484 2484 return
2485 2485 for c in revs:
2486 2486 ui.write("%d\n" % c)
2487 2487
2488 2488 @command('debugserve', [
2489 2489 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2490 2490 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2491 2491 ('', 'logiofile', '', _('file to log server I/O to')),
2492 2492 ], '')
2493 2493 def debugserve(ui, repo, **opts):
2494 2494 """run a server with advanced settings
2495 2495
2496 2496 This command is similar to :hg:`serve`. It exists partially as a
2497 2497 workaround to the fact that ``hg serve --stdio`` must have specific
2498 2498 arguments for security reasons.
2499 2499 """
2500 2500 opts = pycompat.byteskwargs(opts)
2501 2501
2502 2502 if not opts['sshstdio']:
2503 2503 raise error.Abort(_('only --sshstdio is currently supported'))
2504 2504
2505 2505 logfh = None
2506 2506
2507 2507 if opts['logiofd'] and opts['logiofile']:
2508 2508 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2509 2509
2510 2510 if opts['logiofd']:
2511 2511 # Line buffered because output is line based.
2512 2512 try:
2513 2513 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2514 2514 except OSError as e:
2515 2515 if e.errno != errno.ESPIPE:
2516 2516 raise
2517 2517 # can't seek a pipe, so `ab` mode fails on py3
2518 2518 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2519 2519 elif opts['logiofile']:
2520 2520 logfh = open(opts['logiofile'], 'ab', 1)
2521 2521
2522 2522 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2523 2523 s.serve_forever()
2524 2524
2525 2525 @command('debugsetparents', [], _('REV1 [REV2]'))
2526 2526 def debugsetparents(ui, repo, rev1, rev2=None):
2527 2527 """manually set the parents of the current working directory
2528 2528
2529 2529 This is useful for writing repository conversion tools, but should
2530 2530 be used with care. For example, neither the working directory nor the
2531 2531 dirstate is updated, so file status may be incorrect after running this
2532 2532 command.
2533 2533
2534 2534 Returns 0 on success.
2535 2535 """
2536 2536
2537 2537 node1 = scmutil.revsingle(repo, rev1).node()
2538 2538 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2539 2539
2540 2540 with repo.wlock():
2541 2541 repo.setparents(node1, node2)
2542 2542
2543 2543 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2544 2544 def debugssl(ui, repo, source=None, **opts):
2545 2545 '''test a secure connection to a server
2546 2546
2547 2547 This builds the certificate chain for the server on Windows, installing the
2548 2548 missing intermediates and trusted root via Windows Update if necessary. It
2549 2549 does nothing on other platforms.
2550 2550
2551 2551 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2552 2552 that server is used. See :hg:`help urls` for more information.
2553 2553
2554 2554 If the update succeeds, retry the original operation. Otherwise, the cause
2555 2555 of the SSL error is likely another issue.
2556 2556 '''
2557 2557 if not pycompat.iswindows:
2558 2558 raise error.Abort(_('certificate chain building is only possible on '
2559 2559 'Windows'))
2560 2560
2561 2561 if not source:
2562 2562 if not repo:
2563 2563 raise error.Abort(_("there is no Mercurial repository here, and no "
2564 2564 "server specified"))
2565 2565 source = "default"
2566 2566
2567 2567 source, branches = hg.parseurl(ui.expandpath(source))
2568 2568 url = util.url(source)
2569 2569 addr = None
2570 2570
2571 2571 defaultport = {'https': 443, 'ssh': 22}
2572 2572 if url.scheme in defaultport:
2573 2573 try:
2574 2574 addr = (url.host, int(url.port or defaultport[url.scheme]))
2575 2575 except ValueError:
2576 2576 raise error.Abort(_("malformed port number in URL"))
2577 2577 else:
2578 2578 raise error.Abort(_("only https and ssh connections are supported"))
2579 2579
2580 2580 from . import win32
2581 2581
2582 2582 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2583 2583 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2584 2584
2585 2585 try:
2586 2586 s.connect(addr)
2587 2587 cert = s.getpeercert(True)
2588 2588
2589 2589 ui.status(_('checking the certificate chain for %s\n') % url.host)
2590 2590
2591 2591 complete = win32.checkcertificatechain(cert, build=False)
2592 2592
2593 2593 if not complete:
2594 2594 ui.status(_('certificate chain is incomplete, updating... '))
2595 2595
2596 2596 if not win32.checkcertificatechain(cert):
2597 2597 ui.status(_('failed.\n'))
2598 2598 else:
2599 2599 ui.status(_('done.\n'))
2600 2600 else:
2601 2601 ui.status(_('full certificate chain is available\n'))
2602 2602 finally:
2603 2603 s.close()
2604 2604
2605 2605 @command('debugsub',
2606 2606 [('r', 'rev', '',
2607 2607 _('revision to check'), _('REV'))],
2608 2608 _('[-r REV] [REV]'))
2609 2609 def debugsub(ui, repo, rev=None):
2610 2610 ctx = scmutil.revsingle(repo, rev, None)
2611 2611 for k, v in sorted(ctx.substate.items()):
2612 2612 ui.write(('path %s\n') % k)
2613 2613 ui.write((' source %s\n') % v[0])
2614 2614 ui.write((' revision %s\n') % v[1])
2615 2615
2616 2616 @command('debugsuccessorssets',
2617 2617 [('', 'closest', False, _('return closest successors sets only'))],
2618 2618 _('[REV]'))
2619 2619 def debugsuccessorssets(ui, repo, *revs, **opts):
2620 2620 """show set of successors for revision
2621 2621
2622 2622 A successors set of changeset A is a consistent group of revisions that
2623 2623 succeed A. It contains non-obsolete changesets only unless closests
2624 2624 successors set is set.
2625 2625
2626 2626 In most cases a changeset A has a single successors set containing a single
2627 2627 successor (changeset A replaced by A').
2628 2628
2629 2629 A changeset that is made obsolete with no successors are called "pruned".
2630 2630 Such changesets have no successors sets at all.
2631 2631
2632 2632 A changeset that has been "split" will have a successors set containing
2633 2633 more than one successor.
2634 2634
2635 2635 A changeset that has been rewritten in multiple different ways is called
2636 2636 "divergent". Such changesets have multiple successor sets (each of which
2637 2637 may also be split, i.e. have multiple successors).
2638 2638
2639 2639 Results are displayed as follows::
2640 2640
2641 2641 <rev1>
2642 2642 <successors-1A>
2643 2643 <rev2>
2644 2644 <successors-2A>
2645 2645 <successors-2B1> <successors-2B2> <successors-2B3>
2646 2646
2647 2647 Here rev2 has two possible (i.e. divergent) successors sets. The first
2648 2648 holds one element, whereas the second holds three (i.e. the changeset has
2649 2649 been split).
2650 2650 """
2651 2651 # passed to successorssets caching computation from one call to another
2652 2652 cache = {}
2653 2653 ctx2str = bytes
2654 2654 node2str = short
2655 2655 for rev in scmutil.revrange(repo, revs):
2656 2656 ctx = repo[rev]
2657 2657 ui.write('%s\n'% ctx2str(ctx))
2658 2658 for succsset in obsutil.successorssets(repo, ctx.node(),
2659 2659 closest=opts[r'closest'],
2660 2660 cache=cache):
2661 2661 if succsset:
2662 2662 ui.write(' ')
2663 2663 ui.write(node2str(succsset[0]))
2664 2664 for node in succsset[1:]:
2665 2665 ui.write(' ')
2666 2666 ui.write(node2str(node))
2667 2667 ui.write('\n')
2668 2668
2669 2669 @command('debugtemplate',
2670 2670 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2671 2671 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2672 2672 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2673 2673 optionalrepo=True)
2674 2674 def debugtemplate(ui, repo, tmpl, **opts):
2675 2675 """parse and apply a template
2676 2676
2677 2677 If -r/--rev is given, the template is processed as a log template and
2678 2678 applied to the given changesets. Otherwise, it is processed as a generic
2679 2679 template.
2680 2680
2681 2681 Use --verbose to print the parsed tree.
2682 2682 """
2683 2683 revs = None
2684 2684 if opts[r'rev']:
2685 2685 if repo is None:
2686 2686 raise error.RepoError(_('there is no Mercurial repository here '
2687 2687 '(.hg not found)'))
2688 2688 revs = scmutil.revrange(repo, opts[r'rev'])
2689 2689
2690 2690 props = {}
2691 2691 for d in opts[r'define']:
2692 2692 try:
2693 2693 k, v = (e.strip() for e in d.split('=', 1))
2694 2694 if not k or k == 'ui':
2695 2695 raise ValueError
2696 2696 props[k] = v
2697 2697 except ValueError:
2698 2698 raise error.Abort(_('malformed keyword definition: %s') % d)
2699 2699
2700 2700 if ui.verbose:
2701 2701 aliases = ui.configitems('templatealias')
2702 2702 tree = templater.parse(tmpl)
2703 2703 ui.note(templater.prettyformat(tree), '\n')
2704 2704 newtree = templater.expandaliases(tree, aliases)
2705 2705 if newtree != tree:
2706 2706 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2707 2707
2708 2708 if revs is None:
2709 2709 tres = formatter.templateresources(ui, repo)
2710 2710 t = formatter.maketemplater(ui, tmpl, resources=tres)
2711 2711 if ui.verbose:
2712 2712 kwds, funcs = t.symbolsuseddefault()
2713 2713 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2714 2714 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2715 2715 ui.write(t.renderdefault(props))
2716 2716 else:
2717 2717 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2718 2718 if ui.verbose:
2719 2719 kwds, funcs = displayer.t.symbolsuseddefault()
2720 2720 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2721 2721 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2722 2722 for r in revs:
2723 2723 displayer.show(repo[r], **pycompat.strkwargs(props))
2724 2724 displayer.close()
2725 2725
2726 2726 @command('debuguigetpass', [
2727 2727 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2728 2728 ], _('[-p TEXT]'), norepo=True)
2729 2729 def debuguigetpass(ui, prompt=''):
2730 2730 """show prompt to type password"""
2731 2731 r = ui.getpass(prompt)
2732 2732 ui.write(('respose: %s\n') % r)
2733 2733
2734 2734 @command('debuguiprompt', [
2735 2735 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2736 2736 ], _('[-p TEXT]'), norepo=True)
2737 2737 def debuguiprompt(ui, prompt=''):
2738 2738 """show plain prompt"""
2739 2739 r = ui.prompt(prompt)
2740 2740 ui.write(('response: %s\n') % r)
2741 2741
2742 2742 @command('debugupdatecaches', [])
2743 2743 def debugupdatecaches(ui, repo, *pats, **opts):
2744 2744 """warm all known caches in the repository"""
2745 2745 with repo.wlock(), repo.lock():
2746 2746 repo.updatecaches(full=True)
2747 2747
2748 2748 @command('debugupgraderepo', [
2749 2749 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2750 2750 ('', 'run', False, _('performs an upgrade')),
2751 2751 ])
2752 2752 def debugupgraderepo(ui, repo, run=False, optimize=None):
2753 2753 """upgrade a repository to use different features
2754 2754
2755 2755 If no arguments are specified, the repository is evaluated for upgrade
2756 2756 and a list of problems and potential optimizations is printed.
2757 2757
2758 2758 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2759 2759 can be influenced via additional arguments. More details will be provided
2760 2760 by the command output when run without ``--run``.
2761 2761
2762 2762 During the upgrade, the repository will be locked and no writes will be
2763 2763 allowed.
2764 2764
2765 2765 At the end of the upgrade, the repository may not be readable while new
2766 2766 repository data is swapped in. This window will be as long as it takes to
2767 2767 rename some directories inside the ``.hg`` directory. On most machines, this
2768 2768 should complete almost instantaneously and the chances of a consumer being
2769 2769 unable to access the repository should be low.
2770 2770 """
2771 2771 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2772 2772
2773 2773 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2774 2774 inferrepo=True)
2775 2775 def debugwalk(ui, repo, *pats, **opts):
2776 2776 """show how files match on given patterns"""
2777 2777 opts = pycompat.byteskwargs(opts)
2778 2778 m = scmutil.match(repo[None], pats, opts)
2779 2779 if ui.verbose:
2780 2780 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2781 2781 items = list(repo[None].walk(m))
2782 2782 if not items:
2783 2783 return
2784 2784 f = lambda fn: fn
2785 2785 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2786 2786 f = lambda fn: util.normpath(fn)
2787 2787 fmt = 'f %%-%ds %%-%ds %%s' % (
2788 2788 max([len(abs) for abs in items]),
2789 2789 max([len(m.rel(abs)) for abs in items]))
2790 2790 for abs in items:
2791 2791 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2792 2792 ui.write("%s\n" % line.rstrip())
2793 2793
2794 2794 @command('debugwhyunstable', [], _('REV'))
2795 2795 def debugwhyunstable(ui, repo, rev):
2796 2796 """explain instabilities of a changeset"""
2797 2797 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2798 2798 dnodes = ''
2799 2799 if entry.get('divergentnodes'):
2800 2800 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2801 2801 for ctx in entry['divergentnodes']) + ' '
2802 2802 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2803 2803 entry['reason'], entry['node']))
2804 2804
2805 2805 @command('debugwireargs',
2806 2806 [('', 'three', '', 'three'),
2807 2807 ('', 'four', '', 'four'),
2808 2808 ('', 'five', '', 'five'),
2809 2809 ] + cmdutil.remoteopts,
2810 2810 _('REPO [OPTIONS]... [ONE [TWO]]'),
2811 2811 norepo=True)
2812 2812 def debugwireargs(ui, repopath, *vals, **opts):
2813 2813 opts = pycompat.byteskwargs(opts)
2814 2814 repo = hg.peer(ui, opts, repopath)
2815 2815 for opt in cmdutil.remoteopts:
2816 2816 del opts[opt[1]]
2817 2817 args = {}
2818 2818 for k, v in opts.iteritems():
2819 2819 if v:
2820 2820 args[k] = v
2821 2821 args = pycompat.strkwargs(args)
2822 2822 # run twice to check that we don't mess up the stream for the next command
2823 2823 res1 = repo.debugwireargs(*vals, **args)
2824 2824 res2 = repo.debugwireargs(*vals, **args)
2825 2825 ui.write("%s\n" % res1)
2826 2826 if res1 != res2:
2827 2827 ui.warn("%s\n" % res2)
2828 2828
2829 2829 def _parsewirelangblocks(fh):
2830 2830 activeaction = None
2831 2831 blocklines = []
2832 lastindent = 0
2832 2833
2833 2834 for line in fh:
2834 2835 line = line.rstrip()
2835 2836 if not line:
2836 2837 continue
2837 2838
2838 2839 if line.startswith(b'#'):
2839 2840 continue
2840 2841
2841 2842 if not line.startswith(b' '):
2842 2843 # New block. Flush previous one.
2843 2844 if activeaction:
2844 2845 yield activeaction, blocklines
2845 2846
2846 2847 activeaction = line
2847 2848 blocklines = []
2849 lastindent = 0
2848 2850 continue
2849 2851
2850 2852 # Else we start with an indent.
2851 2853
2852 2854 if not activeaction:
2853 2855 raise error.Abort(_('indented line outside of block'))
2854 2856
2855 blocklines.append(line)
2857 indent = len(line) - len(line.lstrip())
2858
2859 # If this line is indented more than the last line, concatenate it.
2860 if indent > lastindent and blocklines:
2861 blocklines[-1] += line.lstrip()
2862 else:
2863 blocklines.append(line)
2864 lastindent = indent
2856 2865
2857 2866 # Flush last block.
2858 2867 if activeaction:
2859 2868 yield activeaction, blocklines
2860 2869
2861 2870 @command('debugwireproto',
2862 2871 [
2863 2872 ('', 'localssh', False, _('start an SSH server for this repo')),
2864 2873 ('', 'peer', '', _('construct a specific version of the peer')),
2865 2874 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2866 2875 ('', 'nologhandshake', False,
2867 2876 _('do not log I/O related to the peer handshake')),
2868 2877 ] + cmdutil.remoteopts,
2869 2878 _('[PATH]'),
2870 2879 optionalrepo=True)
2871 2880 def debugwireproto(ui, repo, path=None, **opts):
2872 2881 """send wire protocol commands to a server
2873 2882
2874 2883 This command can be used to issue wire protocol commands to remote
2875 2884 peers and to debug the raw data being exchanged.
2876 2885
2877 2886 ``--localssh`` will start an SSH server against the current repository
2878 2887 and connect to that. By default, the connection will perform a handshake
2879 2888 and establish an appropriate peer instance.
2880 2889
2881 2890 ``--peer`` can be used to bypass the handshake protocol and construct a
2882 2891 peer instance using the specified class type. Valid values are ``raw``,
2883 2892 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2884 2893 raw data payloads and don't support higher-level command actions.
2885 2894
2886 2895 ``--noreadstderr`` can be used to disable automatic reading from stderr
2887 2896 of the peer (for SSH connections only). Disabling automatic reading of
2888 2897 stderr is useful for making output more deterministic.
2889 2898
2890 2899 Commands are issued via a mini language which is specified via stdin.
2891 2900 The language consists of individual actions to perform. An action is
2892 2901 defined by a block. A block is defined as a line with no leading
2893 2902 space followed by 0 or more lines with leading space. Blocks are
2894 2903 effectively a high-level command with additional metadata.
2895 2904
2896 2905 Lines beginning with ``#`` are ignored.
2897 2906
2898 2907 The following sections denote available actions.
2899 2908
2900 2909 raw
2901 2910 ---
2902 2911
2903 2912 Send raw data to the server.
2904 2913
2905 2914 The block payload contains the raw data to send as one atomic send
2906 2915 operation. The data may not actually be delivered in a single system
2907 2916 call: it depends on the abilities of the transport being used.
2908 2917
2909 2918 Each line in the block is de-indented and concatenated. Then, that
2910 2919 value is evaluated as a Python b'' literal. This allows the use of
2911 2920 backslash escaping, etc.
2912 2921
2913 2922 raw+
2914 2923 ----
2915 2924
2916 2925 Behaves like ``raw`` except flushes output afterwards.
2917 2926
2918 2927 command <X>
2919 2928 -----------
2920 2929
2921 2930 Send a request to run a named command, whose name follows the ``command``
2922 2931 string.
2923 2932
2924 2933 Arguments to the command are defined as lines in this block. The format of
2925 2934 each line is ``<key> <value>``. e.g.::
2926 2935
2927 2936 command listkeys
2928 2937 namespace bookmarks
2929 2938
2930 2939 If the value begins with ``eval:``, it will be interpreted as a Python
2931 2940 literal expression. Otherwise values are interpreted as Python b'' literals.
2932 2941 This allows sending complex types and encoding special byte sequences via
2933 2942 backslash escaping.
2934 2943
2935 2944 The following arguments have special meaning:
2936 2945
2937 2946 ``PUSHFILE``
2938 2947 When defined, the *push* mechanism of the peer will be used instead
2939 2948 of the static request-response mechanism and the content of the
2940 2949 file specified in the value of this argument will be sent as the
2941 2950 command payload.
2942 2951
2943 2952 This can be used to submit a local bundle file to the remote.
2944 2953
2945 2954 batchbegin
2946 2955 ----------
2947 2956
2948 2957 Instruct the peer to begin a batched send.
2949 2958
2950 2959 All ``command`` blocks are queued for execution until the next
2951 2960 ``batchsubmit`` block.
2952 2961
2953 2962 batchsubmit
2954 2963 -----------
2955 2964
2956 2965 Submit previously queued ``command`` blocks as a batch request.
2957 2966
2958 2967 This action MUST be paired with a ``batchbegin`` action.
2959 2968
2960 2969 httprequest <method> <path>
2961 2970 ---------------------------
2962 2971
2963 2972 (HTTP peer only)
2964 2973
2965 2974 Send an HTTP request to the peer.
2966 2975
2967 2976 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2968 2977
2969 2978 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2970 2979 headers to add to the request. e.g. ``Accept: foo``.
2971 2980
2972 2981 The following arguments are special:
2973 2982
2974 2983 ``BODYFILE``
2975 2984 The content of the file defined as the value to this argument will be
2976 2985 transferred verbatim as the HTTP request body.
2977 2986
2978 2987 ``frame <type> <flags> <payload>``
2979 2988 Send a unified protocol frame as part of the request body.
2980 2989
2981 2990 All frames will be collected and sent as the body to the HTTP
2982 2991 request.
2983 2992
2984 2993 close
2985 2994 -----
2986 2995
2987 2996 Close the connection to the server.
2988 2997
2989 2998 flush
2990 2999 -----
2991 3000
2992 3001 Flush data written to the server.
2993 3002
2994 3003 readavailable
2995 3004 -------------
2996 3005
2997 3006 Close the write end of the connection and read all available data from
2998 3007 the server.
2999 3008
3000 3009 If the connection to the server encompasses multiple pipes, we poll both
3001 3010 pipes and read available data.
3002 3011
3003 3012 readline
3004 3013 --------
3005 3014
3006 3015 Read a line of output from the server. If there are multiple output
3007 3016 pipes, reads only the main pipe.
3008 3017
3009 3018 ereadline
3010 3019 ---------
3011 3020
3012 3021 Like ``readline``, but read from the stderr pipe, if available.
3013 3022
3014 3023 read <X>
3015 3024 --------
3016 3025
3017 3026 ``read()`` N bytes from the server's main output pipe.
3018 3027
3019 3028 eread <X>
3020 3029 ---------
3021 3030
3022 3031 ``read()`` N bytes from the server's stderr pipe, if available.
3023 3032
3024 3033 Specifying Unified Frame-Based Protocol Frames
3025 3034 ----------------------------------------------
3026 3035
3027 3036 It is possible to emit a *Unified Frame-Based Protocol* by using special
3028 3037 syntax.
3029 3038
3030 3039 A frame is composed as a type, flags, and payload. These can be parsed
3031 3040 from a string of the form:
3032 3041
3033 3042 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3034 3043
3035 3044 ``request-id`` and ``stream-id`` are integers defining the request and
3036 3045 stream identifiers.
3037 3046
3038 3047 ``type`` can be an integer value for the frame type or the string name
3039 3048 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3040 3049 ``command-name``.
3041 3050
3042 3051 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3043 3052 components. Each component (and there can be just one) can be an integer
3044 3053 or a flag name for stream flags or frame flags, respectively. Values are
3045 3054 resolved to integers and then bitwise OR'd together.
3046 3055
3047 3056 ``payload`` represents the raw frame payload. If it begins with
3048 3057 ``cbor:``, the following string is evaluated as Python code and the
3049 3058 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3050 3059 as a Python byte string literal.
3051 3060 """
3052 3061 opts = pycompat.byteskwargs(opts)
3053 3062
3054 3063 if opts['localssh'] and not repo:
3055 3064 raise error.Abort(_('--localssh requires a repository'))
3056 3065
3057 3066 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3058 3067 raise error.Abort(_('invalid value for --peer'),
3059 3068 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3060 3069
3061 3070 if path and opts['localssh']:
3062 3071 raise error.Abort(_('cannot specify --localssh with an explicit '
3063 3072 'path'))
3064 3073
3065 3074 if ui.interactive():
3066 3075 ui.write(_('(waiting for commands on stdin)\n'))
3067 3076
3068 3077 blocks = list(_parsewirelangblocks(ui.fin))
3069 3078
3070 3079 proc = None
3071 3080 stdin = None
3072 3081 stdout = None
3073 3082 stderr = None
3074 3083 opener = None
3075 3084
3076 3085 if opts['localssh']:
3077 3086 # We start the SSH server in its own process so there is process
3078 3087 # separation. This prevents a whole class of potential bugs around
3079 3088 # shared state from interfering with server operation.
3080 3089 args = procutil.hgcmd() + [
3081 3090 '-R', repo.root,
3082 3091 'debugserve', '--sshstdio',
3083 3092 ]
3084 3093 proc = subprocess.Popen(pycompat.rapply(procutil.tonativestr, args),
3085 3094 stdin=subprocess.PIPE,
3086 3095 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3087 3096 bufsize=0)
3088 3097
3089 3098 stdin = proc.stdin
3090 3099 stdout = proc.stdout
3091 3100 stderr = proc.stderr
3092 3101
3093 3102 # We turn the pipes into observers so we can log I/O.
3094 3103 if ui.verbose or opts['peer'] == 'raw':
3095 3104 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3096 3105 logdata=True)
3097 3106 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3098 3107 logdata=True)
3099 3108 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3100 3109 logdata=True)
3101 3110
3102 3111 # --localssh also implies the peer connection settings.
3103 3112
3104 3113 url = 'ssh://localserver'
3105 3114 autoreadstderr = not opts['noreadstderr']
3106 3115
3107 3116 if opts['peer'] == 'ssh1':
3108 3117 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3109 3118 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3110 3119 None, autoreadstderr=autoreadstderr)
3111 3120 elif opts['peer'] == 'ssh2':
3112 3121 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3113 3122 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3114 3123 None, autoreadstderr=autoreadstderr)
3115 3124 elif opts['peer'] == 'raw':
3116 3125 ui.write(_('using raw connection to peer\n'))
3117 3126 peer = None
3118 3127 else:
3119 3128 ui.write(_('creating ssh peer from handshake results\n'))
3120 3129 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3121 3130 autoreadstderr=autoreadstderr)
3122 3131
3123 3132 elif path:
3124 3133 # We bypass hg.peer() so we can proxy the sockets.
3125 3134 # TODO consider not doing this because we skip
3126 3135 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3127 3136 u = util.url(path)
3128 3137 if u.scheme != 'http':
3129 3138 raise error.Abort(_('only http:// paths are currently supported'))
3130 3139
3131 3140 url, authinfo = u.authinfo()
3132 3141 openerargs = {
3133 3142 r'useragent': b'Mercurial debugwireproto',
3134 3143 }
3135 3144
3136 3145 # Turn pipes/sockets into observers so we can log I/O.
3137 3146 if ui.verbose:
3138 3147 openerargs.update({
3139 3148 r'loggingfh': ui,
3140 3149 r'loggingname': b's',
3141 3150 r'loggingopts': {
3142 3151 r'logdata': True,
3143 3152 r'logdataapis': False,
3144 3153 },
3145 3154 })
3146 3155
3147 3156 if ui.debugflag:
3148 3157 openerargs[r'loggingopts'][r'logdataapis'] = True
3149 3158
3150 3159 # Don't send default headers when in raw mode. This allows us to
3151 3160 # bypass most of the behavior of our URL handling code so we can
3152 3161 # have near complete control over what's sent on the wire.
3153 3162 if opts['peer'] == 'raw':
3154 3163 openerargs[r'sendaccept'] = False
3155 3164
3156 3165 opener = urlmod.opener(ui, authinfo, **openerargs)
3157 3166
3158 3167 if opts['peer'] == 'http2':
3159 3168 ui.write(_('creating http peer for wire protocol version 2\n'))
3160 3169 # We go through makepeer() because we need an API descriptor for
3161 3170 # the peer instance to be useful.
3162 3171 with ui.configoverride({
3163 3172 ('experimental', 'httppeer.advertise-v2'): True}):
3164 3173 if opts['nologhandshake']:
3165 3174 ui.pushbuffer()
3166 3175
3167 3176 peer = httppeer.makepeer(ui, path, opener=opener)
3168 3177
3169 3178 if opts['nologhandshake']:
3170 3179 ui.popbuffer()
3171 3180
3172 3181 if not isinstance(peer, httppeer.httpv2peer):
3173 3182 raise error.Abort(_('could not instantiate HTTP peer for '
3174 3183 'wire protocol version 2'),
3175 3184 hint=_('the server may not have the feature '
3176 3185 'enabled or is not allowing this '
3177 3186 'client version'))
3178 3187
3179 3188 elif opts['peer'] == 'raw':
3180 3189 ui.write(_('using raw connection to peer\n'))
3181 3190 peer = None
3182 3191 elif opts['peer']:
3183 3192 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3184 3193 opts['peer'])
3185 3194 else:
3186 3195 peer = httppeer.makepeer(ui, path, opener=opener)
3187 3196
3188 3197 # We /could/ populate stdin/stdout with sock.makefile()...
3189 3198 else:
3190 3199 raise error.Abort(_('unsupported connection configuration'))
3191 3200
3192 3201 batchedcommands = None
3193 3202
3194 3203 # Now perform actions based on the parsed wire language instructions.
3195 3204 for action, lines in blocks:
3196 3205 if action in ('raw', 'raw+'):
3197 3206 if not stdin:
3198 3207 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3199 3208
3200 3209 # Concatenate the data together.
3201 3210 data = ''.join(l.lstrip() for l in lines)
3202 3211 data = stringutil.unescapestr(data)
3203 3212 stdin.write(data)
3204 3213
3205 3214 if action == 'raw+':
3206 3215 stdin.flush()
3207 3216 elif action == 'flush':
3208 3217 if not stdin:
3209 3218 raise error.Abort(_('cannot call flush on this peer'))
3210 3219 stdin.flush()
3211 3220 elif action.startswith('command'):
3212 3221 if not peer:
3213 3222 raise error.Abort(_('cannot send commands unless peer instance '
3214 3223 'is available'))
3215 3224
3216 3225 command = action.split(' ', 1)[1]
3217 3226
3218 3227 args = {}
3219 3228 for line in lines:
3220 3229 # We need to allow empty values.
3221 3230 fields = line.lstrip().split(' ', 1)
3222 3231 if len(fields) == 1:
3223 3232 key = fields[0]
3224 3233 value = ''
3225 3234 else:
3226 3235 key, value = fields
3227 3236
3228 3237 if value.startswith('eval:'):
3229 3238 value = stringutil.evalpythonliteral(value[5:])
3230 3239 else:
3231 3240 value = stringutil.unescapestr(value)
3232 3241
3233 3242 args[key] = value
3234 3243
3235 3244 if batchedcommands is not None:
3236 3245 batchedcommands.append((command, args))
3237 3246 continue
3238 3247
3239 3248 ui.status(_('sending %s command\n') % command)
3240 3249
3241 3250 if 'PUSHFILE' in args:
3242 3251 with open(args['PUSHFILE'], r'rb') as fh:
3243 3252 del args['PUSHFILE']
3244 3253 res, output = peer._callpush(command, fh,
3245 3254 **pycompat.strkwargs(args))
3246 3255 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3247 3256 ui.status(_('remote output: %s\n') %
3248 3257 stringutil.escapestr(output))
3249 3258 else:
3250 3259 with peer.commandexecutor() as e:
3251 3260 res = e.callcommand(command, args).result()
3252 3261
3253 3262 if isinstance(res, wireprotov2peer.commandresponse):
3254 3263 val = res.objects()
3255 3264 ui.status(_('response: %s\n') %
3256 3265 stringutil.pprint(val, bprefix=True, indent=2))
3257 3266 else:
3258 3267 ui.status(_('response: %s\n') %
3259 3268 stringutil.pprint(res, bprefix=True, indent=2))
3260 3269
3261 3270 elif action == 'batchbegin':
3262 3271 if batchedcommands is not None:
3263 3272 raise error.Abort(_('nested batchbegin not allowed'))
3264 3273
3265 3274 batchedcommands = []
3266 3275 elif action == 'batchsubmit':
3267 3276 # There is a batching API we could go through. But it would be
3268 3277 # difficult to normalize requests into function calls. It is easier
3269 3278 # to bypass this layer and normalize to commands + args.
3270 3279 ui.status(_('sending batch with %d sub-commands\n') %
3271 3280 len(batchedcommands))
3272 3281 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3273 3282 ui.status(_('response #%d: %s\n') %
3274 3283 (i, stringutil.escapestr(chunk)))
3275 3284
3276 3285 batchedcommands = None
3277 3286
3278 3287 elif action.startswith('httprequest '):
3279 3288 if not opener:
3280 3289 raise error.Abort(_('cannot use httprequest without an HTTP '
3281 3290 'peer'))
3282 3291
3283 3292 request = action.split(' ', 2)
3284 3293 if len(request) != 3:
3285 3294 raise error.Abort(_('invalid httprequest: expected format is '
3286 3295 '"httprequest <method> <path>'))
3287 3296
3288 3297 method, httppath = request[1:]
3289 3298 headers = {}
3290 3299 body = None
3291 3300 frames = []
3292 3301 for line in lines:
3293 3302 line = line.lstrip()
3294 3303 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3295 3304 if m:
3296 3305 # Headers need to use native strings.
3297 3306 key = pycompat.strurl(m.group(1))
3298 3307 value = pycompat.strurl(m.group(2))
3299 3308 headers[key] = value
3300 3309 continue
3301 3310
3302 3311 if line.startswith(b'BODYFILE '):
3303 3312 with open(line.split(b' ', 1), 'rb') as fh:
3304 3313 body = fh.read()
3305 3314 elif line.startswith(b'frame '):
3306 3315 frame = wireprotoframing.makeframefromhumanstring(
3307 3316 line[len(b'frame '):])
3308 3317
3309 3318 frames.append(frame)
3310 3319 else:
3311 3320 raise error.Abort(_('unknown argument to httprequest: %s') %
3312 3321 line)
3313 3322
3314 3323 url = path + httppath
3315 3324
3316 3325 if frames:
3317 3326 body = b''.join(bytes(f) for f in frames)
3318 3327
3319 3328 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3320 3329
3321 3330 # urllib.Request insists on using has_data() as a proxy for
3322 3331 # determining the request method. Override that to use our
3323 3332 # explicitly requested method.
3324 3333 req.get_method = lambda: pycompat.sysstr(method)
3325 3334
3326 3335 try:
3327 3336 res = opener.open(req)
3328 3337 body = res.read()
3329 3338 except util.urlerr.urlerror as e:
3330 3339 # read() method must be called, but only exists in Python 2
3331 3340 getattr(e, 'read', lambda: None)()
3332 3341 continue
3333 3342
3334 3343 ct = res.headers.get(r'Content-Type')
3335 3344 if ct == r'application/mercurial-cbor':
3336 3345 ui.write(_('cbor> %s\n') %
3337 3346 stringutil.pprint(cborutil.decodeall(body),
3338 3347 bprefix=True,
3339 3348 indent=2))
3340 3349
3341 3350 elif action == 'close':
3342 3351 peer.close()
3343 3352 elif action == 'readavailable':
3344 3353 if not stdout or not stderr:
3345 3354 raise error.Abort(_('readavailable not available on this peer'))
3346 3355
3347 3356 stdin.close()
3348 3357 stdout.read()
3349 3358 stderr.read()
3350 3359
3351 3360 elif action == 'readline':
3352 3361 if not stdout:
3353 3362 raise error.Abort(_('readline not available on this peer'))
3354 3363 stdout.readline()
3355 3364 elif action == 'ereadline':
3356 3365 if not stderr:
3357 3366 raise error.Abort(_('ereadline not available on this peer'))
3358 3367 stderr.readline()
3359 3368 elif action.startswith('read '):
3360 3369 count = int(action.split(' ', 1)[1])
3361 3370 if not stdout:
3362 3371 raise error.Abort(_('read not available on this peer'))
3363 3372 stdout.read(count)
3364 3373 elif action.startswith('eread '):
3365 3374 count = int(action.split(' ', 1)[1])
3366 3375 if not stderr:
3367 3376 raise error.Abort(_('eread not available on this peer'))
3368 3377 stderr.read(count)
3369 3378 else:
3370 3379 raise error.Abort(_('unknown action: %s') % action)
3371 3380
3372 3381 if batchedcommands is not None:
3373 3382 raise error.Abort(_('unclosed "batchbegin" request'))
3374 3383
3375 3384 if peer:
3376 3385 peer.close()
3377 3386
3378 3387 if proc:
3379 3388 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now