##// END OF EJS Templates
debugcommands: use our CBOR decoder...
Gregory Szorc -
r39480:e5eb67de default
parent child Browse files
Show More
@@ -1,3365 +1,3364 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import operator
15 15 import os
16 16 import random
17 17 import re
18 18 import socket
19 19 import ssl
20 20 import stat
21 21 import string
22 22 import subprocess
23 23 import sys
24 24 import time
25 25
26 26 from .i18n import _
27 27 from .node import (
28 28 bin,
29 29 hex,
30 30 nullhex,
31 31 nullid,
32 32 nullrev,
33 33 short,
34 34 )
35 from .thirdparty import (
36 cbor,
37 )
38 35 from . import (
39 36 bundle2,
40 37 changegroup,
41 38 cmdutil,
42 39 color,
43 40 context,
44 41 dagparser,
45 42 encoding,
46 43 error,
47 44 exchange,
48 45 extensions,
49 46 filemerge,
50 47 filesetlang,
51 48 formatter,
52 49 hg,
53 50 httppeer,
54 51 localrepo,
55 52 lock as lockmod,
56 53 logcmdutil,
57 54 merge as mergemod,
58 55 obsolete,
59 56 obsutil,
60 57 phases,
61 58 policy,
62 59 pvec,
63 60 pycompat,
64 61 registrar,
65 62 repair,
66 63 revlog,
67 64 revset,
68 65 revsetlang,
69 66 scmutil,
70 67 setdiscovery,
71 68 simplemerge,
72 69 sshpeer,
73 70 sslutil,
74 71 streamclone,
75 72 templater,
76 73 treediscovery,
77 74 upgrade,
78 75 url as urlmod,
79 76 util,
80 77 vfs as vfsmod,
81 78 wireprotoframing,
82 79 wireprotoserver,
83 80 wireprotov2peer,
84 81 )
85 82 from .utils import (
83 cborutil,
86 84 dateutil,
87 85 procutil,
88 86 stringutil,
89 87 )
90 88
91 89 from .revlogutils import (
92 90 deltas as deltautil
93 91 )
94 92
95 93 release = lockmod.release
96 94
97 95 command = registrar.command()
98 96
99 97 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
100 98 def debugancestor(ui, repo, *args):
101 99 """find the ancestor revision of two revisions in a given index"""
102 100 if len(args) == 3:
103 101 index, rev1, rev2 = args
104 102 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
105 103 lookup = r.lookup
106 104 elif len(args) == 2:
107 105 if not repo:
108 106 raise error.Abort(_('there is no Mercurial repository here '
109 107 '(.hg not found)'))
110 108 rev1, rev2 = args
111 109 r = repo.changelog
112 110 lookup = repo.lookup
113 111 else:
114 112 raise error.Abort(_('either two or three arguments required'))
115 113 a = r.ancestor(lookup(rev1), lookup(rev2))
116 114 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
117 115
118 116 @command('debugapplystreamclonebundle', [], 'FILE')
119 117 def debugapplystreamclonebundle(ui, repo, fname):
120 118 """apply a stream clone bundle file"""
121 119 f = hg.openpath(ui, fname)
122 120 gen = exchange.readbundle(ui, f, fname)
123 121 gen.apply(repo)
124 122
125 123 @command('debugbuilddag',
126 124 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
127 125 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
128 126 ('n', 'new-file', None, _('add new file at each rev'))],
129 127 _('[OPTION]... [TEXT]'))
130 128 def debugbuilddag(ui, repo, text=None,
131 129 mergeable_file=False,
132 130 overwritten_file=False,
133 131 new_file=False):
134 132 """builds a repo with a given DAG from scratch in the current empty repo
135 133
136 134 The description of the DAG is read from stdin if not given on the
137 135 command line.
138 136
139 137 Elements:
140 138
141 139 - "+n" is a linear run of n nodes based on the current default parent
142 140 - "." is a single node based on the current default parent
143 141 - "$" resets the default parent to null (implied at the start);
144 142 otherwise the default parent is always the last node created
145 143 - "<p" sets the default parent to the backref p
146 144 - "*p" is a fork at parent p, which is a backref
147 145 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
148 146 - "/p2" is a merge of the preceding node and p2
149 147 - ":tag" defines a local tag for the preceding node
150 148 - "@branch" sets the named branch for subsequent nodes
151 149 - "#...\\n" is a comment up to the end of the line
152 150
153 151 Whitespace between the above elements is ignored.
154 152
155 153 A backref is either
156 154
157 155 - a number n, which references the node curr-n, where curr is the current
158 156 node, or
159 157 - the name of a local tag you placed earlier using ":tag", or
160 158 - empty to denote the default parent.
161 159
162 160 All string valued-elements are either strictly alphanumeric, or must
163 161 be enclosed in double quotes ("..."), with "\\" as escape character.
164 162 """
165 163
166 164 if text is None:
167 165 ui.status(_("reading DAG from stdin\n"))
168 166 text = ui.fin.read()
169 167
170 168 cl = repo.changelog
171 169 if len(cl) > 0:
172 170 raise error.Abort(_('repository is not empty'))
173 171
174 172 # determine number of revs in DAG
175 173 total = 0
176 174 for type, data in dagparser.parsedag(text):
177 175 if type == 'n':
178 176 total += 1
179 177
180 178 if mergeable_file:
181 179 linesperrev = 2
182 180 # make a file with k lines per rev
183 181 initialmergedlines = ['%d' % i
184 182 for i in pycompat.xrange(0, total * linesperrev)]
185 183 initialmergedlines.append("")
186 184
187 185 tags = []
188 186 progress = ui.makeprogress(_('building'), unit=_('revisions'),
189 187 total=total)
190 188 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
191 189 at = -1
192 190 atbranch = 'default'
193 191 nodeids = []
194 192 id = 0
195 193 progress.update(id)
196 194 for type, data in dagparser.parsedag(text):
197 195 if type == 'n':
198 196 ui.note(('node %s\n' % pycompat.bytestr(data)))
199 197 id, ps = data
200 198
201 199 files = []
202 200 filecontent = {}
203 201
204 202 p2 = None
205 203 if mergeable_file:
206 204 fn = "mf"
207 205 p1 = repo[ps[0]]
208 206 if len(ps) > 1:
209 207 p2 = repo[ps[1]]
210 208 pa = p1.ancestor(p2)
211 209 base, local, other = [x[fn].data() for x in (pa, p1,
212 210 p2)]
213 211 m3 = simplemerge.Merge3Text(base, local, other)
214 212 ml = [l.strip() for l in m3.merge_lines()]
215 213 ml.append("")
216 214 elif at > 0:
217 215 ml = p1[fn].data().split("\n")
218 216 else:
219 217 ml = initialmergedlines
220 218 ml[id * linesperrev] += " r%i" % id
221 219 mergedtext = "\n".join(ml)
222 220 files.append(fn)
223 221 filecontent[fn] = mergedtext
224 222
225 223 if overwritten_file:
226 224 fn = "of"
227 225 files.append(fn)
228 226 filecontent[fn] = "r%i\n" % id
229 227
230 228 if new_file:
231 229 fn = "nf%i" % id
232 230 files.append(fn)
233 231 filecontent[fn] = "r%i\n" % id
234 232 if len(ps) > 1:
235 233 if not p2:
236 234 p2 = repo[ps[1]]
237 235 for fn in p2:
238 236 if fn.startswith("nf"):
239 237 files.append(fn)
240 238 filecontent[fn] = p2[fn].data()
241 239
242 240 def fctxfn(repo, cx, path):
243 241 if path in filecontent:
244 242 return context.memfilectx(repo, cx, path,
245 243 filecontent[path])
246 244 return None
247 245
248 246 if len(ps) == 0 or ps[0] < 0:
249 247 pars = [None, None]
250 248 elif len(ps) == 1:
251 249 pars = [nodeids[ps[0]], None]
252 250 else:
253 251 pars = [nodeids[p] for p in ps]
254 252 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
255 253 date=(id, 0),
256 254 user="debugbuilddag",
257 255 extra={'branch': atbranch})
258 256 nodeid = repo.commitctx(cx)
259 257 nodeids.append(nodeid)
260 258 at = id
261 259 elif type == 'l':
262 260 id, name = data
263 261 ui.note(('tag %s\n' % name))
264 262 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
265 263 elif type == 'a':
266 264 ui.note(('branch %s\n' % data))
267 265 atbranch = data
268 266 progress.update(id)
269 267
270 268 if tags:
271 269 repo.vfs.write("localtags", "".join(tags))
272 270
273 271 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
274 272 indent_string = ' ' * indent
275 273 if all:
276 274 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
277 275 % indent_string)
278 276
279 277 def showchunks(named):
280 278 ui.write("\n%s%s\n" % (indent_string, named))
281 279 for deltadata in gen.deltaiter():
282 280 node, p1, p2, cs, deltabase, delta, flags = deltadata
283 281 ui.write("%s%s %s %s %s %s %d\n" %
284 282 (indent_string, hex(node), hex(p1), hex(p2),
285 283 hex(cs), hex(deltabase), len(delta)))
286 284
287 285 chunkdata = gen.changelogheader()
288 286 showchunks("changelog")
289 287 chunkdata = gen.manifestheader()
290 288 showchunks("manifest")
291 289 for chunkdata in iter(gen.filelogheader, {}):
292 290 fname = chunkdata['filename']
293 291 showchunks(fname)
294 292 else:
295 293 if isinstance(gen, bundle2.unbundle20):
296 294 raise error.Abort(_('use debugbundle2 for this file'))
297 295 chunkdata = gen.changelogheader()
298 296 for deltadata in gen.deltaiter():
299 297 node, p1, p2, cs, deltabase, delta, flags = deltadata
300 298 ui.write("%s%s\n" % (indent_string, hex(node)))
301 299
302 300 def _debugobsmarkers(ui, part, indent=0, **opts):
303 301 """display version and markers contained in 'data'"""
304 302 opts = pycompat.byteskwargs(opts)
305 303 data = part.read()
306 304 indent_string = ' ' * indent
307 305 try:
308 306 version, markers = obsolete._readmarkers(data)
309 307 except error.UnknownVersion as exc:
310 308 msg = "%sunsupported version: %s (%d bytes)\n"
311 309 msg %= indent_string, exc.version, len(data)
312 310 ui.write(msg)
313 311 else:
314 312 msg = "%sversion: %d (%d bytes)\n"
315 313 msg %= indent_string, version, len(data)
316 314 ui.write(msg)
317 315 fm = ui.formatter('debugobsolete', opts)
318 316 for rawmarker in sorted(markers):
319 317 m = obsutil.marker(None, rawmarker)
320 318 fm.startitem()
321 319 fm.plain(indent_string)
322 320 cmdutil.showmarker(fm, m)
323 321 fm.end()
324 322
325 323 def _debugphaseheads(ui, data, indent=0):
326 324 """display version and markers contained in 'data'"""
327 325 indent_string = ' ' * indent
328 326 headsbyphase = phases.binarydecode(data)
329 327 for phase in phases.allphases:
330 328 for head in headsbyphase[phase]:
331 329 ui.write(indent_string)
332 330 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
333 331
334 332 def _quasirepr(thing):
335 333 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
336 334 return '{%s}' % (
337 335 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
338 336 return pycompat.bytestr(repr(thing))
339 337
340 338 def _debugbundle2(ui, gen, all=None, **opts):
341 339 """lists the contents of a bundle2"""
342 340 if not isinstance(gen, bundle2.unbundle20):
343 341 raise error.Abort(_('not a bundle2 file'))
344 342 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
345 343 parttypes = opts.get(r'part_type', [])
346 344 for part in gen.iterparts():
347 345 if parttypes and part.type not in parttypes:
348 346 continue
349 347 msg = '%s -- %s (mandatory: %r)\n'
350 348 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
351 349 if part.type == 'changegroup':
352 350 version = part.params.get('version', '01')
353 351 cg = changegroup.getunbundler(version, part, 'UN')
354 352 if not ui.quiet:
355 353 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
356 354 if part.type == 'obsmarkers':
357 355 if not ui.quiet:
358 356 _debugobsmarkers(ui, part, indent=4, **opts)
359 357 if part.type == 'phase-heads':
360 358 if not ui.quiet:
361 359 _debugphaseheads(ui, part, indent=4)
362 360
363 361 @command('debugbundle',
364 362 [('a', 'all', None, _('show all details')),
365 363 ('', 'part-type', [], _('show only the named part type')),
366 364 ('', 'spec', None, _('print the bundlespec of the bundle'))],
367 365 _('FILE'),
368 366 norepo=True)
369 367 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
370 368 """lists the contents of a bundle"""
371 369 with hg.openpath(ui, bundlepath) as f:
372 370 if spec:
373 371 spec = exchange.getbundlespec(ui, f)
374 372 ui.write('%s\n' % spec)
375 373 return
376 374
377 375 gen = exchange.readbundle(ui, f, bundlepath)
378 376 if isinstance(gen, bundle2.unbundle20):
379 377 return _debugbundle2(ui, gen, all=all, **opts)
380 378 _debugchangegroup(ui, gen, all=all, **opts)
381 379
382 380 @command('debugcapabilities',
383 381 [], _('PATH'),
384 382 norepo=True)
385 383 def debugcapabilities(ui, path, **opts):
386 384 """lists the capabilities of a remote peer"""
387 385 opts = pycompat.byteskwargs(opts)
388 386 peer = hg.peer(ui, opts, path)
389 387 caps = peer.capabilities()
390 388 ui.write(('Main capabilities:\n'))
391 389 for c in sorted(caps):
392 390 ui.write((' %s\n') % c)
393 391 b2caps = bundle2.bundle2caps(peer)
394 392 if b2caps:
395 393 ui.write(('Bundle2 capabilities:\n'))
396 394 for key, values in sorted(b2caps.iteritems()):
397 395 ui.write((' %s\n') % key)
398 396 for v in values:
399 397 ui.write((' %s\n') % v)
400 398
401 399 @command('debugcheckstate', [], '')
402 400 def debugcheckstate(ui, repo):
403 401 """validate the correctness of the current dirstate"""
404 402 parent1, parent2 = repo.dirstate.parents()
405 403 m1 = repo[parent1].manifest()
406 404 m2 = repo[parent2].manifest()
407 405 errors = 0
408 406 for f in repo.dirstate:
409 407 state = repo.dirstate[f]
410 408 if state in "nr" and f not in m1:
411 409 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
412 410 errors += 1
413 411 if state in "a" and f in m1:
414 412 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
415 413 errors += 1
416 414 if state in "m" and f not in m1 and f not in m2:
417 415 ui.warn(_("%s in state %s, but not in either manifest\n") %
418 416 (f, state))
419 417 errors += 1
420 418 for f in m1:
421 419 state = repo.dirstate[f]
422 420 if state not in "nrm":
423 421 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
424 422 errors += 1
425 423 if errors:
426 424 error = _(".hg/dirstate inconsistent with current parent's manifest")
427 425 raise error.Abort(error)
428 426
429 427 @command('debugcolor',
430 428 [('', 'style', None, _('show all configured styles'))],
431 429 'hg debugcolor')
432 430 def debugcolor(ui, repo, **opts):
433 431 """show available color, effects or style"""
434 432 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
435 433 if opts.get(r'style'):
436 434 return _debugdisplaystyle(ui)
437 435 else:
438 436 return _debugdisplaycolor(ui)
439 437
440 438 def _debugdisplaycolor(ui):
441 439 ui = ui.copy()
442 440 ui._styles.clear()
443 441 for effect in color._activeeffects(ui).keys():
444 442 ui._styles[effect] = effect
445 443 if ui._terminfoparams:
446 444 for k, v in ui.configitems('color'):
447 445 if k.startswith('color.'):
448 446 ui._styles[k] = k[6:]
449 447 elif k.startswith('terminfo.'):
450 448 ui._styles[k] = k[9:]
451 449 ui.write(_('available colors:\n'))
452 450 # sort label with a '_' after the other to group '_background' entry.
453 451 items = sorted(ui._styles.items(),
454 452 key=lambda i: ('_' in i[0], i[0], i[1]))
455 453 for colorname, label in items:
456 454 ui.write(('%s\n') % colorname, label=label)
457 455
458 456 def _debugdisplaystyle(ui):
459 457 ui.write(_('available style:\n'))
460 458 if not ui._styles:
461 459 return
462 460 width = max(len(s) for s in ui._styles)
463 461 for label, effects in sorted(ui._styles.items()):
464 462 ui.write('%s' % label, label=label)
465 463 if effects:
466 464 # 50
467 465 ui.write(': ')
468 466 ui.write(' ' * (max(0, width - len(label))))
469 467 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
470 468 ui.write('\n')
471 469
472 470 @command('debugcreatestreamclonebundle', [], 'FILE')
473 471 def debugcreatestreamclonebundle(ui, repo, fname):
474 472 """create a stream clone bundle file
475 473
476 474 Stream bundles are special bundles that are essentially archives of
477 475 revlog files. They are commonly used for cloning very quickly.
478 476 """
479 477 # TODO we may want to turn this into an abort when this functionality
480 478 # is moved into `hg bundle`.
481 479 if phases.hassecret(repo):
482 480 ui.warn(_('(warning: stream clone bundle will contain secret '
483 481 'revisions)\n'))
484 482
485 483 requirements, gen = streamclone.generatebundlev1(repo)
486 484 changegroup.writechunks(ui, gen, fname)
487 485
488 486 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
489 487
490 488 @command('debugdag',
491 489 [('t', 'tags', None, _('use tags as labels')),
492 490 ('b', 'branches', None, _('annotate with branch names')),
493 491 ('', 'dots', None, _('use dots for runs')),
494 492 ('s', 'spaces', None, _('separate elements by spaces'))],
495 493 _('[OPTION]... [FILE [REV]...]'),
496 494 optionalrepo=True)
497 495 def debugdag(ui, repo, file_=None, *revs, **opts):
498 496 """format the changelog or an index DAG as a concise textual description
499 497
500 498 If you pass a revlog index, the revlog's DAG is emitted. If you list
501 499 revision numbers, they get labeled in the output as rN.
502 500
503 501 Otherwise, the changelog DAG of the current repo is emitted.
504 502 """
505 503 spaces = opts.get(r'spaces')
506 504 dots = opts.get(r'dots')
507 505 if file_:
508 506 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
509 507 file_)
510 508 revs = set((int(r) for r in revs))
511 509 def events():
512 510 for r in rlog:
513 511 yield 'n', (r, list(p for p in rlog.parentrevs(r)
514 512 if p != -1))
515 513 if r in revs:
516 514 yield 'l', (r, "r%i" % r)
517 515 elif repo:
518 516 cl = repo.changelog
519 517 tags = opts.get(r'tags')
520 518 branches = opts.get(r'branches')
521 519 if tags:
522 520 labels = {}
523 521 for l, n in repo.tags().items():
524 522 labels.setdefault(cl.rev(n), []).append(l)
525 523 def events():
526 524 b = "default"
527 525 for r in cl:
528 526 if branches:
529 527 newb = cl.read(cl.node(r))[5]['branch']
530 528 if newb != b:
531 529 yield 'a', newb
532 530 b = newb
533 531 yield 'n', (r, list(p for p in cl.parentrevs(r)
534 532 if p != -1))
535 533 if tags:
536 534 ls = labels.get(r)
537 535 if ls:
538 536 for l in ls:
539 537 yield 'l', (r, l)
540 538 else:
541 539 raise error.Abort(_('need repo for changelog dag'))
542 540
543 541 for line in dagparser.dagtextlines(events(),
544 542 addspaces=spaces,
545 543 wraplabels=True,
546 544 wrapannotations=True,
547 545 wrapnonlinear=dots,
548 546 usedots=dots,
549 547 maxlinewidth=70):
550 548 ui.write(line)
551 549 ui.write("\n")
552 550
553 551 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
554 552 def debugdata(ui, repo, file_, rev=None, **opts):
555 553 """dump the contents of a data file revision"""
556 554 opts = pycompat.byteskwargs(opts)
557 555 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
558 556 if rev is not None:
559 557 raise error.CommandError('debugdata', _('invalid arguments'))
560 558 file_, rev = None, file_
561 559 elif rev is None:
562 560 raise error.CommandError('debugdata', _('invalid arguments'))
563 561 r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
564 562 try:
565 563 ui.write(r.revision(r.lookup(rev), raw=True))
566 564 except KeyError:
567 565 raise error.Abort(_('invalid revision identifier %s') % rev)
568 566
569 567 @command('debugdate',
570 568 [('e', 'extended', None, _('try extended date formats'))],
571 569 _('[-e] DATE [RANGE]'),
572 570 norepo=True, optionalrepo=True)
573 571 def debugdate(ui, date, range=None, **opts):
574 572 """parse and display a date"""
575 573 if opts[r"extended"]:
576 574 d = dateutil.parsedate(date, util.extendeddateformats)
577 575 else:
578 576 d = dateutil.parsedate(date)
579 577 ui.write(("internal: %d %d\n") % d)
580 578 ui.write(("standard: %s\n") % dateutil.datestr(d))
581 579 if range:
582 580 m = dateutil.matchdate(range)
583 581 ui.write(("match: %s\n") % m(d[0]))
584 582
585 583 @command('debugdeltachain',
586 584 cmdutil.debugrevlogopts + cmdutil.formatteropts,
587 585 _('-c|-m|FILE'),
588 586 optionalrepo=True)
589 587 def debugdeltachain(ui, repo, file_=None, **opts):
590 588 """dump information about delta chains in a revlog
591 589
592 590 Output can be templatized. Available template keywords are:
593 591
594 592 :``rev``: revision number
595 593 :``chainid``: delta chain identifier (numbered by unique base)
596 594 :``chainlen``: delta chain length to this revision
597 595 :``prevrev``: previous revision in delta chain
598 596 :``deltatype``: role of delta / how it was computed
599 597 :``compsize``: compressed size of revision
600 598 :``uncompsize``: uncompressed size of revision
601 599 :``chainsize``: total size of compressed revisions in chain
602 600 :``chainratio``: total chain size divided by uncompressed revision size
603 601 (new delta chains typically start at ratio 2.00)
604 602 :``lindist``: linear distance from base revision in delta chain to end
605 603 of this revision
606 604 :``extradist``: total size of revisions not part of this delta chain from
607 605 base of delta chain to end of this revision; a measurement
608 606 of how much extra data we need to read/seek across to read
609 607 the delta chain for this revision
610 608 :``extraratio``: extradist divided by chainsize; another representation of
611 609 how much unrelated data is needed to load this delta chain
612 610
613 611 If the repository is configured to use the sparse read, additional keywords
614 612 are available:
615 613
616 614 :``readsize``: total size of data read from the disk for a revision
617 615 (sum of the sizes of all the blocks)
618 616 :``largestblock``: size of the largest block of data read from the disk
619 617 :``readdensity``: density of useful bytes in the data read from the disk
620 618 :``srchunks``: in how many data hunks the whole revision would be read
621 619
622 620 The sparse read can be enabled with experimental.sparse-read = True
623 621 """
624 622 opts = pycompat.byteskwargs(opts)
625 623 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
626 624 index = r.index
627 625 start = r.start
628 626 length = r.length
629 627 generaldelta = r.version & revlog.FLAG_GENERALDELTA
630 628 withsparseread = getattr(r, '_withsparseread', False)
631 629
632 630 def revinfo(rev):
633 631 e = index[rev]
634 632 compsize = e[1]
635 633 uncompsize = e[2]
636 634 chainsize = 0
637 635
638 636 if generaldelta:
639 637 if e[3] == e[5]:
640 638 deltatype = 'p1'
641 639 elif e[3] == e[6]:
642 640 deltatype = 'p2'
643 641 elif e[3] == rev - 1:
644 642 deltatype = 'prev'
645 643 elif e[3] == rev:
646 644 deltatype = 'base'
647 645 else:
648 646 deltatype = 'other'
649 647 else:
650 648 if e[3] == rev:
651 649 deltatype = 'base'
652 650 else:
653 651 deltatype = 'prev'
654 652
655 653 chain = r._deltachain(rev)[0]
656 654 for iterrev in chain:
657 655 e = index[iterrev]
658 656 chainsize += e[1]
659 657
660 658 return compsize, uncompsize, deltatype, chain, chainsize
661 659
662 660 fm = ui.formatter('debugdeltachain', opts)
663 661
664 662 fm.plain(' rev chain# chainlen prev delta '
665 663 'size rawsize chainsize ratio lindist extradist '
666 664 'extraratio')
667 665 if withsparseread:
668 666 fm.plain(' readsize largestblk rddensity srchunks')
669 667 fm.plain('\n')
670 668
671 669 chainbases = {}
672 670 for rev in r:
673 671 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
674 672 chainbase = chain[0]
675 673 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
676 674 basestart = start(chainbase)
677 675 revstart = start(rev)
678 676 lineardist = revstart + comp - basestart
679 677 extradist = lineardist - chainsize
680 678 try:
681 679 prevrev = chain[-2]
682 680 except IndexError:
683 681 prevrev = -1
684 682
685 683 if uncomp != 0:
686 684 chainratio = float(chainsize) / float(uncomp)
687 685 else:
688 686 chainratio = chainsize
689 687
690 688 if chainsize != 0:
691 689 extraratio = float(extradist) / float(chainsize)
692 690 else:
693 691 extraratio = extradist
694 692
695 693 fm.startitem()
696 694 fm.write('rev chainid chainlen prevrev deltatype compsize '
697 695 'uncompsize chainsize chainratio lindist extradist '
698 696 'extraratio',
699 697 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
700 698 rev, chainid, len(chain), prevrev, deltatype, comp,
701 699 uncomp, chainsize, chainratio, lineardist, extradist,
702 700 extraratio,
703 701 rev=rev, chainid=chainid, chainlen=len(chain),
704 702 prevrev=prevrev, deltatype=deltatype, compsize=comp,
705 703 uncompsize=uncomp, chainsize=chainsize,
706 704 chainratio=chainratio, lindist=lineardist,
707 705 extradist=extradist, extraratio=extraratio)
708 706 if withsparseread:
709 707 readsize = 0
710 708 largestblock = 0
711 709 srchunks = 0
712 710
713 711 for revschunk in deltautil.slicechunk(r, chain):
714 712 srchunks += 1
715 713 blkend = start(revschunk[-1]) + length(revschunk[-1])
716 714 blksize = blkend - start(revschunk[0])
717 715
718 716 readsize += blksize
719 717 if largestblock < blksize:
720 718 largestblock = blksize
721 719
722 720 if readsize:
723 721 readdensity = float(chainsize) / float(readsize)
724 722 else:
725 723 readdensity = 1
726 724
727 725 fm.write('readsize largestblock readdensity srchunks',
728 726 ' %10d %10d %9.5f %8d',
729 727 readsize, largestblock, readdensity, srchunks,
730 728 readsize=readsize, largestblock=largestblock,
731 729 readdensity=readdensity, srchunks=srchunks)
732 730
733 731 fm.plain('\n')
734 732
735 733 fm.end()
736 734
737 735 @command('debugdirstate|debugstate',
738 736 [('', 'nodates', None, _('do not display the saved mtime')),
739 737 ('', 'datesort', None, _('sort by saved mtime'))],
740 738 _('[OPTION]...'))
741 739 def debugstate(ui, repo, **opts):
742 740 """show the contents of the current dirstate"""
743 741
744 742 nodates = opts.get(r'nodates')
745 743 datesort = opts.get(r'datesort')
746 744
747 745 timestr = ""
748 746 if datesort:
749 747 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
750 748 else:
751 749 keyfunc = None # sort by filename
752 750 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
753 751 if ent[3] == -1:
754 752 timestr = 'unset '
755 753 elif nodates:
756 754 timestr = 'set '
757 755 else:
758 756 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
759 757 time.localtime(ent[3]))
760 758 timestr = encoding.strtolocal(timestr)
761 759 if ent[1] & 0o20000:
762 760 mode = 'lnk'
763 761 else:
764 762 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
765 763 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
766 764 for f in repo.dirstate.copies():
767 765 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
768 766
769 767 @command('debugdiscovery',
770 768 [('', 'old', None, _('use old-style discovery')),
771 769 ('', 'nonheads', None,
772 770 _('use old-style discovery with non-heads included')),
773 771 ('', 'rev', [], 'restrict discovery to this set of revs'),
774 772 ] + cmdutil.remoteopts,
775 773 _('[--rev REV] [OTHER]'))
776 774 def debugdiscovery(ui, repo, remoteurl="default", **opts):
777 775 """runs the changeset discovery protocol in isolation"""
778 776 opts = pycompat.byteskwargs(opts)
779 777 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
780 778 remote = hg.peer(repo, opts, remoteurl)
781 779 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
782 780
783 781 # make sure tests are repeatable
784 782 random.seed(12323)
785 783
786 784 def doit(pushedrevs, remoteheads, remote=remote):
787 785 if opts.get('old'):
788 786 if not util.safehasattr(remote, 'branches'):
789 787 # enable in-client legacy support
790 788 remote = localrepo.locallegacypeer(remote.local())
791 789 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
792 790 force=True)
793 791 common = set(common)
794 792 if not opts.get('nonheads'):
795 793 ui.write(("unpruned common: %s\n") %
796 794 " ".join(sorted(short(n) for n in common)))
797 795
798 796 clnode = repo.changelog.node
799 797 common = repo.revs('heads(::%ln)', common)
800 798 common = {clnode(r) for r in common}
801 799 else:
802 800 nodes = None
803 801 if pushedrevs:
804 802 revs = scmutil.revrange(repo, pushedrevs)
805 803 nodes = [repo[r].node() for r in revs]
806 804 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
807 805 ancestorsof=nodes)
808 806 common = set(common)
809 807 rheads = set(hds)
810 808 lheads = set(repo.heads())
811 809 ui.write(("common heads: %s\n") %
812 810 " ".join(sorted(short(n) for n in common)))
813 811 if lheads <= common:
814 812 ui.write(("local is subset\n"))
815 813 elif rheads <= common:
816 814 ui.write(("remote is subset\n"))
817 815
818 816 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
819 817 localrevs = opts['rev']
820 818 doit(localrevs, remoterevs)
821 819
822 820 _chunksize = 4 << 10
823 821
824 822 @command('debugdownload',
825 823 [
826 824 ('o', 'output', '', _('path')),
827 825 ],
828 826 optionalrepo=True)
829 827 def debugdownload(ui, repo, url, output=None, **opts):
830 828 """download a resource using Mercurial logic and config
831 829 """
832 830 fh = urlmod.open(ui, url, output)
833 831
834 832 dest = ui
835 833 if output:
836 834 dest = open(output, "wb", _chunksize)
837 835 try:
838 836 data = fh.read(_chunksize)
839 837 while data:
840 838 dest.write(data)
841 839 data = fh.read(_chunksize)
842 840 finally:
843 841 if output:
844 842 dest.close()
845 843
846 844 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
847 845 def debugextensions(ui, repo, **opts):
848 846 '''show information about active extensions'''
849 847 opts = pycompat.byteskwargs(opts)
850 848 exts = extensions.extensions(ui)
851 849 hgver = util.version()
852 850 fm = ui.formatter('debugextensions', opts)
853 851 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
854 852 isinternal = extensions.ismoduleinternal(extmod)
855 853 extsource = pycompat.fsencode(extmod.__file__)
856 854 if isinternal:
857 855 exttestedwith = [] # never expose magic string to users
858 856 else:
859 857 exttestedwith = getattr(extmod, 'testedwith', '').split()
860 858 extbuglink = getattr(extmod, 'buglink', None)
861 859
862 860 fm.startitem()
863 861
864 862 if ui.quiet or ui.verbose:
865 863 fm.write('name', '%s\n', extname)
866 864 else:
867 865 fm.write('name', '%s', extname)
868 866 if isinternal or hgver in exttestedwith:
869 867 fm.plain('\n')
870 868 elif not exttestedwith:
871 869 fm.plain(_(' (untested!)\n'))
872 870 else:
873 871 lasttestedversion = exttestedwith[-1]
874 872 fm.plain(' (%s!)\n' % lasttestedversion)
875 873
876 874 fm.condwrite(ui.verbose and extsource, 'source',
877 875 _(' location: %s\n'), extsource or "")
878 876
879 877 if ui.verbose:
880 878 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
881 879 fm.data(bundled=isinternal)
882 880
883 881 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
884 882 _(' tested with: %s\n'),
885 883 fm.formatlist(exttestedwith, name='ver'))
886 884
887 885 fm.condwrite(ui.verbose and extbuglink, 'buglink',
888 886 _(' bug reporting: %s\n'), extbuglink or "")
889 887
890 888 fm.end()
891 889
892 890 @command('debugfileset',
893 891 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
894 892 ('', 'all-files', False,
895 893 _('test files from all revisions and working directory')),
896 894 ('s', 'show-matcher', None,
897 895 _('print internal representation of matcher')),
898 896 ('p', 'show-stage', [],
899 897 _('print parsed tree at the given stage'), _('NAME'))],
900 898 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
901 899 def debugfileset(ui, repo, expr, **opts):
902 900 '''parse and apply a fileset specification'''
903 901 from . import fileset
904 902 fileset.symbols # force import of fileset so we have predicates to optimize
905 903 opts = pycompat.byteskwargs(opts)
906 904 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
907 905
908 906 stages = [
909 907 ('parsed', pycompat.identity),
910 908 ('analyzed', filesetlang.analyze),
911 909 ('optimized', filesetlang.optimize),
912 910 ]
913 911 stagenames = set(n for n, f in stages)
914 912
915 913 showalways = set()
916 914 if ui.verbose and not opts['show_stage']:
917 915 # show parsed tree by --verbose (deprecated)
918 916 showalways.add('parsed')
919 917 if opts['show_stage'] == ['all']:
920 918 showalways.update(stagenames)
921 919 else:
922 920 for n in opts['show_stage']:
923 921 if n not in stagenames:
924 922 raise error.Abort(_('invalid stage name: %s') % n)
925 923 showalways.update(opts['show_stage'])
926 924
927 925 tree = filesetlang.parse(expr)
928 926 for n, f in stages:
929 927 tree = f(tree)
930 928 if n in showalways:
931 929 if opts['show_stage'] or n != 'parsed':
932 930 ui.write(("* %s:\n") % n)
933 931 ui.write(filesetlang.prettyformat(tree), "\n")
934 932
935 933 files = set()
936 934 if opts['all_files']:
937 935 for r in repo:
938 936 c = repo[r]
939 937 files.update(c.files())
940 938 files.update(c.substate)
941 939 if opts['all_files'] or ctx.rev() is None:
942 940 wctx = repo[None]
943 941 files.update(repo.dirstate.walk(scmutil.matchall(repo),
944 942 subrepos=list(wctx.substate),
945 943 unknown=True, ignored=True))
946 944 files.update(wctx.substate)
947 945 else:
948 946 files.update(ctx.files())
949 947 files.update(ctx.substate)
950 948
951 949 m = ctx.matchfileset(expr)
952 950 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
953 951 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
954 952 for f in sorted(files):
955 953 if not m(f):
956 954 continue
957 955 ui.write("%s\n" % f)
958 956
959 957 @command('debugformat',
960 958 [] + cmdutil.formatteropts)
961 959 def debugformat(ui, repo, **opts):
962 960 """display format information about the current repository
963 961
964 962 Use --verbose to get extra information about current config value and
965 963 Mercurial default."""
966 964 opts = pycompat.byteskwargs(opts)
967 965 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
968 966 maxvariantlength = max(len('format-variant'), maxvariantlength)
969 967
970 968 def makeformatname(name):
971 969 return '%s:' + (' ' * (maxvariantlength - len(name)))
972 970
973 971 fm = ui.formatter('debugformat', opts)
974 972 if fm.isplain():
975 973 def formatvalue(value):
976 974 if util.safehasattr(value, 'startswith'):
977 975 return value
978 976 if value:
979 977 return 'yes'
980 978 else:
981 979 return 'no'
982 980 else:
983 981 formatvalue = pycompat.identity
984 982
985 983 fm.plain('format-variant')
986 984 fm.plain(' ' * (maxvariantlength - len('format-variant')))
987 985 fm.plain(' repo')
988 986 if ui.verbose:
989 987 fm.plain(' config default')
990 988 fm.plain('\n')
991 989 for fv in upgrade.allformatvariant:
992 990 fm.startitem()
993 991 repovalue = fv.fromrepo(repo)
994 992 configvalue = fv.fromconfig(repo)
995 993
996 994 if repovalue != configvalue:
997 995 namelabel = 'formatvariant.name.mismatchconfig'
998 996 repolabel = 'formatvariant.repo.mismatchconfig'
999 997 elif repovalue != fv.default:
1000 998 namelabel = 'formatvariant.name.mismatchdefault'
1001 999 repolabel = 'formatvariant.repo.mismatchdefault'
1002 1000 else:
1003 1001 namelabel = 'formatvariant.name.uptodate'
1004 1002 repolabel = 'formatvariant.repo.uptodate'
1005 1003
1006 1004 fm.write('name', makeformatname(fv.name), fv.name,
1007 1005 label=namelabel)
1008 1006 fm.write('repo', ' %3s', formatvalue(repovalue),
1009 1007 label=repolabel)
1010 1008 if fv.default != configvalue:
1011 1009 configlabel = 'formatvariant.config.special'
1012 1010 else:
1013 1011 configlabel = 'formatvariant.config.default'
1014 1012 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1015 1013 label=configlabel)
1016 1014 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1017 1015 label='formatvariant.default')
1018 1016 fm.plain('\n')
1019 1017 fm.end()
1020 1018
1021 1019 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1022 1020 def debugfsinfo(ui, path="."):
1023 1021 """show information detected about current filesystem"""
1024 1022 ui.write(('path: %s\n') % path)
1025 1023 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1026 1024 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1027 1025 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1028 1026 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1029 1027 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1030 1028 casesensitive = '(unknown)'
1031 1029 try:
1032 1030 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1033 1031 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1034 1032 except OSError:
1035 1033 pass
1036 1034 ui.write(('case-sensitive: %s\n') % casesensitive)
1037 1035
1038 1036 @command('debuggetbundle',
1039 1037 [('H', 'head', [], _('id of head node'), _('ID')),
1040 1038 ('C', 'common', [], _('id of common node'), _('ID')),
1041 1039 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1042 1040 _('REPO FILE [-H|-C ID]...'),
1043 1041 norepo=True)
1044 1042 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1045 1043 """retrieves a bundle from a repo
1046 1044
1047 1045 Every ID must be a full-length hex node id string. Saves the bundle to the
1048 1046 given file.
1049 1047 """
1050 1048 opts = pycompat.byteskwargs(opts)
1051 1049 repo = hg.peer(ui, opts, repopath)
1052 1050 if not repo.capable('getbundle'):
1053 1051 raise error.Abort("getbundle() not supported by target repository")
1054 1052 args = {}
1055 1053 if common:
1056 1054 args[r'common'] = [bin(s) for s in common]
1057 1055 if head:
1058 1056 args[r'heads'] = [bin(s) for s in head]
1059 1057 # TODO: get desired bundlecaps from command line.
1060 1058 args[r'bundlecaps'] = None
1061 1059 bundle = repo.getbundle('debug', **args)
1062 1060
1063 1061 bundletype = opts.get('type', 'bzip2').lower()
1064 1062 btypes = {'none': 'HG10UN',
1065 1063 'bzip2': 'HG10BZ',
1066 1064 'gzip': 'HG10GZ',
1067 1065 'bundle2': 'HG20'}
1068 1066 bundletype = btypes.get(bundletype)
1069 1067 if bundletype not in bundle2.bundletypes:
1070 1068 raise error.Abort(_('unknown bundle type specified with --type'))
1071 1069 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1072 1070
1073 1071 @command('debugignore', [], '[FILE]')
1074 1072 def debugignore(ui, repo, *files, **opts):
1075 1073 """display the combined ignore pattern and information about ignored files
1076 1074
1077 1075 With no argument display the combined ignore pattern.
1078 1076
1079 1077 Given space separated file names, shows if the given file is ignored and
1080 1078 if so, show the ignore rule (file and line number) that matched it.
1081 1079 """
1082 1080 ignore = repo.dirstate._ignore
1083 1081 if not files:
1084 1082 # Show all the patterns
1085 1083 ui.write("%s\n" % pycompat.byterepr(ignore))
1086 1084 else:
1087 1085 m = scmutil.match(repo[None], pats=files)
1088 1086 for f in m.files():
1089 1087 nf = util.normpath(f)
1090 1088 ignored = None
1091 1089 ignoredata = None
1092 1090 if nf != '.':
1093 1091 if ignore(nf):
1094 1092 ignored = nf
1095 1093 ignoredata = repo.dirstate._ignorefileandline(nf)
1096 1094 else:
1097 1095 for p in util.finddirs(nf):
1098 1096 if ignore(p):
1099 1097 ignored = p
1100 1098 ignoredata = repo.dirstate._ignorefileandline(p)
1101 1099 break
1102 1100 if ignored:
1103 1101 if ignored == nf:
1104 1102 ui.write(_("%s is ignored\n") % m.uipath(f))
1105 1103 else:
1106 1104 ui.write(_("%s is ignored because of "
1107 1105 "containing folder %s\n")
1108 1106 % (m.uipath(f), ignored))
1109 1107 ignorefile, lineno, line = ignoredata
1110 1108 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1111 1109 % (ignorefile, lineno, line))
1112 1110 else:
1113 1111 ui.write(_("%s is not ignored\n") % m.uipath(f))
1114 1112
1115 1113 @command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts,
1116 1114 _('-c|-m|FILE'))
1117 1115 def debugindex(ui, repo, file_=None, **opts):
1118 1116 """dump index data for a storage primitive"""
1119 1117 opts = pycompat.byteskwargs(opts)
1120 1118 store = cmdutil.openstorage(repo, 'debugindex', file_, opts)
1121 1119
1122 1120 if ui.debugflag:
1123 1121 shortfn = hex
1124 1122 else:
1125 1123 shortfn = short
1126 1124
1127 1125 idlen = 12
1128 1126 for i in store:
1129 1127 idlen = len(shortfn(store.node(i)))
1130 1128 break
1131 1129
1132 1130 fm = ui.formatter('debugindex', opts)
1133 1131 fm.plain(b' rev linkrev %s %s p2\n' % (
1134 1132 b'nodeid'.ljust(idlen),
1135 1133 b'p1'.ljust(idlen)))
1136 1134
1137 1135 for rev in store:
1138 1136 node = store.node(rev)
1139 1137 parents = store.parents(node)
1140 1138
1141 1139 fm.startitem()
1142 1140 fm.write(b'rev', b'%6d ', rev)
1143 1141 fm.write(b'linkrev', '%7d ', store.linkrev(rev))
1144 1142 fm.write(b'node', '%s ', shortfn(node))
1145 1143 fm.write(b'p1', '%s ', shortfn(parents[0]))
1146 1144 fm.write(b'p2', '%s', shortfn(parents[1]))
1147 1145 fm.plain(b'\n')
1148 1146
1149 1147 fm.end()
1150 1148
1151 1149 @command('debugindexdot', cmdutil.debugrevlogopts,
1152 1150 _('-c|-m|FILE'), optionalrepo=True)
1153 1151 def debugindexdot(ui, repo, file_=None, **opts):
1154 1152 """dump an index DAG as a graphviz dot file"""
1155 1153 opts = pycompat.byteskwargs(opts)
1156 1154 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
1157 1155 ui.write(("digraph G {\n"))
1158 1156 for i in r:
1159 1157 node = r.node(i)
1160 1158 pp = r.parents(node)
1161 1159 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1162 1160 if pp[1] != nullid:
1163 1161 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1164 1162 ui.write("}\n")
1165 1163
1166 1164 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1167 1165 def debuginstall(ui, **opts):
1168 1166 '''test Mercurial installation
1169 1167
1170 1168 Returns 0 on success.
1171 1169 '''
1172 1170 opts = pycompat.byteskwargs(opts)
1173 1171
1174 1172 def writetemp(contents):
1175 1173 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1176 1174 f = os.fdopen(fd, r"wb")
1177 1175 f.write(contents)
1178 1176 f.close()
1179 1177 return name
1180 1178
1181 1179 problems = 0
1182 1180
1183 1181 fm = ui.formatter('debuginstall', opts)
1184 1182 fm.startitem()
1185 1183
1186 1184 # encoding
1187 1185 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1188 1186 err = None
1189 1187 try:
1190 1188 codecs.lookup(pycompat.sysstr(encoding.encoding))
1191 1189 except LookupError as inst:
1192 1190 err = stringutil.forcebytestr(inst)
1193 1191 problems += 1
1194 1192 fm.condwrite(err, 'encodingerror', _(" %s\n"
1195 1193 " (check that your locale is properly set)\n"), err)
1196 1194
1197 1195 # Python
1198 1196 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1199 1197 pycompat.sysexecutable)
1200 1198 fm.write('pythonver', _("checking Python version (%s)\n"),
1201 1199 ("%d.%d.%d" % sys.version_info[:3]))
1202 1200 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1203 1201 os.path.dirname(pycompat.fsencode(os.__file__)))
1204 1202
1205 1203 security = set(sslutil.supportedprotocols)
1206 1204 if sslutil.hassni:
1207 1205 security.add('sni')
1208 1206
1209 1207 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1210 1208 fm.formatlist(sorted(security), name='protocol',
1211 1209 fmt='%s', sep=','))
1212 1210
1213 1211 # These are warnings, not errors. So don't increment problem count. This
1214 1212 # may change in the future.
1215 1213 if 'tls1.2' not in security:
1216 1214 fm.plain(_(' TLS 1.2 not supported by Python install; '
1217 1215 'network connections lack modern security\n'))
1218 1216 if 'sni' not in security:
1219 1217 fm.plain(_(' SNI not supported by Python install; may have '
1220 1218 'connectivity issues with some servers\n'))
1221 1219
1222 1220 # TODO print CA cert info
1223 1221
1224 1222 # hg version
1225 1223 hgver = util.version()
1226 1224 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1227 1225 hgver.split('+')[0])
1228 1226 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1229 1227 '+'.join(hgver.split('+')[1:]))
1230 1228
1231 1229 # compiled modules
1232 1230 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1233 1231 policy.policy)
1234 1232 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1235 1233 os.path.dirname(pycompat.fsencode(__file__)))
1236 1234
1237 1235 if policy.policy in ('c', 'allow'):
1238 1236 err = None
1239 1237 try:
1240 1238 from .cext import (
1241 1239 base85,
1242 1240 bdiff,
1243 1241 mpatch,
1244 1242 osutil,
1245 1243 )
1246 1244 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1247 1245 except Exception as inst:
1248 1246 err = stringutil.forcebytestr(inst)
1249 1247 problems += 1
1250 1248 fm.condwrite(err, 'extensionserror', " %s\n", err)
1251 1249
1252 1250 compengines = util.compengines._engines.values()
1253 1251 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1254 1252 fm.formatlist(sorted(e.name() for e in compengines),
1255 1253 name='compengine', fmt='%s', sep=', '))
1256 1254 fm.write('compenginesavail', _('checking available compression engines '
1257 1255 '(%s)\n'),
1258 1256 fm.formatlist(sorted(e.name() for e in compengines
1259 1257 if e.available()),
1260 1258 name='compengine', fmt='%s', sep=', '))
1261 1259 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1262 1260 fm.write('compenginesserver', _('checking available compression engines '
1263 1261 'for wire protocol (%s)\n'),
1264 1262 fm.formatlist([e.name() for e in wirecompengines
1265 1263 if e.wireprotosupport()],
1266 1264 name='compengine', fmt='%s', sep=', '))
1267 1265 re2 = 'missing'
1268 1266 if util._re2:
1269 1267 re2 = 'available'
1270 1268 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1271 1269 fm.data(re2=bool(util._re2))
1272 1270
1273 1271 # templates
1274 1272 p = templater.templatepaths()
1275 1273 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1276 1274 fm.condwrite(not p, '', _(" no template directories found\n"))
1277 1275 if p:
1278 1276 m = templater.templatepath("map-cmdline.default")
1279 1277 if m:
1280 1278 # template found, check if it is working
1281 1279 err = None
1282 1280 try:
1283 1281 templater.templater.frommapfile(m)
1284 1282 except Exception as inst:
1285 1283 err = stringutil.forcebytestr(inst)
1286 1284 p = None
1287 1285 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1288 1286 else:
1289 1287 p = None
1290 1288 fm.condwrite(p, 'defaulttemplate',
1291 1289 _("checking default template (%s)\n"), m)
1292 1290 fm.condwrite(not m, 'defaulttemplatenotfound',
1293 1291 _(" template '%s' not found\n"), "default")
1294 1292 if not p:
1295 1293 problems += 1
1296 1294 fm.condwrite(not p, '',
1297 1295 _(" (templates seem to have been installed incorrectly)\n"))
1298 1296
1299 1297 # editor
1300 1298 editor = ui.geteditor()
1301 1299 editor = util.expandpath(editor)
1302 1300 editorbin = procutil.shellsplit(editor)[0]
1303 1301 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1304 1302 cmdpath = procutil.findexe(editorbin)
1305 1303 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1306 1304 _(" No commit editor set and can't find %s in PATH\n"
1307 1305 " (specify a commit editor in your configuration"
1308 1306 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1309 1307 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1310 1308 _(" Can't find editor '%s' in PATH\n"
1311 1309 " (specify a commit editor in your configuration"
1312 1310 " file)\n"), not cmdpath and editorbin)
1313 1311 if not cmdpath and editor != 'vi':
1314 1312 problems += 1
1315 1313
1316 1314 # check username
1317 1315 username = None
1318 1316 err = None
1319 1317 try:
1320 1318 username = ui.username()
1321 1319 except error.Abort as e:
1322 1320 err = stringutil.forcebytestr(e)
1323 1321 problems += 1
1324 1322
1325 1323 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1326 1324 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1327 1325 " (specify a username in your configuration file)\n"), err)
1328 1326
1329 1327 fm.condwrite(not problems, '',
1330 1328 _("no problems detected\n"))
1331 1329 if not problems:
1332 1330 fm.data(problems=problems)
1333 1331 fm.condwrite(problems, 'problems',
1334 1332 _("%d problems detected,"
1335 1333 " please check your install!\n"), problems)
1336 1334 fm.end()
1337 1335
1338 1336 return problems
1339 1337
1340 1338 @command('debugknown', [], _('REPO ID...'), norepo=True)
1341 1339 def debugknown(ui, repopath, *ids, **opts):
1342 1340 """test whether node ids are known to a repo
1343 1341
1344 1342 Every ID must be a full-length hex node id string. Returns a list of 0s
1345 1343 and 1s indicating unknown/known.
1346 1344 """
1347 1345 opts = pycompat.byteskwargs(opts)
1348 1346 repo = hg.peer(ui, opts, repopath)
1349 1347 if not repo.capable('known'):
1350 1348 raise error.Abort("known() not supported by target repository")
1351 1349 flags = repo.known([bin(s) for s in ids])
1352 1350 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1353 1351
1354 1352 @command('debuglabelcomplete', [], _('LABEL...'))
1355 1353 def debuglabelcomplete(ui, repo, *args):
1356 1354 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1357 1355 debugnamecomplete(ui, repo, *args)
1358 1356
1359 1357 @command('debuglocks',
1360 1358 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1361 1359 ('W', 'force-wlock', None,
1362 1360 _('free the working state lock (DANGEROUS)')),
1363 1361 ('s', 'set-lock', None, _('set the store lock until stopped')),
1364 1362 ('S', 'set-wlock', None,
1365 1363 _('set the working state lock until stopped'))],
1366 1364 _('[OPTION]...'))
1367 1365 def debuglocks(ui, repo, **opts):
1368 1366 """show or modify state of locks
1369 1367
1370 1368 By default, this command will show which locks are held. This
1371 1369 includes the user and process holding the lock, the amount of time
1372 1370 the lock has been held, and the machine name where the process is
1373 1371 running if it's not local.
1374 1372
1375 1373 Locks protect the integrity of Mercurial's data, so should be
1376 1374 treated with care. System crashes or other interruptions may cause
1377 1375 locks to not be properly released, though Mercurial will usually
1378 1376 detect and remove such stale locks automatically.
1379 1377
1380 1378 However, detecting stale locks may not always be possible (for
1381 1379 instance, on a shared filesystem). Removing locks may also be
1382 1380 blocked by filesystem permissions.
1383 1381
1384 1382 Setting a lock will prevent other commands from changing the data.
1385 1383 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1386 1384 The set locks are removed when the command exits.
1387 1385
1388 1386 Returns 0 if no locks are held.
1389 1387
1390 1388 """
1391 1389
1392 1390 if opts.get(r'force_lock'):
1393 1391 repo.svfs.unlink('lock')
1394 1392 if opts.get(r'force_wlock'):
1395 1393 repo.vfs.unlink('wlock')
1396 1394 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1397 1395 return 0
1398 1396
1399 1397 locks = []
1400 1398 try:
1401 1399 if opts.get(r'set_wlock'):
1402 1400 try:
1403 1401 locks.append(repo.wlock(False))
1404 1402 except error.LockHeld:
1405 1403 raise error.Abort(_('wlock is already held'))
1406 1404 if opts.get(r'set_lock'):
1407 1405 try:
1408 1406 locks.append(repo.lock(False))
1409 1407 except error.LockHeld:
1410 1408 raise error.Abort(_('lock is already held'))
1411 1409 if len(locks):
1412 1410 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1413 1411 return 0
1414 1412 finally:
1415 1413 release(*locks)
1416 1414
1417 1415 now = time.time()
1418 1416 held = 0
1419 1417
1420 1418 def report(vfs, name, method):
1421 1419 # this causes stale locks to get reaped for more accurate reporting
1422 1420 try:
1423 1421 l = method(False)
1424 1422 except error.LockHeld:
1425 1423 l = None
1426 1424
1427 1425 if l:
1428 1426 l.release()
1429 1427 else:
1430 1428 try:
1431 1429 st = vfs.lstat(name)
1432 1430 age = now - st[stat.ST_MTIME]
1433 1431 user = util.username(st.st_uid)
1434 1432 locker = vfs.readlock(name)
1435 1433 if ":" in locker:
1436 1434 host, pid = locker.split(':')
1437 1435 if host == socket.gethostname():
1438 1436 locker = 'user %s, process %s' % (user, pid)
1439 1437 else:
1440 1438 locker = 'user %s, process %s, host %s' \
1441 1439 % (user, pid, host)
1442 1440 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1443 1441 return 1
1444 1442 except OSError as e:
1445 1443 if e.errno != errno.ENOENT:
1446 1444 raise
1447 1445
1448 1446 ui.write(("%-6s free\n") % (name + ":"))
1449 1447 return 0
1450 1448
1451 1449 held += report(repo.svfs, "lock", repo.lock)
1452 1450 held += report(repo.vfs, "wlock", repo.wlock)
1453 1451
1454 1452 return held
1455 1453
1456 1454 @command('debugmanifestfulltextcache', [
1457 1455 ('', 'clear', False, _('clear the cache')),
1458 1456 ('a', 'add', '', _('add the given manifest node to the cache'),
1459 1457 _('NODE'))
1460 1458 ], '')
1461 1459 def debugmanifestfulltextcache(ui, repo, add=None, **opts):
1462 1460 """show, clear or amend the contents of the manifest fulltext cache"""
1463 1461 with repo.lock():
1464 1462 r = repo.manifestlog.getstorage(b'')
1465 1463 try:
1466 1464 cache = r._fulltextcache
1467 1465 except AttributeError:
1468 1466 ui.warn(_(
1469 1467 "Current revlog implementation doesn't appear to have a "
1470 1468 'manifest fulltext cache\n'))
1471 1469 return
1472 1470
1473 1471 if opts.get(r'clear'):
1474 1472 cache.clear()
1475 1473
1476 1474 if add:
1477 1475 try:
1478 1476 manifest = repo.manifestlog[r.lookup(add)]
1479 1477 except error.LookupError as e:
1480 1478 raise error.Abort(e, hint="Check your manifest node id")
1481 1479 manifest.read() # stores revisision in cache too
1482 1480
1483 1481 if not len(cache):
1484 1482 ui.write(_('Cache empty'))
1485 1483 else:
1486 1484 ui.write(
1487 1485 _('Cache contains %d manifest entries, in order of most to '
1488 1486 'least recent:\n') % (len(cache),))
1489 1487 totalsize = 0
1490 1488 for nodeid in cache:
1491 1489 # Use cache.get to not update the LRU order
1492 1490 data = cache.get(nodeid)
1493 1491 size = len(data)
1494 1492 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1495 1493 ui.write(_('id: %s, size %s\n') % (
1496 1494 hex(nodeid), util.bytecount(size)))
1497 1495 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1498 1496 ui.write(
1499 1497 _('Total cache data size %s, on-disk %s\n') % (
1500 1498 util.bytecount(totalsize), util.bytecount(ondisk))
1501 1499 )
1502 1500
1503 1501 @command('debugmergestate', [], '')
1504 1502 def debugmergestate(ui, repo, *args):
1505 1503 """print merge state
1506 1504
1507 1505 Use --verbose to print out information about whether v1 or v2 merge state
1508 1506 was chosen."""
1509 1507 def _hashornull(h):
1510 1508 if h == nullhex:
1511 1509 return 'null'
1512 1510 else:
1513 1511 return h
1514 1512
1515 1513 def printrecords(version):
1516 1514 ui.write(('* version %d records\n') % version)
1517 1515 if version == 1:
1518 1516 records = v1records
1519 1517 else:
1520 1518 records = v2records
1521 1519
1522 1520 for rtype, record in records:
1523 1521 # pretty print some record types
1524 1522 if rtype == 'L':
1525 1523 ui.write(('local: %s\n') % record)
1526 1524 elif rtype == 'O':
1527 1525 ui.write(('other: %s\n') % record)
1528 1526 elif rtype == 'm':
1529 1527 driver, mdstate = record.split('\0', 1)
1530 1528 ui.write(('merge driver: %s (state "%s")\n')
1531 1529 % (driver, mdstate))
1532 1530 elif rtype in 'FDC':
1533 1531 r = record.split('\0')
1534 1532 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1535 1533 if version == 1:
1536 1534 onode = 'not stored in v1 format'
1537 1535 flags = r[7]
1538 1536 else:
1539 1537 onode, flags = r[7:9]
1540 1538 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1541 1539 % (f, rtype, state, _hashornull(hash)))
1542 1540 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1543 1541 ui.write((' ancestor path: %s (node %s)\n')
1544 1542 % (afile, _hashornull(anode)))
1545 1543 ui.write((' other path: %s (node %s)\n')
1546 1544 % (ofile, _hashornull(onode)))
1547 1545 elif rtype == 'f':
1548 1546 filename, rawextras = record.split('\0', 1)
1549 1547 extras = rawextras.split('\0')
1550 1548 i = 0
1551 1549 extrastrings = []
1552 1550 while i < len(extras):
1553 1551 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1554 1552 i += 2
1555 1553
1556 1554 ui.write(('file extras: %s (%s)\n')
1557 1555 % (filename, ', '.join(extrastrings)))
1558 1556 elif rtype == 'l':
1559 1557 labels = record.split('\0', 2)
1560 1558 labels = [l for l in labels if len(l) > 0]
1561 1559 ui.write(('labels:\n'))
1562 1560 ui.write((' local: %s\n' % labels[0]))
1563 1561 ui.write((' other: %s\n' % labels[1]))
1564 1562 if len(labels) > 2:
1565 1563 ui.write((' base: %s\n' % labels[2]))
1566 1564 else:
1567 1565 ui.write(('unrecognized entry: %s\t%s\n')
1568 1566 % (rtype, record.replace('\0', '\t')))
1569 1567
1570 1568 # Avoid mergestate.read() since it may raise an exception for unsupported
1571 1569 # merge state records. We shouldn't be doing this, but this is OK since this
1572 1570 # command is pretty low-level.
1573 1571 ms = mergemod.mergestate(repo)
1574 1572
1575 1573 # sort so that reasonable information is on top
1576 1574 v1records = ms._readrecordsv1()
1577 1575 v2records = ms._readrecordsv2()
1578 1576 order = 'LOml'
1579 1577 def key(r):
1580 1578 idx = order.find(r[0])
1581 1579 if idx == -1:
1582 1580 return (1, r[1])
1583 1581 else:
1584 1582 return (0, idx)
1585 1583 v1records.sort(key=key)
1586 1584 v2records.sort(key=key)
1587 1585
1588 1586 if not v1records and not v2records:
1589 1587 ui.write(('no merge state found\n'))
1590 1588 elif not v2records:
1591 1589 ui.note(('no version 2 merge state\n'))
1592 1590 printrecords(1)
1593 1591 elif ms._v1v2match(v1records, v2records):
1594 1592 ui.note(('v1 and v2 states match: using v2\n'))
1595 1593 printrecords(2)
1596 1594 else:
1597 1595 ui.note(('v1 and v2 states mismatch: using v1\n'))
1598 1596 printrecords(1)
1599 1597 if ui.verbose:
1600 1598 printrecords(2)
1601 1599
1602 1600 @command('debugnamecomplete', [], _('NAME...'))
1603 1601 def debugnamecomplete(ui, repo, *args):
1604 1602 '''complete "names" - tags, open branch names, bookmark names'''
1605 1603
1606 1604 names = set()
1607 1605 # since we previously only listed open branches, we will handle that
1608 1606 # specially (after this for loop)
1609 1607 for name, ns in repo.names.iteritems():
1610 1608 if name != 'branches':
1611 1609 names.update(ns.listnames(repo))
1612 1610 names.update(tag for (tag, heads, tip, closed)
1613 1611 in repo.branchmap().iterbranches() if not closed)
1614 1612 completions = set()
1615 1613 if not args:
1616 1614 args = ['']
1617 1615 for a in args:
1618 1616 completions.update(n for n in names if n.startswith(a))
1619 1617 ui.write('\n'.join(sorted(completions)))
1620 1618 ui.write('\n')
1621 1619
1622 1620 @command('debugobsolete',
1623 1621 [('', 'flags', 0, _('markers flag')),
1624 1622 ('', 'record-parents', False,
1625 1623 _('record parent information for the precursor')),
1626 1624 ('r', 'rev', [], _('display markers relevant to REV')),
1627 1625 ('', 'exclusive', False, _('restrict display to markers only '
1628 1626 'relevant to REV')),
1629 1627 ('', 'index', False, _('display index of the marker')),
1630 1628 ('', 'delete', [], _('delete markers specified by indices')),
1631 1629 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1632 1630 _('[OBSOLETED [REPLACEMENT ...]]'))
1633 1631 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1634 1632 """create arbitrary obsolete marker
1635 1633
1636 1634 With no arguments, displays the list of obsolescence markers."""
1637 1635
1638 1636 opts = pycompat.byteskwargs(opts)
1639 1637
1640 1638 def parsenodeid(s):
1641 1639 try:
1642 1640 # We do not use revsingle/revrange functions here to accept
1643 1641 # arbitrary node identifiers, possibly not present in the
1644 1642 # local repository.
1645 1643 n = bin(s)
1646 1644 if len(n) != len(nullid):
1647 1645 raise TypeError()
1648 1646 return n
1649 1647 except TypeError:
1650 1648 raise error.Abort('changeset references must be full hexadecimal '
1651 1649 'node identifiers')
1652 1650
1653 1651 if opts.get('delete'):
1654 1652 indices = []
1655 1653 for v in opts.get('delete'):
1656 1654 try:
1657 1655 indices.append(int(v))
1658 1656 except ValueError:
1659 1657 raise error.Abort(_('invalid index value: %r') % v,
1660 1658 hint=_('use integers for indices'))
1661 1659
1662 1660 if repo.currenttransaction():
1663 1661 raise error.Abort(_('cannot delete obsmarkers in the middle '
1664 1662 'of transaction.'))
1665 1663
1666 1664 with repo.lock():
1667 1665 n = repair.deleteobsmarkers(repo.obsstore, indices)
1668 1666 ui.write(_('deleted %i obsolescence markers\n') % n)
1669 1667
1670 1668 return
1671 1669
1672 1670 if precursor is not None:
1673 1671 if opts['rev']:
1674 1672 raise error.Abort('cannot select revision when creating marker')
1675 1673 metadata = {}
1676 1674 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1677 1675 succs = tuple(parsenodeid(succ) for succ in successors)
1678 1676 l = repo.lock()
1679 1677 try:
1680 1678 tr = repo.transaction('debugobsolete')
1681 1679 try:
1682 1680 date = opts.get('date')
1683 1681 if date:
1684 1682 date = dateutil.parsedate(date)
1685 1683 else:
1686 1684 date = None
1687 1685 prec = parsenodeid(precursor)
1688 1686 parents = None
1689 1687 if opts['record_parents']:
1690 1688 if prec not in repo.unfiltered():
1691 1689 raise error.Abort('cannot used --record-parents on '
1692 1690 'unknown changesets')
1693 1691 parents = repo.unfiltered()[prec].parents()
1694 1692 parents = tuple(p.node() for p in parents)
1695 1693 repo.obsstore.create(tr, prec, succs, opts['flags'],
1696 1694 parents=parents, date=date,
1697 1695 metadata=metadata, ui=ui)
1698 1696 tr.close()
1699 1697 except ValueError as exc:
1700 1698 raise error.Abort(_('bad obsmarker input: %s') %
1701 1699 pycompat.bytestr(exc))
1702 1700 finally:
1703 1701 tr.release()
1704 1702 finally:
1705 1703 l.release()
1706 1704 else:
1707 1705 if opts['rev']:
1708 1706 revs = scmutil.revrange(repo, opts['rev'])
1709 1707 nodes = [repo[r].node() for r in revs]
1710 1708 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1711 1709 exclusive=opts['exclusive']))
1712 1710 markers.sort(key=lambda x: x._data)
1713 1711 else:
1714 1712 markers = obsutil.getmarkers(repo)
1715 1713
1716 1714 markerstoiter = markers
1717 1715 isrelevant = lambda m: True
1718 1716 if opts.get('rev') and opts.get('index'):
1719 1717 markerstoiter = obsutil.getmarkers(repo)
1720 1718 markerset = set(markers)
1721 1719 isrelevant = lambda m: m in markerset
1722 1720
1723 1721 fm = ui.formatter('debugobsolete', opts)
1724 1722 for i, m in enumerate(markerstoiter):
1725 1723 if not isrelevant(m):
1726 1724 # marker can be irrelevant when we're iterating over a set
1727 1725 # of markers (markerstoiter) which is bigger than the set
1728 1726 # of markers we want to display (markers)
1729 1727 # this can happen if both --index and --rev options are
1730 1728 # provided and thus we need to iterate over all of the markers
1731 1729 # to get the correct indices, but only display the ones that
1732 1730 # are relevant to --rev value
1733 1731 continue
1734 1732 fm.startitem()
1735 1733 ind = i if opts.get('index') else None
1736 1734 cmdutil.showmarker(fm, m, index=ind)
1737 1735 fm.end()
1738 1736
1739 1737 @command('debugpathcomplete',
1740 1738 [('f', 'full', None, _('complete an entire path')),
1741 1739 ('n', 'normal', None, _('show only normal files')),
1742 1740 ('a', 'added', None, _('show only added files')),
1743 1741 ('r', 'removed', None, _('show only removed files'))],
1744 1742 _('FILESPEC...'))
1745 1743 def debugpathcomplete(ui, repo, *specs, **opts):
1746 1744 '''complete part or all of a tracked path
1747 1745
1748 1746 This command supports shells that offer path name completion. It
1749 1747 currently completes only files already known to the dirstate.
1750 1748
1751 1749 Completion extends only to the next path segment unless
1752 1750 --full is specified, in which case entire paths are used.'''
1753 1751
1754 1752 def complete(path, acceptable):
1755 1753 dirstate = repo.dirstate
1756 1754 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1757 1755 rootdir = repo.root + pycompat.ossep
1758 1756 if spec != repo.root and not spec.startswith(rootdir):
1759 1757 return [], []
1760 1758 if os.path.isdir(spec):
1761 1759 spec += '/'
1762 1760 spec = spec[len(rootdir):]
1763 1761 fixpaths = pycompat.ossep != '/'
1764 1762 if fixpaths:
1765 1763 spec = spec.replace(pycompat.ossep, '/')
1766 1764 speclen = len(spec)
1767 1765 fullpaths = opts[r'full']
1768 1766 files, dirs = set(), set()
1769 1767 adddir, addfile = dirs.add, files.add
1770 1768 for f, st in dirstate.iteritems():
1771 1769 if f.startswith(spec) and st[0] in acceptable:
1772 1770 if fixpaths:
1773 1771 f = f.replace('/', pycompat.ossep)
1774 1772 if fullpaths:
1775 1773 addfile(f)
1776 1774 continue
1777 1775 s = f.find(pycompat.ossep, speclen)
1778 1776 if s >= 0:
1779 1777 adddir(f[:s])
1780 1778 else:
1781 1779 addfile(f)
1782 1780 return files, dirs
1783 1781
1784 1782 acceptable = ''
1785 1783 if opts[r'normal']:
1786 1784 acceptable += 'nm'
1787 1785 if opts[r'added']:
1788 1786 acceptable += 'a'
1789 1787 if opts[r'removed']:
1790 1788 acceptable += 'r'
1791 1789 cwd = repo.getcwd()
1792 1790 if not specs:
1793 1791 specs = ['.']
1794 1792
1795 1793 files, dirs = set(), set()
1796 1794 for spec in specs:
1797 1795 f, d = complete(spec, acceptable or 'nmar')
1798 1796 files.update(f)
1799 1797 dirs.update(d)
1800 1798 files.update(dirs)
1801 1799 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1802 1800 ui.write('\n')
1803 1801
1804 1802 @command('debugpeer', [], _('PATH'), norepo=True)
1805 1803 def debugpeer(ui, path):
1806 1804 """establish a connection to a peer repository"""
1807 1805 # Always enable peer request logging. Requires --debug to display
1808 1806 # though.
1809 1807 overrides = {
1810 1808 ('devel', 'debug.peer-request'): True,
1811 1809 }
1812 1810
1813 1811 with ui.configoverride(overrides):
1814 1812 peer = hg.peer(ui, {}, path)
1815 1813
1816 1814 local = peer.local() is not None
1817 1815 canpush = peer.canpush()
1818 1816
1819 1817 ui.write(_('url: %s\n') % peer.url())
1820 1818 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1821 1819 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1822 1820
1823 1821 @command('debugpickmergetool',
1824 1822 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1825 1823 ('', 'changedelete', None, _('emulate merging change and delete')),
1826 1824 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1827 1825 _('[PATTERN]...'),
1828 1826 inferrepo=True)
1829 1827 def debugpickmergetool(ui, repo, *pats, **opts):
1830 1828 """examine which merge tool is chosen for specified file
1831 1829
1832 1830 As described in :hg:`help merge-tools`, Mercurial examines
1833 1831 configurations below in this order to decide which merge tool is
1834 1832 chosen for specified file.
1835 1833
1836 1834 1. ``--tool`` option
1837 1835 2. ``HGMERGE`` environment variable
1838 1836 3. configurations in ``merge-patterns`` section
1839 1837 4. configuration of ``ui.merge``
1840 1838 5. configurations in ``merge-tools`` section
1841 1839 6. ``hgmerge`` tool (for historical reason only)
1842 1840 7. default tool for fallback (``:merge`` or ``:prompt``)
1843 1841
1844 1842 This command writes out examination result in the style below::
1845 1843
1846 1844 FILE = MERGETOOL
1847 1845
1848 1846 By default, all files known in the first parent context of the
1849 1847 working directory are examined. Use file patterns and/or -I/-X
1850 1848 options to limit target files. -r/--rev is also useful to examine
1851 1849 files in another context without actual updating to it.
1852 1850
1853 1851 With --debug, this command shows warning messages while matching
1854 1852 against ``merge-patterns`` and so on, too. It is recommended to
1855 1853 use this option with explicit file patterns and/or -I/-X options,
1856 1854 because this option increases amount of output per file according
1857 1855 to configurations in hgrc.
1858 1856
1859 1857 With -v/--verbose, this command shows configurations below at
1860 1858 first (only if specified).
1861 1859
1862 1860 - ``--tool`` option
1863 1861 - ``HGMERGE`` environment variable
1864 1862 - configuration of ``ui.merge``
1865 1863
1866 1864 If merge tool is chosen before matching against
1867 1865 ``merge-patterns``, this command can't show any helpful
1868 1866 information, even with --debug. In such case, information above is
1869 1867 useful to know why a merge tool is chosen.
1870 1868 """
1871 1869 opts = pycompat.byteskwargs(opts)
1872 1870 overrides = {}
1873 1871 if opts['tool']:
1874 1872 overrides[('ui', 'forcemerge')] = opts['tool']
1875 1873 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1876 1874
1877 1875 with ui.configoverride(overrides, 'debugmergepatterns'):
1878 1876 hgmerge = encoding.environ.get("HGMERGE")
1879 1877 if hgmerge is not None:
1880 1878 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1881 1879 uimerge = ui.config("ui", "merge")
1882 1880 if uimerge:
1883 1881 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1884 1882
1885 1883 ctx = scmutil.revsingle(repo, opts.get('rev'))
1886 1884 m = scmutil.match(ctx, pats, opts)
1887 1885 changedelete = opts['changedelete']
1888 1886 for path in ctx.walk(m):
1889 1887 fctx = ctx[path]
1890 1888 try:
1891 1889 if not ui.debugflag:
1892 1890 ui.pushbuffer(error=True)
1893 1891 tool, toolpath = filemerge._picktool(repo, ui, path,
1894 1892 fctx.isbinary(),
1895 1893 'l' in fctx.flags(),
1896 1894 changedelete)
1897 1895 finally:
1898 1896 if not ui.debugflag:
1899 1897 ui.popbuffer()
1900 1898 ui.write(('%s = %s\n') % (path, tool))
1901 1899
1902 1900 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1903 1901 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1904 1902 '''access the pushkey key/value protocol
1905 1903
1906 1904 With two args, list the keys in the given namespace.
1907 1905
1908 1906 With five args, set a key to new if it currently is set to old.
1909 1907 Reports success or failure.
1910 1908 '''
1911 1909
1912 1910 target = hg.peer(ui, {}, repopath)
1913 1911 if keyinfo:
1914 1912 key, old, new = keyinfo
1915 1913 with target.commandexecutor() as e:
1916 1914 r = e.callcommand('pushkey', {
1917 1915 'namespace': namespace,
1918 1916 'key': key,
1919 1917 'old': old,
1920 1918 'new': new,
1921 1919 }).result()
1922 1920
1923 1921 ui.status(pycompat.bytestr(r) + '\n')
1924 1922 return not r
1925 1923 else:
1926 1924 for k, v in sorted(target.listkeys(namespace).iteritems()):
1927 1925 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1928 1926 stringutil.escapestr(v)))
1929 1927
1930 1928 @command('debugpvec', [], _('A B'))
1931 1929 def debugpvec(ui, repo, a, b=None):
1932 1930 ca = scmutil.revsingle(repo, a)
1933 1931 cb = scmutil.revsingle(repo, b)
1934 1932 pa = pvec.ctxpvec(ca)
1935 1933 pb = pvec.ctxpvec(cb)
1936 1934 if pa == pb:
1937 1935 rel = "="
1938 1936 elif pa > pb:
1939 1937 rel = ">"
1940 1938 elif pa < pb:
1941 1939 rel = "<"
1942 1940 elif pa | pb:
1943 1941 rel = "|"
1944 1942 ui.write(_("a: %s\n") % pa)
1945 1943 ui.write(_("b: %s\n") % pb)
1946 1944 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1947 1945 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1948 1946 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1949 1947 pa.distance(pb), rel))
1950 1948
1951 1949 @command('debugrebuilddirstate|debugrebuildstate',
1952 1950 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1953 1951 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1954 1952 'the working copy parent')),
1955 1953 ],
1956 1954 _('[-r REV]'))
1957 1955 def debugrebuilddirstate(ui, repo, rev, **opts):
1958 1956 """rebuild the dirstate as it would look like for the given revision
1959 1957
1960 1958 If no revision is specified the first current parent will be used.
1961 1959
1962 1960 The dirstate will be set to the files of the given revision.
1963 1961 The actual working directory content or existing dirstate
1964 1962 information such as adds or removes is not considered.
1965 1963
1966 1964 ``minimal`` will only rebuild the dirstate status for files that claim to be
1967 1965 tracked but are not in the parent manifest, or that exist in the parent
1968 1966 manifest but are not in the dirstate. It will not change adds, removes, or
1969 1967 modified files that are in the working copy parent.
1970 1968
1971 1969 One use of this command is to make the next :hg:`status` invocation
1972 1970 check the actual file content.
1973 1971 """
1974 1972 ctx = scmutil.revsingle(repo, rev)
1975 1973 with repo.wlock():
1976 1974 dirstate = repo.dirstate
1977 1975 changedfiles = None
1978 1976 # See command doc for what minimal does.
1979 1977 if opts.get(r'minimal'):
1980 1978 manifestfiles = set(ctx.manifest().keys())
1981 1979 dirstatefiles = set(dirstate)
1982 1980 manifestonly = manifestfiles - dirstatefiles
1983 1981 dsonly = dirstatefiles - manifestfiles
1984 1982 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1985 1983 changedfiles = manifestonly | dsnotadded
1986 1984
1987 1985 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1988 1986
1989 1987 @command('debugrebuildfncache', [], '')
1990 1988 def debugrebuildfncache(ui, repo):
1991 1989 """rebuild the fncache file"""
1992 1990 repair.rebuildfncache(ui, repo)
1993 1991
1994 1992 @command('debugrename',
1995 1993 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1996 1994 _('[-r REV] FILE'))
1997 1995 def debugrename(ui, repo, file1, *pats, **opts):
1998 1996 """dump rename information"""
1999 1997
2000 1998 opts = pycompat.byteskwargs(opts)
2001 1999 ctx = scmutil.revsingle(repo, opts.get('rev'))
2002 2000 m = scmutil.match(ctx, (file1,) + pats, opts)
2003 2001 for abs in ctx.walk(m):
2004 2002 fctx = ctx[abs]
2005 2003 o = fctx.filelog().renamed(fctx.filenode())
2006 2004 rel = m.rel(abs)
2007 2005 if o:
2008 2006 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2009 2007 else:
2010 2008 ui.write(_("%s not renamed\n") % rel)
2011 2009
2012 2010 @command('debugrevlog', cmdutil.debugrevlogopts +
2013 2011 [('d', 'dump', False, _('dump index data'))],
2014 2012 _('-c|-m|FILE'),
2015 2013 optionalrepo=True)
2016 2014 def debugrevlog(ui, repo, file_=None, **opts):
2017 2015 """show data and statistics about a revlog"""
2018 2016 opts = pycompat.byteskwargs(opts)
2019 2017 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2020 2018
2021 2019 if opts.get("dump"):
2022 2020 numrevs = len(r)
2023 2021 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2024 2022 " rawsize totalsize compression heads chainlen\n"))
2025 2023 ts = 0
2026 2024 heads = set()
2027 2025
2028 2026 for rev in pycompat.xrange(numrevs):
2029 2027 dbase = r.deltaparent(rev)
2030 2028 if dbase == -1:
2031 2029 dbase = rev
2032 2030 cbase = r.chainbase(rev)
2033 2031 clen = r.chainlen(rev)
2034 2032 p1, p2 = r.parentrevs(rev)
2035 2033 rs = r.rawsize(rev)
2036 2034 ts = ts + rs
2037 2035 heads -= set(r.parentrevs(rev))
2038 2036 heads.add(rev)
2039 2037 try:
2040 2038 compression = ts / r.end(rev)
2041 2039 except ZeroDivisionError:
2042 2040 compression = 0
2043 2041 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2044 2042 "%11d %5d %8d\n" %
2045 2043 (rev, p1, p2, r.start(rev), r.end(rev),
2046 2044 r.start(dbase), r.start(cbase),
2047 2045 r.start(p1), r.start(p2),
2048 2046 rs, ts, compression, len(heads), clen))
2049 2047 return 0
2050 2048
2051 2049 v = r.version
2052 2050 format = v & 0xFFFF
2053 2051 flags = []
2054 2052 gdelta = False
2055 2053 if v & revlog.FLAG_INLINE_DATA:
2056 2054 flags.append('inline')
2057 2055 if v & revlog.FLAG_GENERALDELTA:
2058 2056 gdelta = True
2059 2057 flags.append('generaldelta')
2060 2058 if not flags:
2061 2059 flags = ['(none)']
2062 2060
2063 2061 ### tracks merge vs single parent
2064 2062 nummerges = 0
2065 2063
2066 2064 ### tracks ways the "delta" are build
2067 2065 # nodelta
2068 2066 numempty = 0
2069 2067 numemptytext = 0
2070 2068 numemptydelta = 0
2071 2069 # full file content
2072 2070 numfull = 0
2073 2071 # intermediate snapshot against a prior snapshot
2074 2072 numsemi = 0
2075 2073 # snapshot count per depth
2076 2074 numsnapdepth = collections.defaultdict(lambda: 0)
2077 2075 # delta against previous revision
2078 2076 numprev = 0
2079 2077 # delta against first or second parent (not prev)
2080 2078 nump1 = 0
2081 2079 nump2 = 0
2082 2080 # delta against neither prev nor parents
2083 2081 numother = 0
2084 2082 # delta against prev that are also first or second parent
2085 2083 # (details of `numprev`)
2086 2084 nump1prev = 0
2087 2085 nump2prev = 0
2088 2086
2089 2087 # data about delta chain of each revs
2090 2088 chainlengths = []
2091 2089 chainbases = []
2092 2090 chainspans = []
2093 2091
2094 2092 # data about each revision
2095 2093 datasize = [None, 0, 0]
2096 2094 fullsize = [None, 0, 0]
2097 2095 semisize = [None, 0, 0]
2098 2096 # snapshot count per depth
2099 2097 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2100 2098 deltasize = [None, 0, 0]
2101 2099 chunktypecounts = {}
2102 2100 chunktypesizes = {}
2103 2101
2104 2102 def addsize(size, l):
2105 2103 if l[0] is None or size < l[0]:
2106 2104 l[0] = size
2107 2105 if size > l[1]:
2108 2106 l[1] = size
2109 2107 l[2] += size
2110 2108
2111 2109 numrevs = len(r)
2112 2110 for rev in pycompat.xrange(numrevs):
2113 2111 p1, p2 = r.parentrevs(rev)
2114 2112 delta = r.deltaparent(rev)
2115 2113 if format > 0:
2116 2114 addsize(r.rawsize(rev), datasize)
2117 2115 if p2 != nullrev:
2118 2116 nummerges += 1
2119 2117 size = r.length(rev)
2120 2118 if delta == nullrev:
2121 2119 chainlengths.append(0)
2122 2120 chainbases.append(r.start(rev))
2123 2121 chainspans.append(size)
2124 2122 if size == 0:
2125 2123 numempty += 1
2126 2124 numemptytext += 1
2127 2125 else:
2128 2126 numfull += 1
2129 2127 numsnapdepth[0] += 1
2130 2128 addsize(size, fullsize)
2131 2129 addsize(size, snapsizedepth[0])
2132 2130 else:
2133 2131 chainlengths.append(chainlengths[delta] + 1)
2134 2132 baseaddr = chainbases[delta]
2135 2133 revaddr = r.start(rev)
2136 2134 chainbases.append(baseaddr)
2137 2135 chainspans.append((revaddr - baseaddr) + size)
2138 2136 if size == 0:
2139 2137 numempty += 1
2140 2138 numemptydelta += 1
2141 2139 elif r.issnapshot(rev):
2142 2140 addsize(size, semisize)
2143 2141 numsemi += 1
2144 2142 depth = r.snapshotdepth(rev)
2145 2143 numsnapdepth[depth] += 1
2146 2144 addsize(size, snapsizedepth[depth])
2147 2145 else:
2148 2146 addsize(size, deltasize)
2149 2147 if delta == rev - 1:
2150 2148 numprev += 1
2151 2149 if delta == p1:
2152 2150 nump1prev += 1
2153 2151 elif delta == p2:
2154 2152 nump2prev += 1
2155 2153 elif delta == p1:
2156 2154 nump1 += 1
2157 2155 elif delta == p2:
2158 2156 nump2 += 1
2159 2157 elif delta != nullrev:
2160 2158 numother += 1
2161 2159
2162 2160 # Obtain data on the raw chunks in the revlog.
2163 2161 if util.safehasattr(r, '_getsegmentforrevs'):
2164 2162 segment = r._getsegmentforrevs(rev, rev)[1]
2165 2163 else:
2166 2164 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2167 2165 if segment:
2168 2166 chunktype = bytes(segment[0:1])
2169 2167 else:
2170 2168 chunktype = 'empty'
2171 2169
2172 2170 if chunktype not in chunktypecounts:
2173 2171 chunktypecounts[chunktype] = 0
2174 2172 chunktypesizes[chunktype] = 0
2175 2173
2176 2174 chunktypecounts[chunktype] += 1
2177 2175 chunktypesizes[chunktype] += size
2178 2176
2179 2177 # Adjust size min value for empty cases
2180 2178 for size in (datasize, fullsize, semisize, deltasize):
2181 2179 if size[0] is None:
2182 2180 size[0] = 0
2183 2181
2184 2182 numdeltas = numrevs - numfull - numempty - numsemi
2185 2183 numoprev = numprev - nump1prev - nump2prev
2186 2184 totalrawsize = datasize[2]
2187 2185 datasize[2] /= numrevs
2188 2186 fulltotal = fullsize[2]
2189 2187 fullsize[2] /= numfull
2190 2188 semitotal = semisize[2]
2191 2189 snaptotal = {}
2192 2190 if 0 < numsemi:
2193 2191 semisize[2] /= numsemi
2194 2192 for depth in snapsizedepth:
2195 2193 snaptotal[depth] = snapsizedepth[depth][2]
2196 2194 snapsizedepth[depth][2] /= numsnapdepth[depth]
2197 2195
2198 2196 deltatotal = deltasize[2]
2199 2197 if numdeltas > 0:
2200 2198 deltasize[2] /= numdeltas
2201 2199 totalsize = fulltotal + semitotal + deltatotal
2202 2200 avgchainlen = sum(chainlengths) / numrevs
2203 2201 maxchainlen = max(chainlengths)
2204 2202 maxchainspan = max(chainspans)
2205 2203 compratio = 1
2206 2204 if totalsize:
2207 2205 compratio = totalrawsize / totalsize
2208 2206
2209 2207 basedfmtstr = '%%%dd\n'
2210 2208 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2211 2209
2212 2210 def dfmtstr(max):
2213 2211 return basedfmtstr % len(str(max))
2214 2212 def pcfmtstr(max, padding=0):
2215 2213 return basepcfmtstr % (len(str(max)), ' ' * padding)
2216 2214
2217 2215 def pcfmt(value, total):
2218 2216 if total:
2219 2217 return (value, 100 * float(value) / total)
2220 2218 else:
2221 2219 return value, 100.0
2222 2220
2223 2221 ui.write(('format : %d\n') % format)
2224 2222 ui.write(('flags : %s\n') % ', '.join(flags))
2225 2223
2226 2224 ui.write('\n')
2227 2225 fmt = pcfmtstr(totalsize)
2228 2226 fmt2 = dfmtstr(totalsize)
2229 2227 ui.write(('revisions : ') + fmt2 % numrevs)
2230 2228 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2231 2229 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2232 2230 ui.write(('revisions : ') + fmt2 % numrevs)
2233 2231 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2234 2232 ui.write((' text : ')
2235 2233 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2236 2234 ui.write((' delta : ')
2237 2235 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2238 2236 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2239 2237 for depth in sorted(numsnapdepth):
2240 2238 ui.write((' lvl-%-3d : ' % depth)
2241 2239 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2242 2240 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2243 2241 ui.write(('revision size : ') + fmt2 % totalsize)
2244 2242 ui.write((' snapshot : ')
2245 2243 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2246 2244 for depth in sorted(numsnapdepth):
2247 2245 ui.write((' lvl-%-3d : ' % depth)
2248 2246 + fmt % pcfmt(snaptotal[depth], totalsize))
2249 2247 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2250 2248
2251 2249 def fmtchunktype(chunktype):
2252 2250 if chunktype == 'empty':
2253 2251 return ' %s : ' % chunktype
2254 2252 elif chunktype in pycompat.bytestr(string.ascii_letters):
2255 2253 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2256 2254 else:
2257 2255 return ' 0x%s : ' % hex(chunktype)
2258 2256
2259 2257 ui.write('\n')
2260 2258 ui.write(('chunks : ') + fmt2 % numrevs)
2261 2259 for chunktype in sorted(chunktypecounts):
2262 2260 ui.write(fmtchunktype(chunktype))
2263 2261 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2264 2262 ui.write(('chunks size : ') + fmt2 % totalsize)
2265 2263 for chunktype in sorted(chunktypecounts):
2266 2264 ui.write(fmtchunktype(chunktype))
2267 2265 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2268 2266
2269 2267 ui.write('\n')
2270 2268 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2271 2269 ui.write(('avg chain length : ') + fmt % avgchainlen)
2272 2270 ui.write(('max chain length : ') + fmt % maxchainlen)
2273 2271 ui.write(('max chain reach : ') + fmt % maxchainspan)
2274 2272 ui.write(('compression ratio : ') + fmt % compratio)
2275 2273
2276 2274 if format > 0:
2277 2275 ui.write('\n')
2278 2276 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2279 2277 % tuple(datasize))
2280 2278 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2281 2279 % tuple(fullsize))
2282 2280 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2283 2281 % tuple(semisize))
2284 2282 for depth in sorted(snapsizedepth):
2285 2283 if depth == 0:
2286 2284 continue
2287 2285 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2288 2286 % ((depth,) + tuple(snapsizedepth[depth])))
2289 2287 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2290 2288 % tuple(deltasize))
2291 2289
2292 2290 if numdeltas > 0:
2293 2291 ui.write('\n')
2294 2292 fmt = pcfmtstr(numdeltas)
2295 2293 fmt2 = pcfmtstr(numdeltas, 4)
2296 2294 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2297 2295 if numprev > 0:
2298 2296 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2299 2297 numprev))
2300 2298 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2301 2299 numprev))
2302 2300 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2303 2301 numprev))
2304 2302 if gdelta:
2305 2303 ui.write(('deltas against p1 : ')
2306 2304 + fmt % pcfmt(nump1, numdeltas))
2307 2305 ui.write(('deltas against p2 : ')
2308 2306 + fmt % pcfmt(nump2, numdeltas))
2309 2307 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2310 2308 numdeltas))
2311 2309
2312 2310 @command('debugrevlogindex', cmdutil.debugrevlogopts +
2313 2311 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2314 2312 _('[-f FORMAT] -c|-m|FILE'),
2315 2313 optionalrepo=True)
2316 2314 def debugrevlogindex(ui, repo, file_=None, **opts):
2317 2315 """dump the contents of a revlog index"""
2318 2316 opts = pycompat.byteskwargs(opts)
2319 2317 r = cmdutil.openrevlog(repo, 'debugrevlogindex', file_, opts)
2320 2318 format = opts.get('format', 0)
2321 2319 if format not in (0, 1):
2322 2320 raise error.Abort(_("unknown format %d") % format)
2323 2321
2324 2322 if ui.debugflag:
2325 2323 shortfn = hex
2326 2324 else:
2327 2325 shortfn = short
2328 2326
2329 2327 # There might not be anything in r, so have a sane default
2330 2328 idlen = 12
2331 2329 for i in r:
2332 2330 idlen = len(shortfn(r.node(i)))
2333 2331 break
2334 2332
2335 2333 if format == 0:
2336 2334 if ui.verbose:
2337 2335 ui.write((" rev offset length linkrev"
2338 2336 " %s %s p2\n") % ("nodeid".ljust(idlen),
2339 2337 "p1".ljust(idlen)))
2340 2338 else:
2341 2339 ui.write((" rev linkrev %s %s p2\n") % (
2342 2340 "nodeid".ljust(idlen), "p1".ljust(idlen)))
2343 2341 elif format == 1:
2344 2342 if ui.verbose:
2345 2343 ui.write((" rev flag offset length size link p1"
2346 2344 " p2 %s\n") % "nodeid".rjust(idlen))
2347 2345 else:
2348 2346 ui.write((" rev flag size link p1 p2 %s\n") %
2349 2347 "nodeid".rjust(idlen))
2350 2348
2351 2349 for i in r:
2352 2350 node = r.node(i)
2353 2351 if format == 0:
2354 2352 try:
2355 2353 pp = r.parents(node)
2356 2354 except Exception:
2357 2355 pp = [nullid, nullid]
2358 2356 if ui.verbose:
2359 2357 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
2360 2358 i, r.start(i), r.length(i), r.linkrev(i),
2361 2359 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2362 2360 else:
2363 2361 ui.write("% 6d % 7d %s %s %s\n" % (
2364 2362 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
2365 2363 shortfn(pp[1])))
2366 2364 elif format == 1:
2367 2365 pr = r.parentrevs(i)
2368 2366 if ui.verbose:
2369 2367 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
2370 2368 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2371 2369 r.linkrev(i), pr[0], pr[1], shortfn(node)))
2372 2370 else:
2373 2371 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
2374 2372 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
2375 2373 shortfn(node)))
2376 2374
2377 2375 @command('debugrevspec',
2378 2376 [('', 'optimize', None,
2379 2377 _('print parsed tree after optimizing (DEPRECATED)')),
2380 2378 ('', 'show-revs', True, _('print list of result revisions (default)')),
2381 2379 ('s', 'show-set', None, _('print internal representation of result set')),
2382 2380 ('p', 'show-stage', [],
2383 2381 _('print parsed tree at the given stage'), _('NAME')),
2384 2382 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2385 2383 ('', 'verify-optimized', False, _('verify optimized result')),
2386 2384 ],
2387 2385 ('REVSPEC'))
2388 2386 def debugrevspec(ui, repo, expr, **opts):
2389 2387 """parse and apply a revision specification
2390 2388
2391 2389 Use -p/--show-stage option to print the parsed tree at the given stages.
2392 2390 Use -p all to print tree at every stage.
2393 2391
2394 2392 Use --no-show-revs option with -s or -p to print only the set
2395 2393 representation or the parsed tree respectively.
2396 2394
2397 2395 Use --verify-optimized to compare the optimized result with the unoptimized
2398 2396 one. Returns 1 if the optimized result differs.
2399 2397 """
2400 2398 opts = pycompat.byteskwargs(opts)
2401 2399 aliases = ui.configitems('revsetalias')
2402 2400 stages = [
2403 2401 ('parsed', lambda tree: tree),
2404 2402 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2405 2403 ui.warn)),
2406 2404 ('concatenated', revsetlang.foldconcat),
2407 2405 ('analyzed', revsetlang.analyze),
2408 2406 ('optimized', revsetlang.optimize),
2409 2407 ]
2410 2408 if opts['no_optimized']:
2411 2409 stages = stages[:-1]
2412 2410 if opts['verify_optimized'] and opts['no_optimized']:
2413 2411 raise error.Abort(_('cannot use --verify-optimized with '
2414 2412 '--no-optimized'))
2415 2413 stagenames = set(n for n, f in stages)
2416 2414
2417 2415 showalways = set()
2418 2416 showchanged = set()
2419 2417 if ui.verbose and not opts['show_stage']:
2420 2418 # show parsed tree by --verbose (deprecated)
2421 2419 showalways.add('parsed')
2422 2420 showchanged.update(['expanded', 'concatenated'])
2423 2421 if opts['optimize']:
2424 2422 showalways.add('optimized')
2425 2423 if opts['show_stage'] and opts['optimize']:
2426 2424 raise error.Abort(_('cannot use --optimize with --show-stage'))
2427 2425 if opts['show_stage'] == ['all']:
2428 2426 showalways.update(stagenames)
2429 2427 else:
2430 2428 for n in opts['show_stage']:
2431 2429 if n not in stagenames:
2432 2430 raise error.Abort(_('invalid stage name: %s') % n)
2433 2431 showalways.update(opts['show_stage'])
2434 2432
2435 2433 treebystage = {}
2436 2434 printedtree = None
2437 2435 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2438 2436 for n, f in stages:
2439 2437 treebystage[n] = tree = f(tree)
2440 2438 if n in showalways or (n in showchanged and tree != printedtree):
2441 2439 if opts['show_stage'] or n != 'parsed':
2442 2440 ui.write(("* %s:\n") % n)
2443 2441 ui.write(revsetlang.prettyformat(tree), "\n")
2444 2442 printedtree = tree
2445 2443
2446 2444 if opts['verify_optimized']:
2447 2445 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2448 2446 brevs = revset.makematcher(treebystage['optimized'])(repo)
2449 2447 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2450 2448 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2451 2449 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2452 2450 arevs = list(arevs)
2453 2451 brevs = list(brevs)
2454 2452 if arevs == brevs:
2455 2453 return 0
2456 2454 ui.write(('--- analyzed\n'), label='diff.file_a')
2457 2455 ui.write(('+++ optimized\n'), label='diff.file_b')
2458 2456 sm = difflib.SequenceMatcher(None, arevs, brevs)
2459 2457 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2460 2458 if tag in ('delete', 'replace'):
2461 2459 for c in arevs[alo:ahi]:
2462 2460 ui.write('-%s\n' % c, label='diff.deleted')
2463 2461 if tag in ('insert', 'replace'):
2464 2462 for c in brevs[blo:bhi]:
2465 2463 ui.write('+%s\n' % c, label='diff.inserted')
2466 2464 if tag == 'equal':
2467 2465 for c in arevs[alo:ahi]:
2468 2466 ui.write(' %s\n' % c)
2469 2467 return 1
2470 2468
2471 2469 func = revset.makematcher(tree)
2472 2470 revs = func(repo)
2473 2471 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2474 2472 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2475 2473 if not opts['show_revs']:
2476 2474 return
2477 2475 for c in revs:
2478 2476 ui.write("%d\n" % c)
2479 2477
2480 2478 @command('debugserve', [
2481 2479 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2482 2480 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2483 2481 ('', 'logiofile', '', _('file to log server I/O to')),
2484 2482 ], '')
2485 2483 def debugserve(ui, repo, **opts):
2486 2484 """run a server with advanced settings
2487 2485
2488 2486 This command is similar to :hg:`serve`. It exists partially as a
2489 2487 workaround to the fact that ``hg serve --stdio`` must have specific
2490 2488 arguments for security reasons.
2491 2489 """
2492 2490 opts = pycompat.byteskwargs(opts)
2493 2491
2494 2492 if not opts['sshstdio']:
2495 2493 raise error.Abort(_('only --sshstdio is currently supported'))
2496 2494
2497 2495 logfh = None
2498 2496
2499 2497 if opts['logiofd'] and opts['logiofile']:
2500 2498 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2501 2499
2502 2500 if opts['logiofd']:
2503 2501 # Line buffered because output is line based.
2504 2502 try:
2505 2503 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2506 2504 except OSError as e:
2507 2505 if e.errno != errno.ESPIPE:
2508 2506 raise
2509 2507 # can't seek a pipe, so `ab` mode fails on py3
2510 2508 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2511 2509 elif opts['logiofile']:
2512 2510 logfh = open(opts['logiofile'], 'ab', 1)
2513 2511
2514 2512 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2515 2513 s.serve_forever()
2516 2514
2517 2515 @command('debugsetparents', [], _('REV1 [REV2]'))
2518 2516 def debugsetparents(ui, repo, rev1, rev2=None):
2519 2517 """manually set the parents of the current working directory
2520 2518
2521 2519 This is useful for writing repository conversion tools, but should
2522 2520 be used with care. For example, neither the working directory nor the
2523 2521 dirstate is updated, so file status may be incorrect after running this
2524 2522 command.
2525 2523
2526 2524 Returns 0 on success.
2527 2525 """
2528 2526
2529 2527 node1 = scmutil.revsingle(repo, rev1).node()
2530 2528 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2531 2529
2532 2530 with repo.wlock():
2533 2531 repo.setparents(node1, node2)
2534 2532
2535 2533 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2536 2534 def debugssl(ui, repo, source=None, **opts):
2537 2535 '''test a secure connection to a server
2538 2536
2539 2537 This builds the certificate chain for the server on Windows, installing the
2540 2538 missing intermediates and trusted root via Windows Update if necessary. It
2541 2539 does nothing on other platforms.
2542 2540
2543 2541 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2544 2542 that server is used. See :hg:`help urls` for more information.
2545 2543
2546 2544 If the update succeeds, retry the original operation. Otherwise, the cause
2547 2545 of the SSL error is likely another issue.
2548 2546 '''
2549 2547 if not pycompat.iswindows:
2550 2548 raise error.Abort(_('certificate chain building is only possible on '
2551 2549 'Windows'))
2552 2550
2553 2551 if not source:
2554 2552 if not repo:
2555 2553 raise error.Abort(_("there is no Mercurial repository here, and no "
2556 2554 "server specified"))
2557 2555 source = "default"
2558 2556
2559 2557 source, branches = hg.parseurl(ui.expandpath(source))
2560 2558 url = util.url(source)
2561 2559 addr = None
2562 2560
2563 2561 defaultport = {'https': 443, 'ssh': 22}
2564 2562 if url.scheme in defaultport:
2565 2563 try:
2566 2564 addr = (url.host, int(url.port or defaultport[url.scheme]))
2567 2565 except ValueError:
2568 2566 raise error.Abort(_("malformed port number in URL"))
2569 2567 else:
2570 2568 raise error.Abort(_("only https and ssh connections are supported"))
2571 2569
2572 2570 from . import win32
2573 2571
2574 2572 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2575 2573 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2576 2574
2577 2575 try:
2578 2576 s.connect(addr)
2579 2577 cert = s.getpeercert(True)
2580 2578
2581 2579 ui.status(_('checking the certificate chain for %s\n') % url.host)
2582 2580
2583 2581 complete = win32.checkcertificatechain(cert, build=False)
2584 2582
2585 2583 if not complete:
2586 2584 ui.status(_('certificate chain is incomplete, updating... '))
2587 2585
2588 2586 if not win32.checkcertificatechain(cert):
2589 2587 ui.status(_('failed.\n'))
2590 2588 else:
2591 2589 ui.status(_('done.\n'))
2592 2590 else:
2593 2591 ui.status(_('full certificate chain is available\n'))
2594 2592 finally:
2595 2593 s.close()
2596 2594
2597 2595 @command('debugsub',
2598 2596 [('r', 'rev', '',
2599 2597 _('revision to check'), _('REV'))],
2600 2598 _('[-r REV] [REV]'))
2601 2599 def debugsub(ui, repo, rev=None):
2602 2600 ctx = scmutil.revsingle(repo, rev, None)
2603 2601 for k, v in sorted(ctx.substate.items()):
2604 2602 ui.write(('path %s\n') % k)
2605 2603 ui.write((' source %s\n') % v[0])
2606 2604 ui.write((' revision %s\n') % v[1])
2607 2605
2608 2606 @command('debugsuccessorssets',
2609 2607 [('', 'closest', False, _('return closest successors sets only'))],
2610 2608 _('[REV]'))
2611 2609 def debugsuccessorssets(ui, repo, *revs, **opts):
2612 2610 """show set of successors for revision
2613 2611
2614 2612 A successors set of changeset A is a consistent group of revisions that
2615 2613 succeed A. It contains non-obsolete changesets only unless closests
2616 2614 successors set is set.
2617 2615
2618 2616 In most cases a changeset A has a single successors set containing a single
2619 2617 successor (changeset A replaced by A').
2620 2618
2621 2619 A changeset that is made obsolete with no successors are called "pruned".
2622 2620 Such changesets have no successors sets at all.
2623 2621
2624 2622 A changeset that has been "split" will have a successors set containing
2625 2623 more than one successor.
2626 2624
2627 2625 A changeset that has been rewritten in multiple different ways is called
2628 2626 "divergent". Such changesets have multiple successor sets (each of which
2629 2627 may also be split, i.e. have multiple successors).
2630 2628
2631 2629 Results are displayed as follows::
2632 2630
2633 2631 <rev1>
2634 2632 <successors-1A>
2635 2633 <rev2>
2636 2634 <successors-2A>
2637 2635 <successors-2B1> <successors-2B2> <successors-2B3>
2638 2636
2639 2637 Here rev2 has two possible (i.e. divergent) successors sets. The first
2640 2638 holds one element, whereas the second holds three (i.e. the changeset has
2641 2639 been split).
2642 2640 """
2643 2641 # passed to successorssets caching computation from one call to another
2644 2642 cache = {}
2645 2643 ctx2str = bytes
2646 2644 node2str = short
2647 2645 for rev in scmutil.revrange(repo, revs):
2648 2646 ctx = repo[rev]
2649 2647 ui.write('%s\n'% ctx2str(ctx))
2650 2648 for succsset in obsutil.successorssets(repo, ctx.node(),
2651 2649 closest=opts[r'closest'],
2652 2650 cache=cache):
2653 2651 if succsset:
2654 2652 ui.write(' ')
2655 2653 ui.write(node2str(succsset[0]))
2656 2654 for node in succsset[1:]:
2657 2655 ui.write(' ')
2658 2656 ui.write(node2str(node))
2659 2657 ui.write('\n')
2660 2658
2661 2659 @command('debugtemplate',
2662 2660 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2663 2661 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2664 2662 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2665 2663 optionalrepo=True)
2666 2664 def debugtemplate(ui, repo, tmpl, **opts):
2667 2665 """parse and apply a template
2668 2666
2669 2667 If -r/--rev is given, the template is processed as a log template and
2670 2668 applied to the given changesets. Otherwise, it is processed as a generic
2671 2669 template.
2672 2670
2673 2671 Use --verbose to print the parsed tree.
2674 2672 """
2675 2673 revs = None
2676 2674 if opts[r'rev']:
2677 2675 if repo is None:
2678 2676 raise error.RepoError(_('there is no Mercurial repository here '
2679 2677 '(.hg not found)'))
2680 2678 revs = scmutil.revrange(repo, opts[r'rev'])
2681 2679
2682 2680 props = {}
2683 2681 for d in opts[r'define']:
2684 2682 try:
2685 2683 k, v = (e.strip() for e in d.split('=', 1))
2686 2684 if not k or k == 'ui':
2687 2685 raise ValueError
2688 2686 props[k] = v
2689 2687 except ValueError:
2690 2688 raise error.Abort(_('malformed keyword definition: %s') % d)
2691 2689
2692 2690 if ui.verbose:
2693 2691 aliases = ui.configitems('templatealias')
2694 2692 tree = templater.parse(tmpl)
2695 2693 ui.note(templater.prettyformat(tree), '\n')
2696 2694 newtree = templater.expandaliases(tree, aliases)
2697 2695 if newtree != tree:
2698 2696 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2699 2697
2700 2698 if revs is None:
2701 2699 tres = formatter.templateresources(ui, repo)
2702 2700 t = formatter.maketemplater(ui, tmpl, resources=tres)
2703 2701 if ui.verbose:
2704 2702 kwds, funcs = t.symbolsuseddefault()
2705 2703 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2706 2704 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2707 2705 ui.write(t.renderdefault(props))
2708 2706 else:
2709 2707 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2710 2708 if ui.verbose:
2711 2709 kwds, funcs = displayer.t.symbolsuseddefault()
2712 2710 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2713 2711 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2714 2712 for r in revs:
2715 2713 displayer.show(repo[r], **pycompat.strkwargs(props))
2716 2714 displayer.close()
2717 2715
2718 2716 @command('debuguigetpass', [
2719 2717 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2720 2718 ], _('[-p TEXT]'), norepo=True)
2721 2719 def debuguigetpass(ui, prompt=''):
2722 2720 """show prompt to type password"""
2723 2721 r = ui.getpass(prompt)
2724 2722 ui.write(('respose: %s\n') % r)
2725 2723
2726 2724 @command('debuguiprompt', [
2727 2725 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2728 2726 ], _('[-p TEXT]'), norepo=True)
2729 2727 def debuguiprompt(ui, prompt=''):
2730 2728 """show plain prompt"""
2731 2729 r = ui.prompt(prompt)
2732 2730 ui.write(('response: %s\n') % r)
2733 2731
2734 2732 @command('debugupdatecaches', [])
2735 2733 def debugupdatecaches(ui, repo, *pats, **opts):
2736 2734 """warm all known caches in the repository"""
2737 2735 with repo.wlock(), repo.lock():
2738 2736 repo.updatecaches(full=True)
2739 2737
2740 2738 @command('debugupgraderepo', [
2741 2739 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2742 2740 ('', 'run', False, _('performs an upgrade')),
2743 2741 ])
2744 2742 def debugupgraderepo(ui, repo, run=False, optimize=None):
2745 2743 """upgrade a repository to use different features
2746 2744
2747 2745 If no arguments are specified, the repository is evaluated for upgrade
2748 2746 and a list of problems and potential optimizations is printed.
2749 2747
2750 2748 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2751 2749 can be influenced via additional arguments. More details will be provided
2752 2750 by the command output when run without ``--run``.
2753 2751
2754 2752 During the upgrade, the repository will be locked and no writes will be
2755 2753 allowed.
2756 2754
2757 2755 At the end of the upgrade, the repository may not be readable while new
2758 2756 repository data is swapped in. This window will be as long as it takes to
2759 2757 rename some directories inside the ``.hg`` directory. On most machines, this
2760 2758 should complete almost instantaneously and the chances of a consumer being
2761 2759 unable to access the repository should be low.
2762 2760 """
2763 2761 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2764 2762
2765 2763 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2766 2764 inferrepo=True)
2767 2765 def debugwalk(ui, repo, *pats, **opts):
2768 2766 """show how files match on given patterns"""
2769 2767 opts = pycompat.byteskwargs(opts)
2770 2768 m = scmutil.match(repo[None], pats, opts)
2771 2769 if ui.verbose:
2772 2770 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2773 2771 items = list(repo[None].walk(m))
2774 2772 if not items:
2775 2773 return
2776 2774 f = lambda fn: fn
2777 2775 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2778 2776 f = lambda fn: util.normpath(fn)
2779 2777 fmt = 'f %%-%ds %%-%ds %%s' % (
2780 2778 max([len(abs) for abs in items]),
2781 2779 max([len(m.rel(abs)) for abs in items]))
2782 2780 for abs in items:
2783 2781 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2784 2782 ui.write("%s\n" % line.rstrip())
2785 2783
2786 2784 @command('debugwhyunstable', [], _('REV'))
2787 2785 def debugwhyunstable(ui, repo, rev):
2788 2786 """explain instabilities of a changeset"""
2789 2787 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2790 2788 dnodes = ''
2791 2789 if entry.get('divergentnodes'):
2792 2790 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2793 2791 for ctx in entry['divergentnodes']) + ' '
2794 2792 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2795 2793 entry['reason'], entry['node']))
2796 2794
2797 2795 @command('debugwireargs',
2798 2796 [('', 'three', '', 'three'),
2799 2797 ('', 'four', '', 'four'),
2800 2798 ('', 'five', '', 'five'),
2801 2799 ] + cmdutil.remoteopts,
2802 2800 _('REPO [OPTIONS]... [ONE [TWO]]'),
2803 2801 norepo=True)
2804 2802 def debugwireargs(ui, repopath, *vals, **opts):
2805 2803 opts = pycompat.byteskwargs(opts)
2806 2804 repo = hg.peer(ui, opts, repopath)
2807 2805 for opt in cmdutil.remoteopts:
2808 2806 del opts[opt[1]]
2809 2807 args = {}
2810 2808 for k, v in opts.iteritems():
2811 2809 if v:
2812 2810 args[k] = v
2813 2811 args = pycompat.strkwargs(args)
2814 2812 # run twice to check that we don't mess up the stream for the next command
2815 2813 res1 = repo.debugwireargs(*vals, **args)
2816 2814 res2 = repo.debugwireargs(*vals, **args)
2817 2815 ui.write("%s\n" % res1)
2818 2816 if res1 != res2:
2819 2817 ui.warn("%s\n" % res2)
2820 2818
2821 2819 def _parsewirelangblocks(fh):
2822 2820 activeaction = None
2823 2821 blocklines = []
2824 2822
2825 2823 for line in fh:
2826 2824 line = line.rstrip()
2827 2825 if not line:
2828 2826 continue
2829 2827
2830 2828 if line.startswith(b'#'):
2831 2829 continue
2832 2830
2833 2831 if not line.startswith(b' '):
2834 2832 # New block. Flush previous one.
2835 2833 if activeaction:
2836 2834 yield activeaction, blocklines
2837 2835
2838 2836 activeaction = line
2839 2837 blocklines = []
2840 2838 continue
2841 2839
2842 2840 # Else we start with an indent.
2843 2841
2844 2842 if not activeaction:
2845 2843 raise error.Abort(_('indented line outside of block'))
2846 2844
2847 2845 blocklines.append(line)
2848 2846
2849 2847 # Flush last block.
2850 2848 if activeaction:
2851 2849 yield activeaction, blocklines
2852 2850
2853 2851 @command('debugwireproto',
2854 2852 [
2855 2853 ('', 'localssh', False, _('start an SSH server for this repo')),
2856 2854 ('', 'peer', '', _('construct a specific version of the peer')),
2857 2855 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2858 2856 ('', 'nologhandshake', False,
2859 2857 _('do not log I/O related to the peer handshake')),
2860 2858 ] + cmdutil.remoteopts,
2861 2859 _('[PATH]'),
2862 2860 optionalrepo=True)
2863 2861 def debugwireproto(ui, repo, path=None, **opts):
2864 2862 """send wire protocol commands to a server
2865 2863
2866 2864 This command can be used to issue wire protocol commands to remote
2867 2865 peers and to debug the raw data being exchanged.
2868 2866
2869 2867 ``--localssh`` will start an SSH server against the current repository
2870 2868 and connect to that. By default, the connection will perform a handshake
2871 2869 and establish an appropriate peer instance.
2872 2870
2873 2871 ``--peer`` can be used to bypass the handshake protocol and construct a
2874 2872 peer instance using the specified class type. Valid values are ``raw``,
2875 2873 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2876 2874 raw data payloads and don't support higher-level command actions.
2877 2875
2878 2876 ``--noreadstderr`` can be used to disable automatic reading from stderr
2879 2877 of the peer (for SSH connections only). Disabling automatic reading of
2880 2878 stderr is useful for making output more deterministic.
2881 2879
2882 2880 Commands are issued via a mini language which is specified via stdin.
2883 2881 The language consists of individual actions to perform. An action is
2884 2882 defined by a block. A block is defined as a line with no leading
2885 2883 space followed by 0 or more lines with leading space. Blocks are
2886 2884 effectively a high-level command with additional metadata.
2887 2885
2888 2886 Lines beginning with ``#`` are ignored.
2889 2887
2890 2888 The following sections denote available actions.
2891 2889
2892 2890 raw
2893 2891 ---
2894 2892
2895 2893 Send raw data to the server.
2896 2894
2897 2895 The block payload contains the raw data to send as one atomic send
2898 2896 operation. The data may not actually be delivered in a single system
2899 2897 call: it depends on the abilities of the transport being used.
2900 2898
2901 2899 Each line in the block is de-indented and concatenated. Then, that
2902 2900 value is evaluated as a Python b'' literal. This allows the use of
2903 2901 backslash escaping, etc.
2904 2902
2905 2903 raw+
2906 2904 ----
2907 2905
2908 2906 Behaves like ``raw`` except flushes output afterwards.
2909 2907
2910 2908 command <X>
2911 2909 -----------
2912 2910
2913 2911 Send a request to run a named command, whose name follows the ``command``
2914 2912 string.
2915 2913
2916 2914 Arguments to the command are defined as lines in this block. The format of
2917 2915 each line is ``<key> <value>``. e.g.::
2918 2916
2919 2917 command listkeys
2920 2918 namespace bookmarks
2921 2919
2922 2920 If the value begins with ``eval:``, it will be interpreted as a Python
2923 2921 literal expression. Otherwise values are interpreted as Python b'' literals.
2924 2922 This allows sending complex types and encoding special byte sequences via
2925 2923 backslash escaping.
2926 2924
2927 2925 The following arguments have special meaning:
2928 2926
2929 2927 ``PUSHFILE``
2930 2928 When defined, the *push* mechanism of the peer will be used instead
2931 2929 of the static request-response mechanism and the content of the
2932 2930 file specified in the value of this argument will be sent as the
2933 2931 command payload.
2934 2932
2935 2933 This can be used to submit a local bundle file to the remote.
2936 2934
2937 2935 batchbegin
2938 2936 ----------
2939 2937
2940 2938 Instruct the peer to begin a batched send.
2941 2939
2942 2940 All ``command`` blocks are queued for execution until the next
2943 2941 ``batchsubmit`` block.
2944 2942
2945 2943 batchsubmit
2946 2944 -----------
2947 2945
2948 2946 Submit previously queued ``command`` blocks as a batch request.
2949 2947
2950 2948 This action MUST be paired with a ``batchbegin`` action.
2951 2949
2952 2950 httprequest <method> <path>
2953 2951 ---------------------------
2954 2952
2955 2953 (HTTP peer only)
2956 2954
2957 2955 Send an HTTP request to the peer.
2958 2956
2959 2957 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2960 2958
2961 2959 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2962 2960 headers to add to the request. e.g. ``Accept: foo``.
2963 2961
2964 2962 The following arguments are special:
2965 2963
2966 2964 ``BODYFILE``
2967 2965 The content of the file defined as the value to this argument will be
2968 2966 transferred verbatim as the HTTP request body.
2969 2967
2970 2968 ``frame <type> <flags> <payload>``
2971 2969 Send a unified protocol frame as part of the request body.
2972 2970
2973 2971 All frames will be collected and sent as the body to the HTTP
2974 2972 request.
2975 2973
2976 2974 close
2977 2975 -----
2978 2976
2979 2977 Close the connection to the server.
2980 2978
2981 2979 flush
2982 2980 -----
2983 2981
2984 2982 Flush data written to the server.
2985 2983
2986 2984 readavailable
2987 2985 -------------
2988 2986
2989 2987 Close the write end of the connection and read all available data from
2990 2988 the server.
2991 2989
2992 2990 If the connection to the server encompasses multiple pipes, we poll both
2993 2991 pipes and read available data.
2994 2992
2995 2993 readline
2996 2994 --------
2997 2995
2998 2996 Read a line of output from the server. If there are multiple output
2999 2997 pipes, reads only the main pipe.
3000 2998
3001 2999 ereadline
3002 3000 ---------
3003 3001
3004 3002 Like ``readline``, but read from the stderr pipe, if available.
3005 3003
3006 3004 read <X>
3007 3005 --------
3008 3006
3009 3007 ``read()`` N bytes from the server's main output pipe.
3010 3008
3011 3009 eread <X>
3012 3010 ---------
3013 3011
3014 3012 ``read()`` N bytes from the server's stderr pipe, if available.
3015 3013
3016 3014 Specifying Unified Frame-Based Protocol Frames
3017 3015 ----------------------------------------------
3018 3016
3019 3017 It is possible to emit a *Unified Frame-Based Protocol* by using special
3020 3018 syntax.
3021 3019
3022 3020 A frame is composed as a type, flags, and payload. These can be parsed
3023 3021 from a string of the form:
3024 3022
3025 3023 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3026 3024
3027 3025 ``request-id`` and ``stream-id`` are integers defining the request and
3028 3026 stream identifiers.
3029 3027
3030 3028 ``type`` can be an integer value for the frame type or the string name
3031 3029 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3032 3030 ``command-name``.
3033 3031
3034 3032 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3035 3033 components. Each component (and there can be just one) can be an integer
3036 3034 or a flag name for stream flags or frame flags, respectively. Values are
3037 3035 resolved to integers and then bitwise OR'd together.
3038 3036
3039 3037 ``payload`` represents the raw frame payload. If it begins with
3040 3038 ``cbor:``, the following string is evaluated as Python code and the
3041 3039 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3042 3040 as a Python byte string literal.
3043 3041 """
3044 3042 opts = pycompat.byteskwargs(opts)
3045 3043
3046 3044 if opts['localssh'] and not repo:
3047 3045 raise error.Abort(_('--localssh requires a repository'))
3048 3046
3049 3047 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3050 3048 raise error.Abort(_('invalid value for --peer'),
3051 3049 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3052 3050
3053 3051 if path and opts['localssh']:
3054 3052 raise error.Abort(_('cannot specify --localssh with an explicit '
3055 3053 'path'))
3056 3054
3057 3055 if ui.interactive():
3058 3056 ui.write(_('(waiting for commands on stdin)\n'))
3059 3057
3060 3058 blocks = list(_parsewirelangblocks(ui.fin))
3061 3059
3062 3060 proc = None
3063 3061 stdin = None
3064 3062 stdout = None
3065 3063 stderr = None
3066 3064 opener = None
3067 3065
3068 3066 if opts['localssh']:
3069 3067 # We start the SSH server in its own process so there is process
3070 3068 # separation. This prevents a whole class of potential bugs around
3071 3069 # shared state from interfering with server operation.
3072 3070 args = procutil.hgcmd() + [
3073 3071 '-R', repo.root,
3074 3072 'debugserve', '--sshstdio',
3075 3073 ]
3076 3074 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
3077 3075 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3078 3076 bufsize=0)
3079 3077
3080 3078 stdin = proc.stdin
3081 3079 stdout = proc.stdout
3082 3080 stderr = proc.stderr
3083 3081
3084 3082 # We turn the pipes into observers so we can log I/O.
3085 3083 if ui.verbose or opts['peer'] == 'raw':
3086 3084 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3087 3085 logdata=True)
3088 3086 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3089 3087 logdata=True)
3090 3088 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3091 3089 logdata=True)
3092 3090
3093 3091 # --localssh also implies the peer connection settings.
3094 3092
3095 3093 url = 'ssh://localserver'
3096 3094 autoreadstderr = not opts['noreadstderr']
3097 3095
3098 3096 if opts['peer'] == 'ssh1':
3099 3097 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3100 3098 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3101 3099 None, autoreadstderr=autoreadstderr)
3102 3100 elif opts['peer'] == 'ssh2':
3103 3101 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3104 3102 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3105 3103 None, autoreadstderr=autoreadstderr)
3106 3104 elif opts['peer'] == 'raw':
3107 3105 ui.write(_('using raw connection to peer\n'))
3108 3106 peer = None
3109 3107 else:
3110 3108 ui.write(_('creating ssh peer from handshake results\n'))
3111 3109 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3112 3110 autoreadstderr=autoreadstderr)
3113 3111
3114 3112 elif path:
3115 3113 # We bypass hg.peer() so we can proxy the sockets.
3116 3114 # TODO consider not doing this because we skip
3117 3115 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3118 3116 u = util.url(path)
3119 3117 if u.scheme != 'http':
3120 3118 raise error.Abort(_('only http:// paths are currently supported'))
3121 3119
3122 3120 url, authinfo = u.authinfo()
3123 3121 openerargs = {
3124 3122 r'useragent': b'Mercurial debugwireproto',
3125 3123 }
3126 3124
3127 3125 # Turn pipes/sockets into observers so we can log I/O.
3128 3126 if ui.verbose:
3129 3127 openerargs.update({
3130 3128 r'loggingfh': ui,
3131 3129 r'loggingname': b's',
3132 3130 r'loggingopts': {
3133 3131 r'logdata': True,
3134 3132 r'logdataapis': False,
3135 3133 },
3136 3134 })
3137 3135
3138 3136 if ui.debugflag:
3139 3137 openerargs[r'loggingopts'][r'logdataapis'] = True
3140 3138
3141 3139 # Don't send default headers when in raw mode. This allows us to
3142 3140 # bypass most of the behavior of our URL handling code so we can
3143 3141 # have near complete control over what's sent on the wire.
3144 3142 if opts['peer'] == 'raw':
3145 3143 openerargs[r'sendaccept'] = False
3146 3144
3147 3145 opener = urlmod.opener(ui, authinfo, **openerargs)
3148 3146
3149 3147 if opts['peer'] == 'http2':
3150 3148 ui.write(_('creating http peer for wire protocol version 2\n'))
3151 3149 # We go through makepeer() because we need an API descriptor for
3152 3150 # the peer instance to be useful.
3153 3151 with ui.configoverride({
3154 3152 ('experimental', 'httppeer.advertise-v2'): True}):
3155 3153 if opts['nologhandshake']:
3156 3154 ui.pushbuffer()
3157 3155
3158 3156 peer = httppeer.makepeer(ui, path, opener=opener)
3159 3157
3160 3158 if opts['nologhandshake']:
3161 3159 ui.popbuffer()
3162 3160
3163 3161 if not isinstance(peer, httppeer.httpv2peer):
3164 3162 raise error.Abort(_('could not instantiate HTTP peer for '
3165 3163 'wire protocol version 2'),
3166 3164 hint=_('the server may not have the feature '
3167 3165 'enabled or is not allowing this '
3168 3166 'client version'))
3169 3167
3170 3168 elif opts['peer'] == 'raw':
3171 3169 ui.write(_('using raw connection to peer\n'))
3172 3170 peer = None
3173 3171 elif opts['peer']:
3174 3172 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3175 3173 opts['peer'])
3176 3174 else:
3177 3175 peer = httppeer.makepeer(ui, path, opener=opener)
3178 3176
3179 3177 # We /could/ populate stdin/stdout with sock.makefile()...
3180 3178 else:
3181 3179 raise error.Abort(_('unsupported connection configuration'))
3182 3180
3183 3181 batchedcommands = None
3184 3182
3185 3183 # Now perform actions based on the parsed wire language instructions.
3186 3184 for action, lines in blocks:
3187 3185 if action in ('raw', 'raw+'):
3188 3186 if not stdin:
3189 3187 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3190 3188
3191 3189 # Concatenate the data together.
3192 3190 data = ''.join(l.lstrip() for l in lines)
3193 3191 data = stringutil.unescapestr(data)
3194 3192 stdin.write(data)
3195 3193
3196 3194 if action == 'raw+':
3197 3195 stdin.flush()
3198 3196 elif action == 'flush':
3199 3197 if not stdin:
3200 3198 raise error.Abort(_('cannot call flush on this peer'))
3201 3199 stdin.flush()
3202 3200 elif action.startswith('command'):
3203 3201 if not peer:
3204 3202 raise error.Abort(_('cannot send commands unless peer instance '
3205 3203 'is available'))
3206 3204
3207 3205 command = action.split(' ', 1)[1]
3208 3206
3209 3207 args = {}
3210 3208 for line in lines:
3211 3209 # We need to allow empty values.
3212 3210 fields = line.lstrip().split(' ', 1)
3213 3211 if len(fields) == 1:
3214 3212 key = fields[0]
3215 3213 value = ''
3216 3214 else:
3217 3215 key, value = fields
3218 3216
3219 3217 if value.startswith('eval:'):
3220 3218 value = stringutil.evalpythonliteral(value[5:])
3221 3219 else:
3222 3220 value = stringutil.unescapestr(value)
3223 3221
3224 3222 args[key] = value
3225 3223
3226 3224 if batchedcommands is not None:
3227 3225 batchedcommands.append((command, args))
3228 3226 continue
3229 3227
3230 3228 ui.status(_('sending %s command\n') % command)
3231 3229
3232 3230 if 'PUSHFILE' in args:
3233 3231 with open(args['PUSHFILE'], r'rb') as fh:
3234 3232 del args['PUSHFILE']
3235 3233 res, output = peer._callpush(command, fh,
3236 3234 **pycompat.strkwargs(args))
3237 3235 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3238 3236 ui.status(_('remote output: %s\n') %
3239 3237 stringutil.escapestr(output))
3240 3238 else:
3241 3239 with peer.commandexecutor() as e:
3242 3240 res = e.callcommand(command, args).result()
3243 3241
3244 3242 if isinstance(res, wireprotov2peer.commandresponse):
3245 3243 val = list(res.cborobjects())
3246 3244 ui.status(_('response: %s\n') %
3247 3245 stringutil.pprint(val, bprefix=True, indent=2))
3248 3246 else:
3249 3247 ui.status(_('response: %s\n') %
3250 3248 stringutil.pprint(res, bprefix=True, indent=2))
3251 3249
3252 3250 elif action == 'batchbegin':
3253 3251 if batchedcommands is not None:
3254 3252 raise error.Abort(_('nested batchbegin not allowed'))
3255 3253
3256 3254 batchedcommands = []
3257 3255 elif action == 'batchsubmit':
3258 3256 # There is a batching API we could go through. But it would be
3259 3257 # difficult to normalize requests into function calls. It is easier
3260 3258 # to bypass this layer and normalize to commands + args.
3261 3259 ui.status(_('sending batch with %d sub-commands\n') %
3262 3260 len(batchedcommands))
3263 3261 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3264 3262 ui.status(_('response #%d: %s\n') %
3265 3263 (i, stringutil.escapestr(chunk)))
3266 3264
3267 3265 batchedcommands = None
3268 3266
3269 3267 elif action.startswith('httprequest '):
3270 3268 if not opener:
3271 3269 raise error.Abort(_('cannot use httprequest without an HTTP '
3272 3270 'peer'))
3273 3271
3274 3272 request = action.split(' ', 2)
3275 3273 if len(request) != 3:
3276 3274 raise error.Abort(_('invalid httprequest: expected format is '
3277 3275 '"httprequest <method> <path>'))
3278 3276
3279 3277 method, httppath = request[1:]
3280 3278 headers = {}
3281 3279 body = None
3282 3280 frames = []
3283 3281 for line in lines:
3284 3282 line = line.lstrip()
3285 3283 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3286 3284 if m:
3287 3285 headers[m.group(1)] = m.group(2)
3288 3286 continue
3289 3287
3290 3288 if line.startswith(b'BODYFILE '):
3291 3289 with open(line.split(b' ', 1), 'rb') as fh:
3292 3290 body = fh.read()
3293 3291 elif line.startswith(b'frame '):
3294 3292 frame = wireprotoframing.makeframefromhumanstring(
3295 3293 line[len(b'frame '):])
3296 3294
3297 3295 frames.append(frame)
3298 3296 else:
3299 3297 raise error.Abort(_('unknown argument to httprequest: %s') %
3300 3298 line)
3301 3299
3302 3300 url = path + httppath
3303 3301
3304 3302 if frames:
3305 3303 body = b''.join(bytes(f) for f in frames)
3306 3304
3307 3305 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3308 3306
3309 3307 # urllib.Request insists on using has_data() as a proxy for
3310 3308 # determining the request method. Override that to use our
3311 3309 # explicitly requested method.
3312 3310 req.get_method = lambda: pycompat.sysstr(method)
3313 3311
3314 3312 try:
3315 3313 res = opener.open(req)
3316 3314 body = res.read()
3317 3315 except util.urlerr.urlerror as e:
3318 3316 # read() method must be called, but only exists in Python 2
3319 3317 getattr(e, 'read', lambda: None)()
3320 3318 continue
3321 3319
3322 3320 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3323 3321 ui.write(_('cbor> %s\n') %
3324 stringutil.pprint(cbor.loads(body), bprefix=True,
3322 stringutil.pprint(cborutil.decodeall(body)[0],
3323 bprefix=True,
3325 3324 indent=2))
3326 3325
3327 3326 elif action == 'close':
3328 3327 peer.close()
3329 3328 elif action == 'readavailable':
3330 3329 if not stdout or not stderr:
3331 3330 raise error.Abort(_('readavailable not available on this peer'))
3332 3331
3333 3332 stdin.close()
3334 3333 stdout.read()
3335 3334 stderr.read()
3336 3335
3337 3336 elif action == 'readline':
3338 3337 if not stdout:
3339 3338 raise error.Abort(_('readline not available on this peer'))
3340 3339 stdout.readline()
3341 3340 elif action == 'ereadline':
3342 3341 if not stderr:
3343 3342 raise error.Abort(_('ereadline not available on this peer'))
3344 3343 stderr.readline()
3345 3344 elif action.startswith('read '):
3346 3345 count = int(action.split(' ', 1)[1])
3347 3346 if not stdout:
3348 3347 raise error.Abort(_('read not available on this peer'))
3349 3348 stdout.read(count)
3350 3349 elif action.startswith('eread '):
3351 3350 count = int(action.split(' ', 1)[1])
3352 3351 if not stderr:
3353 3352 raise error.Abort(_('eread not available on this peer'))
3354 3353 stderr.read(count)
3355 3354 else:
3356 3355 raise error.Abort(_('unknown action: %s') % action)
3357 3356
3358 3357 if batchedcommands is not None:
3359 3358 raise error.Abort(_('unclosed "batchbegin" request'))
3360 3359
3361 3360 if peer:
3362 3361 peer.close()
3363 3362
3364 3363 if proc:
3365 3364 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now