##// END OF EJS Templates
py3: mask out None type when printing in `debuglocks`...
Matt Harbison -
r39920:9c8eff5c default
parent child Browse files
Show More
@@ -1,3368 +1,3368 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import operator
15 15 import os
16 16 import random
17 17 import re
18 18 import socket
19 19 import ssl
20 20 import stat
21 21 import string
22 22 import subprocess
23 23 import sys
24 24 import time
25 25
26 26 from .i18n import _
27 27 from .node import (
28 28 bin,
29 29 hex,
30 30 nullhex,
31 31 nullid,
32 32 nullrev,
33 33 short,
34 34 )
35 35 from . import (
36 36 bundle2,
37 37 changegroup,
38 38 cmdutil,
39 39 color,
40 40 context,
41 41 dagparser,
42 42 encoding,
43 43 error,
44 44 exchange,
45 45 extensions,
46 46 filemerge,
47 47 filesetlang,
48 48 formatter,
49 49 hg,
50 50 httppeer,
51 51 localrepo,
52 52 lock as lockmod,
53 53 logcmdutil,
54 54 merge as mergemod,
55 55 obsolete,
56 56 obsutil,
57 57 phases,
58 58 policy,
59 59 pvec,
60 60 pycompat,
61 61 registrar,
62 62 repair,
63 63 revlog,
64 64 revset,
65 65 revsetlang,
66 66 scmutil,
67 67 setdiscovery,
68 68 simplemerge,
69 69 sshpeer,
70 70 sslutil,
71 71 streamclone,
72 72 templater,
73 73 treediscovery,
74 74 upgrade,
75 75 url as urlmod,
76 76 util,
77 77 vfs as vfsmod,
78 78 wireprotoframing,
79 79 wireprotoserver,
80 80 wireprotov2peer,
81 81 )
82 82 from .utils import (
83 83 cborutil,
84 84 dateutil,
85 85 procutil,
86 86 stringutil,
87 87 )
88 88
89 89 from .revlogutils import (
90 90 deltas as deltautil
91 91 )
92 92
93 93 release = lockmod.release
94 94
95 95 command = registrar.command()
96 96
97 97 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
98 98 def debugancestor(ui, repo, *args):
99 99 """find the ancestor revision of two revisions in a given index"""
100 100 if len(args) == 3:
101 101 index, rev1, rev2 = args
102 102 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
103 103 lookup = r.lookup
104 104 elif len(args) == 2:
105 105 if not repo:
106 106 raise error.Abort(_('there is no Mercurial repository here '
107 107 '(.hg not found)'))
108 108 rev1, rev2 = args
109 109 r = repo.changelog
110 110 lookup = repo.lookup
111 111 else:
112 112 raise error.Abort(_('either two or three arguments required'))
113 113 a = r.ancestor(lookup(rev1), lookup(rev2))
114 114 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
115 115
116 116 @command('debugapplystreamclonebundle', [], 'FILE')
117 117 def debugapplystreamclonebundle(ui, repo, fname):
118 118 """apply a stream clone bundle file"""
119 119 f = hg.openpath(ui, fname)
120 120 gen = exchange.readbundle(ui, f, fname)
121 121 gen.apply(repo)
122 122
123 123 @command('debugbuilddag',
124 124 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
125 125 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
126 126 ('n', 'new-file', None, _('add new file at each rev'))],
127 127 _('[OPTION]... [TEXT]'))
128 128 def debugbuilddag(ui, repo, text=None,
129 129 mergeable_file=False,
130 130 overwritten_file=False,
131 131 new_file=False):
132 132 """builds a repo with a given DAG from scratch in the current empty repo
133 133
134 134 The description of the DAG is read from stdin if not given on the
135 135 command line.
136 136
137 137 Elements:
138 138
139 139 - "+n" is a linear run of n nodes based on the current default parent
140 140 - "." is a single node based on the current default parent
141 141 - "$" resets the default parent to null (implied at the start);
142 142 otherwise the default parent is always the last node created
143 143 - "<p" sets the default parent to the backref p
144 144 - "*p" is a fork at parent p, which is a backref
145 145 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
146 146 - "/p2" is a merge of the preceding node and p2
147 147 - ":tag" defines a local tag for the preceding node
148 148 - "@branch" sets the named branch for subsequent nodes
149 149 - "#...\\n" is a comment up to the end of the line
150 150
151 151 Whitespace between the above elements is ignored.
152 152
153 153 A backref is either
154 154
155 155 - a number n, which references the node curr-n, where curr is the current
156 156 node, or
157 157 - the name of a local tag you placed earlier using ":tag", or
158 158 - empty to denote the default parent.
159 159
160 160 All string valued-elements are either strictly alphanumeric, or must
161 161 be enclosed in double quotes ("..."), with "\\" as escape character.
162 162 """
163 163
164 164 if text is None:
165 165 ui.status(_("reading DAG from stdin\n"))
166 166 text = ui.fin.read()
167 167
168 168 cl = repo.changelog
169 169 if len(cl) > 0:
170 170 raise error.Abort(_('repository is not empty'))
171 171
172 172 # determine number of revs in DAG
173 173 total = 0
174 174 for type, data in dagparser.parsedag(text):
175 175 if type == 'n':
176 176 total += 1
177 177
178 178 if mergeable_file:
179 179 linesperrev = 2
180 180 # make a file with k lines per rev
181 181 initialmergedlines = ['%d' % i
182 182 for i in pycompat.xrange(0, total * linesperrev)]
183 183 initialmergedlines.append("")
184 184
185 185 tags = []
186 186 progress = ui.makeprogress(_('building'), unit=_('revisions'),
187 187 total=total)
188 188 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
189 189 at = -1
190 190 atbranch = 'default'
191 191 nodeids = []
192 192 id = 0
193 193 progress.update(id)
194 194 for type, data in dagparser.parsedag(text):
195 195 if type == 'n':
196 196 ui.note(('node %s\n' % pycompat.bytestr(data)))
197 197 id, ps = data
198 198
199 199 files = []
200 200 filecontent = {}
201 201
202 202 p2 = None
203 203 if mergeable_file:
204 204 fn = "mf"
205 205 p1 = repo[ps[0]]
206 206 if len(ps) > 1:
207 207 p2 = repo[ps[1]]
208 208 pa = p1.ancestor(p2)
209 209 base, local, other = [x[fn].data() for x in (pa, p1,
210 210 p2)]
211 211 m3 = simplemerge.Merge3Text(base, local, other)
212 212 ml = [l.strip() for l in m3.merge_lines()]
213 213 ml.append("")
214 214 elif at > 0:
215 215 ml = p1[fn].data().split("\n")
216 216 else:
217 217 ml = initialmergedlines
218 218 ml[id * linesperrev] += " r%i" % id
219 219 mergedtext = "\n".join(ml)
220 220 files.append(fn)
221 221 filecontent[fn] = mergedtext
222 222
223 223 if overwritten_file:
224 224 fn = "of"
225 225 files.append(fn)
226 226 filecontent[fn] = "r%i\n" % id
227 227
228 228 if new_file:
229 229 fn = "nf%i" % id
230 230 files.append(fn)
231 231 filecontent[fn] = "r%i\n" % id
232 232 if len(ps) > 1:
233 233 if not p2:
234 234 p2 = repo[ps[1]]
235 235 for fn in p2:
236 236 if fn.startswith("nf"):
237 237 files.append(fn)
238 238 filecontent[fn] = p2[fn].data()
239 239
240 240 def fctxfn(repo, cx, path):
241 241 if path in filecontent:
242 242 return context.memfilectx(repo, cx, path,
243 243 filecontent[path])
244 244 return None
245 245
246 246 if len(ps) == 0 or ps[0] < 0:
247 247 pars = [None, None]
248 248 elif len(ps) == 1:
249 249 pars = [nodeids[ps[0]], None]
250 250 else:
251 251 pars = [nodeids[p] for p in ps]
252 252 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
253 253 date=(id, 0),
254 254 user="debugbuilddag",
255 255 extra={'branch': atbranch})
256 256 nodeid = repo.commitctx(cx)
257 257 nodeids.append(nodeid)
258 258 at = id
259 259 elif type == 'l':
260 260 id, name = data
261 261 ui.note(('tag %s\n' % name))
262 262 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
263 263 elif type == 'a':
264 264 ui.note(('branch %s\n' % data))
265 265 atbranch = data
266 266 progress.update(id)
267 267
268 268 if tags:
269 269 repo.vfs.write("localtags", "".join(tags))
270 270
271 271 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
272 272 indent_string = ' ' * indent
273 273 if all:
274 274 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
275 275 % indent_string)
276 276
277 277 def showchunks(named):
278 278 ui.write("\n%s%s\n" % (indent_string, named))
279 279 for deltadata in gen.deltaiter():
280 280 node, p1, p2, cs, deltabase, delta, flags = deltadata
281 281 ui.write("%s%s %s %s %s %s %d\n" %
282 282 (indent_string, hex(node), hex(p1), hex(p2),
283 283 hex(cs), hex(deltabase), len(delta)))
284 284
285 285 chunkdata = gen.changelogheader()
286 286 showchunks("changelog")
287 287 chunkdata = gen.manifestheader()
288 288 showchunks("manifest")
289 289 for chunkdata in iter(gen.filelogheader, {}):
290 290 fname = chunkdata['filename']
291 291 showchunks(fname)
292 292 else:
293 293 if isinstance(gen, bundle2.unbundle20):
294 294 raise error.Abort(_('use debugbundle2 for this file'))
295 295 chunkdata = gen.changelogheader()
296 296 for deltadata in gen.deltaiter():
297 297 node, p1, p2, cs, deltabase, delta, flags = deltadata
298 298 ui.write("%s%s\n" % (indent_string, hex(node)))
299 299
300 300 def _debugobsmarkers(ui, part, indent=0, **opts):
301 301 """display version and markers contained in 'data'"""
302 302 opts = pycompat.byteskwargs(opts)
303 303 data = part.read()
304 304 indent_string = ' ' * indent
305 305 try:
306 306 version, markers = obsolete._readmarkers(data)
307 307 except error.UnknownVersion as exc:
308 308 msg = "%sunsupported version: %s (%d bytes)\n"
309 309 msg %= indent_string, exc.version, len(data)
310 310 ui.write(msg)
311 311 else:
312 312 msg = "%sversion: %d (%d bytes)\n"
313 313 msg %= indent_string, version, len(data)
314 314 ui.write(msg)
315 315 fm = ui.formatter('debugobsolete', opts)
316 316 for rawmarker in sorted(markers):
317 317 m = obsutil.marker(None, rawmarker)
318 318 fm.startitem()
319 319 fm.plain(indent_string)
320 320 cmdutil.showmarker(fm, m)
321 321 fm.end()
322 322
323 323 def _debugphaseheads(ui, data, indent=0):
324 324 """display version and markers contained in 'data'"""
325 325 indent_string = ' ' * indent
326 326 headsbyphase = phases.binarydecode(data)
327 327 for phase in phases.allphases:
328 328 for head in headsbyphase[phase]:
329 329 ui.write(indent_string)
330 330 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
331 331
332 332 def _quasirepr(thing):
333 333 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
334 334 return '{%s}' % (
335 335 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
336 336 return pycompat.bytestr(repr(thing))
337 337
338 338 def _debugbundle2(ui, gen, all=None, **opts):
339 339 """lists the contents of a bundle2"""
340 340 if not isinstance(gen, bundle2.unbundle20):
341 341 raise error.Abort(_('not a bundle2 file'))
342 342 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
343 343 parttypes = opts.get(r'part_type', [])
344 344 for part in gen.iterparts():
345 345 if parttypes and part.type not in parttypes:
346 346 continue
347 347 msg = '%s -- %s (mandatory: %r)\n'
348 348 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
349 349 if part.type == 'changegroup':
350 350 version = part.params.get('version', '01')
351 351 cg = changegroup.getunbundler(version, part, 'UN')
352 352 if not ui.quiet:
353 353 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
354 354 if part.type == 'obsmarkers':
355 355 if not ui.quiet:
356 356 _debugobsmarkers(ui, part, indent=4, **opts)
357 357 if part.type == 'phase-heads':
358 358 if not ui.quiet:
359 359 _debugphaseheads(ui, part, indent=4)
360 360
361 361 @command('debugbundle',
362 362 [('a', 'all', None, _('show all details')),
363 363 ('', 'part-type', [], _('show only the named part type')),
364 364 ('', 'spec', None, _('print the bundlespec of the bundle'))],
365 365 _('FILE'),
366 366 norepo=True)
367 367 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
368 368 """lists the contents of a bundle"""
369 369 with hg.openpath(ui, bundlepath) as f:
370 370 if spec:
371 371 spec = exchange.getbundlespec(ui, f)
372 372 ui.write('%s\n' % spec)
373 373 return
374 374
375 375 gen = exchange.readbundle(ui, f, bundlepath)
376 376 if isinstance(gen, bundle2.unbundle20):
377 377 return _debugbundle2(ui, gen, all=all, **opts)
378 378 _debugchangegroup(ui, gen, all=all, **opts)
379 379
380 380 @command('debugcapabilities',
381 381 [], _('PATH'),
382 382 norepo=True)
383 383 def debugcapabilities(ui, path, **opts):
384 384 """lists the capabilities of a remote peer"""
385 385 opts = pycompat.byteskwargs(opts)
386 386 peer = hg.peer(ui, opts, path)
387 387 caps = peer.capabilities()
388 388 ui.write(('Main capabilities:\n'))
389 389 for c in sorted(caps):
390 390 ui.write((' %s\n') % c)
391 391 b2caps = bundle2.bundle2caps(peer)
392 392 if b2caps:
393 393 ui.write(('Bundle2 capabilities:\n'))
394 394 for key, values in sorted(b2caps.iteritems()):
395 395 ui.write((' %s\n') % key)
396 396 for v in values:
397 397 ui.write((' %s\n') % v)
398 398
399 399 @command('debugcheckstate', [], '')
400 400 def debugcheckstate(ui, repo):
401 401 """validate the correctness of the current dirstate"""
402 402 parent1, parent2 = repo.dirstate.parents()
403 403 m1 = repo[parent1].manifest()
404 404 m2 = repo[parent2].manifest()
405 405 errors = 0
406 406 for f in repo.dirstate:
407 407 state = repo.dirstate[f]
408 408 if state in "nr" and f not in m1:
409 409 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
410 410 errors += 1
411 411 if state in "a" and f in m1:
412 412 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
413 413 errors += 1
414 414 if state in "m" and f not in m1 and f not in m2:
415 415 ui.warn(_("%s in state %s, but not in either manifest\n") %
416 416 (f, state))
417 417 errors += 1
418 418 for f in m1:
419 419 state = repo.dirstate[f]
420 420 if state not in "nrm":
421 421 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
422 422 errors += 1
423 423 if errors:
424 424 error = _(".hg/dirstate inconsistent with current parent's manifest")
425 425 raise error.Abort(error)
426 426
427 427 @command('debugcolor',
428 428 [('', 'style', None, _('show all configured styles'))],
429 429 'hg debugcolor')
430 430 def debugcolor(ui, repo, **opts):
431 431 """show available color, effects or style"""
432 432 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
433 433 if opts.get(r'style'):
434 434 return _debugdisplaystyle(ui)
435 435 else:
436 436 return _debugdisplaycolor(ui)
437 437
438 438 def _debugdisplaycolor(ui):
439 439 ui = ui.copy()
440 440 ui._styles.clear()
441 441 for effect in color._activeeffects(ui).keys():
442 442 ui._styles[effect] = effect
443 443 if ui._terminfoparams:
444 444 for k, v in ui.configitems('color'):
445 445 if k.startswith('color.'):
446 446 ui._styles[k] = k[6:]
447 447 elif k.startswith('terminfo.'):
448 448 ui._styles[k] = k[9:]
449 449 ui.write(_('available colors:\n'))
450 450 # sort label with a '_' after the other to group '_background' entry.
451 451 items = sorted(ui._styles.items(),
452 452 key=lambda i: ('_' in i[0], i[0], i[1]))
453 453 for colorname, label in items:
454 454 ui.write(('%s\n') % colorname, label=label)
455 455
456 456 def _debugdisplaystyle(ui):
457 457 ui.write(_('available style:\n'))
458 458 if not ui._styles:
459 459 return
460 460 width = max(len(s) for s in ui._styles)
461 461 for label, effects in sorted(ui._styles.items()):
462 462 ui.write('%s' % label, label=label)
463 463 if effects:
464 464 # 50
465 465 ui.write(': ')
466 466 ui.write(' ' * (max(0, width - len(label))))
467 467 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
468 468 ui.write('\n')
469 469
470 470 @command('debugcreatestreamclonebundle', [], 'FILE')
471 471 def debugcreatestreamclonebundle(ui, repo, fname):
472 472 """create a stream clone bundle file
473 473
474 474 Stream bundles are special bundles that are essentially archives of
475 475 revlog files. They are commonly used for cloning very quickly.
476 476 """
477 477 # TODO we may want to turn this into an abort when this functionality
478 478 # is moved into `hg bundle`.
479 479 if phases.hassecret(repo):
480 480 ui.warn(_('(warning: stream clone bundle will contain secret '
481 481 'revisions)\n'))
482 482
483 483 requirements, gen = streamclone.generatebundlev1(repo)
484 484 changegroup.writechunks(ui, gen, fname)
485 485
486 486 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
487 487
488 488 @command('debugdag',
489 489 [('t', 'tags', None, _('use tags as labels')),
490 490 ('b', 'branches', None, _('annotate with branch names')),
491 491 ('', 'dots', None, _('use dots for runs')),
492 492 ('s', 'spaces', None, _('separate elements by spaces'))],
493 493 _('[OPTION]... [FILE [REV]...]'),
494 494 optionalrepo=True)
495 495 def debugdag(ui, repo, file_=None, *revs, **opts):
496 496 """format the changelog or an index DAG as a concise textual description
497 497
498 498 If you pass a revlog index, the revlog's DAG is emitted. If you list
499 499 revision numbers, they get labeled in the output as rN.
500 500
501 501 Otherwise, the changelog DAG of the current repo is emitted.
502 502 """
503 503 spaces = opts.get(r'spaces')
504 504 dots = opts.get(r'dots')
505 505 if file_:
506 506 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
507 507 file_)
508 508 revs = set((int(r) for r in revs))
509 509 def events():
510 510 for r in rlog:
511 511 yield 'n', (r, list(p for p in rlog.parentrevs(r)
512 512 if p != -1))
513 513 if r in revs:
514 514 yield 'l', (r, "r%i" % r)
515 515 elif repo:
516 516 cl = repo.changelog
517 517 tags = opts.get(r'tags')
518 518 branches = opts.get(r'branches')
519 519 if tags:
520 520 labels = {}
521 521 for l, n in repo.tags().items():
522 522 labels.setdefault(cl.rev(n), []).append(l)
523 523 def events():
524 524 b = "default"
525 525 for r in cl:
526 526 if branches:
527 527 newb = cl.read(cl.node(r))[5]['branch']
528 528 if newb != b:
529 529 yield 'a', newb
530 530 b = newb
531 531 yield 'n', (r, list(p for p in cl.parentrevs(r)
532 532 if p != -1))
533 533 if tags:
534 534 ls = labels.get(r)
535 535 if ls:
536 536 for l in ls:
537 537 yield 'l', (r, l)
538 538 else:
539 539 raise error.Abort(_('need repo for changelog dag'))
540 540
541 541 for line in dagparser.dagtextlines(events(),
542 542 addspaces=spaces,
543 543 wraplabels=True,
544 544 wrapannotations=True,
545 545 wrapnonlinear=dots,
546 546 usedots=dots,
547 547 maxlinewidth=70):
548 548 ui.write(line)
549 549 ui.write("\n")
550 550
551 551 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
552 552 def debugdata(ui, repo, file_, rev=None, **opts):
553 553 """dump the contents of a data file revision"""
554 554 opts = pycompat.byteskwargs(opts)
555 555 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
556 556 if rev is not None:
557 557 raise error.CommandError('debugdata', _('invalid arguments'))
558 558 file_, rev = None, file_
559 559 elif rev is None:
560 560 raise error.CommandError('debugdata', _('invalid arguments'))
561 561 r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
562 562 try:
563 563 ui.write(r.revision(r.lookup(rev), raw=True))
564 564 except KeyError:
565 565 raise error.Abort(_('invalid revision identifier %s') % rev)
566 566
567 567 @command('debugdate',
568 568 [('e', 'extended', None, _('try extended date formats'))],
569 569 _('[-e] DATE [RANGE]'),
570 570 norepo=True, optionalrepo=True)
571 571 def debugdate(ui, date, range=None, **opts):
572 572 """parse and display a date"""
573 573 if opts[r"extended"]:
574 574 d = dateutil.parsedate(date, util.extendeddateformats)
575 575 else:
576 576 d = dateutil.parsedate(date)
577 577 ui.write(("internal: %d %d\n") % d)
578 578 ui.write(("standard: %s\n") % dateutil.datestr(d))
579 579 if range:
580 580 m = dateutil.matchdate(range)
581 581 ui.write(("match: %s\n") % m(d[0]))
582 582
583 583 @command('debugdeltachain',
584 584 cmdutil.debugrevlogopts + cmdutil.formatteropts,
585 585 _('-c|-m|FILE'),
586 586 optionalrepo=True)
587 587 def debugdeltachain(ui, repo, file_=None, **opts):
588 588 """dump information about delta chains in a revlog
589 589
590 590 Output can be templatized. Available template keywords are:
591 591
592 592 :``rev``: revision number
593 593 :``chainid``: delta chain identifier (numbered by unique base)
594 594 :``chainlen``: delta chain length to this revision
595 595 :``prevrev``: previous revision in delta chain
596 596 :``deltatype``: role of delta / how it was computed
597 597 :``compsize``: compressed size of revision
598 598 :``uncompsize``: uncompressed size of revision
599 599 :``chainsize``: total size of compressed revisions in chain
600 600 :``chainratio``: total chain size divided by uncompressed revision size
601 601 (new delta chains typically start at ratio 2.00)
602 602 :``lindist``: linear distance from base revision in delta chain to end
603 603 of this revision
604 604 :``extradist``: total size of revisions not part of this delta chain from
605 605 base of delta chain to end of this revision; a measurement
606 606 of how much extra data we need to read/seek across to read
607 607 the delta chain for this revision
608 608 :``extraratio``: extradist divided by chainsize; another representation of
609 609 how much unrelated data is needed to load this delta chain
610 610
611 611 If the repository is configured to use the sparse read, additional keywords
612 612 are available:
613 613
614 614 :``readsize``: total size of data read from the disk for a revision
615 615 (sum of the sizes of all the blocks)
616 616 :``largestblock``: size of the largest block of data read from the disk
617 617 :``readdensity``: density of useful bytes in the data read from the disk
618 618 :``srchunks``: in how many data hunks the whole revision would be read
619 619
620 620 The sparse read can be enabled with experimental.sparse-read = True
621 621 """
622 622 opts = pycompat.byteskwargs(opts)
623 623 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
624 624 index = r.index
625 625 start = r.start
626 626 length = r.length
627 627 generaldelta = r.version & revlog.FLAG_GENERALDELTA
628 628 withsparseread = getattr(r, '_withsparseread', False)
629 629
630 630 def revinfo(rev):
631 631 e = index[rev]
632 632 compsize = e[1]
633 633 uncompsize = e[2]
634 634 chainsize = 0
635 635
636 636 if generaldelta:
637 637 if e[3] == e[5]:
638 638 deltatype = 'p1'
639 639 elif e[3] == e[6]:
640 640 deltatype = 'p2'
641 641 elif e[3] == rev - 1:
642 642 deltatype = 'prev'
643 643 elif e[3] == rev:
644 644 deltatype = 'base'
645 645 else:
646 646 deltatype = 'other'
647 647 else:
648 648 if e[3] == rev:
649 649 deltatype = 'base'
650 650 else:
651 651 deltatype = 'prev'
652 652
653 653 chain = r._deltachain(rev)[0]
654 654 for iterrev in chain:
655 655 e = index[iterrev]
656 656 chainsize += e[1]
657 657
658 658 return compsize, uncompsize, deltatype, chain, chainsize
659 659
660 660 fm = ui.formatter('debugdeltachain', opts)
661 661
662 662 fm.plain(' rev chain# chainlen prev delta '
663 663 'size rawsize chainsize ratio lindist extradist '
664 664 'extraratio')
665 665 if withsparseread:
666 666 fm.plain(' readsize largestblk rddensity srchunks')
667 667 fm.plain('\n')
668 668
669 669 chainbases = {}
670 670 for rev in r:
671 671 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
672 672 chainbase = chain[0]
673 673 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
674 674 basestart = start(chainbase)
675 675 revstart = start(rev)
676 676 lineardist = revstart + comp - basestart
677 677 extradist = lineardist - chainsize
678 678 try:
679 679 prevrev = chain[-2]
680 680 except IndexError:
681 681 prevrev = -1
682 682
683 683 if uncomp != 0:
684 684 chainratio = float(chainsize) / float(uncomp)
685 685 else:
686 686 chainratio = chainsize
687 687
688 688 if chainsize != 0:
689 689 extraratio = float(extradist) / float(chainsize)
690 690 else:
691 691 extraratio = extradist
692 692
693 693 fm.startitem()
694 694 fm.write('rev chainid chainlen prevrev deltatype compsize '
695 695 'uncompsize chainsize chainratio lindist extradist '
696 696 'extraratio',
697 697 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
698 698 rev, chainid, len(chain), prevrev, deltatype, comp,
699 699 uncomp, chainsize, chainratio, lineardist, extradist,
700 700 extraratio,
701 701 rev=rev, chainid=chainid, chainlen=len(chain),
702 702 prevrev=prevrev, deltatype=deltatype, compsize=comp,
703 703 uncompsize=uncomp, chainsize=chainsize,
704 704 chainratio=chainratio, lindist=lineardist,
705 705 extradist=extradist, extraratio=extraratio)
706 706 if withsparseread:
707 707 readsize = 0
708 708 largestblock = 0
709 709 srchunks = 0
710 710
711 711 for revschunk in deltautil.slicechunk(r, chain):
712 712 srchunks += 1
713 713 blkend = start(revschunk[-1]) + length(revschunk[-1])
714 714 blksize = blkend - start(revschunk[0])
715 715
716 716 readsize += blksize
717 717 if largestblock < blksize:
718 718 largestblock = blksize
719 719
720 720 if readsize:
721 721 readdensity = float(chainsize) / float(readsize)
722 722 else:
723 723 readdensity = 1
724 724
725 725 fm.write('readsize largestblock readdensity srchunks',
726 726 ' %10d %10d %9.5f %8d',
727 727 readsize, largestblock, readdensity, srchunks,
728 728 readsize=readsize, largestblock=largestblock,
729 729 readdensity=readdensity, srchunks=srchunks)
730 730
731 731 fm.plain('\n')
732 732
733 733 fm.end()
734 734
735 735 @command('debugdirstate|debugstate',
736 736 [('', 'nodates', None, _('do not display the saved mtime (DEPRECATED)')),
737 737 ('', 'dates', True, _('display the saved mtime')),
738 738 ('', 'datesort', None, _('sort by saved mtime'))],
739 739 _('[OPTION]...'))
740 740 def debugstate(ui, repo, **opts):
741 741 """show the contents of the current dirstate"""
742 742
743 743 nodates = not opts[r'dates']
744 744 if opts.get(r'nodates') is not None:
745 745 nodates = True
746 746 datesort = opts.get(r'datesort')
747 747
748 748 timestr = ""
749 749 if datesort:
750 750 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
751 751 else:
752 752 keyfunc = None # sort by filename
753 753 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
754 754 if ent[3] == -1:
755 755 timestr = 'unset '
756 756 elif nodates:
757 757 timestr = 'set '
758 758 else:
759 759 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
760 760 time.localtime(ent[3]))
761 761 timestr = encoding.strtolocal(timestr)
762 762 if ent[1] & 0o20000:
763 763 mode = 'lnk'
764 764 else:
765 765 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
766 766 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
767 767 for f in repo.dirstate.copies():
768 768 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
769 769
770 770 @command('debugdiscovery',
771 771 [('', 'old', None, _('use old-style discovery')),
772 772 ('', 'nonheads', None,
773 773 _('use old-style discovery with non-heads included')),
774 774 ('', 'rev', [], 'restrict discovery to this set of revs'),
775 775 ] + cmdutil.remoteopts,
776 776 _('[--rev REV] [OTHER]'))
777 777 def debugdiscovery(ui, repo, remoteurl="default", **opts):
778 778 """runs the changeset discovery protocol in isolation"""
779 779 opts = pycompat.byteskwargs(opts)
780 780 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
781 781 remote = hg.peer(repo, opts, remoteurl)
782 782 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
783 783
784 784 # make sure tests are repeatable
785 785 random.seed(12323)
786 786
787 787 def doit(pushedrevs, remoteheads, remote=remote):
788 788 if opts.get('old'):
789 789 if not util.safehasattr(remote, 'branches'):
790 790 # enable in-client legacy support
791 791 remote = localrepo.locallegacypeer(remote.local())
792 792 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
793 793 force=True)
794 794 common = set(common)
795 795 if not opts.get('nonheads'):
796 796 ui.write(("unpruned common: %s\n") %
797 797 " ".join(sorted(short(n) for n in common)))
798 798
799 799 clnode = repo.changelog.node
800 800 common = repo.revs('heads(::%ln)', common)
801 801 common = {clnode(r) for r in common}
802 802 else:
803 803 nodes = None
804 804 if pushedrevs:
805 805 revs = scmutil.revrange(repo, pushedrevs)
806 806 nodes = [repo[r].node() for r in revs]
807 807 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
808 808 ancestorsof=nodes)
809 809 common = set(common)
810 810 rheads = set(hds)
811 811 lheads = set(repo.heads())
812 812 ui.write(("common heads: %s\n") %
813 813 " ".join(sorted(short(n) for n in common)))
814 814 if lheads <= common:
815 815 ui.write(("local is subset\n"))
816 816 elif rheads <= common:
817 817 ui.write(("remote is subset\n"))
818 818
819 819 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
820 820 localrevs = opts['rev']
821 821 doit(localrevs, remoterevs)
822 822
823 823 _chunksize = 4 << 10
824 824
825 825 @command('debugdownload',
826 826 [
827 827 ('o', 'output', '', _('path')),
828 828 ],
829 829 optionalrepo=True)
830 830 def debugdownload(ui, repo, url, output=None, **opts):
831 831 """download a resource using Mercurial logic and config
832 832 """
833 833 fh = urlmod.open(ui, url, output)
834 834
835 835 dest = ui
836 836 if output:
837 837 dest = open(output, "wb", _chunksize)
838 838 try:
839 839 data = fh.read(_chunksize)
840 840 while data:
841 841 dest.write(data)
842 842 data = fh.read(_chunksize)
843 843 finally:
844 844 if output:
845 845 dest.close()
846 846
847 847 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
848 848 def debugextensions(ui, repo, **opts):
849 849 '''show information about active extensions'''
850 850 opts = pycompat.byteskwargs(opts)
851 851 exts = extensions.extensions(ui)
852 852 hgver = util.version()
853 853 fm = ui.formatter('debugextensions', opts)
854 854 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
855 855 isinternal = extensions.ismoduleinternal(extmod)
856 856 extsource = pycompat.fsencode(extmod.__file__)
857 857 if isinternal:
858 858 exttestedwith = [] # never expose magic string to users
859 859 else:
860 860 exttestedwith = getattr(extmod, 'testedwith', '').split()
861 861 extbuglink = getattr(extmod, 'buglink', None)
862 862
863 863 fm.startitem()
864 864
865 865 if ui.quiet or ui.verbose:
866 866 fm.write('name', '%s\n', extname)
867 867 else:
868 868 fm.write('name', '%s', extname)
869 869 if isinternal or hgver in exttestedwith:
870 870 fm.plain('\n')
871 871 elif not exttestedwith:
872 872 fm.plain(_(' (untested!)\n'))
873 873 else:
874 874 lasttestedversion = exttestedwith[-1]
875 875 fm.plain(' (%s!)\n' % lasttestedversion)
876 876
877 877 fm.condwrite(ui.verbose and extsource, 'source',
878 878 _(' location: %s\n'), extsource or "")
879 879
880 880 if ui.verbose:
881 881 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
882 882 fm.data(bundled=isinternal)
883 883
884 884 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
885 885 _(' tested with: %s\n'),
886 886 fm.formatlist(exttestedwith, name='ver'))
887 887
888 888 fm.condwrite(ui.verbose and extbuglink, 'buglink',
889 889 _(' bug reporting: %s\n'), extbuglink or "")
890 890
891 891 fm.end()
892 892
893 893 @command('debugfileset',
894 894 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
895 895 ('', 'all-files', False,
896 896 _('test files from all revisions and working directory')),
897 897 ('s', 'show-matcher', None,
898 898 _('print internal representation of matcher')),
899 899 ('p', 'show-stage', [],
900 900 _('print parsed tree at the given stage'), _('NAME'))],
901 901 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
902 902 def debugfileset(ui, repo, expr, **opts):
903 903 '''parse and apply a fileset specification'''
904 904 from . import fileset
905 905 fileset.symbols # force import of fileset so we have predicates to optimize
906 906 opts = pycompat.byteskwargs(opts)
907 907 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
908 908
909 909 stages = [
910 910 ('parsed', pycompat.identity),
911 911 ('analyzed', filesetlang.analyze),
912 912 ('optimized', filesetlang.optimize),
913 913 ]
914 914 stagenames = set(n for n, f in stages)
915 915
916 916 showalways = set()
917 917 if ui.verbose and not opts['show_stage']:
918 918 # show parsed tree by --verbose (deprecated)
919 919 showalways.add('parsed')
920 920 if opts['show_stage'] == ['all']:
921 921 showalways.update(stagenames)
922 922 else:
923 923 for n in opts['show_stage']:
924 924 if n not in stagenames:
925 925 raise error.Abort(_('invalid stage name: %s') % n)
926 926 showalways.update(opts['show_stage'])
927 927
928 928 tree = filesetlang.parse(expr)
929 929 for n, f in stages:
930 930 tree = f(tree)
931 931 if n in showalways:
932 932 if opts['show_stage'] or n != 'parsed':
933 933 ui.write(("* %s:\n") % n)
934 934 ui.write(filesetlang.prettyformat(tree), "\n")
935 935
936 936 files = set()
937 937 if opts['all_files']:
938 938 for r in repo:
939 939 c = repo[r]
940 940 files.update(c.files())
941 941 files.update(c.substate)
942 942 if opts['all_files'] or ctx.rev() is None:
943 943 wctx = repo[None]
944 944 files.update(repo.dirstate.walk(scmutil.matchall(repo),
945 945 subrepos=list(wctx.substate),
946 946 unknown=True, ignored=True))
947 947 files.update(wctx.substate)
948 948 else:
949 949 files.update(ctx.files())
950 950 files.update(ctx.substate)
951 951
952 952 m = ctx.matchfileset(expr)
953 953 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
954 954 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
955 955 for f in sorted(files):
956 956 if not m(f):
957 957 continue
958 958 ui.write("%s\n" % f)
959 959
960 960 @command('debugformat',
961 961 [] + cmdutil.formatteropts)
962 962 def debugformat(ui, repo, **opts):
963 963 """display format information about the current repository
964 964
965 965 Use --verbose to get extra information about current config value and
966 966 Mercurial default."""
967 967 opts = pycompat.byteskwargs(opts)
968 968 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
969 969 maxvariantlength = max(len('format-variant'), maxvariantlength)
970 970
971 971 def makeformatname(name):
972 972 return '%s:' + (' ' * (maxvariantlength - len(name)))
973 973
974 974 fm = ui.formatter('debugformat', opts)
975 975 if fm.isplain():
976 976 def formatvalue(value):
977 977 if util.safehasattr(value, 'startswith'):
978 978 return value
979 979 if value:
980 980 return 'yes'
981 981 else:
982 982 return 'no'
983 983 else:
984 984 formatvalue = pycompat.identity
985 985
986 986 fm.plain('format-variant')
987 987 fm.plain(' ' * (maxvariantlength - len('format-variant')))
988 988 fm.plain(' repo')
989 989 if ui.verbose:
990 990 fm.plain(' config default')
991 991 fm.plain('\n')
992 992 for fv in upgrade.allformatvariant:
993 993 fm.startitem()
994 994 repovalue = fv.fromrepo(repo)
995 995 configvalue = fv.fromconfig(repo)
996 996
997 997 if repovalue != configvalue:
998 998 namelabel = 'formatvariant.name.mismatchconfig'
999 999 repolabel = 'formatvariant.repo.mismatchconfig'
1000 1000 elif repovalue != fv.default:
1001 1001 namelabel = 'formatvariant.name.mismatchdefault'
1002 1002 repolabel = 'formatvariant.repo.mismatchdefault'
1003 1003 else:
1004 1004 namelabel = 'formatvariant.name.uptodate'
1005 1005 repolabel = 'formatvariant.repo.uptodate'
1006 1006
1007 1007 fm.write('name', makeformatname(fv.name), fv.name,
1008 1008 label=namelabel)
1009 1009 fm.write('repo', ' %3s', formatvalue(repovalue),
1010 1010 label=repolabel)
1011 1011 if fv.default != configvalue:
1012 1012 configlabel = 'formatvariant.config.special'
1013 1013 else:
1014 1014 configlabel = 'formatvariant.config.default'
1015 1015 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1016 1016 label=configlabel)
1017 1017 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1018 1018 label='formatvariant.default')
1019 1019 fm.plain('\n')
1020 1020 fm.end()
1021 1021
1022 1022 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1023 1023 def debugfsinfo(ui, path="."):
1024 1024 """show information detected about current filesystem"""
1025 1025 ui.write(('path: %s\n') % path)
1026 1026 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1027 1027 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1028 1028 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1029 1029 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1030 1030 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1031 1031 casesensitive = '(unknown)'
1032 1032 try:
1033 1033 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1034 1034 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1035 1035 except OSError:
1036 1036 pass
1037 1037 ui.write(('case-sensitive: %s\n') % casesensitive)
1038 1038
1039 1039 @command('debuggetbundle',
1040 1040 [('H', 'head', [], _('id of head node'), _('ID')),
1041 1041 ('C', 'common', [], _('id of common node'), _('ID')),
1042 1042 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1043 1043 _('REPO FILE [-H|-C ID]...'),
1044 1044 norepo=True)
1045 1045 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1046 1046 """retrieves a bundle from a repo
1047 1047
1048 1048 Every ID must be a full-length hex node id string. Saves the bundle to the
1049 1049 given file.
1050 1050 """
1051 1051 opts = pycompat.byteskwargs(opts)
1052 1052 repo = hg.peer(ui, opts, repopath)
1053 1053 if not repo.capable('getbundle'):
1054 1054 raise error.Abort("getbundle() not supported by target repository")
1055 1055 args = {}
1056 1056 if common:
1057 1057 args[r'common'] = [bin(s) for s in common]
1058 1058 if head:
1059 1059 args[r'heads'] = [bin(s) for s in head]
1060 1060 # TODO: get desired bundlecaps from command line.
1061 1061 args[r'bundlecaps'] = None
1062 1062 bundle = repo.getbundle('debug', **args)
1063 1063
1064 1064 bundletype = opts.get('type', 'bzip2').lower()
1065 1065 btypes = {'none': 'HG10UN',
1066 1066 'bzip2': 'HG10BZ',
1067 1067 'gzip': 'HG10GZ',
1068 1068 'bundle2': 'HG20'}
1069 1069 bundletype = btypes.get(bundletype)
1070 1070 if bundletype not in bundle2.bundletypes:
1071 1071 raise error.Abort(_('unknown bundle type specified with --type'))
1072 1072 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1073 1073
1074 1074 @command('debugignore', [], '[FILE]')
1075 1075 def debugignore(ui, repo, *files, **opts):
1076 1076 """display the combined ignore pattern and information about ignored files
1077 1077
1078 1078 With no argument display the combined ignore pattern.
1079 1079
1080 1080 Given space separated file names, shows if the given file is ignored and
1081 1081 if so, show the ignore rule (file and line number) that matched it.
1082 1082 """
1083 1083 ignore = repo.dirstate._ignore
1084 1084 if not files:
1085 1085 # Show all the patterns
1086 1086 ui.write("%s\n" % pycompat.byterepr(ignore))
1087 1087 else:
1088 1088 m = scmutil.match(repo[None], pats=files)
1089 1089 for f in m.files():
1090 1090 nf = util.normpath(f)
1091 1091 ignored = None
1092 1092 ignoredata = None
1093 1093 if nf != '.':
1094 1094 if ignore(nf):
1095 1095 ignored = nf
1096 1096 ignoredata = repo.dirstate._ignorefileandline(nf)
1097 1097 else:
1098 1098 for p in util.finddirs(nf):
1099 1099 if ignore(p):
1100 1100 ignored = p
1101 1101 ignoredata = repo.dirstate._ignorefileandline(p)
1102 1102 break
1103 1103 if ignored:
1104 1104 if ignored == nf:
1105 1105 ui.write(_("%s is ignored\n") % m.uipath(f))
1106 1106 else:
1107 1107 ui.write(_("%s is ignored because of "
1108 1108 "containing folder %s\n")
1109 1109 % (m.uipath(f), ignored))
1110 1110 ignorefile, lineno, line = ignoredata
1111 1111 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1112 1112 % (ignorefile, lineno, line))
1113 1113 else:
1114 1114 ui.write(_("%s is not ignored\n") % m.uipath(f))
1115 1115
1116 1116 @command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts,
1117 1117 _('-c|-m|FILE'))
1118 1118 def debugindex(ui, repo, file_=None, **opts):
1119 1119 """dump index data for a storage primitive"""
1120 1120 opts = pycompat.byteskwargs(opts)
1121 1121 store = cmdutil.openstorage(repo, 'debugindex', file_, opts)
1122 1122
1123 1123 if ui.debugflag:
1124 1124 shortfn = hex
1125 1125 else:
1126 1126 shortfn = short
1127 1127
1128 1128 idlen = 12
1129 1129 for i in store:
1130 1130 idlen = len(shortfn(store.node(i)))
1131 1131 break
1132 1132
1133 1133 fm = ui.formatter('debugindex', opts)
1134 1134 fm.plain(b' rev linkrev %s %s p2\n' % (
1135 1135 b'nodeid'.ljust(idlen),
1136 1136 b'p1'.ljust(idlen)))
1137 1137
1138 1138 for rev in store:
1139 1139 node = store.node(rev)
1140 1140 parents = store.parents(node)
1141 1141
1142 1142 fm.startitem()
1143 1143 fm.write(b'rev', b'%6d ', rev)
1144 1144 fm.write(b'linkrev', '%7d ', store.linkrev(rev))
1145 1145 fm.write(b'node', '%s ', shortfn(node))
1146 1146 fm.write(b'p1', '%s ', shortfn(parents[0]))
1147 1147 fm.write(b'p2', '%s', shortfn(parents[1]))
1148 1148 fm.plain(b'\n')
1149 1149
1150 1150 fm.end()
1151 1151
1152 1152 @command('debugindexdot', cmdutil.debugrevlogopts,
1153 1153 _('-c|-m|FILE'), optionalrepo=True)
1154 1154 def debugindexdot(ui, repo, file_=None, **opts):
1155 1155 """dump an index DAG as a graphviz dot file"""
1156 1156 opts = pycompat.byteskwargs(opts)
1157 1157 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
1158 1158 ui.write(("digraph G {\n"))
1159 1159 for i in r:
1160 1160 node = r.node(i)
1161 1161 pp = r.parents(node)
1162 1162 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1163 1163 if pp[1] != nullid:
1164 1164 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1165 1165 ui.write("}\n")
1166 1166
1167 1167 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1168 1168 def debuginstall(ui, **opts):
1169 1169 '''test Mercurial installation
1170 1170
1171 1171 Returns 0 on success.
1172 1172 '''
1173 1173 opts = pycompat.byteskwargs(opts)
1174 1174
1175 1175 def writetemp(contents):
1176 1176 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1177 1177 f = os.fdopen(fd, r"wb")
1178 1178 f.write(contents)
1179 1179 f.close()
1180 1180 return name
1181 1181
1182 1182 problems = 0
1183 1183
1184 1184 fm = ui.formatter('debuginstall', opts)
1185 1185 fm.startitem()
1186 1186
1187 1187 # encoding
1188 1188 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1189 1189 err = None
1190 1190 try:
1191 1191 codecs.lookup(pycompat.sysstr(encoding.encoding))
1192 1192 except LookupError as inst:
1193 1193 err = stringutil.forcebytestr(inst)
1194 1194 problems += 1
1195 1195 fm.condwrite(err, 'encodingerror', _(" %s\n"
1196 1196 " (check that your locale is properly set)\n"), err)
1197 1197
1198 1198 # Python
1199 1199 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1200 1200 pycompat.sysexecutable)
1201 1201 fm.write('pythonver', _("checking Python version (%s)\n"),
1202 1202 ("%d.%d.%d" % sys.version_info[:3]))
1203 1203 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1204 1204 os.path.dirname(pycompat.fsencode(os.__file__)))
1205 1205
1206 1206 security = set(sslutil.supportedprotocols)
1207 1207 if sslutil.hassni:
1208 1208 security.add('sni')
1209 1209
1210 1210 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1211 1211 fm.formatlist(sorted(security), name='protocol',
1212 1212 fmt='%s', sep=','))
1213 1213
1214 1214 # These are warnings, not errors. So don't increment problem count. This
1215 1215 # may change in the future.
1216 1216 if 'tls1.2' not in security:
1217 1217 fm.plain(_(' TLS 1.2 not supported by Python install; '
1218 1218 'network connections lack modern security\n'))
1219 1219 if 'sni' not in security:
1220 1220 fm.plain(_(' SNI not supported by Python install; may have '
1221 1221 'connectivity issues with some servers\n'))
1222 1222
1223 1223 # TODO print CA cert info
1224 1224
1225 1225 # hg version
1226 1226 hgver = util.version()
1227 1227 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1228 1228 hgver.split('+')[0])
1229 1229 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1230 1230 '+'.join(hgver.split('+')[1:]))
1231 1231
1232 1232 # compiled modules
1233 1233 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1234 1234 policy.policy)
1235 1235 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1236 1236 os.path.dirname(pycompat.fsencode(__file__)))
1237 1237
1238 1238 if policy.policy in ('c', 'allow'):
1239 1239 err = None
1240 1240 try:
1241 1241 from .cext import (
1242 1242 base85,
1243 1243 bdiff,
1244 1244 mpatch,
1245 1245 osutil,
1246 1246 )
1247 1247 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1248 1248 except Exception as inst:
1249 1249 err = stringutil.forcebytestr(inst)
1250 1250 problems += 1
1251 1251 fm.condwrite(err, 'extensionserror', " %s\n", err)
1252 1252
1253 1253 compengines = util.compengines._engines.values()
1254 1254 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1255 1255 fm.formatlist(sorted(e.name() for e in compengines),
1256 1256 name='compengine', fmt='%s', sep=', '))
1257 1257 fm.write('compenginesavail', _('checking available compression engines '
1258 1258 '(%s)\n'),
1259 1259 fm.formatlist(sorted(e.name() for e in compengines
1260 1260 if e.available()),
1261 1261 name='compengine', fmt='%s', sep=', '))
1262 1262 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1263 1263 fm.write('compenginesserver', _('checking available compression engines '
1264 1264 'for wire protocol (%s)\n'),
1265 1265 fm.formatlist([e.name() for e in wirecompengines
1266 1266 if e.wireprotosupport()],
1267 1267 name='compengine', fmt='%s', sep=', '))
1268 1268 re2 = 'missing'
1269 1269 if util._re2:
1270 1270 re2 = 'available'
1271 1271 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1272 1272 fm.data(re2=bool(util._re2))
1273 1273
1274 1274 # templates
1275 1275 p = templater.templatepaths()
1276 1276 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1277 1277 fm.condwrite(not p, '', _(" no template directories found\n"))
1278 1278 if p:
1279 1279 m = templater.templatepath("map-cmdline.default")
1280 1280 if m:
1281 1281 # template found, check if it is working
1282 1282 err = None
1283 1283 try:
1284 1284 templater.templater.frommapfile(m)
1285 1285 except Exception as inst:
1286 1286 err = stringutil.forcebytestr(inst)
1287 1287 p = None
1288 1288 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1289 1289 else:
1290 1290 p = None
1291 1291 fm.condwrite(p, 'defaulttemplate',
1292 1292 _("checking default template (%s)\n"), m)
1293 1293 fm.condwrite(not m, 'defaulttemplatenotfound',
1294 1294 _(" template '%s' not found\n"), "default")
1295 1295 if not p:
1296 1296 problems += 1
1297 1297 fm.condwrite(not p, '',
1298 1298 _(" (templates seem to have been installed incorrectly)\n"))
1299 1299
1300 1300 # editor
1301 1301 editor = ui.geteditor()
1302 1302 editor = util.expandpath(editor)
1303 1303 editorbin = procutil.shellsplit(editor)[0]
1304 1304 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1305 1305 cmdpath = procutil.findexe(editorbin)
1306 1306 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1307 1307 _(" No commit editor set and can't find %s in PATH\n"
1308 1308 " (specify a commit editor in your configuration"
1309 1309 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1310 1310 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1311 1311 _(" Can't find editor '%s' in PATH\n"
1312 1312 " (specify a commit editor in your configuration"
1313 1313 " file)\n"), not cmdpath and editorbin)
1314 1314 if not cmdpath and editor != 'vi':
1315 1315 problems += 1
1316 1316
1317 1317 # check username
1318 1318 username = None
1319 1319 err = None
1320 1320 try:
1321 1321 username = ui.username()
1322 1322 except error.Abort as e:
1323 1323 err = stringutil.forcebytestr(e)
1324 1324 problems += 1
1325 1325
1326 1326 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1327 1327 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1328 1328 " (specify a username in your configuration file)\n"), err)
1329 1329
1330 1330 fm.condwrite(not problems, '',
1331 1331 _("no problems detected\n"))
1332 1332 if not problems:
1333 1333 fm.data(problems=problems)
1334 1334 fm.condwrite(problems, 'problems',
1335 1335 _("%d problems detected,"
1336 1336 " please check your install!\n"), problems)
1337 1337 fm.end()
1338 1338
1339 1339 return problems
1340 1340
1341 1341 @command('debugknown', [], _('REPO ID...'), norepo=True)
1342 1342 def debugknown(ui, repopath, *ids, **opts):
1343 1343 """test whether node ids are known to a repo
1344 1344
1345 1345 Every ID must be a full-length hex node id string. Returns a list of 0s
1346 1346 and 1s indicating unknown/known.
1347 1347 """
1348 1348 opts = pycompat.byteskwargs(opts)
1349 1349 repo = hg.peer(ui, opts, repopath)
1350 1350 if not repo.capable('known'):
1351 1351 raise error.Abort("known() not supported by target repository")
1352 1352 flags = repo.known([bin(s) for s in ids])
1353 1353 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1354 1354
1355 1355 @command('debuglabelcomplete', [], _('LABEL...'))
1356 1356 def debuglabelcomplete(ui, repo, *args):
1357 1357 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1358 1358 debugnamecomplete(ui, repo, *args)
1359 1359
1360 1360 @command('debuglocks',
1361 1361 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1362 1362 ('W', 'force-wlock', None,
1363 1363 _('free the working state lock (DANGEROUS)')),
1364 1364 ('s', 'set-lock', None, _('set the store lock until stopped')),
1365 1365 ('S', 'set-wlock', None,
1366 1366 _('set the working state lock until stopped'))],
1367 1367 _('[OPTION]...'))
1368 1368 def debuglocks(ui, repo, **opts):
1369 1369 """show or modify state of locks
1370 1370
1371 1371 By default, this command will show which locks are held. This
1372 1372 includes the user and process holding the lock, the amount of time
1373 1373 the lock has been held, and the machine name where the process is
1374 1374 running if it's not local.
1375 1375
1376 1376 Locks protect the integrity of Mercurial's data, so should be
1377 1377 treated with care. System crashes or other interruptions may cause
1378 1378 locks to not be properly released, though Mercurial will usually
1379 1379 detect and remove such stale locks automatically.
1380 1380
1381 1381 However, detecting stale locks may not always be possible (for
1382 1382 instance, on a shared filesystem). Removing locks may also be
1383 1383 blocked by filesystem permissions.
1384 1384
1385 1385 Setting a lock will prevent other commands from changing the data.
1386 1386 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1387 1387 The set locks are removed when the command exits.
1388 1388
1389 1389 Returns 0 if no locks are held.
1390 1390
1391 1391 """
1392 1392
1393 1393 if opts.get(r'force_lock'):
1394 1394 repo.svfs.unlink('lock')
1395 1395 if opts.get(r'force_wlock'):
1396 1396 repo.vfs.unlink('wlock')
1397 1397 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1398 1398 return 0
1399 1399
1400 1400 locks = []
1401 1401 try:
1402 1402 if opts.get(r'set_wlock'):
1403 1403 try:
1404 1404 locks.append(repo.wlock(False))
1405 1405 except error.LockHeld:
1406 1406 raise error.Abort(_('wlock is already held'))
1407 1407 if opts.get(r'set_lock'):
1408 1408 try:
1409 1409 locks.append(repo.lock(False))
1410 1410 except error.LockHeld:
1411 1411 raise error.Abort(_('lock is already held'))
1412 1412 if len(locks):
1413 1413 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1414 1414 return 0
1415 1415 finally:
1416 1416 release(*locks)
1417 1417
1418 1418 now = time.time()
1419 1419 held = 0
1420 1420
1421 1421 def report(vfs, name, method):
1422 1422 # this causes stale locks to get reaped for more accurate reporting
1423 1423 try:
1424 1424 l = method(False)
1425 1425 except error.LockHeld:
1426 1426 l = None
1427 1427
1428 1428 if l:
1429 1429 l.release()
1430 1430 else:
1431 1431 try:
1432 1432 st = vfs.lstat(name)
1433 1433 age = now - st[stat.ST_MTIME]
1434 1434 user = util.username(st.st_uid)
1435 1435 locker = vfs.readlock(name)
1436 1436 if ":" in locker:
1437 1437 host, pid = locker.split(':')
1438 1438 if host == socket.gethostname():
1439 locker = 'user %s, process %s' % (user, pid)
1439 locker = 'user %s, process %s' % (user or b'None', pid)
1440 1440 else:
1441 1441 locker = 'user %s, process %s, host %s' \
1442 % (user, pid, host)
1442 % (user or b'None', pid, host)
1443 1443 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1444 1444 return 1
1445 1445 except OSError as e:
1446 1446 if e.errno != errno.ENOENT:
1447 1447 raise
1448 1448
1449 1449 ui.write(("%-6s free\n") % (name + ":"))
1450 1450 return 0
1451 1451
1452 1452 held += report(repo.svfs, "lock", repo.lock)
1453 1453 held += report(repo.vfs, "wlock", repo.wlock)
1454 1454
1455 1455 return held
1456 1456
1457 1457 @command('debugmanifestfulltextcache', [
1458 1458 ('', 'clear', False, _('clear the cache')),
1459 1459 ('a', 'add', '', _('add the given manifest node to the cache'),
1460 1460 _('NODE'))
1461 1461 ], '')
1462 1462 def debugmanifestfulltextcache(ui, repo, add=None, **opts):
1463 1463 """show, clear or amend the contents of the manifest fulltext cache"""
1464 1464 with repo.lock():
1465 1465 r = repo.manifestlog.getstorage(b'')
1466 1466 try:
1467 1467 cache = r._fulltextcache
1468 1468 except AttributeError:
1469 1469 ui.warn(_(
1470 1470 "Current revlog implementation doesn't appear to have a "
1471 1471 'manifest fulltext cache\n'))
1472 1472 return
1473 1473
1474 1474 if opts.get(r'clear'):
1475 1475 cache.clear()
1476 1476
1477 1477 if add:
1478 1478 try:
1479 1479 manifest = repo.manifestlog[r.lookup(add)]
1480 1480 except error.LookupError as e:
1481 1481 raise error.Abort(e, hint="Check your manifest node id")
1482 1482 manifest.read() # stores revisision in cache too
1483 1483
1484 1484 if not len(cache):
1485 1485 ui.write(_('Cache empty'))
1486 1486 else:
1487 1487 ui.write(
1488 1488 _('Cache contains %d manifest entries, in order of most to '
1489 1489 'least recent:\n') % (len(cache),))
1490 1490 totalsize = 0
1491 1491 for nodeid in cache:
1492 1492 # Use cache.get to not update the LRU order
1493 1493 data = cache.get(nodeid)
1494 1494 size = len(data)
1495 1495 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1496 1496 ui.write(_('id: %s, size %s\n') % (
1497 1497 hex(nodeid), util.bytecount(size)))
1498 1498 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1499 1499 ui.write(
1500 1500 _('Total cache data size %s, on-disk %s\n') % (
1501 1501 util.bytecount(totalsize), util.bytecount(ondisk))
1502 1502 )
1503 1503
1504 1504 @command('debugmergestate', [], '')
1505 1505 def debugmergestate(ui, repo, *args):
1506 1506 """print merge state
1507 1507
1508 1508 Use --verbose to print out information about whether v1 or v2 merge state
1509 1509 was chosen."""
1510 1510 def _hashornull(h):
1511 1511 if h == nullhex:
1512 1512 return 'null'
1513 1513 else:
1514 1514 return h
1515 1515
1516 1516 def printrecords(version):
1517 1517 ui.write(('* version %d records\n') % version)
1518 1518 if version == 1:
1519 1519 records = v1records
1520 1520 else:
1521 1521 records = v2records
1522 1522
1523 1523 for rtype, record in records:
1524 1524 # pretty print some record types
1525 1525 if rtype == 'L':
1526 1526 ui.write(('local: %s\n') % record)
1527 1527 elif rtype == 'O':
1528 1528 ui.write(('other: %s\n') % record)
1529 1529 elif rtype == 'm':
1530 1530 driver, mdstate = record.split('\0', 1)
1531 1531 ui.write(('merge driver: %s (state "%s")\n')
1532 1532 % (driver, mdstate))
1533 1533 elif rtype in 'FDC':
1534 1534 r = record.split('\0')
1535 1535 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1536 1536 if version == 1:
1537 1537 onode = 'not stored in v1 format'
1538 1538 flags = r[7]
1539 1539 else:
1540 1540 onode, flags = r[7:9]
1541 1541 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1542 1542 % (f, rtype, state, _hashornull(hash)))
1543 1543 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1544 1544 ui.write((' ancestor path: %s (node %s)\n')
1545 1545 % (afile, _hashornull(anode)))
1546 1546 ui.write((' other path: %s (node %s)\n')
1547 1547 % (ofile, _hashornull(onode)))
1548 1548 elif rtype == 'f':
1549 1549 filename, rawextras = record.split('\0', 1)
1550 1550 extras = rawextras.split('\0')
1551 1551 i = 0
1552 1552 extrastrings = []
1553 1553 while i < len(extras):
1554 1554 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1555 1555 i += 2
1556 1556
1557 1557 ui.write(('file extras: %s (%s)\n')
1558 1558 % (filename, ', '.join(extrastrings)))
1559 1559 elif rtype == 'l':
1560 1560 labels = record.split('\0', 2)
1561 1561 labels = [l for l in labels if len(l) > 0]
1562 1562 ui.write(('labels:\n'))
1563 1563 ui.write((' local: %s\n' % labels[0]))
1564 1564 ui.write((' other: %s\n' % labels[1]))
1565 1565 if len(labels) > 2:
1566 1566 ui.write((' base: %s\n' % labels[2]))
1567 1567 else:
1568 1568 ui.write(('unrecognized entry: %s\t%s\n')
1569 1569 % (rtype, record.replace('\0', '\t')))
1570 1570
1571 1571 # Avoid mergestate.read() since it may raise an exception for unsupported
1572 1572 # merge state records. We shouldn't be doing this, but this is OK since this
1573 1573 # command is pretty low-level.
1574 1574 ms = mergemod.mergestate(repo)
1575 1575
1576 1576 # sort so that reasonable information is on top
1577 1577 v1records = ms._readrecordsv1()
1578 1578 v2records = ms._readrecordsv2()
1579 1579 order = 'LOml'
1580 1580 def key(r):
1581 1581 idx = order.find(r[0])
1582 1582 if idx == -1:
1583 1583 return (1, r[1])
1584 1584 else:
1585 1585 return (0, idx)
1586 1586 v1records.sort(key=key)
1587 1587 v2records.sort(key=key)
1588 1588
1589 1589 if not v1records and not v2records:
1590 1590 ui.write(('no merge state found\n'))
1591 1591 elif not v2records:
1592 1592 ui.note(('no version 2 merge state\n'))
1593 1593 printrecords(1)
1594 1594 elif ms._v1v2match(v1records, v2records):
1595 1595 ui.note(('v1 and v2 states match: using v2\n'))
1596 1596 printrecords(2)
1597 1597 else:
1598 1598 ui.note(('v1 and v2 states mismatch: using v1\n'))
1599 1599 printrecords(1)
1600 1600 if ui.verbose:
1601 1601 printrecords(2)
1602 1602
1603 1603 @command('debugnamecomplete', [], _('NAME...'))
1604 1604 def debugnamecomplete(ui, repo, *args):
1605 1605 '''complete "names" - tags, open branch names, bookmark names'''
1606 1606
1607 1607 names = set()
1608 1608 # since we previously only listed open branches, we will handle that
1609 1609 # specially (after this for loop)
1610 1610 for name, ns in repo.names.iteritems():
1611 1611 if name != 'branches':
1612 1612 names.update(ns.listnames(repo))
1613 1613 names.update(tag for (tag, heads, tip, closed)
1614 1614 in repo.branchmap().iterbranches() if not closed)
1615 1615 completions = set()
1616 1616 if not args:
1617 1617 args = ['']
1618 1618 for a in args:
1619 1619 completions.update(n for n in names if n.startswith(a))
1620 1620 ui.write('\n'.join(sorted(completions)))
1621 1621 ui.write('\n')
1622 1622
1623 1623 @command('debugobsolete',
1624 1624 [('', 'flags', 0, _('markers flag')),
1625 1625 ('', 'record-parents', False,
1626 1626 _('record parent information for the precursor')),
1627 1627 ('r', 'rev', [], _('display markers relevant to REV')),
1628 1628 ('', 'exclusive', False, _('restrict display to markers only '
1629 1629 'relevant to REV')),
1630 1630 ('', 'index', False, _('display index of the marker')),
1631 1631 ('', 'delete', [], _('delete markers specified by indices')),
1632 1632 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1633 1633 _('[OBSOLETED [REPLACEMENT ...]]'))
1634 1634 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1635 1635 """create arbitrary obsolete marker
1636 1636
1637 1637 With no arguments, displays the list of obsolescence markers."""
1638 1638
1639 1639 opts = pycompat.byteskwargs(opts)
1640 1640
1641 1641 def parsenodeid(s):
1642 1642 try:
1643 1643 # We do not use revsingle/revrange functions here to accept
1644 1644 # arbitrary node identifiers, possibly not present in the
1645 1645 # local repository.
1646 1646 n = bin(s)
1647 1647 if len(n) != len(nullid):
1648 1648 raise TypeError()
1649 1649 return n
1650 1650 except TypeError:
1651 1651 raise error.Abort('changeset references must be full hexadecimal '
1652 1652 'node identifiers')
1653 1653
1654 1654 if opts.get('delete'):
1655 1655 indices = []
1656 1656 for v in opts.get('delete'):
1657 1657 try:
1658 1658 indices.append(int(v))
1659 1659 except ValueError:
1660 1660 raise error.Abort(_('invalid index value: %r') % v,
1661 1661 hint=_('use integers for indices'))
1662 1662
1663 1663 if repo.currenttransaction():
1664 1664 raise error.Abort(_('cannot delete obsmarkers in the middle '
1665 1665 'of transaction.'))
1666 1666
1667 1667 with repo.lock():
1668 1668 n = repair.deleteobsmarkers(repo.obsstore, indices)
1669 1669 ui.write(_('deleted %i obsolescence markers\n') % n)
1670 1670
1671 1671 return
1672 1672
1673 1673 if precursor is not None:
1674 1674 if opts['rev']:
1675 1675 raise error.Abort('cannot select revision when creating marker')
1676 1676 metadata = {}
1677 1677 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1678 1678 succs = tuple(parsenodeid(succ) for succ in successors)
1679 1679 l = repo.lock()
1680 1680 try:
1681 1681 tr = repo.transaction('debugobsolete')
1682 1682 try:
1683 1683 date = opts.get('date')
1684 1684 if date:
1685 1685 date = dateutil.parsedate(date)
1686 1686 else:
1687 1687 date = None
1688 1688 prec = parsenodeid(precursor)
1689 1689 parents = None
1690 1690 if opts['record_parents']:
1691 1691 if prec not in repo.unfiltered():
1692 1692 raise error.Abort('cannot used --record-parents on '
1693 1693 'unknown changesets')
1694 1694 parents = repo.unfiltered()[prec].parents()
1695 1695 parents = tuple(p.node() for p in parents)
1696 1696 repo.obsstore.create(tr, prec, succs, opts['flags'],
1697 1697 parents=parents, date=date,
1698 1698 metadata=metadata, ui=ui)
1699 1699 tr.close()
1700 1700 except ValueError as exc:
1701 1701 raise error.Abort(_('bad obsmarker input: %s') %
1702 1702 pycompat.bytestr(exc))
1703 1703 finally:
1704 1704 tr.release()
1705 1705 finally:
1706 1706 l.release()
1707 1707 else:
1708 1708 if opts['rev']:
1709 1709 revs = scmutil.revrange(repo, opts['rev'])
1710 1710 nodes = [repo[r].node() for r in revs]
1711 1711 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1712 1712 exclusive=opts['exclusive']))
1713 1713 markers.sort(key=lambda x: x._data)
1714 1714 else:
1715 1715 markers = obsutil.getmarkers(repo)
1716 1716
1717 1717 markerstoiter = markers
1718 1718 isrelevant = lambda m: True
1719 1719 if opts.get('rev') and opts.get('index'):
1720 1720 markerstoiter = obsutil.getmarkers(repo)
1721 1721 markerset = set(markers)
1722 1722 isrelevant = lambda m: m in markerset
1723 1723
1724 1724 fm = ui.formatter('debugobsolete', opts)
1725 1725 for i, m in enumerate(markerstoiter):
1726 1726 if not isrelevant(m):
1727 1727 # marker can be irrelevant when we're iterating over a set
1728 1728 # of markers (markerstoiter) which is bigger than the set
1729 1729 # of markers we want to display (markers)
1730 1730 # this can happen if both --index and --rev options are
1731 1731 # provided and thus we need to iterate over all of the markers
1732 1732 # to get the correct indices, but only display the ones that
1733 1733 # are relevant to --rev value
1734 1734 continue
1735 1735 fm.startitem()
1736 1736 ind = i if opts.get('index') else None
1737 1737 cmdutil.showmarker(fm, m, index=ind)
1738 1738 fm.end()
1739 1739
1740 1740 @command('debugpathcomplete',
1741 1741 [('f', 'full', None, _('complete an entire path')),
1742 1742 ('n', 'normal', None, _('show only normal files')),
1743 1743 ('a', 'added', None, _('show only added files')),
1744 1744 ('r', 'removed', None, _('show only removed files'))],
1745 1745 _('FILESPEC...'))
1746 1746 def debugpathcomplete(ui, repo, *specs, **opts):
1747 1747 '''complete part or all of a tracked path
1748 1748
1749 1749 This command supports shells that offer path name completion. It
1750 1750 currently completes only files already known to the dirstate.
1751 1751
1752 1752 Completion extends only to the next path segment unless
1753 1753 --full is specified, in which case entire paths are used.'''
1754 1754
1755 1755 def complete(path, acceptable):
1756 1756 dirstate = repo.dirstate
1757 1757 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
1758 1758 rootdir = repo.root + pycompat.ossep
1759 1759 if spec != repo.root and not spec.startswith(rootdir):
1760 1760 return [], []
1761 1761 if os.path.isdir(spec):
1762 1762 spec += '/'
1763 1763 spec = spec[len(rootdir):]
1764 1764 fixpaths = pycompat.ossep != '/'
1765 1765 if fixpaths:
1766 1766 spec = spec.replace(pycompat.ossep, '/')
1767 1767 speclen = len(spec)
1768 1768 fullpaths = opts[r'full']
1769 1769 files, dirs = set(), set()
1770 1770 adddir, addfile = dirs.add, files.add
1771 1771 for f, st in dirstate.iteritems():
1772 1772 if f.startswith(spec) and st[0] in acceptable:
1773 1773 if fixpaths:
1774 1774 f = f.replace('/', pycompat.ossep)
1775 1775 if fullpaths:
1776 1776 addfile(f)
1777 1777 continue
1778 1778 s = f.find(pycompat.ossep, speclen)
1779 1779 if s >= 0:
1780 1780 adddir(f[:s])
1781 1781 else:
1782 1782 addfile(f)
1783 1783 return files, dirs
1784 1784
1785 1785 acceptable = ''
1786 1786 if opts[r'normal']:
1787 1787 acceptable += 'nm'
1788 1788 if opts[r'added']:
1789 1789 acceptable += 'a'
1790 1790 if opts[r'removed']:
1791 1791 acceptable += 'r'
1792 1792 cwd = repo.getcwd()
1793 1793 if not specs:
1794 1794 specs = ['.']
1795 1795
1796 1796 files, dirs = set(), set()
1797 1797 for spec in specs:
1798 1798 f, d = complete(spec, acceptable or 'nmar')
1799 1799 files.update(f)
1800 1800 dirs.update(d)
1801 1801 files.update(dirs)
1802 1802 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1803 1803 ui.write('\n')
1804 1804
1805 1805 @command('debugpeer', [], _('PATH'), norepo=True)
1806 1806 def debugpeer(ui, path):
1807 1807 """establish a connection to a peer repository"""
1808 1808 # Always enable peer request logging. Requires --debug to display
1809 1809 # though.
1810 1810 overrides = {
1811 1811 ('devel', 'debug.peer-request'): True,
1812 1812 }
1813 1813
1814 1814 with ui.configoverride(overrides):
1815 1815 peer = hg.peer(ui, {}, path)
1816 1816
1817 1817 local = peer.local() is not None
1818 1818 canpush = peer.canpush()
1819 1819
1820 1820 ui.write(_('url: %s\n') % peer.url())
1821 1821 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1822 1822 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1823 1823
1824 1824 @command('debugpickmergetool',
1825 1825 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1826 1826 ('', 'changedelete', None, _('emulate merging change and delete')),
1827 1827 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1828 1828 _('[PATTERN]...'),
1829 1829 inferrepo=True)
1830 1830 def debugpickmergetool(ui, repo, *pats, **opts):
1831 1831 """examine which merge tool is chosen for specified file
1832 1832
1833 1833 As described in :hg:`help merge-tools`, Mercurial examines
1834 1834 configurations below in this order to decide which merge tool is
1835 1835 chosen for specified file.
1836 1836
1837 1837 1. ``--tool`` option
1838 1838 2. ``HGMERGE`` environment variable
1839 1839 3. configurations in ``merge-patterns`` section
1840 1840 4. configuration of ``ui.merge``
1841 1841 5. configurations in ``merge-tools`` section
1842 1842 6. ``hgmerge`` tool (for historical reason only)
1843 1843 7. default tool for fallback (``:merge`` or ``:prompt``)
1844 1844
1845 1845 This command writes out examination result in the style below::
1846 1846
1847 1847 FILE = MERGETOOL
1848 1848
1849 1849 By default, all files known in the first parent context of the
1850 1850 working directory are examined. Use file patterns and/or -I/-X
1851 1851 options to limit target files. -r/--rev is also useful to examine
1852 1852 files in another context without actual updating to it.
1853 1853
1854 1854 With --debug, this command shows warning messages while matching
1855 1855 against ``merge-patterns`` and so on, too. It is recommended to
1856 1856 use this option with explicit file patterns and/or -I/-X options,
1857 1857 because this option increases amount of output per file according
1858 1858 to configurations in hgrc.
1859 1859
1860 1860 With -v/--verbose, this command shows configurations below at
1861 1861 first (only if specified).
1862 1862
1863 1863 - ``--tool`` option
1864 1864 - ``HGMERGE`` environment variable
1865 1865 - configuration of ``ui.merge``
1866 1866
1867 1867 If merge tool is chosen before matching against
1868 1868 ``merge-patterns``, this command can't show any helpful
1869 1869 information, even with --debug. In such case, information above is
1870 1870 useful to know why a merge tool is chosen.
1871 1871 """
1872 1872 opts = pycompat.byteskwargs(opts)
1873 1873 overrides = {}
1874 1874 if opts['tool']:
1875 1875 overrides[('ui', 'forcemerge')] = opts['tool']
1876 1876 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1877 1877
1878 1878 with ui.configoverride(overrides, 'debugmergepatterns'):
1879 1879 hgmerge = encoding.environ.get("HGMERGE")
1880 1880 if hgmerge is not None:
1881 1881 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1882 1882 uimerge = ui.config("ui", "merge")
1883 1883 if uimerge:
1884 1884 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1885 1885
1886 1886 ctx = scmutil.revsingle(repo, opts.get('rev'))
1887 1887 m = scmutil.match(ctx, pats, opts)
1888 1888 changedelete = opts['changedelete']
1889 1889 for path in ctx.walk(m):
1890 1890 fctx = ctx[path]
1891 1891 try:
1892 1892 if not ui.debugflag:
1893 1893 ui.pushbuffer(error=True)
1894 1894 tool, toolpath = filemerge._picktool(repo, ui, path,
1895 1895 fctx.isbinary(),
1896 1896 'l' in fctx.flags(),
1897 1897 changedelete)
1898 1898 finally:
1899 1899 if not ui.debugflag:
1900 1900 ui.popbuffer()
1901 1901 ui.write(('%s = %s\n') % (path, tool))
1902 1902
1903 1903 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1904 1904 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1905 1905 '''access the pushkey key/value protocol
1906 1906
1907 1907 With two args, list the keys in the given namespace.
1908 1908
1909 1909 With five args, set a key to new if it currently is set to old.
1910 1910 Reports success or failure.
1911 1911 '''
1912 1912
1913 1913 target = hg.peer(ui, {}, repopath)
1914 1914 if keyinfo:
1915 1915 key, old, new = keyinfo
1916 1916 with target.commandexecutor() as e:
1917 1917 r = e.callcommand('pushkey', {
1918 1918 'namespace': namespace,
1919 1919 'key': key,
1920 1920 'old': old,
1921 1921 'new': new,
1922 1922 }).result()
1923 1923
1924 1924 ui.status(pycompat.bytestr(r) + '\n')
1925 1925 return not r
1926 1926 else:
1927 1927 for k, v in sorted(target.listkeys(namespace).iteritems()):
1928 1928 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1929 1929 stringutil.escapestr(v)))
1930 1930
1931 1931 @command('debugpvec', [], _('A B'))
1932 1932 def debugpvec(ui, repo, a, b=None):
1933 1933 ca = scmutil.revsingle(repo, a)
1934 1934 cb = scmutil.revsingle(repo, b)
1935 1935 pa = pvec.ctxpvec(ca)
1936 1936 pb = pvec.ctxpvec(cb)
1937 1937 if pa == pb:
1938 1938 rel = "="
1939 1939 elif pa > pb:
1940 1940 rel = ">"
1941 1941 elif pa < pb:
1942 1942 rel = "<"
1943 1943 elif pa | pb:
1944 1944 rel = "|"
1945 1945 ui.write(_("a: %s\n") % pa)
1946 1946 ui.write(_("b: %s\n") % pb)
1947 1947 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1948 1948 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1949 1949 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1950 1950 pa.distance(pb), rel))
1951 1951
1952 1952 @command('debugrebuilddirstate|debugrebuildstate',
1953 1953 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1954 1954 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1955 1955 'the working copy parent')),
1956 1956 ],
1957 1957 _('[-r REV]'))
1958 1958 def debugrebuilddirstate(ui, repo, rev, **opts):
1959 1959 """rebuild the dirstate as it would look like for the given revision
1960 1960
1961 1961 If no revision is specified the first current parent will be used.
1962 1962
1963 1963 The dirstate will be set to the files of the given revision.
1964 1964 The actual working directory content or existing dirstate
1965 1965 information such as adds or removes is not considered.
1966 1966
1967 1967 ``minimal`` will only rebuild the dirstate status for files that claim to be
1968 1968 tracked but are not in the parent manifest, or that exist in the parent
1969 1969 manifest but are not in the dirstate. It will not change adds, removes, or
1970 1970 modified files that are in the working copy parent.
1971 1971
1972 1972 One use of this command is to make the next :hg:`status` invocation
1973 1973 check the actual file content.
1974 1974 """
1975 1975 ctx = scmutil.revsingle(repo, rev)
1976 1976 with repo.wlock():
1977 1977 dirstate = repo.dirstate
1978 1978 changedfiles = None
1979 1979 # See command doc for what minimal does.
1980 1980 if opts.get(r'minimal'):
1981 1981 manifestfiles = set(ctx.manifest().keys())
1982 1982 dirstatefiles = set(dirstate)
1983 1983 manifestonly = manifestfiles - dirstatefiles
1984 1984 dsonly = dirstatefiles - manifestfiles
1985 1985 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1986 1986 changedfiles = manifestonly | dsnotadded
1987 1987
1988 1988 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1989 1989
1990 1990 @command('debugrebuildfncache', [], '')
1991 1991 def debugrebuildfncache(ui, repo):
1992 1992 """rebuild the fncache file"""
1993 1993 repair.rebuildfncache(ui, repo)
1994 1994
1995 1995 @command('debugrename',
1996 1996 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1997 1997 _('[-r REV] FILE'))
1998 1998 def debugrename(ui, repo, file1, *pats, **opts):
1999 1999 """dump rename information"""
2000 2000
2001 2001 opts = pycompat.byteskwargs(opts)
2002 2002 ctx = scmutil.revsingle(repo, opts.get('rev'))
2003 2003 m = scmutil.match(ctx, (file1,) + pats, opts)
2004 2004 for abs in ctx.walk(m):
2005 2005 fctx = ctx[abs]
2006 2006 o = fctx.filelog().renamed(fctx.filenode())
2007 2007 rel = m.rel(abs)
2008 2008 if o:
2009 2009 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2010 2010 else:
2011 2011 ui.write(_("%s not renamed\n") % rel)
2012 2012
2013 2013 @command('debugrevlog', cmdutil.debugrevlogopts +
2014 2014 [('d', 'dump', False, _('dump index data'))],
2015 2015 _('-c|-m|FILE'),
2016 2016 optionalrepo=True)
2017 2017 def debugrevlog(ui, repo, file_=None, **opts):
2018 2018 """show data and statistics about a revlog"""
2019 2019 opts = pycompat.byteskwargs(opts)
2020 2020 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2021 2021
2022 2022 if opts.get("dump"):
2023 2023 numrevs = len(r)
2024 2024 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2025 2025 " rawsize totalsize compression heads chainlen\n"))
2026 2026 ts = 0
2027 2027 heads = set()
2028 2028
2029 2029 for rev in pycompat.xrange(numrevs):
2030 2030 dbase = r.deltaparent(rev)
2031 2031 if dbase == -1:
2032 2032 dbase = rev
2033 2033 cbase = r.chainbase(rev)
2034 2034 clen = r.chainlen(rev)
2035 2035 p1, p2 = r.parentrevs(rev)
2036 2036 rs = r.rawsize(rev)
2037 2037 ts = ts + rs
2038 2038 heads -= set(r.parentrevs(rev))
2039 2039 heads.add(rev)
2040 2040 try:
2041 2041 compression = ts / r.end(rev)
2042 2042 except ZeroDivisionError:
2043 2043 compression = 0
2044 2044 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2045 2045 "%11d %5d %8d\n" %
2046 2046 (rev, p1, p2, r.start(rev), r.end(rev),
2047 2047 r.start(dbase), r.start(cbase),
2048 2048 r.start(p1), r.start(p2),
2049 2049 rs, ts, compression, len(heads), clen))
2050 2050 return 0
2051 2051
2052 2052 v = r.version
2053 2053 format = v & 0xFFFF
2054 2054 flags = []
2055 2055 gdelta = False
2056 2056 if v & revlog.FLAG_INLINE_DATA:
2057 2057 flags.append('inline')
2058 2058 if v & revlog.FLAG_GENERALDELTA:
2059 2059 gdelta = True
2060 2060 flags.append('generaldelta')
2061 2061 if not flags:
2062 2062 flags = ['(none)']
2063 2063
2064 2064 ### tracks merge vs single parent
2065 2065 nummerges = 0
2066 2066
2067 2067 ### tracks ways the "delta" are build
2068 2068 # nodelta
2069 2069 numempty = 0
2070 2070 numemptytext = 0
2071 2071 numemptydelta = 0
2072 2072 # full file content
2073 2073 numfull = 0
2074 2074 # intermediate snapshot against a prior snapshot
2075 2075 numsemi = 0
2076 2076 # snapshot count per depth
2077 2077 numsnapdepth = collections.defaultdict(lambda: 0)
2078 2078 # delta against previous revision
2079 2079 numprev = 0
2080 2080 # delta against first or second parent (not prev)
2081 2081 nump1 = 0
2082 2082 nump2 = 0
2083 2083 # delta against neither prev nor parents
2084 2084 numother = 0
2085 2085 # delta against prev that are also first or second parent
2086 2086 # (details of `numprev`)
2087 2087 nump1prev = 0
2088 2088 nump2prev = 0
2089 2089
2090 2090 # data about delta chain of each revs
2091 2091 chainlengths = []
2092 2092 chainbases = []
2093 2093 chainspans = []
2094 2094
2095 2095 # data about each revision
2096 2096 datasize = [None, 0, 0]
2097 2097 fullsize = [None, 0, 0]
2098 2098 semisize = [None, 0, 0]
2099 2099 # snapshot count per depth
2100 2100 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2101 2101 deltasize = [None, 0, 0]
2102 2102 chunktypecounts = {}
2103 2103 chunktypesizes = {}
2104 2104
2105 2105 def addsize(size, l):
2106 2106 if l[0] is None or size < l[0]:
2107 2107 l[0] = size
2108 2108 if size > l[1]:
2109 2109 l[1] = size
2110 2110 l[2] += size
2111 2111
2112 2112 numrevs = len(r)
2113 2113 for rev in pycompat.xrange(numrevs):
2114 2114 p1, p2 = r.parentrevs(rev)
2115 2115 delta = r.deltaparent(rev)
2116 2116 if format > 0:
2117 2117 addsize(r.rawsize(rev), datasize)
2118 2118 if p2 != nullrev:
2119 2119 nummerges += 1
2120 2120 size = r.length(rev)
2121 2121 if delta == nullrev:
2122 2122 chainlengths.append(0)
2123 2123 chainbases.append(r.start(rev))
2124 2124 chainspans.append(size)
2125 2125 if size == 0:
2126 2126 numempty += 1
2127 2127 numemptytext += 1
2128 2128 else:
2129 2129 numfull += 1
2130 2130 numsnapdepth[0] += 1
2131 2131 addsize(size, fullsize)
2132 2132 addsize(size, snapsizedepth[0])
2133 2133 else:
2134 2134 chainlengths.append(chainlengths[delta] + 1)
2135 2135 baseaddr = chainbases[delta]
2136 2136 revaddr = r.start(rev)
2137 2137 chainbases.append(baseaddr)
2138 2138 chainspans.append((revaddr - baseaddr) + size)
2139 2139 if size == 0:
2140 2140 numempty += 1
2141 2141 numemptydelta += 1
2142 2142 elif r.issnapshot(rev):
2143 2143 addsize(size, semisize)
2144 2144 numsemi += 1
2145 2145 depth = r.snapshotdepth(rev)
2146 2146 numsnapdepth[depth] += 1
2147 2147 addsize(size, snapsizedepth[depth])
2148 2148 else:
2149 2149 addsize(size, deltasize)
2150 2150 if delta == rev - 1:
2151 2151 numprev += 1
2152 2152 if delta == p1:
2153 2153 nump1prev += 1
2154 2154 elif delta == p2:
2155 2155 nump2prev += 1
2156 2156 elif delta == p1:
2157 2157 nump1 += 1
2158 2158 elif delta == p2:
2159 2159 nump2 += 1
2160 2160 elif delta != nullrev:
2161 2161 numother += 1
2162 2162
2163 2163 # Obtain data on the raw chunks in the revlog.
2164 2164 if util.safehasattr(r, '_getsegmentforrevs'):
2165 2165 segment = r._getsegmentforrevs(rev, rev)[1]
2166 2166 else:
2167 2167 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2168 2168 if segment:
2169 2169 chunktype = bytes(segment[0:1])
2170 2170 else:
2171 2171 chunktype = 'empty'
2172 2172
2173 2173 if chunktype not in chunktypecounts:
2174 2174 chunktypecounts[chunktype] = 0
2175 2175 chunktypesizes[chunktype] = 0
2176 2176
2177 2177 chunktypecounts[chunktype] += 1
2178 2178 chunktypesizes[chunktype] += size
2179 2179
2180 2180 # Adjust size min value for empty cases
2181 2181 for size in (datasize, fullsize, semisize, deltasize):
2182 2182 if size[0] is None:
2183 2183 size[0] = 0
2184 2184
2185 2185 numdeltas = numrevs - numfull - numempty - numsemi
2186 2186 numoprev = numprev - nump1prev - nump2prev
2187 2187 totalrawsize = datasize[2]
2188 2188 datasize[2] /= numrevs
2189 2189 fulltotal = fullsize[2]
2190 2190 fullsize[2] /= numfull
2191 2191 semitotal = semisize[2]
2192 2192 snaptotal = {}
2193 2193 if 0 < numsemi:
2194 2194 semisize[2] /= numsemi
2195 2195 for depth in snapsizedepth:
2196 2196 snaptotal[depth] = snapsizedepth[depth][2]
2197 2197 snapsizedepth[depth][2] /= numsnapdepth[depth]
2198 2198
2199 2199 deltatotal = deltasize[2]
2200 2200 if numdeltas > 0:
2201 2201 deltasize[2] /= numdeltas
2202 2202 totalsize = fulltotal + semitotal + deltatotal
2203 2203 avgchainlen = sum(chainlengths) / numrevs
2204 2204 maxchainlen = max(chainlengths)
2205 2205 maxchainspan = max(chainspans)
2206 2206 compratio = 1
2207 2207 if totalsize:
2208 2208 compratio = totalrawsize / totalsize
2209 2209
2210 2210 basedfmtstr = '%%%dd\n'
2211 2211 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2212 2212
2213 2213 def dfmtstr(max):
2214 2214 return basedfmtstr % len(str(max))
2215 2215 def pcfmtstr(max, padding=0):
2216 2216 return basepcfmtstr % (len(str(max)), ' ' * padding)
2217 2217
2218 2218 def pcfmt(value, total):
2219 2219 if total:
2220 2220 return (value, 100 * float(value) / total)
2221 2221 else:
2222 2222 return value, 100.0
2223 2223
2224 2224 ui.write(('format : %d\n') % format)
2225 2225 ui.write(('flags : %s\n') % ', '.join(flags))
2226 2226
2227 2227 ui.write('\n')
2228 2228 fmt = pcfmtstr(totalsize)
2229 2229 fmt2 = dfmtstr(totalsize)
2230 2230 ui.write(('revisions : ') + fmt2 % numrevs)
2231 2231 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2232 2232 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2233 2233 ui.write(('revisions : ') + fmt2 % numrevs)
2234 2234 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2235 2235 ui.write((' text : ')
2236 2236 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2237 2237 ui.write((' delta : ')
2238 2238 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2239 2239 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2240 2240 for depth in sorted(numsnapdepth):
2241 2241 ui.write((' lvl-%-3d : ' % depth)
2242 2242 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2243 2243 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2244 2244 ui.write(('revision size : ') + fmt2 % totalsize)
2245 2245 ui.write((' snapshot : ')
2246 2246 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2247 2247 for depth in sorted(numsnapdepth):
2248 2248 ui.write((' lvl-%-3d : ' % depth)
2249 2249 + fmt % pcfmt(snaptotal[depth], totalsize))
2250 2250 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2251 2251
2252 2252 def fmtchunktype(chunktype):
2253 2253 if chunktype == 'empty':
2254 2254 return ' %s : ' % chunktype
2255 2255 elif chunktype in pycompat.bytestr(string.ascii_letters):
2256 2256 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2257 2257 else:
2258 2258 return ' 0x%s : ' % hex(chunktype)
2259 2259
2260 2260 ui.write('\n')
2261 2261 ui.write(('chunks : ') + fmt2 % numrevs)
2262 2262 for chunktype in sorted(chunktypecounts):
2263 2263 ui.write(fmtchunktype(chunktype))
2264 2264 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2265 2265 ui.write(('chunks size : ') + fmt2 % totalsize)
2266 2266 for chunktype in sorted(chunktypecounts):
2267 2267 ui.write(fmtchunktype(chunktype))
2268 2268 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2269 2269
2270 2270 ui.write('\n')
2271 2271 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2272 2272 ui.write(('avg chain length : ') + fmt % avgchainlen)
2273 2273 ui.write(('max chain length : ') + fmt % maxchainlen)
2274 2274 ui.write(('max chain reach : ') + fmt % maxchainspan)
2275 2275 ui.write(('compression ratio : ') + fmt % compratio)
2276 2276
2277 2277 if format > 0:
2278 2278 ui.write('\n')
2279 2279 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2280 2280 % tuple(datasize))
2281 2281 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2282 2282 % tuple(fullsize))
2283 2283 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2284 2284 % tuple(semisize))
2285 2285 for depth in sorted(snapsizedepth):
2286 2286 if depth == 0:
2287 2287 continue
2288 2288 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2289 2289 % ((depth,) + tuple(snapsizedepth[depth])))
2290 2290 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2291 2291 % tuple(deltasize))
2292 2292
2293 2293 if numdeltas > 0:
2294 2294 ui.write('\n')
2295 2295 fmt = pcfmtstr(numdeltas)
2296 2296 fmt2 = pcfmtstr(numdeltas, 4)
2297 2297 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2298 2298 if numprev > 0:
2299 2299 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2300 2300 numprev))
2301 2301 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2302 2302 numprev))
2303 2303 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2304 2304 numprev))
2305 2305 if gdelta:
2306 2306 ui.write(('deltas against p1 : ')
2307 2307 + fmt % pcfmt(nump1, numdeltas))
2308 2308 ui.write(('deltas against p2 : ')
2309 2309 + fmt % pcfmt(nump2, numdeltas))
2310 2310 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2311 2311 numdeltas))
2312 2312
2313 2313 @command('debugrevlogindex', cmdutil.debugrevlogopts +
2314 2314 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2315 2315 _('[-f FORMAT] -c|-m|FILE'),
2316 2316 optionalrepo=True)
2317 2317 def debugrevlogindex(ui, repo, file_=None, **opts):
2318 2318 """dump the contents of a revlog index"""
2319 2319 opts = pycompat.byteskwargs(opts)
2320 2320 r = cmdutil.openrevlog(repo, 'debugrevlogindex', file_, opts)
2321 2321 format = opts.get('format', 0)
2322 2322 if format not in (0, 1):
2323 2323 raise error.Abort(_("unknown format %d") % format)
2324 2324
2325 2325 if ui.debugflag:
2326 2326 shortfn = hex
2327 2327 else:
2328 2328 shortfn = short
2329 2329
2330 2330 # There might not be anything in r, so have a sane default
2331 2331 idlen = 12
2332 2332 for i in r:
2333 2333 idlen = len(shortfn(r.node(i)))
2334 2334 break
2335 2335
2336 2336 if format == 0:
2337 2337 if ui.verbose:
2338 2338 ui.write((" rev offset length linkrev"
2339 2339 " %s %s p2\n") % ("nodeid".ljust(idlen),
2340 2340 "p1".ljust(idlen)))
2341 2341 else:
2342 2342 ui.write((" rev linkrev %s %s p2\n") % (
2343 2343 "nodeid".ljust(idlen), "p1".ljust(idlen)))
2344 2344 elif format == 1:
2345 2345 if ui.verbose:
2346 2346 ui.write((" rev flag offset length size link p1"
2347 2347 " p2 %s\n") % "nodeid".rjust(idlen))
2348 2348 else:
2349 2349 ui.write((" rev flag size link p1 p2 %s\n") %
2350 2350 "nodeid".rjust(idlen))
2351 2351
2352 2352 for i in r:
2353 2353 node = r.node(i)
2354 2354 if format == 0:
2355 2355 try:
2356 2356 pp = r.parents(node)
2357 2357 except Exception:
2358 2358 pp = [nullid, nullid]
2359 2359 if ui.verbose:
2360 2360 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
2361 2361 i, r.start(i), r.length(i), r.linkrev(i),
2362 2362 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2363 2363 else:
2364 2364 ui.write("% 6d % 7d %s %s %s\n" % (
2365 2365 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
2366 2366 shortfn(pp[1])))
2367 2367 elif format == 1:
2368 2368 pr = r.parentrevs(i)
2369 2369 if ui.verbose:
2370 2370 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
2371 2371 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2372 2372 r.linkrev(i), pr[0], pr[1], shortfn(node)))
2373 2373 else:
2374 2374 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
2375 2375 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
2376 2376 shortfn(node)))
2377 2377
2378 2378 @command('debugrevspec',
2379 2379 [('', 'optimize', None,
2380 2380 _('print parsed tree after optimizing (DEPRECATED)')),
2381 2381 ('', 'show-revs', True, _('print list of result revisions (default)')),
2382 2382 ('s', 'show-set', None, _('print internal representation of result set')),
2383 2383 ('p', 'show-stage', [],
2384 2384 _('print parsed tree at the given stage'), _('NAME')),
2385 2385 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2386 2386 ('', 'verify-optimized', False, _('verify optimized result')),
2387 2387 ],
2388 2388 ('REVSPEC'))
2389 2389 def debugrevspec(ui, repo, expr, **opts):
2390 2390 """parse and apply a revision specification
2391 2391
2392 2392 Use -p/--show-stage option to print the parsed tree at the given stages.
2393 2393 Use -p all to print tree at every stage.
2394 2394
2395 2395 Use --no-show-revs option with -s or -p to print only the set
2396 2396 representation or the parsed tree respectively.
2397 2397
2398 2398 Use --verify-optimized to compare the optimized result with the unoptimized
2399 2399 one. Returns 1 if the optimized result differs.
2400 2400 """
2401 2401 opts = pycompat.byteskwargs(opts)
2402 2402 aliases = ui.configitems('revsetalias')
2403 2403 stages = [
2404 2404 ('parsed', lambda tree: tree),
2405 2405 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2406 2406 ui.warn)),
2407 2407 ('concatenated', revsetlang.foldconcat),
2408 2408 ('analyzed', revsetlang.analyze),
2409 2409 ('optimized', revsetlang.optimize),
2410 2410 ]
2411 2411 if opts['no_optimized']:
2412 2412 stages = stages[:-1]
2413 2413 if opts['verify_optimized'] and opts['no_optimized']:
2414 2414 raise error.Abort(_('cannot use --verify-optimized with '
2415 2415 '--no-optimized'))
2416 2416 stagenames = set(n for n, f in stages)
2417 2417
2418 2418 showalways = set()
2419 2419 showchanged = set()
2420 2420 if ui.verbose and not opts['show_stage']:
2421 2421 # show parsed tree by --verbose (deprecated)
2422 2422 showalways.add('parsed')
2423 2423 showchanged.update(['expanded', 'concatenated'])
2424 2424 if opts['optimize']:
2425 2425 showalways.add('optimized')
2426 2426 if opts['show_stage'] and opts['optimize']:
2427 2427 raise error.Abort(_('cannot use --optimize with --show-stage'))
2428 2428 if opts['show_stage'] == ['all']:
2429 2429 showalways.update(stagenames)
2430 2430 else:
2431 2431 for n in opts['show_stage']:
2432 2432 if n not in stagenames:
2433 2433 raise error.Abort(_('invalid stage name: %s') % n)
2434 2434 showalways.update(opts['show_stage'])
2435 2435
2436 2436 treebystage = {}
2437 2437 printedtree = None
2438 2438 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2439 2439 for n, f in stages:
2440 2440 treebystage[n] = tree = f(tree)
2441 2441 if n in showalways or (n in showchanged and tree != printedtree):
2442 2442 if opts['show_stage'] or n != 'parsed':
2443 2443 ui.write(("* %s:\n") % n)
2444 2444 ui.write(revsetlang.prettyformat(tree), "\n")
2445 2445 printedtree = tree
2446 2446
2447 2447 if opts['verify_optimized']:
2448 2448 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2449 2449 brevs = revset.makematcher(treebystage['optimized'])(repo)
2450 2450 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2451 2451 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2452 2452 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2453 2453 arevs = list(arevs)
2454 2454 brevs = list(brevs)
2455 2455 if arevs == brevs:
2456 2456 return 0
2457 2457 ui.write(('--- analyzed\n'), label='diff.file_a')
2458 2458 ui.write(('+++ optimized\n'), label='diff.file_b')
2459 2459 sm = difflib.SequenceMatcher(None, arevs, brevs)
2460 2460 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2461 2461 if tag in ('delete', 'replace'):
2462 2462 for c in arevs[alo:ahi]:
2463 2463 ui.write('-%s\n' % c, label='diff.deleted')
2464 2464 if tag in ('insert', 'replace'):
2465 2465 for c in brevs[blo:bhi]:
2466 2466 ui.write('+%s\n' % c, label='diff.inserted')
2467 2467 if tag == 'equal':
2468 2468 for c in arevs[alo:ahi]:
2469 2469 ui.write(' %s\n' % c)
2470 2470 return 1
2471 2471
2472 2472 func = revset.makematcher(tree)
2473 2473 revs = func(repo)
2474 2474 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2475 2475 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2476 2476 if not opts['show_revs']:
2477 2477 return
2478 2478 for c in revs:
2479 2479 ui.write("%d\n" % c)
2480 2480
2481 2481 @command('debugserve', [
2482 2482 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2483 2483 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2484 2484 ('', 'logiofile', '', _('file to log server I/O to')),
2485 2485 ], '')
2486 2486 def debugserve(ui, repo, **opts):
2487 2487 """run a server with advanced settings
2488 2488
2489 2489 This command is similar to :hg:`serve`. It exists partially as a
2490 2490 workaround to the fact that ``hg serve --stdio`` must have specific
2491 2491 arguments for security reasons.
2492 2492 """
2493 2493 opts = pycompat.byteskwargs(opts)
2494 2494
2495 2495 if not opts['sshstdio']:
2496 2496 raise error.Abort(_('only --sshstdio is currently supported'))
2497 2497
2498 2498 logfh = None
2499 2499
2500 2500 if opts['logiofd'] and opts['logiofile']:
2501 2501 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2502 2502
2503 2503 if opts['logiofd']:
2504 2504 # Line buffered because output is line based.
2505 2505 try:
2506 2506 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2507 2507 except OSError as e:
2508 2508 if e.errno != errno.ESPIPE:
2509 2509 raise
2510 2510 # can't seek a pipe, so `ab` mode fails on py3
2511 2511 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2512 2512 elif opts['logiofile']:
2513 2513 logfh = open(opts['logiofile'], 'ab', 1)
2514 2514
2515 2515 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2516 2516 s.serve_forever()
2517 2517
2518 2518 @command('debugsetparents', [], _('REV1 [REV2]'))
2519 2519 def debugsetparents(ui, repo, rev1, rev2=None):
2520 2520 """manually set the parents of the current working directory
2521 2521
2522 2522 This is useful for writing repository conversion tools, but should
2523 2523 be used with care. For example, neither the working directory nor the
2524 2524 dirstate is updated, so file status may be incorrect after running this
2525 2525 command.
2526 2526
2527 2527 Returns 0 on success.
2528 2528 """
2529 2529
2530 2530 node1 = scmutil.revsingle(repo, rev1).node()
2531 2531 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2532 2532
2533 2533 with repo.wlock():
2534 2534 repo.setparents(node1, node2)
2535 2535
2536 2536 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2537 2537 def debugssl(ui, repo, source=None, **opts):
2538 2538 '''test a secure connection to a server
2539 2539
2540 2540 This builds the certificate chain for the server on Windows, installing the
2541 2541 missing intermediates and trusted root via Windows Update if necessary. It
2542 2542 does nothing on other platforms.
2543 2543
2544 2544 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2545 2545 that server is used. See :hg:`help urls` for more information.
2546 2546
2547 2547 If the update succeeds, retry the original operation. Otherwise, the cause
2548 2548 of the SSL error is likely another issue.
2549 2549 '''
2550 2550 if not pycompat.iswindows:
2551 2551 raise error.Abort(_('certificate chain building is only possible on '
2552 2552 'Windows'))
2553 2553
2554 2554 if not source:
2555 2555 if not repo:
2556 2556 raise error.Abort(_("there is no Mercurial repository here, and no "
2557 2557 "server specified"))
2558 2558 source = "default"
2559 2559
2560 2560 source, branches = hg.parseurl(ui.expandpath(source))
2561 2561 url = util.url(source)
2562 2562 addr = None
2563 2563
2564 2564 defaultport = {'https': 443, 'ssh': 22}
2565 2565 if url.scheme in defaultport:
2566 2566 try:
2567 2567 addr = (url.host, int(url.port or defaultport[url.scheme]))
2568 2568 except ValueError:
2569 2569 raise error.Abort(_("malformed port number in URL"))
2570 2570 else:
2571 2571 raise error.Abort(_("only https and ssh connections are supported"))
2572 2572
2573 2573 from . import win32
2574 2574
2575 2575 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2576 2576 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2577 2577
2578 2578 try:
2579 2579 s.connect(addr)
2580 2580 cert = s.getpeercert(True)
2581 2581
2582 2582 ui.status(_('checking the certificate chain for %s\n') % url.host)
2583 2583
2584 2584 complete = win32.checkcertificatechain(cert, build=False)
2585 2585
2586 2586 if not complete:
2587 2587 ui.status(_('certificate chain is incomplete, updating... '))
2588 2588
2589 2589 if not win32.checkcertificatechain(cert):
2590 2590 ui.status(_('failed.\n'))
2591 2591 else:
2592 2592 ui.status(_('done.\n'))
2593 2593 else:
2594 2594 ui.status(_('full certificate chain is available\n'))
2595 2595 finally:
2596 2596 s.close()
2597 2597
2598 2598 @command('debugsub',
2599 2599 [('r', 'rev', '',
2600 2600 _('revision to check'), _('REV'))],
2601 2601 _('[-r REV] [REV]'))
2602 2602 def debugsub(ui, repo, rev=None):
2603 2603 ctx = scmutil.revsingle(repo, rev, None)
2604 2604 for k, v in sorted(ctx.substate.items()):
2605 2605 ui.write(('path %s\n') % k)
2606 2606 ui.write((' source %s\n') % v[0])
2607 2607 ui.write((' revision %s\n') % v[1])
2608 2608
2609 2609 @command('debugsuccessorssets',
2610 2610 [('', 'closest', False, _('return closest successors sets only'))],
2611 2611 _('[REV]'))
2612 2612 def debugsuccessorssets(ui, repo, *revs, **opts):
2613 2613 """show set of successors for revision
2614 2614
2615 2615 A successors set of changeset A is a consistent group of revisions that
2616 2616 succeed A. It contains non-obsolete changesets only unless closests
2617 2617 successors set is set.
2618 2618
2619 2619 In most cases a changeset A has a single successors set containing a single
2620 2620 successor (changeset A replaced by A').
2621 2621
2622 2622 A changeset that is made obsolete with no successors are called "pruned".
2623 2623 Such changesets have no successors sets at all.
2624 2624
2625 2625 A changeset that has been "split" will have a successors set containing
2626 2626 more than one successor.
2627 2627
2628 2628 A changeset that has been rewritten in multiple different ways is called
2629 2629 "divergent". Such changesets have multiple successor sets (each of which
2630 2630 may also be split, i.e. have multiple successors).
2631 2631
2632 2632 Results are displayed as follows::
2633 2633
2634 2634 <rev1>
2635 2635 <successors-1A>
2636 2636 <rev2>
2637 2637 <successors-2A>
2638 2638 <successors-2B1> <successors-2B2> <successors-2B3>
2639 2639
2640 2640 Here rev2 has two possible (i.e. divergent) successors sets. The first
2641 2641 holds one element, whereas the second holds three (i.e. the changeset has
2642 2642 been split).
2643 2643 """
2644 2644 # passed to successorssets caching computation from one call to another
2645 2645 cache = {}
2646 2646 ctx2str = bytes
2647 2647 node2str = short
2648 2648 for rev in scmutil.revrange(repo, revs):
2649 2649 ctx = repo[rev]
2650 2650 ui.write('%s\n'% ctx2str(ctx))
2651 2651 for succsset in obsutil.successorssets(repo, ctx.node(),
2652 2652 closest=opts[r'closest'],
2653 2653 cache=cache):
2654 2654 if succsset:
2655 2655 ui.write(' ')
2656 2656 ui.write(node2str(succsset[0]))
2657 2657 for node in succsset[1:]:
2658 2658 ui.write(' ')
2659 2659 ui.write(node2str(node))
2660 2660 ui.write('\n')
2661 2661
2662 2662 @command('debugtemplate',
2663 2663 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2664 2664 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2665 2665 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2666 2666 optionalrepo=True)
2667 2667 def debugtemplate(ui, repo, tmpl, **opts):
2668 2668 """parse and apply a template
2669 2669
2670 2670 If -r/--rev is given, the template is processed as a log template and
2671 2671 applied to the given changesets. Otherwise, it is processed as a generic
2672 2672 template.
2673 2673
2674 2674 Use --verbose to print the parsed tree.
2675 2675 """
2676 2676 revs = None
2677 2677 if opts[r'rev']:
2678 2678 if repo is None:
2679 2679 raise error.RepoError(_('there is no Mercurial repository here '
2680 2680 '(.hg not found)'))
2681 2681 revs = scmutil.revrange(repo, opts[r'rev'])
2682 2682
2683 2683 props = {}
2684 2684 for d in opts[r'define']:
2685 2685 try:
2686 2686 k, v = (e.strip() for e in d.split('=', 1))
2687 2687 if not k or k == 'ui':
2688 2688 raise ValueError
2689 2689 props[k] = v
2690 2690 except ValueError:
2691 2691 raise error.Abort(_('malformed keyword definition: %s') % d)
2692 2692
2693 2693 if ui.verbose:
2694 2694 aliases = ui.configitems('templatealias')
2695 2695 tree = templater.parse(tmpl)
2696 2696 ui.note(templater.prettyformat(tree), '\n')
2697 2697 newtree = templater.expandaliases(tree, aliases)
2698 2698 if newtree != tree:
2699 2699 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2700 2700
2701 2701 if revs is None:
2702 2702 tres = formatter.templateresources(ui, repo)
2703 2703 t = formatter.maketemplater(ui, tmpl, resources=tres)
2704 2704 if ui.verbose:
2705 2705 kwds, funcs = t.symbolsuseddefault()
2706 2706 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2707 2707 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2708 2708 ui.write(t.renderdefault(props))
2709 2709 else:
2710 2710 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2711 2711 if ui.verbose:
2712 2712 kwds, funcs = displayer.t.symbolsuseddefault()
2713 2713 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2714 2714 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2715 2715 for r in revs:
2716 2716 displayer.show(repo[r], **pycompat.strkwargs(props))
2717 2717 displayer.close()
2718 2718
2719 2719 @command('debuguigetpass', [
2720 2720 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2721 2721 ], _('[-p TEXT]'), norepo=True)
2722 2722 def debuguigetpass(ui, prompt=''):
2723 2723 """show prompt to type password"""
2724 2724 r = ui.getpass(prompt)
2725 2725 ui.write(('respose: %s\n') % r)
2726 2726
2727 2727 @command('debuguiprompt', [
2728 2728 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2729 2729 ], _('[-p TEXT]'), norepo=True)
2730 2730 def debuguiprompt(ui, prompt=''):
2731 2731 """show plain prompt"""
2732 2732 r = ui.prompt(prompt)
2733 2733 ui.write(('response: %s\n') % r)
2734 2734
2735 2735 @command('debugupdatecaches', [])
2736 2736 def debugupdatecaches(ui, repo, *pats, **opts):
2737 2737 """warm all known caches in the repository"""
2738 2738 with repo.wlock(), repo.lock():
2739 2739 repo.updatecaches(full=True)
2740 2740
2741 2741 @command('debugupgraderepo', [
2742 2742 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2743 2743 ('', 'run', False, _('performs an upgrade')),
2744 2744 ])
2745 2745 def debugupgraderepo(ui, repo, run=False, optimize=None):
2746 2746 """upgrade a repository to use different features
2747 2747
2748 2748 If no arguments are specified, the repository is evaluated for upgrade
2749 2749 and a list of problems and potential optimizations is printed.
2750 2750
2751 2751 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2752 2752 can be influenced via additional arguments. More details will be provided
2753 2753 by the command output when run without ``--run``.
2754 2754
2755 2755 During the upgrade, the repository will be locked and no writes will be
2756 2756 allowed.
2757 2757
2758 2758 At the end of the upgrade, the repository may not be readable while new
2759 2759 repository data is swapped in. This window will be as long as it takes to
2760 2760 rename some directories inside the ``.hg`` directory. On most machines, this
2761 2761 should complete almost instantaneously and the chances of a consumer being
2762 2762 unable to access the repository should be low.
2763 2763 """
2764 2764 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2765 2765
2766 2766 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2767 2767 inferrepo=True)
2768 2768 def debugwalk(ui, repo, *pats, **opts):
2769 2769 """show how files match on given patterns"""
2770 2770 opts = pycompat.byteskwargs(opts)
2771 2771 m = scmutil.match(repo[None], pats, opts)
2772 2772 if ui.verbose:
2773 2773 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2774 2774 items = list(repo[None].walk(m))
2775 2775 if not items:
2776 2776 return
2777 2777 f = lambda fn: fn
2778 2778 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2779 2779 f = lambda fn: util.normpath(fn)
2780 2780 fmt = 'f %%-%ds %%-%ds %%s' % (
2781 2781 max([len(abs) for abs in items]),
2782 2782 max([len(m.rel(abs)) for abs in items]))
2783 2783 for abs in items:
2784 2784 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2785 2785 ui.write("%s\n" % line.rstrip())
2786 2786
2787 2787 @command('debugwhyunstable', [], _('REV'))
2788 2788 def debugwhyunstable(ui, repo, rev):
2789 2789 """explain instabilities of a changeset"""
2790 2790 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2791 2791 dnodes = ''
2792 2792 if entry.get('divergentnodes'):
2793 2793 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2794 2794 for ctx in entry['divergentnodes']) + ' '
2795 2795 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2796 2796 entry['reason'], entry['node']))
2797 2797
2798 2798 @command('debugwireargs',
2799 2799 [('', 'three', '', 'three'),
2800 2800 ('', 'four', '', 'four'),
2801 2801 ('', 'five', '', 'five'),
2802 2802 ] + cmdutil.remoteopts,
2803 2803 _('REPO [OPTIONS]... [ONE [TWO]]'),
2804 2804 norepo=True)
2805 2805 def debugwireargs(ui, repopath, *vals, **opts):
2806 2806 opts = pycompat.byteskwargs(opts)
2807 2807 repo = hg.peer(ui, opts, repopath)
2808 2808 for opt in cmdutil.remoteopts:
2809 2809 del opts[opt[1]]
2810 2810 args = {}
2811 2811 for k, v in opts.iteritems():
2812 2812 if v:
2813 2813 args[k] = v
2814 2814 args = pycompat.strkwargs(args)
2815 2815 # run twice to check that we don't mess up the stream for the next command
2816 2816 res1 = repo.debugwireargs(*vals, **args)
2817 2817 res2 = repo.debugwireargs(*vals, **args)
2818 2818 ui.write("%s\n" % res1)
2819 2819 if res1 != res2:
2820 2820 ui.warn("%s\n" % res2)
2821 2821
2822 2822 def _parsewirelangblocks(fh):
2823 2823 activeaction = None
2824 2824 blocklines = []
2825 2825
2826 2826 for line in fh:
2827 2827 line = line.rstrip()
2828 2828 if not line:
2829 2829 continue
2830 2830
2831 2831 if line.startswith(b'#'):
2832 2832 continue
2833 2833
2834 2834 if not line.startswith(b' '):
2835 2835 # New block. Flush previous one.
2836 2836 if activeaction:
2837 2837 yield activeaction, blocklines
2838 2838
2839 2839 activeaction = line
2840 2840 blocklines = []
2841 2841 continue
2842 2842
2843 2843 # Else we start with an indent.
2844 2844
2845 2845 if not activeaction:
2846 2846 raise error.Abort(_('indented line outside of block'))
2847 2847
2848 2848 blocklines.append(line)
2849 2849
2850 2850 # Flush last block.
2851 2851 if activeaction:
2852 2852 yield activeaction, blocklines
2853 2853
2854 2854 @command('debugwireproto',
2855 2855 [
2856 2856 ('', 'localssh', False, _('start an SSH server for this repo')),
2857 2857 ('', 'peer', '', _('construct a specific version of the peer')),
2858 2858 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2859 2859 ('', 'nologhandshake', False,
2860 2860 _('do not log I/O related to the peer handshake')),
2861 2861 ] + cmdutil.remoteopts,
2862 2862 _('[PATH]'),
2863 2863 optionalrepo=True)
2864 2864 def debugwireproto(ui, repo, path=None, **opts):
2865 2865 """send wire protocol commands to a server
2866 2866
2867 2867 This command can be used to issue wire protocol commands to remote
2868 2868 peers and to debug the raw data being exchanged.
2869 2869
2870 2870 ``--localssh`` will start an SSH server against the current repository
2871 2871 and connect to that. By default, the connection will perform a handshake
2872 2872 and establish an appropriate peer instance.
2873 2873
2874 2874 ``--peer`` can be used to bypass the handshake protocol and construct a
2875 2875 peer instance using the specified class type. Valid values are ``raw``,
2876 2876 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2877 2877 raw data payloads and don't support higher-level command actions.
2878 2878
2879 2879 ``--noreadstderr`` can be used to disable automatic reading from stderr
2880 2880 of the peer (for SSH connections only). Disabling automatic reading of
2881 2881 stderr is useful for making output more deterministic.
2882 2882
2883 2883 Commands are issued via a mini language which is specified via stdin.
2884 2884 The language consists of individual actions to perform. An action is
2885 2885 defined by a block. A block is defined as a line with no leading
2886 2886 space followed by 0 or more lines with leading space. Blocks are
2887 2887 effectively a high-level command with additional metadata.
2888 2888
2889 2889 Lines beginning with ``#`` are ignored.
2890 2890
2891 2891 The following sections denote available actions.
2892 2892
2893 2893 raw
2894 2894 ---
2895 2895
2896 2896 Send raw data to the server.
2897 2897
2898 2898 The block payload contains the raw data to send as one atomic send
2899 2899 operation. The data may not actually be delivered in a single system
2900 2900 call: it depends on the abilities of the transport being used.
2901 2901
2902 2902 Each line in the block is de-indented and concatenated. Then, that
2903 2903 value is evaluated as a Python b'' literal. This allows the use of
2904 2904 backslash escaping, etc.
2905 2905
2906 2906 raw+
2907 2907 ----
2908 2908
2909 2909 Behaves like ``raw`` except flushes output afterwards.
2910 2910
2911 2911 command <X>
2912 2912 -----------
2913 2913
2914 2914 Send a request to run a named command, whose name follows the ``command``
2915 2915 string.
2916 2916
2917 2917 Arguments to the command are defined as lines in this block. The format of
2918 2918 each line is ``<key> <value>``. e.g.::
2919 2919
2920 2920 command listkeys
2921 2921 namespace bookmarks
2922 2922
2923 2923 If the value begins with ``eval:``, it will be interpreted as a Python
2924 2924 literal expression. Otherwise values are interpreted as Python b'' literals.
2925 2925 This allows sending complex types and encoding special byte sequences via
2926 2926 backslash escaping.
2927 2927
2928 2928 The following arguments have special meaning:
2929 2929
2930 2930 ``PUSHFILE``
2931 2931 When defined, the *push* mechanism of the peer will be used instead
2932 2932 of the static request-response mechanism and the content of the
2933 2933 file specified in the value of this argument will be sent as the
2934 2934 command payload.
2935 2935
2936 2936 This can be used to submit a local bundle file to the remote.
2937 2937
2938 2938 batchbegin
2939 2939 ----------
2940 2940
2941 2941 Instruct the peer to begin a batched send.
2942 2942
2943 2943 All ``command`` blocks are queued for execution until the next
2944 2944 ``batchsubmit`` block.
2945 2945
2946 2946 batchsubmit
2947 2947 -----------
2948 2948
2949 2949 Submit previously queued ``command`` blocks as a batch request.
2950 2950
2951 2951 This action MUST be paired with a ``batchbegin`` action.
2952 2952
2953 2953 httprequest <method> <path>
2954 2954 ---------------------------
2955 2955
2956 2956 (HTTP peer only)
2957 2957
2958 2958 Send an HTTP request to the peer.
2959 2959
2960 2960 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2961 2961
2962 2962 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2963 2963 headers to add to the request. e.g. ``Accept: foo``.
2964 2964
2965 2965 The following arguments are special:
2966 2966
2967 2967 ``BODYFILE``
2968 2968 The content of the file defined as the value to this argument will be
2969 2969 transferred verbatim as the HTTP request body.
2970 2970
2971 2971 ``frame <type> <flags> <payload>``
2972 2972 Send a unified protocol frame as part of the request body.
2973 2973
2974 2974 All frames will be collected and sent as the body to the HTTP
2975 2975 request.
2976 2976
2977 2977 close
2978 2978 -----
2979 2979
2980 2980 Close the connection to the server.
2981 2981
2982 2982 flush
2983 2983 -----
2984 2984
2985 2985 Flush data written to the server.
2986 2986
2987 2987 readavailable
2988 2988 -------------
2989 2989
2990 2990 Close the write end of the connection and read all available data from
2991 2991 the server.
2992 2992
2993 2993 If the connection to the server encompasses multiple pipes, we poll both
2994 2994 pipes and read available data.
2995 2995
2996 2996 readline
2997 2997 --------
2998 2998
2999 2999 Read a line of output from the server. If there are multiple output
3000 3000 pipes, reads only the main pipe.
3001 3001
3002 3002 ereadline
3003 3003 ---------
3004 3004
3005 3005 Like ``readline``, but read from the stderr pipe, if available.
3006 3006
3007 3007 read <X>
3008 3008 --------
3009 3009
3010 3010 ``read()`` N bytes from the server's main output pipe.
3011 3011
3012 3012 eread <X>
3013 3013 ---------
3014 3014
3015 3015 ``read()`` N bytes from the server's stderr pipe, if available.
3016 3016
3017 3017 Specifying Unified Frame-Based Protocol Frames
3018 3018 ----------------------------------------------
3019 3019
3020 3020 It is possible to emit a *Unified Frame-Based Protocol* by using special
3021 3021 syntax.
3022 3022
3023 3023 A frame is composed as a type, flags, and payload. These can be parsed
3024 3024 from a string of the form:
3025 3025
3026 3026 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3027 3027
3028 3028 ``request-id`` and ``stream-id`` are integers defining the request and
3029 3029 stream identifiers.
3030 3030
3031 3031 ``type`` can be an integer value for the frame type or the string name
3032 3032 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3033 3033 ``command-name``.
3034 3034
3035 3035 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3036 3036 components. Each component (and there can be just one) can be an integer
3037 3037 or a flag name for stream flags or frame flags, respectively. Values are
3038 3038 resolved to integers and then bitwise OR'd together.
3039 3039
3040 3040 ``payload`` represents the raw frame payload. If it begins with
3041 3041 ``cbor:``, the following string is evaluated as Python code and the
3042 3042 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3043 3043 as a Python byte string literal.
3044 3044 """
3045 3045 opts = pycompat.byteskwargs(opts)
3046 3046
3047 3047 if opts['localssh'] and not repo:
3048 3048 raise error.Abort(_('--localssh requires a repository'))
3049 3049
3050 3050 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3051 3051 raise error.Abort(_('invalid value for --peer'),
3052 3052 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3053 3053
3054 3054 if path and opts['localssh']:
3055 3055 raise error.Abort(_('cannot specify --localssh with an explicit '
3056 3056 'path'))
3057 3057
3058 3058 if ui.interactive():
3059 3059 ui.write(_('(waiting for commands on stdin)\n'))
3060 3060
3061 3061 blocks = list(_parsewirelangblocks(ui.fin))
3062 3062
3063 3063 proc = None
3064 3064 stdin = None
3065 3065 stdout = None
3066 3066 stderr = None
3067 3067 opener = None
3068 3068
3069 3069 if opts['localssh']:
3070 3070 # We start the SSH server in its own process so there is process
3071 3071 # separation. This prevents a whole class of potential bugs around
3072 3072 # shared state from interfering with server operation.
3073 3073 args = procutil.hgcmd() + [
3074 3074 '-R', repo.root,
3075 3075 'debugserve', '--sshstdio',
3076 3076 ]
3077 3077 proc = subprocess.Popen(pycompat.rapply(procutil.tonativestr, args),
3078 3078 stdin=subprocess.PIPE,
3079 3079 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3080 3080 bufsize=0)
3081 3081
3082 3082 stdin = proc.stdin
3083 3083 stdout = proc.stdout
3084 3084 stderr = proc.stderr
3085 3085
3086 3086 # We turn the pipes into observers so we can log I/O.
3087 3087 if ui.verbose or opts['peer'] == 'raw':
3088 3088 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3089 3089 logdata=True)
3090 3090 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3091 3091 logdata=True)
3092 3092 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3093 3093 logdata=True)
3094 3094
3095 3095 # --localssh also implies the peer connection settings.
3096 3096
3097 3097 url = 'ssh://localserver'
3098 3098 autoreadstderr = not opts['noreadstderr']
3099 3099
3100 3100 if opts['peer'] == 'ssh1':
3101 3101 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3102 3102 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3103 3103 None, autoreadstderr=autoreadstderr)
3104 3104 elif opts['peer'] == 'ssh2':
3105 3105 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3106 3106 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3107 3107 None, autoreadstderr=autoreadstderr)
3108 3108 elif opts['peer'] == 'raw':
3109 3109 ui.write(_('using raw connection to peer\n'))
3110 3110 peer = None
3111 3111 else:
3112 3112 ui.write(_('creating ssh peer from handshake results\n'))
3113 3113 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3114 3114 autoreadstderr=autoreadstderr)
3115 3115
3116 3116 elif path:
3117 3117 # We bypass hg.peer() so we can proxy the sockets.
3118 3118 # TODO consider not doing this because we skip
3119 3119 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3120 3120 u = util.url(path)
3121 3121 if u.scheme != 'http':
3122 3122 raise error.Abort(_('only http:// paths are currently supported'))
3123 3123
3124 3124 url, authinfo = u.authinfo()
3125 3125 openerargs = {
3126 3126 r'useragent': b'Mercurial debugwireproto',
3127 3127 }
3128 3128
3129 3129 # Turn pipes/sockets into observers so we can log I/O.
3130 3130 if ui.verbose:
3131 3131 openerargs.update({
3132 3132 r'loggingfh': ui,
3133 3133 r'loggingname': b's',
3134 3134 r'loggingopts': {
3135 3135 r'logdata': True,
3136 3136 r'logdataapis': False,
3137 3137 },
3138 3138 })
3139 3139
3140 3140 if ui.debugflag:
3141 3141 openerargs[r'loggingopts'][r'logdataapis'] = True
3142 3142
3143 3143 # Don't send default headers when in raw mode. This allows us to
3144 3144 # bypass most of the behavior of our URL handling code so we can
3145 3145 # have near complete control over what's sent on the wire.
3146 3146 if opts['peer'] == 'raw':
3147 3147 openerargs[r'sendaccept'] = False
3148 3148
3149 3149 opener = urlmod.opener(ui, authinfo, **openerargs)
3150 3150
3151 3151 if opts['peer'] == 'http2':
3152 3152 ui.write(_('creating http peer for wire protocol version 2\n'))
3153 3153 # We go through makepeer() because we need an API descriptor for
3154 3154 # the peer instance to be useful.
3155 3155 with ui.configoverride({
3156 3156 ('experimental', 'httppeer.advertise-v2'): True}):
3157 3157 if opts['nologhandshake']:
3158 3158 ui.pushbuffer()
3159 3159
3160 3160 peer = httppeer.makepeer(ui, path, opener=opener)
3161 3161
3162 3162 if opts['nologhandshake']:
3163 3163 ui.popbuffer()
3164 3164
3165 3165 if not isinstance(peer, httppeer.httpv2peer):
3166 3166 raise error.Abort(_('could not instantiate HTTP peer for '
3167 3167 'wire protocol version 2'),
3168 3168 hint=_('the server may not have the feature '
3169 3169 'enabled or is not allowing this '
3170 3170 'client version'))
3171 3171
3172 3172 elif opts['peer'] == 'raw':
3173 3173 ui.write(_('using raw connection to peer\n'))
3174 3174 peer = None
3175 3175 elif opts['peer']:
3176 3176 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3177 3177 opts['peer'])
3178 3178 else:
3179 3179 peer = httppeer.makepeer(ui, path, opener=opener)
3180 3180
3181 3181 # We /could/ populate stdin/stdout with sock.makefile()...
3182 3182 else:
3183 3183 raise error.Abort(_('unsupported connection configuration'))
3184 3184
3185 3185 batchedcommands = None
3186 3186
3187 3187 # Now perform actions based on the parsed wire language instructions.
3188 3188 for action, lines in blocks:
3189 3189 if action in ('raw', 'raw+'):
3190 3190 if not stdin:
3191 3191 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3192 3192
3193 3193 # Concatenate the data together.
3194 3194 data = ''.join(l.lstrip() for l in lines)
3195 3195 data = stringutil.unescapestr(data)
3196 3196 stdin.write(data)
3197 3197
3198 3198 if action == 'raw+':
3199 3199 stdin.flush()
3200 3200 elif action == 'flush':
3201 3201 if not stdin:
3202 3202 raise error.Abort(_('cannot call flush on this peer'))
3203 3203 stdin.flush()
3204 3204 elif action.startswith('command'):
3205 3205 if not peer:
3206 3206 raise error.Abort(_('cannot send commands unless peer instance '
3207 3207 'is available'))
3208 3208
3209 3209 command = action.split(' ', 1)[1]
3210 3210
3211 3211 args = {}
3212 3212 for line in lines:
3213 3213 # We need to allow empty values.
3214 3214 fields = line.lstrip().split(' ', 1)
3215 3215 if len(fields) == 1:
3216 3216 key = fields[0]
3217 3217 value = ''
3218 3218 else:
3219 3219 key, value = fields
3220 3220
3221 3221 if value.startswith('eval:'):
3222 3222 value = stringutil.evalpythonliteral(value[5:])
3223 3223 else:
3224 3224 value = stringutil.unescapestr(value)
3225 3225
3226 3226 args[key] = value
3227 3227
3228 3228 if batchedcommands is not None:
3229 3229 batchedcommands.append((command, args))
3230 3230 continue
3231 3231
3232 3232 ui.status(_('sending %s command\n') % command)
3233 3233
3234 3234 if 'PUSHFILE' in args:
3235 3235 with open(args['PUSHFILE'], r'rb') as fh:
3236 3236 del args['PUSHFILE']
3237 3237 res, output = peer._callpush(command, fh,
3238 3238 **pycompat.strkwargs(args))
3239 3239 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3240 3240 ui.status(_('remote output: %s\n') %
3241 3241 stringutil.escapestr(output))
3242 3242 else:
3243 3243 with peer.commandexecutor() as e:
3244 3244 res = e.callcommand(command, args).result()
3245 3245
3246 3246 if isinstance(res, wireprotov2peer.commandresponse):
3247 3247 val = res.objects()
3248 3248 ui.status(_('response: %s\n') %
3249 3249 stringutil.pprint(val, bprefix=True, indent=2))
3250 3250 else:
3251 3251 ui.status(_('response: %s\n') %
3252 3252 stringutil.pprint(res, bprefix=True, indent=2))
3253 3253
3254 3254 elif action == 'batchbegin':
3255 3255 if batchedcommands is not None:
3256 3256 raise error.Abort(_('nested batchbegin not allowed'))
3257 3257
3258 3258 batchedcommands = []
3259 3259 elif action == 'batchsubmit':
3260 3260 # There is a batching API we could go through. But it would be
3261 3261 # difficult to normalize requests into function calls. It is easier
3262 3262 # to bypass this layer and normalize to commands + args.
3263 3263 ui.status(_('sending batch with %d sub-commands\n') %
3264 3264 len(batchedcommands))
3265 3265 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3266 3266 ui.status(_('response #%d: %s\n') %
3267 3267 (i, stringutil.escapestr(chunk)))
3268 3268
3269 3269 batchedcommands = None
3270 3270
3271 3271 elif action.startswith('httprequest '):
3272 3272 if not opener:
3273 3273 raise error.Abort(_('cannot use httprequest without an HTTP '
3274 3274 'peer'))
3275 3275
3276 3276 request = action.split(' ', 2)
3277 3277 if len(request) != 3:
3278 3278 raise error.Abort(_('invalid httprequest: expected format is '
3279 3279 '"httprequest <method> <path>'))
3280 3280
3281 3281 method, httppath = request[1:]
3282 3282 headers = {}
3283 3283 body = None
3284 3284 frames = []
3285 3285 for line in lines:
3286 3286 line = line.lstrip()
3287 3287 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3288 3288 if m:
3289 3289 headers[m.group(1)] = m.group(2)
3290 3290 continue
3291 3291
3292 3292 if line.startswith(b'BODYFILE '):
3293 3293 with open(line.split(b' ', 1), 'rb') as fh:
3294 3294 body = fh.read()
3295 3295 elif line.startswith(b'frame '):
3296 3296 frame = wireprotoframing.makeframefromhumanstring(
3297 3297 line[len(b'frame '):])
3298 3298
3299 3299 frames.append(frame)
3300 3300 else:
3301 3301 raise error.Abort(_('unknown argument to httprequest: %s') %
3302 3302 line)
3303 3303
3304 3304 url = path + httppath
3305 3305
3306 3306 if frames:
3307 3307 body = b''.join(bytes(f) for f in frames)
3308 3308
3309 3309 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3310 3310
3311 3311 # urllib.Request insists on using has_data() as a proxy for
3312 3312 # determining the request method. Override that to use our
3313 3313 # explicitly requested method.
3314 3314 req.get_method = lambda: pycompat.sysstr(method)
3315 3315
3316 3316 try:
3317 3317 res = opener.open(req)
3318 3318 body = res.read()
3319 3319 except util.urlerr.urlerror as e:
3320 3320 # read() method must be called, but only exists in Python 2
3321 3321 getattr(e, 'read', lambda: None)()
3322 3322 continue
3323 3323
3324 3324 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3325 3325 ui.write(_('cbor> %s\n') %
3326 3326 stringutil.pprint(cborutil.decodeall(body)[0],
3327 3327 bprefix=True,
3328 3328 indent=2))
3329 3329
3330 3330 elif action == 'close':
3331 3331 peer.close()
3332 3332 elif action == 'readavailable':
3333 3333 if not stdout or not stderr:
3334 3334 raise error.Abort(_('readavailable not available on this peer'))
3335 3335
3336 3336 stdin.close()
3337 3337 stdout.read()
3338 3338 stderr.read()
3339 3339
3340 3340 elif action == 'readline':
3341 3341 if not stdout:
3342 3342 raise error.Abort(_('readline not available on this peer'))
3343 3343 stdout.readline()
3344 3344 elif action == 'ereadline':
3345 3345 if not stderr:
3346 3346 raise error.Abort(_('ereadline not available on this peer'))
3347 3347 stderr.readline()
3348 3348 elif action.startswith('read '):
3349 3349 count = int(action.split(' ', 1)[1])
3350 3350 if not stdout:
3351 3351 raise error.Abort(_('read not available on this peer'))
3352 3352 stdout.read(count)
3353 3353 elif action.startswith('eread '):
3354 3354 count = int(action.split(' ', 1)[1])
3355 3355 if not stderr:
3356 3356 raise error.Abort(_('eread not available on this peer'))
3357 3357 stderr.read(count)
3358 3358 else:
3359 3359 raise error.Abort(_('unknown action: %s') % action)
3360 3360
3361 3361 if batchedcommands is not None:
3362 3362 raise error.Abort(_('unclosed "batchbegin" request'))
3363 3363
3364 3364 if peer:
3365 3365 peer.close()
3366 3366
3367 3367 if proc:
3368 3368 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now