##// END OF EJS Templates
debugcommands: use openstorage() in debugdata (BC)...
Gregory Szorc -
r39317:dd6bc250 default
parent child Browse files
Show More
@@ -1,3325 +1,3325 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import operator
15 15 import os
16 16 import random
17 17 import re
18 18 import socket
19 19 import ssl
20 20 import stat
21 21 import string
22 22 import subprocess
23 23 import sys
24 24 import time
25 25
26 26 from .i18n import _
27 27 from .node import (
28 28 bin,
29 29 hex,
30 30 nullhex,
31 31 nullid,
32 32 nullrev,
33 33 short,
34 34 )
35 35 from .thirdparty import (
36 36 cbor,
37 37 )
38 38 from . import (
39 39 bundle2,
40 40 changegroup,
41 41 cmdutil,
42 42 color,
43 43 context,
44 44 dagparser,
45 45 encoding,
46 46 error,
47 47 exchange,
48 48 extensions,
49 49 filemerge,
50 50 filesetlang,
51 51 formatter,
52 52 hg,
53 53 httppeer,
54 54 localrepo,
55 55 lock as lockmod,
56 56 logcmdutil,
57 57 merge as mergemod,
58 58 obsolete,
59 59 obsutil,
60 60 phases,
61 61 policy,
62 62 pvec,
63 63 pycompat,
64 64 registrar,
65 65 repair,
66 66 revlog,
67 67 revset,
68 68 revsetlang,
69 69 scmutil,
70 70 setdiscovery,
71 71 simplemerge,
72 72 sshpeer,
73 73 sslutil,
74 74 streamclone,
75 75 templater,
76 76 treediscovery,
77 77 upgrade,
78 78 url as urlmod,
79 79 util,
80 80 vfs as vfsmod,
81 81 wireprotoframing,
82 82 wireprotoserver,
83 83 wireprotov2peer,
84 84 )
85 85 from .utils import (
86 86 dateutil,
87 87 procutil,
88 88 stringutil,
89 89 )
90 90
91 91 release = lockmod.release
92 92
93 93 command = registrar.command()
94 94
95 95 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
96 96 def debugancestor(ui, repo, *args):
97 97 """find the ancestor revision of two revisions in a given index"""
98 98 if len(args) == 3:
99 99 index, rev1, rev2 = args
100 100 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
101 101 lookup = r.lookup
102 102 elif len(args) == 2:
103 103 if not repo:
104 104 raise error.Abort(_('there is no Mercurial repository here '
105 105 '(.hg not found)'))
106 106 rev1, rev2 = args
107 107 r = repo.changelog
108 108 lookup = repo.lookup
109 109 else:
110 110 raise error.Abort(_('either two or three arguments required'))
111 111 a = r.ancestor(lookup(rev1), lookup(rev2))
112 112 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
113 113
114 114 @command('debugapplystreamclonebundle', [], 'FILE')
115 115 def debugapplystreamclonebundle(ui, repo, fname):
116 116 """apply a stream clone bundle file"""
117 117 f = hg.openpath(ui, fname)
118 118 gen = exchange.readbundle(ui, f, fname)
119 119 gen.apply(repo)
120 120
121 121 @command('debugbuilddag',
122 122 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
123 123 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
124 124 ('n', 'new-file', None, _('add new file at each rev'))],
125 125 _('[OPTION]... [TEXT]'))
126 126 def debugbuilddag(ui, repo, text=None,
127 127 mergeable_file=False,
128 128 overwritten_file=False,
129 129 new_file=False):
130 130 """builds a repo with a given DAG from scratch in the current empty repo
131 131
132 132 The description of the DAG is read from stdin if not given on the
133 133 command line.
134 134
135 135 Elements:
136 136
137 137 - "+n" is a linear run of n nodes based on the current default parent
138 138 - "." is a single node based on the current default parent
139 139 - "$" resets the default parent to null (implied at the start);
140 140 otherwise the default parent is always the last node created
141 141 - "<p" sets the default parent to the backref p
142 142 - "*p" is a fork at parent p, which is a backref
143 143 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
144 144 - "/p2" is a merge of the preceding node and p2
145 145 - ":tag" defines a local tag for the preceding node
146 146 - "@branch" sets the named branch for subsequent nodes
147 147 - "#...\\n" is a comment up to the end of the line
148 148
149 149 Whitespace between the above elements is ignored.
150 150
151 151 A backref is either
152 152
153 153 - a number n, which references the node curr-n, where curr is the current
154 154 node, or
155 155 - the name of a local tag you placed earlier using ":tag", or
156 156 - empty to denote the default parent.
157 157
158 158 All string valued-elements are either strictly alphanumeric, or must
159 159 be enclosed in double quotes ("..."), with "\\" as escape character.
160 160 """
161 161
162 162 if text is None:
163 163 ui.status(_("reading DAG from stdin\n"))
164 164 text = ui.fin.read()
165 165
166 166 cl = repo.changelog
167 167 if len(cl) > 0:
168 168 raise error.Abort(_('repository is not empty'))
169 169
170 170 # determine number of revs in DAG
171 171 total = 0
172 172 for type, data in dagparser.parsedag(text):
173 173 if type == 'n':
174 174 total += 1
175 175
176 176 if mergeable_file:
177 177 linesperrev = 2
178 178 # make a file with k lines per rev
179 179 initialmergedlines = ['%d' % i
180 180 for i in pycompat.xrange(0, total * linesperrev)]
181 181 initialmergedlines.append("")
182 182
183 183 tags = []
184 184 progress = ui.makeprogress(_('building'), unit=_('revisions'),
185 185 total=total)
186 186 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
187 187 at = -1
188 188 atbranch = 'default'
189 189 nodeids = []
190 190 id = 0
191 191 progress.update(id)
192 192 for type, data in dagparser.parsedag(text):
193 193 if type == 'n':
194 194 ui.note(('node %s\n' % pycompat.bytestr(data)))
195 195 id, ps = data
196 196
197 197 files = []
198 198 filecontent = {}
199 199
200 200 p2 = None
201 201 if mergeable_file:
202 202 fn = "mf"
203 203 p1 = repo[ps[0]]
204 204 if len(ps) > 1:
205 205 p2 = repo[ps[1]]
206 206 pa = p1.ancestor(p2)
207 207 base, local, other = [x[fn].data() for x in (pa, p1,
208 208 p2)]
209 209 m3 = simplemerge.Merge3Text(base, local, other)
210 210 ml = [l.strip() for l in m3.merge_lines()]
211 211 ml.append("")
212 212 elif at > 0:
213 213 ml = p1[fn].data().split("\n")
214 214 else:
215 215 ml = initialmergedlines
216 216 ml[id * linesperrev] += " r%i" % id
217 217 mergedtext = "\n".join(ml)
218 218 files.append(fn)
219 219 filecontent[fn] = mergedtext
220 220
221 221 if overwritten_file:
222 222 fn = "of"
223 223 files.append(fn)
224 224 filecontent[fn] = "r%i\n" % id
225 225
226 226 if new_file:
227 227 fn = "nf%i" % id
228 228 files.append(fn)
229 229 filecontent[fn] = "r%i\n" % id
230 230 if len(ps) > 1:
231 231 if not p2:
232 232 p2 = repo[ps[1]]
233 233 for fn in p2:
234 234 if fn.startswith("nf"):
235 235 files.append(fn)
236 236 filecontent[fn] = p2[fn].data()
237 237
238 238 def fctxfn(repo, cx, path):
239 239 if path in filecontent:
240 240 return context.memfilectx(repo, cx, path,
241 241 filecontent[path])
242 242 return None
243 243
244 244 if len(ps) == 0 or ps[0] < 0:
245 245 pars = [None, None]
246 246 elif len(ps) == 1:
247 247 pars = [nodeids[ps[0]], None]
248 248 else:
249 249 pars = [nodeids[p] for p in ps]
250 250 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
251 251 date=(id, 0),
252 252 user="debugbuilddag",
253 253 extra={'branch': atbranch})
254 254 nodeid = repo.commitctx(cx)
255 255 nodeids.append(nodeid)
256 256 at = id
257 257 elif type == 'l':
258 258 id, name = data
259 259 ui.note(('tag %s\n' % name))
260 260 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
261 261 elif type == 'a':
262 262 ui.note(('branch %s\n' % data))
263 263 atbranch = data
264 264 progress.update(id)
265 265
266 266 if tags:
267 267 repo.vfs.write("localtags", "".join(tags))
268 268
269 269 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
270 270 indent_string = ' ' * indent
271 271 if all:
272 272 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
273 273 % indent_string)
274 274
275 275 def showchunks(named):
276 276 ui.write("\n%s%s\n" % (indent_string, named))
277 277 for deltadata in gen.deltaiter():
278 278 node, p1, p2, cs, deltabase, delta, flags = deltadata
279 279 ui.write("%s%s %s %s %s %s %d\n" %
280 280 (indent_string, hex(node), hex(p1), hex(p2),
281 281 hex(cs), hex(deltabase), len(delta)))
282 282
283 283 chunkdata = gen.changelogheader()
284 284 showchunks("changelog")
285 285 chunkdata = gen.manifestheader()
286 286 showchunks("manifest")
287 287 for chunkdata in iter(gen.filelogheader, {}):
288 288 fname = chunkdata['filename']
289 289 showchunks(fname)
290 290 else:
291 291 if isinstance(gen, bundle2.unbundle20):
292 292 raise error.Abort(_('use debugbundle2 for this file'))
293 293 chunkdata = gen.changelogheader()
294 294 for deltadata in gen.deltaiter():
295 295 node, p1, p2, cs, deltabase, delta, flags = deltadata
296 296 ui.write("%s%s\n" % (indent_string, hex(node)))
297 297
298 298 def _debugobsmarkers(ui, part, indent=0, **opts):
299 299 """display version and markers contained in 'data'"""
300 300 opts = pycompat.byteskwargs(opts)
301 301 data = part.read()
302 302 indent_string = ' ' * indent
303 303 try:
304 304 version, markers = obsolete._readmarkers(data)
305 305 except error.UnknownVersion as exc:
306 306 msg = "%sunsupported version: %s (%d bytes)\n"
307 307 msg %= indent_string, exc.version, len(data)
308 308 ui.write(msg)
309 309 else:
310 310 msg = "%sversion: %d (%d bytes)\n"
311 311 msg %= indent_string, version, len(data)
312 312 ui.write(msg)
313 313 fm = ui.formatter('debugobsolete', opts)
314 314 for rawmarker in sorted(markers):
315 315 m = obsutil.marker(None, rawmarker)
316 316 fm.startitem()
317 317 fm.plain(indent_string)
318 318 cmdutil.showmarker(fm, m)
319 319 fm.end()
320 320
321 321 def _debugphaseheads(ui, data, indent=0):
322 322 """display version and markers contained in 'data'"""
323 323 indent_string = ' ' * indent
324 324 headsbyphase = phases.binarydecode(data)
325 325 for phase in phases.allphases:
326 326 for head in headsbyphase[phase]:
327 327 ui.write(indent_string)
328 328 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
329 329
330 330 def _quasirepr(thing):
331 331 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
332 332 return '{%s}' % (
333 333 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
334 334 return pycompat.bytestr(repr(thing))
335 335
336 336 def _debugbundle2(ui, gen, all=None, **opts):
337 337 """lists the contents of a bundle2"""
338 338 if not isinstance(gen, bundle2.unbundle20):
339 339 raise error.Abort(_('not a bundle2 file'))
340 340 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
341 341 parttypes = opts.get(r'part_type', [])
342 342 for part in gen.iterparts():
343 343 if parttypes and part.type not in parttypes:
344 344 continue
345 345 msg = '%s -- %s (mandatory: %r)\n'
346 346 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
347 347 if part.type == 'changegroup':
348 348 version = part.params.get('version', '01')
349 349 cg = changegroup.getunbundler(version, part, 'UN')
350 350 if not ui.quiet:
351 351 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
352 352 if part.type == 'obsmarkers':
353 353 if not ui.quiet:
354 354 _debugobsmarkers(ui, part, indent=4, **opts)
355 355 if part.type == 'phase-heads':
356 356 if not ui.quiet:
357 357 _debugphaseheads(ui, part, indent=4)
358 358
359 359 @command('debugbundle',
360 360 [('a', 'all', None, _('show all details')),
361 361 ('', 'part-type', [], _('show only the named part type')),
362 362 ('', 'spec', None, _('print the bundlespec of the bundle'))],
363 363 _('FILE'),
364 364 norepo=True)
365 365 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
366 366 """lists the contents of a bundle"""
367 367 with hg.openpath(ui, bundlepath) as f:
368 368 if spec:
369 369 spec = exchange.getbundlespec(ui, f)
370 370 ui.write('%s\n' % spec)
371 371 return
372 372
373 373 gen = exchange.readbundle(ui, f, bundlepath)
374 374 if isinstance(gen, bundle2.unbundle20):
375 375 return _debugbundle2(ui, gen, all=all, **opts)
376 376 _debugchangegroup(ui, gen, all=all, **opts)
377 377
378 378 @command('debugcapabilities',
379 379 [], _('PATH'),
380 380 norepo=True)
381 381 def debugcapabilities(ui, path, **opts):
382 382 """lists the capabilities of a remote peer"""
383 383 opts = pycompat.byteskwargs(opts)
384 384 peer = hg.peer(ui, opts, path)
385 385 caps = peer.capabilities()
386 386 ui.write(('Main capabilities:\n'))
387 387 for c in sorted(caps):
388 388 ui.write((' %s\n') % c)
389 389 b2caps = bundle2.bundle2caps(peer)
390 390 if b2caps:
391 391 ui.write(('Bundle2 capabilities:\n'))
392 392 for key, values in sorted(b2caps.iteritems()):
393 393 ui.write((' %s\n') % key)
394 394 for v in values:
395 395 ui.write((' %s\n') % v)
396 396
397 397 @command('debugcheckstate', [], '')
398 398 def debugcheckstate(ui, repo):
399 399 """validate the correctness of the current dirstate"""
400 400 parent1, parent2 = repo.dirstate.parents()
401 401 m1 = repo[parent1].manifest()
402 402 m2 = repo[parent2].manifest()
403 403 errors = 0
404 404 for f in repo.dirstate:
405 405 state = repo.dirstate[f]
406 406 if state in "nr" and f not in m1:
407 407 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
408 408 errors += 1
409 409 if state in "a" and f in m1:
410 410 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
411 411 errors += 1
412 412 if state in "m" and f not in m1 and f not in m2:
413 413 ui.warn(_("%s in state %s, but not in either manifest\n") %
414 414 (f, state))
415 415 errors += 1
416 416 for f in m1:
417 417 state = repo.dirstate[f]
418 418 if state not in "nrm":
419 419 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
420 420 errors += 1
421 421 if errors:
422 422 error = _(".hg/dirstate inconsistent with current parent's manifest")
423 423 raise error.Abort(error)
424 424
425 425 @command('debugcolor',
426 426 [('', 'style', None, _('show all configured styles'))],
427 427 'hg debugcolor')
428 428 def debugcolor(ui, repo, **opts):
429 429 """show available color, effects or style"""
430 430 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
431 431 if opts.get(r'style'):
432 432 return _debugdisplaystyle(ui)
433 433 else:
434 434 return _debugdisplaycolor(ui)
435 435
436 436 def _debugdisplaycolor(ui):
437 437 ui = ui.copy()
438 438 ui._styles.clear()
439 439 for effect in color._activeeffects(ui).keys():
440 440 ui._styles[effect] = effect
441 441 if ui._terminfoparams:
442 442 for k, v in ui.configitems('color'):
443 443 if k.startswith('color.'):
444 444 ui._styles[k] = k[6:]
445 445 elif k.startswith('terminfo.'):
446 446 ui._styles[k] = k[9:]
447 447 ui.write(_('available colors:\n'))
448 448 # sort label with a '_' after the other to group '_background' entry.
449 449 items = sorted(ui._styles.items(),
450 450 key=lambda i: ('_' in i[0], i[0], i[1]))
451 451 for colorname, label in items:
452 452 ui.write(('%s\n') % colorname, label=label)
453 453
454 454 def _debugdisplaystyle(ui):
455 455 ui.write(_('available style:\n'))
456 456 if not ui._styles:
457 457 return
458 458 width = max(len(s) for s in ui._styles)
459 459 for label, effects in sorted(ui._styles.items()):
460 460 ui.write('%s' % label, label=label)
461 461 if effects:
462 462 # 50
463 463 ui.write(': ')
464 464 ui.write(' ' * (max(0, width - len(label))))
465 465 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
466 466 ui.write('\n')
467 467
468 468 @command('debugcreatestreamclonebundle', [], 'FILE')
469 469 def debugcreatestreamclonebundle(ui, repo, fname):
470 470 """create a stream clone bundle file
471 471
472 472 Stream bundles are special bundles that are essentially archives of
473 473 revlog files. They are commonly used for cloning very quickly.
474 474 """
475 475 # TODO we may want to turn this into an abort when this functionality
476 476 # is moved into `hg bundle`.
477 477 if phases.hassecret(repo):
478 478 ui.warn(_('(warning: stream clone bundle will contain secret '
479 479 'revisions)\n'))
480 480
481 481 requirements, gen = streamclone.generatebundlev1(repo)
482 482 changegroup.writechunks(ui, gen, fname)
483 483
484 484 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
485 485
486 486 @command('debugdag',
487 487 [('t', 'tags', None, _('use tags as labels')),
488 488 ('b', 'branches', None, _('annotate with branch names')),
489 489 ('', 'dots', None, _('use dots for runs')),
490 490 ('s', 'spaces', None, _('separate elements by spaces'))],
491 491 _('[OPTION]... [FILE [REV]...]'),
492 492 optionalrepo=True)
493 493 def debugdag(ui, repo, file_=None, *revs, **opts):
494 494 """format the changelog or an index DAG as a concise textual description
495 495
496 496 If you pass a revlog index, the revlog's DAG is emitted. If you list
497 497 revision numbers, they get labeled in the output as rN.
498 498
499 499 Otherwise, the changelog DAG of the current repo is emitted.
500 500 """
501 501 spaces = opts.get(r'spaces')
502 502 dots = opts.get(r'dots')
503 503 if file_:
504 504 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
505 505 file_)
506 506 revs = set((int(r) for r in revs))
507 507 def events():
508 508 for r in rlog:
509 509 yield 'n', (r, list(p for p in rlog.parentrevs(r)
510 510 if p != -1))
511 511 if r in revs:
512 512 yield 'l', (r, "r%i" % r)
513 513 elif repo:
514 514 cl = repo.changelog
515 515 tags = opts.get(r'tags')
516 516 branches = opts.get(r'branches')
517 517 if tags:
518 518 labels = {}
519 519 for l, n in repo.tags().items():
520 520 labels.setdefault(cl.rev(n), []).append(l)
521 521 def events():
522 522 b = "default"
523 523 for r in cl:
524 524 if branches:
525 525 newb = cl.read(cl.node(r))[5]['branch']
526 526 if newb != b:
527 527 yield 'a', newb
528 528 b = newb
529 529 yield 'n', (r, list(p for p in cl.parentrevs(r)
530 530 if p != -1))
531 531 if tags:
532 532 ls = labels.get(r)
533 533 if ls:
534 534 for l in ls:
535 535 yield 'l', (r, l)
536 536 else:
537 537 raise error.Abort(_('need repo for changelog dag'))
538 538
539 539 for line in dagparser.dagtextlines(events(),
540 540 addspaces=spaces,
541 541 wraplabels=True,
542 542 wrapannotations=True,
543 543 wrapnonlinear=dots,
544 544 usedots=dots,
545 545 maxlinewidth=70):
546 546 ui.write(line)
547 547 ui.write("\n")
548 548
549 549 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
550 550 def debugdata(ui, repo, file_, rev=None, **opts):
551 551 """dump the contents of a data file revision"""
552 552 opts = pycompat.byteskwargs(opts)
553 553 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
554 554 if rev is not None:
555 555 raise error.CommandError('debugdata', _('invalid arguments'))
556 556 file_, rev = None, file_
557 557 elif rev is None:
558 558 raise error.CommandError('debugdata', _('invalid arguments'))
559 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
559 r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
560 560 try:
561 561 ui.write(r.revision(r.lookup(rev), raw=True))
562 562 except KeyError:
563 563 raise error.Abort(_('invalid revision identifier %s') % rev)
564 564
565 565 @command('debugdate',
566 566 [('e', 'extended', None, _('try extended date formats'))],
567 567 _('[-e] DATE [RANGE]'),
568 568 norepo=True, optionalrepo=True)
569 569 def debugdate(ui, date, range=None, **opts):
570 570 """parse and display a date"""
571 571 if opts[r"extended"]:
572 572 d = dateutil.parsedate(date, util.extendeddateformats)
573 573 else:
574 574 d = dateutil.parsedate(date)
575 575 ui.write(("internal: %d %d\n") % d)
576 576 ui.write(("standard: %s\n") % dateutil.datestr(d))
577 577 if range:
578 578 m = dateutil.matchdate(range)
579 579 ui.write(("match: %s\n") % m(d[0]))
580 580
581 581 @command('debugdeltachain',
582 582 cmdutil.debugrevlogopts + cmdutil.formatteropts,
583 583 _('-c|-m|FILE'),
584 584 optionalrepo=True)
585 585 def debugdeltachain(ui, repo, file_=None, **opts):
586 586 """dump information about delta chains in a revlog
587 587
588 588 Output can be templatized. Available template keywords are:
589 589
590 590 :``rev``: revision number
591 591 :``chainid``: delta chain identifier (numbered by unique base)
592 592 :``chainlen``: delta chain length to this revision
593 593 :``prevrev``: previous revision in delta chain
594 594 :``deltatype``: role of delta / how it was computed
595 595 :``compsize``: compressed size of revision
596 596 :``uncompsize``: uncompressed size of revision
597 597 :``chainsize``: total size of compressed revisions in chain
598 598 :``chainratio``: total chain size divided by uncompressed revision size
599 599 (new delta chains typically start at ratio 2.00)
600 600 :``lindist``: linear distance from base revision in delta chain to end
601 601 of this revision
602 602 :``extradist``: total size of revisions not part of this delta chain from
603 603 base of delta chain to end of this revision; a measurement
604 604 of how much extra data we need to read/seek across to read
605 605 the delta chain for this revision
606 606 :``extraratio``: extradist divided by chainsize; another representation of
607 607 how much unrelated data is needed to load this delta chain
608 608
609 609 If the repository is configured to use the sparse read, additional keywords
610 610 are available:
611 611
612 612 :``readsize``: total size of data read from the disk for a revision
613 613 (sum of the sizes of all the blocks)
614 614 :``largestblock``: size of the largest block of data read from the disk
615 615 :``readdensity``: density of useful bytes in the data read from the disk
616 616 :``srchunks``: in how many data hunks the whole revision would be read
617 617
618 618 The sparse read can be enabled with experimental.sparse-read = True
619 619 """
620 620 opts = pycompat.byteskwargs(opts)
621 621 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
622 622 index = r.index
623 623 start = r.start
624 624 length = r.length
625 625 generaldelta = r.version & revlog.FLAG_GENERALDELTA
626 626 withsparseread = getattr(r, '_withsparseread', False)
627 627
628 628 def revinfo(rev):
629 629 e = index[rev]
630 630 compsize = e[1]
631 631 uncompsize = e[2]
632 632 chainsize = 0
633 633
634 634 if generaldelta:
635 635 if e[3] == e[5]:
636 636 deltatype = 'p1'
637 637 elif e[3] == e[6]:
638 638 deltatype = 'p2'
639 639 elif e[3] == rev - 1:
640 640 deltatype = 'prev'
641 641 elif e[3] == rev:
642 642 deltatype = 'base'
643 643 else:
644 644 deltatype = 'other'
645 645 else:
646 646 if e[3] == rev:
647 647 deltatype = 'base'
648 648 else:
649 649 deltatype = 'prev'
650 650
651 651 chain = r._deltachain(rev)[0]
652 652 for iterrev in chain:
653 653 e = index[iterrev]
654 654 chainsize += e[1]
655 655
656 656 return compsize, uncompsize, deltatype, chain, chainsize
657 657
658 658 fm = ui.formatter('debugdeltachain', opts)
659 659
660 660 fm.plain(' rev chain# chainlen prev delta '
661 661 'size rawsize chainsize ratio lindist extradist '
662 662 'extraratio')
663 663 if withsparseread:
664 664 fm.plain(' readsize largestblk rddensity srchunks')
665 665 fm.plain('\n')
666 666
667 667 chainbases = {}
668 668 for rev in r:
669 669 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
670 670 chainbase = chain[0]
671 671 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
672 672 basestart = start(chainbase)
673 673 revstart = start(rev)
674 674 lineardist = revstart + comp - basestart
675 675 extradist = lineardist - chainsize
676 676 try:
677 677 prevrev = chain[-2]
678 678 except IndexError:
679 679 prevrev = -1
680 680
681 681 if uncomp != 0:
682 682 chainratio = float(chainsize) / float(uncomp)
683 683 else:
684 684 chainratio = chainsize
685 685
686 686 if chainsize != 0:
687 687 extraratio = float(extradist) / float(chainsize)
688 688 else:
689 689 extraratio = extradist
690 690
691 691 fm.startitem()
692 692 fm.write('rev chainid chainlen prevrev deltatype compsize '
693 693 'uncompsize chainsize chainratio lindist extradist '
694 694 'extraratio',
695 695 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
696 696 rev, chainid, len(chain), prevrev, deltatype, comp,
697 697 uncomp, chainsize, chainratio, lineardist, extradist,
698 698 extraratio,
699 699 rev=rev, chainid=chainid, chainlen=len(chain),
700 700 prevrev=prevrev, deltatype=deltatype, compsize=comp,
701 701 uncompsize=uncomp, chainsize=chainsize,
702 702 chainratio=chainratio, lindist=lineardist,
703 703 extradist=extradist, extraratio=extraratio)
704 704 if withsparseread:
705 705 readsize = 0
706 706 largestblock = 0
707 707 srchunks = 0
708 708
709 709 for revschunk in revlog._slicechunk(r, chain):
710 710 srchunks += 1
711 711 blkend = start(revschunk[-1]) + length(revschunk[-1])
712 712 blksize = blkend - start(revschunk[0])
713 713
714 714 readsize += blksize
715 715 if largestblock < blksize:
716 716 largestblock = blksize
717 717
718 718 if readsize:
719 719 readdensity = float(chainsize) / float(readsize)
720 720 else:
721 721 readdensity = 1
722 722
723 723 fm.write('readsize largestblock readdensity srchunks',
724 724 ' %10d %10d %9.5f %8d',
725 725 readsize, largestblock, readdensity, srchunks,
726 726 readsize=readsize, largestblock=largestblock,
727 727 readdensity=readdensity, srchunks=srchunks)
728 728
729 729 fm.plain('\n')
730 730
731 731 fm.end()
732 732
733 733 @command('debugdirstate|debugstate',
734 734 [('', 'nodates', None, _('do not display the saved mtime')),
735 735 ('', 'datesort', None, _('sort by saved mtime'))],
736 736 _('[OPTION]...'))
737 737 def debugstate(ui, repo, **opts):
738 738 """show the contents of the current dirstate"""
739 739
740 740 nodates = opts.get(r'nodates')
741 741 datesort = opts.get(r'datesort')
742 742
743 743 timestr = ""
744 744 if datesort:
745 745 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
746 746 else:
747 747 keyfunc = None # sort by filename
748 748 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
749 749 if ent[3] == -1:
750 750 timestr = 'unset '
751 751 elif nodates:
752 752 timestr = 'set '
753 753 else:
754 754 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
755 755 time.localtime(ent[3]))
756 756 timestr = encoding.strtolocal(timestr)
757 757 if ent[1] & 0o20000:
758 758 mode = 'lnk'
759 759 else:
760 760 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
761 761 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
762 762 for f in repo.dirstate.copies():
763 763 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
764 764
765 765 @command('debugdiscovery',
766 766 [('', 'old', None, _('use old-style discovery')),
767 767 ('', 'nonheads', None,
768 768 _('use old-style discovery with non-heads included')),
769 769 ('', 'rev', [], 'restrict discovery to this set of revs'),
770 770 ] + cmdutil.remoteopts,
771 771 _('[--rev REV] [OTHER]'))
772 772 def debugdiscovery(ui, repo, remoteurl="default", **opts):
773 773 """runs the changeset discovery protocol in isolation"""
774 774 opts = pycompat.byteskwargs(opts)
775 775 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
776 776 remote = hg.peer(repo, opts, remoteurl)
777 777 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
778 778
779 779 # make sure tests are repeatable
780 780 random.seed(12323)
781 781
782 782 def doit(pushedrevs, remoteheads, remote=remote):
783 783 if opts.get('old'):
784 784 if not util.safehasattr(remote, 'branches'):
785 785 # enable in-client legacy support
786 786 remote = localrepo.locallegacypeer(remote.local())
787 787 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
788 788 force=True)
789 789 common = set(common)
790 790 if not opts.get('nonheads'):
791 791 ui.write(("unpruned common: %s\n") %
792 792 " ".join(sorted(short(n) for n in common)))
793 793
794 794 clnode = repo.changelog.node
795 795 common = repo.revs('heads(::%ln)', common)
796 796 common = {clnode(r) for r in common}
797 797 else:
798 798 nodes = None
799 799 if pushedrevs:
800 800 revs = scmutil.revrange(repo, pushedrevs)
801 801 nodes = [repo[r].node() for r in revs]
802 802 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
803 803 ancestorsof=nodes)
804 804 common = set(common)
805 805 rheads = set(hds)
806 806 lheads = set(repo.heads())
807 807 ui.write(("common heads: %s\n") %
808 808 " ".join(sorted(short(n) for n in common)))
809 809 if lheads <= common:
810 810 ui.write(("local is subset\n"))
811 811 elif rheads <= common:
812 812 ui.write(("remote is subset\n"))
813 813
814 814 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
815 815 localrevs = opts['rev']
816 816 doit(localrevs, remoterevs)
817 817
818 818 _chunksize = 4 << 10
819 819
820 820 @command('debugdownload',
821 821 [
822 822 ('o', 'output', '', _('path')),
823 823 ],
824 824 optionalrepo=True)
825 825 def debugdownload(ui, repo, url, output=None, **opts):
826 826 """download a resource using Mercurial logic and config
827 827 """
828 828 fh = urlmod.open(ui, url, output)
829 829
830 830 dest = ui
831 831 if output:
832 832 dest = open(output, "wb", _chunksize)
833 833 try:
834 834 data = fh.read(_chunksize)
835 835 while data:
836 836 dest.write(data)
837 837 data = fh.read(_chunksize)
838 838 finally:
839 839 if output:
840 840 dest.close()
841 841
842 842 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
843 843 def debugextensions(ui, repo, **opts):
844 844 '''show information about active extensions'''
845 845 opts = pycompat.byteskwargs(opts)
846 846 exts = extensions.extensions(ui)
847 847 hgver = util.version()
848 848 fm = ui.formatter('debugextensions', opts)
849 849 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
850 850 isinternal = extensions.ismoduleinternal(extmod)
851 851 extsource = pycompat.fsencode(extmod.__file__)
852 852 if isinternal:
853 853 exttestedwith = [] # never expose magic string to users
854 854 else:
855 855 exttestedwith = getattr(extmod, 'testedwith', '').split()
856 856 extbuglink = getattr(extmod, 'buglink', None)
857 857
858 858 fm.startitem()
859 859
860 860 if ui.quiet or ui.verbose:
861 861 fm.write('name', '%s\n', extname)
862 862 else:
863 863 fm.write('name', '%s', extname)
864 864 if isinternal or hgver in exttestedwith:
865 865 fm.plain('\n')
866 866 elif not exttestedwith:
867 867 fm.plain(_(' (untested!)\n'))
868 868 else:
869 869 lasttestedversion = exttestedwith[-1]
870 870 fm.plain(' (%s!)\n' % lasttestedversion)
871 871
872 872 fm.condwrite(ui.verbose and extsource, 'source',
873 873 _(' location: %s\n'), extsource or "")
874 874
875 875 if ui.verbose:
876 876 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
877 877 fm.data(bundled=isinternal)
878 878
879 879 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
880 880 _(' tested with: %s\n'),
881 881 fm.formatlist(exttestedwith, name='ver'))
882 882
883 883 fm.condwrite(ui.verbose and extbuglink, 'buglink',
884 884 _(' bug reporting: %s\n'), extbuglink or "")
885 885
886 886 fm.end()
887 887
888 888 @command('debugfileset',
889 889 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
890 890 ('', 'all-files', False,
891 891 _('test files from all revisions and working directory')),
892 892 ('s', 'show-matcher', None,
893 893 _('print internal representation of matcher')),
894 894 ('p', 'show-stage', [],
895 895 _('print parsed tree at the given stage'), _('NAME'))],
896 896 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
897 897 def debugfileset(ui, repo, expr, **opts):
898 898 '''parse and apply a fileset specification'''
899 899 from . import fileset
900 900 fileset.symbols # force import of fileset so we have predicates to optimize
901 901 opts = pycompat.byteskwargs(opts)
902 902 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
903 903
904 904 stages = [
905 905 ('parsed', pycompat.identity),
906 906 ('analyzed', filesetlang.analyze),
907 907 ('optimized', filesetlang.optimize),
908 908 ]
909 909 stagenames = set(n for n, f in stages)
910 910
911 911 showalways = set()
912 912 if ui.verbose and not opts['show_stage']:
913 913 # show parsed tree by --verbose (deprecated)
914 914 showalways.add('parsed')
915 915 if opts['show_stage'] == ['all']:
916 916 showalways.update(stagenames)
917 917 else:
918 918 for n in opts['show_stage']:
919 919 if n not in stagenames:
920 920 raise error.Abort(_('invalid stage name: %s') % n)
921 921 showalways.update(opts['show_stage'])
922 922
923 923 tree = filesetlang.parse(expr)
924 924 for n, f in stages:
925 925 tree = f(tree)
926 926 if n in showalways:
927 927 if opts['show_stage'] or n != 'parsed':
928 928 ui.write(("* %s:\n") % n)
929 929 ui.write(filesetlang.prettyformat(tree), "\n")
930 930
931 931 files = set()
932 932 if opts['all_files']:
933 933 for r in repo:
934 934 c = repo[r]
935 935 files.update(c.files())
936 936 files.update(c.substate)
937 937 if opts['all_files'] or ctx.rev() is None:
938 938 wctx = repo[None]
939 939 files.update(repo.dirstate.walk(scmutil.matchall(repo),
940 940 subrepos=list(wctx.substate),
941 941 unknown=True, ignored=True))
942 942 files.update(wctx.substate)
943 943 else:
944 944 files.update(ctx.files())
945 945 files.update(ctx.substate)
946 946
947 947 m = ctx.matchfileset(expr)
948 948 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
949 949 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
950 950 for f in sorted(files):
951 951 if not m(f):
952 952 continue
953 953 ui.write("%s\n" % f)
954 954
955 955 @command('debugformat',
956 956 [] + cmdutil.formatteropts)
957 957 def debugformat(ui, repo, **opts):
958 958 """display format information about the current repository
959 959
960 960 Use --verbose to get extra information about current config value and
961 961 Mercurial default."""
962 962 opts = pycompat.byteskwargs(opts)
963 963 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
964 964 maxvariantlength = max(len('format-variant'), maxvariantlength)
965 965
966 966 def makeformatname(name):
967 967 return '%s:' + (' ' * (maxvariantlength - len(name)))
968 968
969 969 fm = ui.formatter('debugformat', opts)
970 970 if fm.isplain():
971 971 def formatvalue(value):
972 972 if util.safehasattr(value, 'startswith'):
973 973 return value
974 974 if value:
975 975 return 'yes'
976 976 else:
977 977 return 'no'
978 978 else:
979 979 formatvalue = pycompat.identity
980 980
981 981 fm.plain('format-variant')
982 982 fm.plain(' ' * (maxvariantlength - len('format-variant')))
983 983 fm.plain(' repo')
984 984 if ui.verbose:
985 985 fm.plain(' config default')
986 986 fm.plain('\n')
987 987 for fv in upgrade.allformatvariant:
988 988 fm.startitem()
989 989 repovalue = fv.fromrepo(repo)
990 990 configvalue = fv.fromconfig(repo)
991 991
992 992 if repovalue != configvalue:
993 993 namelabel = 'formatvariant.name.mismatchconfig'
994 994 repolabel = 'formatvariant.repo.mismatchconfig'
995 995 elif repovalue != fv.default:
996 996 namelabel = 'formatvariant.name.mismatchdefault'
997 997 repolabel = 'formatvariant.repo.mismatchdefault'
998 998 else:
999 999 namelabel = 'formatvariant.name.uptodate'
1000 1000 repolabel = 'formatvariant.repo.uptodate'
1001 1001
1002 1002 fm.write('name', makeformatname(fv.name), fv.name,
1003 1003 label=namelabel)
1004 1004 fm.write('repo', ' %3s', formatvalue(repovalue),
1005 1005 label=repolabel)
1006 1006 if fv.default != configvalue:
1007 1007 configlabel = 'formatvariant.config.special'
1008 1008 else:
1009 1009 configlabel = 'formatvariant.config.default'
1010 1010 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1011 1011 label=configlabel)
1012 1012 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1013 1013 label='formatvariant.default')
1014 1014 fm.plain('\n')
1015 1015 fm.end()
1016 1016
1017 1017 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1018 1018 def debugfsinfo(ui, path="."):
1019 1019 """show information detected about current filesystem"""
1020 1020 ui.write(('path: %s\n') % path)
1021 1021 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1022 1022 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1023 1023 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1024 1024 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1025 1025 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1026 1026 casesensitive = '(unknown)'
1027 1027 try:
1028 1028 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1029 1029 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1030 1030 except OSError:
1031 1031 pass
1032 1032 ui.write(('case-sensitive: %s\n') % casesensitive)
1033 1033
1034 1034 @command('debuggetbundle',
1035 1035 [('H', 'head', [], _('id of head node'), _('ID')),
1036 1036 ('C', 'common', [], _('id of common node'), _('ID')),
1037 1037 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1038 1038 _('REPO FILE [-H|-C ID]...'),
1039 1039 norepo=True)
1040 1040 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1041 1041 """retrieves a bundle from a repo
1042 1042
1043 1043 Every ID must be a full-length hex node id string. Saves the bundle to the
1044 1044 given file.
1045 1045 """
1046 1046 opts = pycompat.byteskwargs(opts)
1047 1047 repo = hg.peer(ui, opts, repopath)
1048 1048 if not repo.capable('getbundle'):
1049 1049 raise error.Abort("getbundle() not supported by target repository")
1050 1050 args = {}
1051 1051 if common:
1052 1052 args[r'common'] = [bin(s) for s in common]
1053 1053 if head:
1054 1054 args[r'heads'] = [bin(s) for s in head]
1055 1055 # TODO: get desired bundlecaps from command line.
1056 1056 args[r'bundlecaps'] = None
1057 1057 bundle = repo.getbundle('debug', **args)
1058 1058
1059 1059 bundletype = opts.get('type', 'bzip2').lower()
1060 1060 btypes = {'none': 'HG10UN',
1061 1061 'bzip2': 'HG10BZ',
1062 1062 'gzip': 'HG10GZ',
1063 1063 'bundle2': 'HG20'}
1064 1064 bundletype = btypes.get(bundletype)
1065 1065 if bundletype not in bundle2.bundletypes:
1066 1066 raise error.Abort(_('unknown bundle type specified with --type'))
1067 1067 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1068 1068
1069 1069 @command('debugignore', [], '[FILE]')
1070 1070 def debugignore(ui, repo, *files, **opts):
1071 1071 """display the combined ignore pattern and information about ignored files
1072 1072
1073 1073 With no argument display the combined ignore pattern.
1074 1074
1075 1075 Given space separated file names, shows if the given file is ignored and
1076 1076 if so, show the ignore rule (file and line number) that matched it.
1077 1077 """
1078 1078 ignore = repo.dirstate._ignore
1079 1079 if not files:
1080 1080 # Show all the patterns
1081 1081 ui.write("%s\n" % pycompat.byterepr(ignore))
1082 1082 else:
1083 1083 m = scmutil.match(repo[None], pats=files)
1084 1084 for f in m.files():
1085 1085 nf = util.normpath(f)
1086 1086 ignored = None
1087 1087 ignoredata = None
1088 1088 if nf != '.':
1089 1089 if ignore(nf):
1090 1090 ignored = nf
1091 1091 ignoredata = repo.dirstate._ignorefileandline(nf)
1092 1092 else:
1093 1093 for p in util.finddirs(nf):
1094 1094 if ignore(p):
1095 1095 ignored = p
1096 1096 ignoredata = repo.dirstate._ignorefileandline(p)
1097 1097 break
1098 1098 if ignored:
1099 1099 if ignored == nf:
1100 1100 ui.write(_("%s is ignored\n") % m.uipath(f))
1101 1101 else:
1102 1102 ui.write(_("%s is ignored because of "
1103 1103 "containing folder %s\n")
1104 1104 % (m.uipath(f), ignored))
1105 1105 ignorefile, lineno, line = ignoredata
1106 1106 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1107 1107 % (ignorefile, lineno, line))
1108 1108 else:
1109 1109 ui.write(_("%s is not ignored\n") % m.uipath(f))
1110 1110
1111 1111 @command('debugindex', cmdutil.debugrevlogopts +
1112 1112 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1113 1113 _('[-f FORMAT] -c|-m|FILE'),
1114 1114 optionalrepo=True)
1115 1115 def debugindex(ui, repo, file_=None, **opts):
1116 1116 """dump the contents of an index file"""
1117 1117 opts = pycompat.byteskwargs(opts)
1118 1118 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1119 1119 format = opts.get('format', 0)
1120 1120 if format not in (0, 1):
1121 1121 raise error.Abort(_("unknown format %d") % format)
1122 1122
1123 1123 if ui.debugflag:
1124 1124 shortfn = hex
1125 1125 else:
1126 1126 shortfn = short
1127 1127
1128 1128 # There might not be anything in r, so have a sane default
1129 1129 idlen = 12
1130 1130 for i in r:
1131 1131 idlen = len(shortfn(r.node(i)))
1132 1132 break
1133 1133
1134 1134 if format == 0:
1135 1135 if ui.verbose:
1136 1136 ui.write((" rev offset length linkrev"
1137 1137 " %s %s p2\n") % ("nodeid".ljust(idlen),
1138 1138 "p1".ljust(idlen)))
1139 1139 else:
1140 1140 ui.write((" rev linkrev %s %s p2\n") % (
1141 1141 "nodeid".ljust(idlen), "p1".ljust(idlen)))
1142 1142 elif format == 1:
1143 1143 if ui.verbose:
1144 1144 ui.write((" rev flag offset length size link p1"
1145 1145 " p2 %s\n") % "nodeid".rjust(idlen))
1146 1146 else:
1147 1147 ui.write((" rev flag size link p1 p2 %s\n") %
1148 1148 "nodeid".rjust(idlen))
1149 1149
1150 1150 for i in r:
1151 1151 node = r.node(i)
1152 1152 if format == 0:
1153 1153 try:
1154 1154 pp = r.parents(node)
1155 1155 except Exception:
1156 1156 pp = [nullid, nullid]
1157 1157 if ui.verbose:
1158 1158 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
1159 1159 i, r.start(i), r.length(i), r.linkrev(i),
1160 1160 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1161 1161 else:
1162 1162 ui.write("% 6d % 7d %s %s %s\n" % (
1163 1163 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
1164 1164 shortfn(pp[1])))
1165 1165 elif format == 1:
1166 1166 pr = r.parentrevs(i)
1167 1167 if ui.verbose:
1168 1168 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
1169 1169 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1170 1170 r.linkrev(i), pr[0], pr[1], shortfn(node)))
1171 1171 else:
1172 1172 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
1173 1173 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
1174 1174 shortfn(node)))
1175 1175
1176 1176 @command('debugindexdot', cmdutil.debugrevlogopts,
1177 1177 _('-c|-m|FILE'), optionalrepo=True)
1178 1178 def debugindexdot(ui, repo, file_=None, **opts):
1179 1179 """dump an index DAG as a graphviz dot file"""
1180 1180 opts = pycompat.byteskwargs(opts)
1181 1181 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
1182 1182 ui.write(("digraph G {\n"))
1183 1183 for i in r:
1184 1184 node = r.node(i)
1185 1185 pp = r.parents(node)
1186 1186 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1187 1187 if pp[1] != nullid:
1188 1188 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1189 1189 ui.write("}\n")
1190 1190
1191 1191 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1192 1192 def debuginstall(ui, **opts):
1193 1193 '''test Mercurial installation
1194 1194
1195 1195 Returns 0 on success.
1196 1196 '''
1197 1197 opts = pycompat.byteskwargs(opts)
1198 1198
1199 1199 def writetemp(contents):
1200 1200 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1201 1201 f = os.fdopen(fd, r"wb")
1202 1202 f.write(contents)
1203 1203 f.close()
1204 1204 return name
1205 1205
1206 1206 problems = 0
1207 1207
1208 1208 fm = ui.formatter('debuginstall', opts)
1209 1209 fm.startitem()
1210 1210
1211 1211 # encoding
1212 1212 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1213 1213 err = None
1214 1214 try:
1215 1215 codecs.lookup(pycompat.sysstr(encoding.encoding))
1216 1216 except LookupError as inst:
1217 1217 err = stringutil.forcebytestr(inst)
1218 1218 problems += 1
1219 1219 fm.condwrite(err, 'encodingerror', _(" %s\n"
1220 1220 " (check that your locale is properly set)\n"), err)
1221 1221
1222 1222 # Python
1223 1223 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1224 1224 pycompat.sysexecutable)
1225 1225 fm.write('pythonver', _("checking Python version (%s)\n"),
1226 1226 ("%d.%d.%d" % sys.version_info[:3]))
1227 1227 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1228 1228 os.path.dirname(pycompat.fsencode(os.__file__)))
1229 1229
1230 1230 security = set(sslutil.supportedprotocols)
1231 1231 if sslutil.hassni:
1232 1232 security.add('sni')
1233 1233
1234 1234 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1235 1235 fm.formatlist(sorted(security), name='protocol',
1236 1236 fmt='%s', sep=','))
1237 1237
1238 1238 # These are warnings, not errors. So don't increment problem count. This
1239 1239 # may change in the future.
1240 1240 if 'tls1.2' not in security:
1241 1241 fm.plain(_(' TLS 1.2 not supported by Python install; '
1242 1242 'network connections lack modern security\n'))
1243 1243 if 'sni' not in security:
1244 1244 fm.plain(_(' SNI not supported by Python install; may have '
1245 1245 'connectivity issues with some servers\n'))
1246 1246
1247 1247 # TODO print CA cert info
1248 1248
1249 1249 # hg version
1250 1250 hgver = util.version()
1251 1251 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1252 1252 hgver.split('+')[0])
1253 1253 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1254 1254 '+'.join(hgver.split('+')[1:]))
1255 1255
1256 1256 # compiled modules
1257 1257 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1258 1258 policy.policy)
1259 1259 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1260 1260 os.path.dirname(pycompat.fsencode(__file__)))
1261 1261
1262 1262 if policy.policy in ('c', 'allow'):
1263 1263 err = None
1264 1264 try:
1265 1265 from .cext import (
1266 1266 base85,
1267 1267 bdiff,
1268 1268 mpatch,
1269 1269 osutil,
1270 1270 )
1271 1271 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1272 1272 except Exception as inst:
1273 1273 err = stringutil.forcebytestr(inst)
1274 1274 problems += 1
1275 1275 fm.condwrite(err, 'extensionserror', " %s\n", err)
1276 1276
1277 1277 compengines = util.compengines._engines.values()
1278 1278 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1279 1279 fm.formatlist(sorted(e.name() for e in compengines),
1280 1280 name='compengine', fmt='%s', sep=', '))
1281 1281 fm.write('compenginesavail', _('checking available compression engines '
1282 1282 '(%s)\n'),
1283 1283 fm.formatlist(sorted(e.name() for e in compengines
1284 1284 if e.available()),
1285 1285 name='compengine', fmt='%s', sep=', '))
1286 1286 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1287 1287 fm.write('compenginesserver', _('checking available compression engines '
1288 1288 'for wire protocol (%s)\n'),
1289 1289 fm.formatlist([e.name() for e in wirecompengines
1290 1290 if e.wireprotosupport()],
1291 1291 name='compengine', fmt='%s', sep=', '))
1292 1292 re2 = 'missing'
1293 1293 if util._re2:
1294 1294 re2 = 'available'
1295 1295 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1296 1296 fm.data(re2=bool(util._re2))
1297 1297
1298 1298 # templates
1299 1299 p = templater.templatepaths()
1300 1300 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1301 1301 fm.condwrite(not p, '', _(" no template directories found\n"))
1302 1302 if p:
1303 1303 m = templater.templatepath("map-cmdline.default")
1304 1304 if m:
1305 1305 # template found, check if it is working
1306 1306 err = None
1307 1307 try:
1308 1308 templater.templater.frommapfile(m)
1309 1309 except Exception as inst:
1310 1310 err = stringutil.forcebytestr(inst)
1311 1311 p = None
1312 1312 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1313 1313 else:
1314 1314 p = None
1315 1315 fm.condwrite(p, 'defaulttemplate',
1316 1316 _("checking default template (%s)\n"), m)
1317 1317 fm.condwrite(not m, 'defaulttemplatenotfound',
1318 1318 _(" template '%s' not found\n"), "default")
1319 1319 if not p:
1320 1320 problems += 1
1321 1321 fm.condwrite(not p, '',
1322 1322 _(" (templates seem to have been installed incorrectly)\n"))
1323 1323
1324 1324 # editor
1325 1325 editor = ui.geteditor()
1326 1326 editor = util.expandpath(editor)
1327 1327 editorbin = procutil.shellsplit(editor)[0]
1328 1328 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1329 1329 cmdpath = procutil.findexe(editorbin)
1330 1330 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1331 1331 _(" No commit editor set and can't find %s in PATH\n"
1332 1332 " (specify a commit editor in your configuration"
1333 1333 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1334 1334 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1335 1335 _(" Can't find editor '%s' in PATH\n"
1336 1336 " (specify a commit editor in your configuration"
1337 1337 " file)\n"), not cmdpath and editorbin)
1338 1338 if not cmdpath and editor != 'vi':
1339 1339 problems += 1
1340 1340
1341 1341 # check username
1342 1342 username = None
1343 1343 err = None
1344 1344 try:
1345 1345 username = ui.username()
1346 1346 except error.Abort as e:
1347 1347 err = stringutil.forcebytestr(e)
1348 1348 problems += 1
1349 1349
1350 1350 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1351 1351 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1352 1352 " (specify a username in your configuration file)\n"), err)
1353 1353
1354 1354 fm.condwrite(not problems, '',
1355 1355 _("no problems detected\n"))
1356 1356 if not problems:
1357 1357 fm.data(problems=problems)
1358 1358 fm.condwrite(problems, 'problems',
1359 1359 _("%d problems detected,"
1360 1360 " please check your install!\n"), problems)
1361 1361 fm.end()
1362 1362
1363 1363 return problems
1364 1364
1365 1365 @command('debugknown', [], _('REPO ID...'), norepo=True)
1366 1366 def debugknown(ui, repopath, *ids, **opts):
1367 1367 """test whether node ids are known to a repo
1368 1368
1369 1369 Every ID must be a full-length hex node id string. Returns a list of 0s
1370 1370 and 1s indicating unknown/known.
1371 1371 """
1372 1372 opts = pycompat.byteskwargs(opts)
1373 1373 repo = hg.peer(ui, opts, repopath)
1374 1374 if not repo.capable('known'):
1375 1375 raise error.Abort("known() not supported by target repository")
1376 1376 flags = repo.known([bin(s) for s in ids])
1377 1377 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1378 1378
1379 1379 @command('debuglabelcomplete', [], _('LABEL...'))
1380 1380 def debuglabelcomplete(ui, repo, *args):
1381 1381 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1382 1382 debugnamecomplete(ui, repo, *args)
1383 1383
1384 1384 @command('debuglocks',
1385 1385 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1386 1386 ('W', 'force-wlock', None,
1387 1387 _('free the working state lock (DANGEROUS)')),
1388 1388 ('s', 'set-lock', None, _('set the store lock until stopped')),
1389 1389 ('S', 'set-wlock', None,
1390 1390 _('set the working state lock until stopped'))],
1391 1391 _('[OPTION]...'))
1392 1392 def debuglocks(ui, repo, **opts):
1393 1393 """show or modify state of locks
1394 1394
1395 1395 By default, this command will show which locks are held. This
1396 1396 includes the user and process holding the lock, the amount of time
1397 1397 the lock has been held, and the machine name where the process is
1398 1398 running if it's not local.
1399 1399
1400 1400 Locks protect the integrity of Mercurial's data, so should be
1401 1401 treated with care. System crashes or other interruptions may cause
1402 1402 locks to not be properly released, though Mercurial will usually
1403 1403 detect and remove such stale locks automatically.
1404 1404
1405 1405 However, detecting stale locks may not always be possible (for
1406 1406 instance, on a shared filesystem). Removing locks may also be
1407 1407 blocked by filesystem permissions.
1408 1408
1409 1409 Setting a lock will prevent other commands from changing the data.
1410 1410 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1411 1411 The set locks are removed when the command exits.
1412 1412
1413 1413 Returns 0 if no locks are held.
1414 1414
1415 1415 """
1416 1416
1417 1417 if opts.get(r'force_lock'):
1418 1418 repo.svfs.unlink('lock')
1419 1419 if opts.get(r'force_wlock'):
1420 1420 repo.vfs.unlink('wlock')
1421 1421 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1422 1422 return 0
1423 1423
1424 1424 locks = []
1425 1425 try:
1426 1426 if opts.get(r'set_wlock'):
1427 1427 try:
1428 1428 locks.append(repo.wlock(False))
1429 1429 except error.LockHeld:
1430 1430 raise error.Abort(_('wlock is already held'))
1431 1431 if opts.get(r'set_lock'):
1432 1432 try:
1433 1433 locks.append(repo.lock(False))
1434 1434 except error.LockHeld:
1435 1435 raise error.Abort(_('lock is already held'))
1436 1436 if len(locks):
1437 1437 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1438 1438 return 0
1439 1439 finally:
1440 1440 release(*locks)
1441 1441
1442 1442 now = time.time()
1443 1443 held = 0
1444 1444
1445 1445 def report(vfs, name, method):
1446 1446 # this causes stale locks to get reaped for more accurate reporting
1447 1447 try:
1448 1448 l = method(False)
1449 1449 except error.LockHeld:
1450 1450 l = None
1451 1451
1452 1452 if l:
1453 1453 l.release()
1454 1454 else:
1455 1455 try:
1456 1456 st = vfs.lstat(name)
1457 1457 age = now - st[stat.ST_MTIME]
1458 1458 user = util.username(st.st_uid)
1459 1459 locker = vfs.readlock(name)
1460 1460 if ":" in locker:
1461 1461 host, pid = locker.split(':')
1462 1462 if host == socket.gethostname():
1463 1463 locker = 'user %s, process %s' % (user, pid)
1464 1464 else:
1465 1465 locker = 'user %s, process %s, host %s' \
1466 1466 % (user, pid, host)
1467 1467 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1468 1468 return 1
1469 1469 except OSError as e:
1470 1470 if e.errno != errno.ENOENT:
1471 1471 raise
1472 1472
1473 1473 ui.write(("%-6s free\n") % (name + ":"))
1474 1474 return 0
1475 1475
1476 1476 held += report(repo.svfs, "lock", repo.lock)
1477 1477 held += report(repo.vfs, "wlock", repo.wlock)
1478 1478
1479 1479 return held
1480 1480
1481 1481 @command('debugmanifestfulltextcache', [
1482 1482 ('', 'clear', False, _('clear the cache')),
1483 1483 ('a', 'add', '', _('add the given manifest node to the cache'),
1484 1484 _('NODE'))
1485 1485 ], '')
1486 1486 def debugmanifestfulltextcache(ui, repo, add=None, **opts):
1487 1487 """show, clear or amend the contents of the manifest fulltext cache"""
1488 1488 with repo.lock():
1489 1489 r = repo.manifestlog.getstorage(b'')
1490 1490 try:
1491 1491 cache = r._fulltextcache
1492 1492 except AttributeError:
1493 1493 ui.warn(_(
1494 1494 "Current revlog implementation doesn't appear to have a "
1495 1495 'manifest fulltext cache\n'))
1496 1496 return
1497 1497
1498 1498 if opts.get(r'clear'):
1499 1499 cache.clear()
1500 1500
1501 1501 if add:
1502 1502 try:
1503 1503 manifest = repo.manifestlog[r.lookup(add)]
1504 1504 except error.LookupError as e:
1505 1505 raise error.Abort(e, hint="Check your manifest node id")
1506 1506 manifest.read() # stores revisision in cache too
1507 1507
1508 1508 if not len(cache):
1509 1509 ui.write(_('Cache empty'))
1510 1510 else:
1511 1511 ui.write(
1512 1512 _('Cache contains %d manifest entries, in order of most to '
1513 1513 'least recent:\n') % (len(cache),))
1514 1514 totalsize = 0
1515 1515 for nodeid in cache:
1516 1516 # Use cache.get to not update the LRU order
1517 1517 data = cache.get(nodeid)
1518 1518 size = len(data)
1519 1519 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1520 1520 ui.write(_('id: %s, size %s\n') % (
1521 1521 hex(nodeid), util.bytecount(size)))
1522 1522 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1523 1523 ui.write(
1524 1524 _('Total cache data size %s, on-disk %s\n') % (
1525 1525 util.bytecount(totalsize), util.bytecount(ondisk))
1526 1526 )
1527 1527
1528 1528 @command('debugmergestate', [], '')
1529 1529 def debugmergestate(ui, repo, *args):
1530 1530 """print merge state
1531 1531
1532 1532 Use --verbose to print out information about whether v1 or v2 merge state
1533 1533 was chosen."""
1534 1534 def _hashornull(h):
1535 1535 if h == nullhex:
1536 1536 return 'null'
1537 1537 else:
1538 1538 return h
1539 1539
1540 1540 def printrecords(version):
1541 1541 ui.write(('* version %d records\n') % version)
1542 1542 if version == 1:
1543 1543 records = v1records
1544 1544 else:
1545 1545 records = v2records
1546 1546
1547 1547 for rtype, record in records:
1548 1548 # pretty print some record types
1549 1549 if rtype == 'L':
1550 1550 ui.write(('local: %s\n') % record)
1551 1551 elif rtype == 'O':
1552 1552 ui.write(('other: %s\n') % record)
1553 1553 elif rtype == 'm':
1554 1554 driver, mdstate = record.split('\0', 1)
1555 1555 ui.write(('merge driver: %s (state "%s")\n')
1556 1556 % (driver, mdstate))
1557 1557 elif rtype in 'FDC':
1558 1558 r = record.split('\0')
1559 1559 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1560 1560 if version == 1:
1561 1561 onode = 'not stored in v1 format'
1562 1562 flags = r[7]
1563 1563 else:
1564 1564 onode, flags = r[7:9]
1565 1565 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1566 1566 % (f, rtype, state, _hashornull(hash)))
1567 1567 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1568 1568 ui.write((' ancestor path: %s (node %s)\n')
1569 1569 % (afile, _hashornull(anode)))
1570 1570 ui.write((' other path: %s (node %s)\n')
1571 1571 % (ofile, _hashornull(onode)))
1572 1572 elif rtype == 'f':
1573 1573 filename, rawextras = record.split('\0', 1)
1574 1574 extras = rawextras.split('\0')
1575 1575 i = 0
1576 1576 extrastrings = []
1577 1577 while i < len(extras):
1578 1578 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1579 1579 i += 2
1580 1580
1581 1581 ui.write(('file extras: %s (%s)\n')
1582 1582 % (filename, ', '.join(extrastrings)))
1583 1583 elif rtype == 'l':
1584 1584 labels = record.split('\0', 2)
1585 1585 labels = [l for l in labels if len(l) > 0]
1586 1586 ui.write(('labels:\n'))
1587 1587 ui.write((' local: %s\n' % labels[0]))
1588 1588 ui.write((' other: %s\n' % labels[1]))
1589 1589 if len(labels) > 2:
1590 1590 ui.write((' base: %s\n' % labels[2]))
1591 1591 else:
1592 1592 ui.write(('unrecognized entry: %s\t%s\n')
1593 1593 % (rtype, record.replace('\0', '\t')))
1594 1594
1595 1595 # Avoid mergestate.read() since it may raise an exception for unsupported
1596 1596 # merge state records. We shouldn't be doing this, but this is OK since this
1597 1597 # command is pretty low-level.
1598 1598 ms = mergemod.mergestate(repo)
1599 1599
1600 1600 # sort so that reasonable information is on top
1601 1601 v1records = ms._readrecordsv1()
1602 1602 v2records = ms._readrecordsv2()
1603 1603 order = 'LOml'
1604 1604 def key(r):
1605 1605 idx = order.find(r[0])
1606 1606 if idx == -1:
1607 1607 return (1, r[1])
1608 1608 else:
1609 1609 return (0, idx)
1610 1610 v1records.sort(key=key)
1611 1611 v2records.sort(key=key)
1612 1612
1613 1613 if not v1records and not v2records:
1614 1614 ui.write(('no merge state found\n'))
1615 1615 elif not v2records:
1616 1616 ui.note(('no version 2 merge state\n'))
1617 1617 printrecords(1)
1618 1618 elif ms._v1v2match(v1records, v2records):
1619 1619 ui.note(('v1 and v2 states match: using v2\n'))
1620 1620 printrecords(2)
1621 1621 else:
1622 1622 ui.note(('v1 and v2 states mismatch: using v1\n'))
1623 1623 printrecords(1)
1624 1624 if ui.verbose:
1625 1625 printrecords(2)
1626 1626
1627 1627 @command('debugnamecomplete', [], _('NAME...'))
1628 1628 def debugnamecomplete(ui, repo, *args):
1629 1629 '''complete "names" - tags, open branch names, bookmark names'''
1630 1630
1631 1631 names = set()
1632 1632 # since we previously only listed open branches, we will handle that
1633 1633 # specially (after this for loop)
1634 1634 for name, ns in repo.names.iteritems():
1635 1635 if name != 'branches':
1636 1636 names.update(ns.listnames(repo))
1637 1637 names.update(tag for (tag, heads, tip, closed)
1638 1638 in repo.branchmap().iterbranches() if not closed)
1639 1639 completions = set()
1640 1640 if not args:
1641 1641 args = ['']
1642 1642 for a in args:
1643 1643 completions.update(n for n in names if n.startswith(a))
1644 1644 ui.write('\n'.join(sorted(completions)))
1645 1645 ui.write('\n')
1646 1646
1647 1647 @command('debugobsolete',
1648 1648 [('', 'flags', 0, _('markers flag')),
1649 1649 ('', 'record-parents', False,
1650 1650 _('record parent information for the precursor')),
1651 1651 ('r', 'rev', [], _('display markers relevant to REV')),
1652 1652 ('', 'exclusive', False, _('restrict display to markers only '
1653 1653 'relevant to REV')),
1654 1654 ('', 'index', False, _('display index of the marker')),
1655 1655 ('', 'delete', [], _('delete markers specified by indices')),
1656 1656 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1657 1657 _('[OBSOLETED [REPLACEMENT ...]]'))
1658 1658 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1659 1659 """create arbitrary obsolete marker
1660 1660
1661 1661 With no arguments, displays the list of obsolescence markers."""
1662 1662
1663 1663 opts = pycompat.byteskwargs(opts)
1664 1664
1665 1665 def parsenodeid(s):
1666 1666 try:
1667 1667 # We do not use revsingle/revrange functions here to accept
1668 1668 # arbitrary node identifiers, possibly not present in the
1669 1669 # local repository.
1670 1670 n = bin(s)
1671 1671 if len(n) != len(nullid):
1672 1672 raise TypeError()
1673 1673 return n
1674 1674 except TypeError:
1675 1675 raise error.Abort('changeset references must be full hexadecimal '
1676 1676 'node identifiers')
1677 1677
1678 1678 if opts.get('delete'):
1679 1679 indices = []
1680 1680 for v in opts.get('delete'):
1681 1681 try:
1682 1682 indices.append(int(v))
1683 1683 except ValueError:
1684 1684 raise error.Abort(_('invalid index value: %r') % v,
1685 1685 hint=_('use integers for indices'))
1686 1686
1687 1687 if repo.currenttransaction():
1688 1688 raise error.Abort(_('cannot delete obsmarkers in the middle '
1689 1689 'of transaction.'))
1690 1690
1691 1691 with repo.lock():
1692 1692 n = repair.deleteobsmarkers(repo.obsstore, indices)
1693 1693 ui.write(_('deleted %i obsolescence markers\n') % n)
1694 1694
1695 1695 return
1696 1696
1697 1697 if precursor is not None:
1698 1698 if opts['rev']:
1699 1699 raise error.Abort('cannot select revision when creating marker')
1700 1700 metadata = {}
1701 1701 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1702 1702 succs = tuple(parsenodeid(succ) for succ in successors)
1703 1703 l = repo.lock()
1704 1704 try:
1705 1705 tr = repo.transaction('debugobsolete')
1706 1706 try:
1707 1707 date = opts.get('date')
1708 1708 if date:
1709 1709 date = dateutil.parsedate(date)
1710 1710 else:
1711 1711 date = None
1712 1712 prec = parsenodeid(precursor)
1713 1713 parents = None
1714 1714 if opts['record_parents']:
1715 1715 if prec not in repo.unfiltered():
1716 1716 raise error.Abort('cannot used --record-parents on '
1717 1717 'unknown changesets')
1718 1718 parents = repo.unfiltered()[prec].parents()
1719 1719 parents = tuple(p.node() for p in parents)
1720 1720 repo.obsstore.create(tr, prec, succs, opts['flags'],
1721 1721 parents=parents, date=date,
1722 1722 metadata=metadata, ui=ui)
1723 1723 tr.close()
1724 1724 except ValueError as exc:
1725 1725 raise error.Abort(_('bad obsmarker input: %s') %
1726 1726 pycompat.bytestr(exc))
1727 1727 finally:
1728 1728 tr.release()
1729 1729 finally:
1730 1730 l.release()
1731 1731 else:
1732 1732 if opts['rev']:
1733 1733 revs = scmutil.revrange(repo, opts['rev'])
1734 1734 nodes = [repo[r].node() for r in revs]
1735 1735 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1736 1736 exclusive=opts['exclusive']))
1737 1737 markers.sort(key=lambda x: x._data)
1738 1738 else:
1739 1739 markers = obsutil.getmarkers(repo)
1740 1740
1741 1741 markerstoiter = markers
1742 1742 isrelevant = lambda m: True
1743 1743 if opts.get('rev') and opts.get('index'):
1744 1744 markerstoiter = obsutil.getmarkers(repo)
1745 1745 markerset = set(markers)
1746 1746 isrelevant = lambda m: m in markerset
1747 1747
1748 1748 fm = ui.formatter('debugobsolete', opts)
1749 1749 for i, m in enumerate(markerstoiter):
1750 1750 if not isrelevant(m):
1751 1751 # marker can be irrelevant when we're iterating over a set
1752 1752 # of markers (markerstoiter) which is bigger than the set
1753 1753 # of markers we want to display (markers)
1754 1754 # this can happen if both --index and --rev options are
1755 1755 # provided and thus we need to iterate over all of the markers
1756 1756 # to get the correct indices, but only display the ones that
1757 1757 # are relevant to --rev value
1758 1758 continue
1759 1759 fm.startitem()
1760 1760 ind = i if opts.get('index') else None
1761 1761 cmdutil.showmarker(fm, m, index=ind)
1762 1762 fm.end()
1763 1763
1764 1764 @command('debugpathcomplete',
1765 1765 [('f', 'full', None, _('complete an entire path')),
1766 1766 ('n', 'normal', None, _('show only normal files')),
1767 1767 ('a', 'added', None, _('show only added files')),
1768 1768 ('r', 'removed', None, _('show only removed files'))],
1769 1769 _('FILESPEC...'))
1770 1770 def debugpathcomplete(ui, repo, *specs, **opts):
1771 1771 '''complete part or all of a tracked path
1772 1772
1773 1773 This command supports shells that offer path name completion. It
1774 1774 currently completes only files already known to the dirstate.
1775 1775
1776 1776 Completion extends only to the next path segment unless
1777 1777 --full is specified, in which case entire paths are used.'''
1778 1778
1779 1779 def complete(path, acceptable):
1780 1780 dirstate = repo.dirstate
1781 1781 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1782 1782 rootdir = repo.root + pycompat.ossep
1783 1783 if spec != repo.root and not spec.startswith(rootdir):
1784 1784 return [], []
1785 1785 if os.path.isdir(spec):
1786 1786 spec += '/'
1787 1787 spec = spec[len(rootdir):]
1788 1788 fixpaths = pycompat.ossep != '/'
1789 1789 if fixpaths:
1790 1790 spec = spec.replace(pycompat.ossep, '/')
1791 1791 speclen = len(spec)
1792 1792 fullpaths = opts[r'full']
1793 1793 files, dirs = set(), set()
1794 1794 adddir, addfile = dirs.add, files.add
1795 1795 for f, st in dirstate.iteritems():
1796 1796 if f.startswith(spec) and st[0] in acceptable:
1797 1797 if fixpaths:
1798 1798 f = f.replace('/', pycompat.ossep)
1799 1799 if fullpaths:
1800 1800 addfile(f)
1801 1801 continue
1802 1802 s = f.find(pycompat.ossep, speclen)
1803 1803 if s >= 0:
1804 1804 adddir(f[:s])
1805 1805 else:
1806 1806 addfile(f)
1807 1807 return files, dirs
1808 1808
1809 1809 acceptable = ''
1810 1810 if opts[r'normal']:
1811 1811 acceptable += 'nm'
1812 1812 if opts[r'added']:
1813 1813 acceptable += 'a'
1814 1814 if opts[r'removed']:
1815 1815 acceptable += 'r'
1816 1816 cwd = repo.getcwd()
1817 1817 if not specs:
1818 1818 specs = ['.']
1819 1819
1820 1820 files, dirs = set(), set()
1821 1821 for spec in specs:
1822 1822 f, d = complete(spec, acceptable or 'nmar')
1823 1823 files.update(f)
1824 1824 dirs.update(d)
1825 1825 files.update(dirs)
1826 1826 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1827 1827 ui.write('\n')
1828 1828
1829 1829 @command('debugpeer', [], _('PATH'), norepo=True)
1830 1830 def debugpeer(ui, path):
1831 1831 """establish a connection to a peer repository"""
1832 1832 # Always enable peer request logging. Requires --debug to display
1833 1833 # though.
1834 1834 overrides = {
1835 1835 ('devel', 'debug.peer-request'): True,
1836 1836 }
1837 1837
1838 1838 with ui.configoverride(overrides):
1839 1839 peer = hg.peer(ui, {}, path)
1840 1840
1841 1841 local = peer.local() is not None
1842 1842 canpush = peer.canpush()
1843 1843
1844 1844 ui.write(_('url: %s\n') % peer.url())
1845 1845 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1846 1846 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1847 1847
1848 1848 @command('debugpickmergetool',
1849 1849 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1850 1850 ('', 'changedelete', None, _('emulate merging change and delete')),
1851 1851 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1852 1852 _('[PATTERN]...'),
1853 1853 inferrepo=True)
1854 1854 def debugpickmergetool(ui, repo, *pats, **opts):
1855 1855 """examine which merge tool is chosen for specified file
1856 1856
1857 1857 As described in :hg:`help merge-tools`, Mercurial examines
1858 1858 configurations below in this order to decide which merge tool is
1859 1859 chosen for specified file.
1860 1860
1861 1861 1. ``--tool`` option
1862 1862 2. ``HGMERGE`` environment variable
1863 1863 3. configurations in ``merge-patterns`` section
1864 1864 4. configuration of ``ui.merge``
1865 1865 5. configurations in ``merge-tools`` section
1866 1866 6. ``hgmerge`` tool (for historical reason only)
1867 1867 7. default tool for fallback (``:merge`` or ``:prompt``)
1868 1868
1869 1869 This command writes out examination result in the style below::
1870 1870
1871 1871 FILE = MERGETOOL
1872 1872
1873 1873 By default, all files known in the first parent context of the
1874 1874 working directory are examined. Use file patterns and/or -I/-X
1875 1875 options to limit target files. -r/--rev is also useful to examine
1876 1876 files in another context without actual updating to it.
1877 1877
1878 1878 With --debug, this command shows warning messages while matching
1879 1879 against ``merge-patterns`` and so on, too. It is recommended to
1880 1880 use this option with explicit file patterns and/or -I/-X options,
1881 1881 because this option increases amount of output per file according
1882 1882 to configurations in hgrc.
1883 1883
1884 1884 With -v/--verbose, this command shows configurations below at
1885 1885 first (only if specified).
1886 1886
1887 1887 - ``--tool`` option
1888 1888 - ``HGMERGE`` environment variable
1889 1889 - configuration of ``ui.merge``
1890 1890
1891 1891 If merge tool is chosen before matching against
1892 1892 ``merge-patterns``, this command can't show any helpful
1893 1893 information, even with --debug. In such case, information above is
1894 1894 useful to know why a merge tool is chosen.
1895 1895 """
1896 1896 opts = pycompat.byteskwargs(opts)
1897 1897 overrides = {}
1898 1898 if opts['tool']:
1899 1899 overrides[('ui', 'forcemerge')] = opts['tool']
1900 1900 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1901 1901
1902 1902 with ui.configoverride(overrides, 'debugmergepatterns'):
1903 1903 hgmerge = encoding.environ.get("HGMERGE")
1904 1904 if hgmerge is not None:
1905 1905 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1906 1906 uimerge = ui.config("ui", "merge")
1907 1907 if uimerge:
1908 1908 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1909 1909
1910 1910 ctx = scmutil.revsingle(repo, opts.get('rev'))
1911 1911 m = scmutil.match(ctx, pats, opts)
1912 1912 changedelete = opts['changedelete']
1913 1913 for path in ctx.walk(m):
1914 1914 fctx = ctx[path]
1915 1915 try:
1916 1916 if not ui.debugflag:
1917 1917 ui.pushbuffer(error=True)
1918 1918 tool, toolpath = filemerge._picktool(repo, ui, path,
1919 1919 fctx.isbinary(),
1920 1920 'l' in fctx.flags(),
1921 1921 changedelete)
1922 1922 finally:
1923 1923 if not ui.debugflag:
1924 1924 ui.popbuffer()
1925 1925 ui.write(('%s = %s\n') % (path, tool))
1926 1926
1927 1927 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1928 1928 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1929 1929 '''access the pushkey key/value protocol
1930 1930
1931 1931 With two args, list the keys in the given namespace.
1932 1932
1933 1933 With five args, set a key to new if it currently is set to old.
1934 1934 Reports success or failure.
1935 1935 '''
1936 1936
1937 1937 target = hg.peer(ui, {}, repopath)
1938 1938 if keyinfo:
1939 1939 key, old, new = keyinfo
1940 1940 with target.commandexecutor() as e:
1941 1941 r = e.callcommand('pushkey', {
1942 1942 'namespace': namespace,
1943 1943 'key': key,
1944 1944 'old': old,
1945 1945 'new': new,
1946 1946 }).result()
1947 1947
1948 1948 ui.status(pycompat.bytestr(r) + '\n')
1949 1949 return not r
1950 1950 else:
1951 1951 for k, v in sorted(target.listkeys(namespace).iteritems()):
1952 1952 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1953 1953 stringutil.escapestr(v)))
1954 1954
1955 1955 @command('debugpvec', [], _('A B'))
1956 1956 def debugpvec(ui, repo, a, b=None):
1957 1957 ca = scmutil.revsingle(repo, a)
1958 1958 cb = scmutil.revsingle(repo, b)
1959 1959 pa = pvec.ctxpvec(ca)
1960 1960 pb = pvec.ctxpvec(cb)
1961 1961 if pa == pb:
1962 1962 rel = "="
1963 1963 elif pa > pb:
1964 1964 rel = ">"
1965 1965 elif pa < pb:
1966 1966 rel = "<"
1967 1967 elif pa | pb:
1968 1968 rel = "|"
1969 1969 ui.write(_("a: %s\n") % pa)
1970 1970 ui.write(_("b: %s\n") % pb)
1971 1971 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1972 1972 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1973 1973 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1974 1974 pa.distance(pb), rel))
1975 1975
1976 1976 @command('debugrebuilddirstate|debugrebuildstate',
1977 1977 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1978 1978 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1979 1979 'the working copy parent')),
1980 1980 ],
1981 1981 _('[-r REV]'))
1982 1982 def debugrebuilddirstate(ui, repo, rev, **opts):
1983 1983 """rebuild the dirstate as it would look like for the given revision
1984 1984
1985 1985 If no revision is specified the first current parent will be used.
1986 1986
1987 1987 The dirstate will be set to the files of the given revision.
1988 1988 The actual working directory content or existing dirstate
1989 1989 information such as adds or removes is not considered.
1990 1990
1991 1991 ``minimal`` will only rebuild the dirstate status for files that claim to be
1992 1992 tracked but are not in the parent manifest, or that exist in the parent
1993 1993 manifest but are not in the dirstate. It will not change adds, removes, or
1994 1994 modified files that are in the working copy parent.
1995 1995
1996 1996 One use of this command is to make the next :hg:`status` invocation
1997 1997 check the actual file content.
1998 1998 """
1999 1999 ctx = scmutil.revsingle(repo, rev)
2000 2000 with repo.wlock():
2001 2001 dirstate = repo.dirstate
2002 2002 changedfiles = None
2003 2003 # See command doc for what minimal does.
2004 2004 if opts.get(r'minimal'):
2005 2005 manifestfiles = set(ctx.manifest().keys())
2006 2006 dirstatefiles = set(dirstate)
2007 2007 manifestonly = manifestfiles - dirstatefiles
2008 2008 dsonly = dirstatefiles - manifestfiles
2009 2009 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2010 2010 changedfiles = manifestonly | dsnotadded
2011 2011
2012 2012 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2013 2013
2014 2014 @command('debugrebuildfncache', [], '')
2015 2015 def debugrebuildfncache(ui, repo):
2016 2016 """rebuild the fncache file"""
2017 2017 repair.rebuildfncache(ui, repo)
2018 2018
2019 2019 @command('debugrename',
2020 2020 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2021 2021 _('[-r REV] FILE'))
2022 2022 def debugrename(ui, repo, file1, *pats, **opts):
2023 2023 """dump rename information"""
2024 2024
2025 2025 opts = pycompat.byteskwargs(opts)
2026 2026 ctx = scmutil.revsingle(repo, opts.get('rev'))
2027 2027 m = scmutil.match(ctx, (file1,) + pats, opts)
2028 2028 for abs in ctx.walk(m):
2029 2029 fctx = ctx[abs]
2030 2030 o = fctx.filelog().renamed(fctx.filenode())
2031 2031 rel = m.rel(abs)
2032 2032 if o:
2033 2033 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2034 2034 else:
2035 2035 ui.write(_("%s not renamed\n") % rel)
2036 2036
2037 2037 @command('debugrevlog', cmdutil.debugrevlogopts +
2038 2038 [('d', 'dump', False, _('dump index data'))],
2039 2039 _('-c|-m|FILE'),
2040 2040 optionalrepo=True)
2041 2041 def debugrevlog(ui, repo, file_=None, **opts):
2042 2042 """show data and statistics about a revlog"""
2043 2043 opts = pycompat.byteskwargs(opts)
2044 2044 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2045 2045
2046 2046 if opts.get("dump"):
2047 2047 numrevs = len(r)
2048 2048 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2049 2049 " rawsize totalsize compression heads chainlen\n"))
2050 2050 ts = 0
2051 2051 heads = set()
2052 2052
2053 2053 for rev in pycompat.xrange(numrevs):
2054 2054 dbase = r.deltaparent(rev)
2055 2055 if dbase == -1:
2056 2056 dbase = rev
2057 2057 cbase = r.chainbase(rev)
2058 2058 clen = r.chainlen(rev)
2059 2059 p1, p2 = r.parentrevs(rev)
2060 2060 rs = r.rawsize(rev)
2061 2061 ts = ts + rs
2062 2062 heads -= set(r.parentrevs(rev))
2063 2063 heads.add(rev)
2064 2064 try:
2065 2065 compression = ts / r.end(rev)
2066 2066 except ZeroDivisionError:
2067 2067 compression = 0
2068 2068 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2069 2069 "%11d %5d %8d\n" %
2070 2070 (rev, p1, p2, r.start(rev), r.end(rev),
2071 2071 r.start(dbase), r.start(cbase),
2072 2072 r.start(p1), r.start(p2),
2073 2073 rs, ts, compression, len(heads), clen))
2074 2074 return 0
2075 2075
2076 2076 v = r.version
2077 2077 format = v & 0xFFFF
2078 2078 flags = []
2079 2079 gdelta = False
2080 2080 if v & revlog.FLAG_INLINE_DATA:
2081 2081 flags.append('inline')
2082 2082 if v & revlog.FLAG_GENERALDELTA:
2083 2083 gdelta = True
2084 2084 flags.append('generaldelta')
2085 2085 if not flags:
2086 2086 flags = ['(none)']
2087 2087
2088 2088 ### tracks merge vs single parent
2089 2089 nummerges = 0
2090 2090
2091 2091 ### tracks ways the "delta" are build
2092 2092 # nodelta
2093 2093 numempty = 0
2094 2094 numemptytext = 0
2095 2095 numemptydelta = 0
2096 2096 # full file content
2097 2097 numfull = 0
2098 2098 # intermediate snapshot against a prior snapshot
2099 2099 numsemi = 0
2100 2100 # snapshot count per depth
2101 2101 numsnapdepth = collections.defaultdict(lambda: 0)
2102 2102 # delta against previous revision
2103 2103 numprev = 0
2104 2104 # delta against first or second parent (not prev)
2105 2105 nump1 = 0
2106 2106 nump2 = 0
2107 2107 # delta against neither prev nor parents
2108 2108 numother = 0
2109 2109 # delta against prev that are also first or second parent
2110 2110 # (details of `numprev`)
2111 2111 nump1prev = 0
2112 2112 nump2prev = 0
2113 2113
2114 2114 # data about delta chain of each revs
2115 2115 chainlengths = []
2116 2116 chainbases = []
2117 2117 chainspans = []
2118 2118
2119 2119 # data about each revision
2120 2120 datasize = [None, 0, 0]
2121 2121 fullsize = [None, 0, 0]
2122 2122 semisize = [None, 0, 0]
2123 2123 # snapshot count per depth
2124 2124 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2125 2125 deltasize = [None, 0, 0]
2126 2126 chunktypecounts = {}
2127 2127 chunktypesizes = {}
2128 2128
2129 2129 def addsize(size, l):
2130 2130 if l[0] is None or size < l[0]:
2131 2131 l[0] = size
2132 2132 if size > l[1]:
2133 2133 l[1] = size
2134 2134 l[2] += size
2135 2135
2136 2136 numrevs = len(r)
2137 2137 for rev in pycompat.xrange(numrevs):
2138 2138 p1, p2 = r.parentrevs(rev)
2139 2139 delta = r.deltaparent(rev)
2140 2140 if format > 0:
2141 2141 addsize(r.rawsize(rev), datasize)
2142 2142 if p2 != nullrev:
2143 2143 nummerges += 1
2144 2144 size = r.length(rev)
2145 2145 if delta == nullrev:
2146 2146 chainlengths.append(0)
2147 2147 chainbases.append(r.start(rev))
2148 2148 chainspans.append(size)
2149 2149 if size == 0:
2150 2150 numempty += 1
2151 2151 numemptytext += 1
2152 2152 else:
2153 2153 numfull += 1
2154 2154 numsnapdepth[0] += 1
2155 2155 addsize(size, fullsize)
2156 2156 addsize(size, snapsizedepth[0])
2157 2157 else:
2158 2158 chainlengths.append(chainlengths[delta] + 1)
2159 2159 baseaddr = chainbases[delta]
2160 2160 revaddr = r.start(rev)
2161 2161 chainbases.append(baseaddr)
2162 2162 chainspans.append((revaddr - baseaddr) + size)
2163 2163 if size == 0:
2164 2164 numempty += 1
2165 2165 numemptydelta += 1
2166 2166 elif r.issnapshot(rev):
2167 2167 addsize(size, semisize)
2168 2168 numsemi += 1
2169 2169 depth = r.snapshotdepth(rev)
2170 2170 numsnapdepth[depth] += 1
2171 2171 addsize(size, snapsizedepth[depth])
2172 2172 else:
2173 2173 addsize(size, deltasize)
2174 2174 if delta == rev - 1:
2175 2175 numprev += 1
2176 2176 if delta == p1:
2177 2177 nump1prev += 1
2178 2178 elif delta == p2:
2179 2179 nump2prev += 1
2180 2180 elif delta == p1:
2181 2181 nump1 += 1
2182 2182 elif delta == p2:
2183 2183 nump2 += 1
2184 2184 elif delta != nullrev:
2185 2185 numother += 1
2186 2186
2187 2187 # Obtain data on the raw chunks in the revlog.
2188 2188 if util.safehasattr(r, '_getsegmentforrevs'):
2189 2189 segment = r._getsegmentforrevs(rev, rev)[1]
2190 2190 else:
2191 2191 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2192 2192 if segment:
2193 2193 chunktype = bytes(segment[0:1])
2194 2194 else:
2195 2195 chunktype = 'empty'
2196 2196
2197 2197 if chunktype not in chunktypecounts:
2198 2198 chunktypecounts[chunktype] = 0
2199 2199 chunktypesizes[chunktype] = 0
2200 2200
2201 2201 chunktypecounts[chunktype] += 1
2202 2202 chunktypesizes[chunktype] += size
2203 2203
2204 2204 # Adjust size min value for empty cases
2205 2205 for size in (datasize, fullsize, semisize, deltasize):
2206 2206 if size[0] is None:
2207 2207 size[0] = 0
2208 2208
2209 2209 numdeltas = numrevs - numfull - numempty - numsemi
2210 2210 numoprev = numprev - nump1prev - nump2prev
2211 2211 totalrawsize = datasize[2]
2212 2212 datasize[2] /= numrevs
2213 2213 fulltotal = fullsize[2]
2214 2214 fullsize[2] /= numfull
2215 2215 semitotal = semisize[2]
2216 2216 snaptotal = {}
2217 2217 if 0 < numsemi:
2218 2218 semisize[2] /= numsemi
2219 2219 for depth in snapsizedepth:
2220 2220 snaptotal[depth] = snapsizedepth[depth][2]
2221 2221 snapsizedepth[depth][2] /= numsnapdepth[depth]
2222 2222
2223 2223 deltatotal = deltasize[2]
2224 2224 if numdeltas > 0:
2225 2225 deltasize[2] /= numdeltas
2226 2226 totalsize = fulltotal + semitotal + deltatotal
2227 2227 avgchainlen = sum(chainlengths) / numrevs
2228 2228 maxchainlen = max(chainlengths)
2229 2229 maxchainspan = max(chainspans)
2230 2230 compratio = 1
2231 2231 if totalsize:
2232 2232 compratio = totalrawsize / totalsize
2233 2233
2234 2234 basedfmtstr = '%%%dd\n'
2235 2235 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2236 2236
2237 2237 def dfmtstr(max):
2238 2238 return basedfmtstr % len(str(max))
2239 2239 def pcfmtstr(max, padding=0):
2240 2240 return basepcfmtstr % (len(str(max)), ' ' * padding)
2241 2241
2242 2242 def pcfmt(value, total):
2243 2243 if total:
2244 2244 return (value, 100 * float(value) / total)
2245 2245 else:
2246 2246 return value, 100.0
2247 2247
2248 2248 ui.write(('format : %d\n') % format)
2249 2249 ui.write(('flags : %s\n') % ', '.join(flags))
2250 2250
2251 2251 ui.write('\n')
2252 2252 fmt = pcfmtstr(totalsize)
2253 2253 fmt2 = dfmtstr(totalsize)
2254 2254 ui.write(('revisions : ') + fmt2 % numrevs)
2255 2255 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2256 2256 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2257 2257 ui.write(('revisions : ') + fmt2 % numrevs)
2258 2258 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2259 2259 ui.write((' text : ')
2260 2260 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2261 2261 ui.write((' delta : ')
2262 2262 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2263 2263 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2264 2264 for depth in sorted(numsnapdepth):
2265 2265 ui.write((' lvl-%-3d : ' % depth)
2266 2266 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2267 2267 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2268 2268 ui.write(('revision size : ') + fmt2 % totalsize)
2269 2269 ui.write((' snapshot : ')
2270 2270 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2271 2271 for depth in sorted(numsnapdepth):
2272 2272 ui.write((' lvl-%-3d : ' % depth)
2273 2273 + fmt % pcfmt(snaptotal[depth], totalsize))
2274 2274 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2275 2275
2276 2276 def fmtchunktype(chunktype):
2277 2277 if chunktype == 'empty':
2278 2278 return ' %s : ' % chunktype
2279 2279 elif chunktype in pycompat.bytestr(string.ascii_letters):
2280 2280 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2281 2281 else:
2282 2282 return ' 0x%s : ' % hex(chunktype)
2283 2283
2284 2284 ui.write('\n')
2285 2285 ui.write(('chunks : ') + fmt2 % numrevs)
2286 2286 for chunktype in sorted(chunktypecounts):
2287 2287 ui.write(fmtchunktype(chunktype))
2288 2288 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2289 2289 ui.write(('chunks size : ') + fmt2 % totalsize)
2290 2290 for chunktype in sorted(chunktypecounts):
2291 2291 ui.write(fmtchunktype(chunktype))
2292 2292 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2293 2293
2294 2294 ui.write('\n')
2295 2295 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2296 2296 ui.write(('avg chain length : ') + fmt % avgchainlen)
2297 2297 ui.write(('max chain length : ') + fmt % maxchainlen)
2298 2298 ui.write(('max chain reach : ') + fmt % maxchainspan)
2299 2299 ui.write(('compression ratio : ') + fmt % compratio)
2300 2300
2301 2301 if format > 0:
2302 2302 ui.write('\n')
2303 2303 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2304 2304 % tuple(datasize))
2305 2305 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2306 2306 % tuple(fullsize))
2307 2307 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2308 2308 % tuple(semisize))
2309 2309 for depth in sorted(snapsizedepth):
2310 2310 if depth == 0:
2311 2311 continue
2312 2312 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2313 2313 % ((depth,) + tuple(snapsizedepth[depth])))
2314 2314 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2315 2315 % tuple(deltasize))
2316 2316
2317 2317 if numdeltas > 0:
2318 2318 ui.write('\n')
2319 2319 fmt = pcfmtstr(numdeltas)
2320 2320 fmt2 = pcfmtstr(numdeltas, 4)
2321 2321 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2322 2322 if numprev > 0:
2323 2323 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2324 2324 numprev))
2325 2325 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2326 2326 numprev))
2327 2327 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2328 2328 numprev))
2329 2329 if gdelta:
2330 2330 ui.write(('deltas against p1 : ')
2331 2331 + fmt % pcfmt(nump1, numdeltas))
2332 2332 ui.write(('deltas against p2 : ')
2333 2333 + fmt % pcfmt(nump2, numdeltas))
2334 2334 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2335 2335 numdeltas))
2336 2336
2337 2337 @command('debugrevspec',
2338 2338 [('', 'optimize', None,
2339 2339 _('print parsed tree after optimizing (DEPRECATED)')),
2340 2340 ('', 'show-revs', True, _('print list of result revisions (default)')),
2341 2341 ('s', 'show-set', None, _('print internal representation of result set')),
2342 2342 ('p', 'show-stage', [],
2343 2343 _('print parsed tree at the given stage'), _('NAME')),
2344 2344 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2345 2345 ('', 'verify-optimized', False, _('verify optimized result')),
2346 2346 ],
2347 2347 ('REVSPEC'))
2348 2348 def debugrevspec(ui, repo, expr, **opts):
2349 2349 """parse and apply a revision specification
2350 2350
2351 2351 Use -p/--show-stage option to print the parsed tree at the given stages.
2352 2352 Use -p all to print tree at every stage.
2353 2353
2354 2354 Use --no-show-revs option with -s or -p to print only the set
2355 2355 representation or the parsed tree respectively.
2356 2356
2357 2357 Use --verify-optimized to compare the optimized result with the unoptimized
2358 2358 one. Returns 1 if the optimized result differs.
2359 2359 """
2360 2360 opts = pycompat.byteskwargs(opts)
2361 2361 aliases = ui.configitems('revsetalias')
2362 2362 stages = [
2363 2363 ('parsed', lambda tree: tree),
2364 2364 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2365 2365 ui.warn)),
2366 2366 ('concatenated', revsetlang.foldconcat),
2367 2367 ('analyzed', revsetlang.analyze),
2368 2368 ('optimized', revsetlang.optimize),
2369 2369 ]
2370 2370 if opts['no_optimized']:
2371 2371 stages = stages[:-1]
2372 2372 if opts['verify_optimized'] and opts['no_optimized']:
2373 2373 raise error.Abort(_('cannot use --verify-optimized with '
2374 2374 '--no-optimized'))
2375 2375 stagenames = set(n for n, f in stages)
2376 2376
2377 2377 showalways = set()
2378 2378 showchanged = set()
2379 2379 if ui.verbose and not opts['show_stage']:
2380 2380 # show parsed tree by --verbose (deprecated)
2381 2381 showalways.add('parsed')
2382 2382 showchanged.update(['expanded', 'concatenated'])
2383 2383 if opts['optimize']:
2384 2384 showalways.add('optimized')
2385 2385 if opts['show_stage'] and opts['optimize']:
2386 2386 raise error.Abort(_('cannot use --optimize with --show-stage'))
2387 2387 if opts['show_stage'] == ['all']:
2388 2388 showalways.update(stagenames)
2389 2389 else:
2390 2390 for n in opts['show_stage']:
2391 2391 if n not in stagenames:
2392 2392 raise error.Abort(_('invalid stage name: %s') % n)
2393 2393 showalways.update(opts['show_stage'])
2394 2394
2395 2395 treebystage = {}
2396 2396 printedtree = None
2397 2397 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2398 2398 for n, f in stages:
2399 2399 treebystage[n] = tree = f(tree)
2400 2400 if n in showalways or (n in showchanged and tree != printedtree):
2401 2401 if opts['show_stage'] or n != 'parsed':
2402 2402 ui.write(("* %s:\n") % n)
2403 2403 ui.write(revsetlang.prettyformat(tree), "\n")
2404 2404 printedtree = tree
2405 2405
2406 2406 if opts['verify_optimized']:
2407 2407 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2408 2408 brevs = revset.makematcher(treebystage['optimized'])(repo)
2409 2409 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2410 2410 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2411 2411 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2412 2412 arevs = list(arevs)
2413 2413 brevs = list(brevs)
2414 2414 if arevs == brevs:
2415 2415 return 0
2416 2416 ui.write(('--- analyzed\n'), label='diff.file_a')
2417 2417 ui.write(('+++ optimized\n'), label='diff.file_b')
2418 2418 sm = difflib.SequenceMatcher(None, arevs, brevs)
2419 2419 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2420 2420 if tag in ('delete', 'replace'):
2421 2421 for c in arevs[alo:ahi]:
2422 2422 ui.write('-%s\n' % c, label='diff.deleted')
2423 2423 if tag in ('insert', 'replace'):
2424 2424 for c in brevs[blo:bhi]:
2425 2425 ui.write('+%s\n' % c, label='diff.inserted')
2426 2426 if tag == 'equal':
2427 2427 for c in arevs[alo:ahi]:
2428 2428 ui.write(' %s\n' % c)
2429 2429 return 1
2430 2430
2431 2431 func = revset.makematcher(tree)
2432 2432 revs = func(repo)
2433 2433 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2434 2434 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2435 2435 if not opts['show_revs']:
2436 2436 return
2437 2437 for c in revs:
2438 2438 ui.write("%d\n" % c)
2439 2439
2440 2440 @command('debugserve', [
2441 2441 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2442 2442 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2443 2443 ('', 'logiofile', '', _('file to log server I/O to')),
2444 2444 ], '')
2445 2445 def debugserve(ui, repo, **opts):
2446 2446 """run a server with advanced settings
2447 2447
2448 2448 This command is similar to :hg:`serve`. It exists partially as a
2449 2449 workaround to the fact that ``hg serve --stdio`` must have specific
2450 2450 arguments for security reasons.
2451 2451 """
2452 2452 opts = pycompat.byteskwargs(opts)
2453 2453
2454 2454 if not opts['sshstdio']:
2455 2455 raise error.Abort(_('only --sshstdio is currently supported'))
2456 2456
2457 2457 logfh = None
2458 2458
2459 2459 if opts['logiofd'] and opts['logiofile']:
2460 2460 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2461 2461
2462 2462 if opts['logiofd']:
2463 2463 # Line buffered because output is line based.
2464 2464 try:
2465 2465 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2466 2466 except OSError as e:
2467 2467 if e.errno != errno.ESPIPE:
2468 2468 raise
2469 2469 # can't seek a pipe, so `ab` mode fails on py3
2470 2470 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2471 2471 elif opts['logiofile']:
2472 2472 logfh = open(opts['logiofile'], 'ab', 1)
2473 2473
2474 2474 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2475 2475 s.serve_forever()
2476 2476
2477 2477 @command('debugsetparents', [], _('REV1 [REV2]'))
2478 2478 def debugsetparents(ui, repo, rev1, rev2=None):
2479 2479 """manually set the parents of the current working directory
2480 2480
2481 2481 This is useful for writing repository conversion tools, but should
2482 2482 be used with care. For example, neither the working directory nor the
2483 2483 dirstate is updated, so file status may be incorrect after running this
2484 2484 command.
2485 2485
2486 2486 Returns 0 on success.
2487 2487 """
2488 2488
2489 2489 node1 = scmutil.revsingle(repo, rev1).node()
2490 2490 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2491 2491
2492 2492 with repo.wlock():
2493 2493 repo.setparents(node1, node2)
2494 2494
2495 2495 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2496 2496 def debugssl(ui, repo, source=None, **opts):
2497 2497 '''test a secure connection to a server
2498 2498
2499 2499 This builds the certificate chain for the server on Windows, installing the
2500 2500 missing intermediates and trusted root via Windows Update if necessary. It
2501 2501 does nothing on other platforms.
2502 2502
2503 2503 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2504 2504 that server is used. See :hg:`help urls` for more information.
2505 2505
2506 2506 If the update succeeds, retry the original operation. Otherwise, the cause
2507 2507 of the SSL error is likely another issue.
2508 2508 '''
2509 2509 if not pycompat.iswindows:
2510 2510 raise error.Abort(_('certificate chain building is only possible on '
2511 2511 'Windows'))
2512 2512
2513 2513 if not source:
2514 2514 if not repo:
2515 2515 raise error.Abort(_("there is no Mercurial repository here, and no "
2516 2516 "server specified"))
2517 2517 source = "default"
2518 2518
2519 2519 source, branches = hg.parseurl(ui.expandpath(source))
2520 2520 url = util.url(source)
2521 2521 addr = None
2522 2522
2523 2523 defaultport = {'https': 443, 'ssh': 22}
2524 2524 if url.scheme in defaultport:
2525 2525 try:
2526 2526 addr = (url.host, int(url.port or defaultport[url.scheme]))
2527 2527 except ValueError:
2528 2528 raise error.Abort(_("malformed port number in URL"))
2529 2529 else:
2530 2530 raise error.Abort(_("only https and ssh connections are supported"))
2531 2531
2532 2532 from . import win32
2533 2533
2534 2534 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2535 2535 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2536 2536
2537 2537 try:
2538 2538 s.connect(addr)
2539 2539 cert = s.getpeercert(True)
2540 2540
2541 2541 ui.status(_('checking the certificate chain for %s\n') % url.host)
2542 2542
2543 2543 complete = win32.checkcertificatechain(cert, build=False)
2544 2544
2545 2545 if not complete:
2546 2546 ui.status(_('certificate chain is incomplete, updating... '))
2547 2547
2548 2548 if not win32.checkcertificatechain(cert):
2549 2549 ui.status(_('failed.\n'))
2550 2550 else:
2551 2551 ui.status(_('done.\n'))
2552 2552 else:
2553 2553 ui.status(_('full certificate chain is available\n'))
2554 2554 finally:
2555 2555 s.close()
2556 2556
2557 2557 @command('debugsub',
2558 2558 [('r', 'rev', '',
2559 2559 _('revision to check'), _('REV'))],
2560 2560 _('[-r REV] [REV]'))
2561 2561 def debugsub(ui, repo, rev=None):
2562 2562 ctx = scmutil.revsingle(repo, rev, None)
2563 2563 for k, v in sorted(ctx.substate.items()):
2564 2564 ui.write(('path %s\n') % k)
2565 2565 ui.write((' source %s\n') % v[0])
2566 2566 ui.write((' revision %s\n') % v[1])
2567 2567
2568 2568 @command('debugsuccessorssets',
2569 2569 [('', 'closest', False, _('return closest successors sets only'))],
2570 2570 _('[REV]'))
2571 2571 def debugsuccessorssets(ui, repo, *revs, **opts):
2572 2572 """show set of successors for revision
2573 2573
2574 2574 A successors set of changeset A is a consistent group of revisions that
2575 2575 succeed A. It contains non-obsolete changesets only unless closests
2576 2576 successors set is set.
2577 2577
2578 2578 In most cases a changeset A has a single successors set containing a single
2579 2579 successor (changeset A replaced by A').
2580 2580
2581 2581 A changeset that is made obsolete with no successors are called "pruned".
2582 2582 Such changesets have no successors sets at all.
2583 2583
2584 2584 A changeset that has been "split" will have a successors set containing
2585 2585 more than one successor.
2586 2586
2587 2587 A changeset that has been rewritten in multiple different ways is called
2588 2588 "divergent". Such changesets have multiple successor sets (each of which
2589 2589 may also be split, i.e. have multiple successors).
2590 2590
2591 2591 Results are displayed as follows::
2592 2592
2593 2593 <rev1>
2594 2594 <successors-1A>
2595 2595 <rev2>
2596 2596 <successors-2A>
2597 2597 <successors-2B1> <successors-2B2> <successors-2B3>
2598 2598
2599 2599 Here rev2 has two possible (i.e. divergent) successors sets. The first
2600 2600 holds one element, whereas the second holds three (i.e. the changeset has
2601 2601 been split).
2602 2602 """
2603 2603 # passed to successorssets caching computation from one call to another
2604 2604 cache = {}
2605 2605 ctx2str = bytes
2606 2606 node2str = short
2607 2607 for rev in scmutil.revrange(repo, revs):
2608 2608 ctx = repo[rev]
2609 2609 ui.write('%s\n'% ctx2str(ctx))
2610 2610 for succsset in obsutil.successorssets(repo, ctx.node(),
2611 2611 closest=opts[r'closest'],
2612 2612 cache=cache):
2613 2613 if succsset:
2614 2614 ui.write(' ')
2615 2615 ui.write(node2str(succsset[0]))
2616 2616 for node in succsset[1:]:
2617 2617 ui.write(' ')
2618 2618 ui.write(node2str(node))
2619 2619 ui.write('\n')
2620 2620
2621 2621 @command('debugtemplate',
2622 2622 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2623 2623 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2624 2624 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2625 2625 optionalrepo=True)
2626 2626 def debugtemplate(ui, repo, tmpl, **opts):
2627 2627 """parse and apply a template
2628 2628
2629 2629 If -r/--rev is given, the template is processed as a log template and
2630 2630 applied to the given changesets. Otherwise, it is processed as a generic
2631 2631 template.
2632 2632
2633 2633 Use --verbose to print the parsed tree.
2634 2634 """
2635 2635 revs = None
2636 2636 if opts[r'rev']:
2637 2637 if repo is None:
2638 2638 raise error.RepoError(_('there is no Mercurial repository here '
2639 2639 '(.hg not found)'))
2640 2640 revs = scmutil.revrange(repo, opts[r'rev'])
2641 2641
2642 2642 props = {}
2643 2643 for d in opts[r'define']:
2644 2644 try:
2645 2645 k, v = (e.strip() for e in d.split('=', 1))
2646 2646 if not k or k == 'ui':
2647 2647 raise ValueError
2648 2648 props[k] = v
2649 2649 except ValueError:
2650 2650 raise error.Abort(_('malformed keyword definition: %s') % d)
2651 2651
2652 2652 if ui.verbose:
2653 2653 aliases = ui.configitems('templatealias')
2654 2654 tree = templater.parse(tmpl)
2655 2655 ui.note(templater.prettyformat(tree), '\n')
2656 2656 newtree = templater.expandaliases(tree, aliases)
2657 2657 if newtree != tree:
2658 2658 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2659 2659
2660 2660 if revs is None:
2661 2661 tres = formatter.templateresources(ui, repo)
2662 2662 t = formatter.maketemplater(ui, tmpl, resources=tres)
2663 2663 if ui.verbose:
2664 2664 kwds, funcs = t.symbolsuseddefault()
2665 2665 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2666 2666 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2667 2667 ui.write(t.renderdefault(props))
2668 2668 else:
2669 2669 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2670 2670 if ui.verbose:
2671 2671 kwds, funcs = displayer.t.symbolsuseddefault()
2672 2672 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2673 2673 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2674 2674 for r in revs:
2675 2675 displayer.show(repo[r], **pycompat.strkwargs(props))
2676 2676 displayer.close()
2677 2677
2678 2678 @command('debuguigetpass', [
2679 2679 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2680 2680 ], _('[-p TEXT]'), norepo=True)
2681 2681 def debuguigetpass(ui, prompt=''):
2682 2682 """show prompt to type password"""
2683 2683 r = ui.getpass(prompt)
2684 2684 ui.write(('respose: %s\n') % r)
2685 2685
2686 2686 @command('debuguiprompt', [
2687 2687 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2688 2688 ], _('[-p TEXT]'), norepo=True)
2689 2689 def debuguiprompt(ui, prompt=''):
2690 2690 """show plain prompt"""
2691 2691 r = ui.prompt(prompt)
2692 2692 ui.write(('response: %s\n') % r)
2693 2693
2694 2694 @command('debugupdatecaches', [])
2695 2695 def debugupdatecaches(ui, repo, *pats, **opts):
2696 2696 """warm all known caches in the repository"""
2697 2697 with repo.wlock(), repo.lock():
2698 2698 repo.updatecaches(full=True)
2699 2699
2700 2700 @command('debugupgraderepo', [
2701 2701 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2702 2702 ('', 'run', False, _('performs an upgrade')),
2703 2703 ])
2704 2704 def debugupgraderepo(ui, repo, run=False, optimize=None):
2705 2705 """upgrade a repository to use different features
2706 2706
2707 2707 If no arguments are specified, the repository is evaluated for upgrade
2708 2708 and a list of problems and potential optimizations is printed.
2709 2709
2710 2710 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2711 2711 can be influenced via additional arguments. More details will be provided
2712 2712 by the command output when run without ``--run``.
2713 2713
2714 2714 During the upgrade, the repository will be locked and no writes will be
2715 2715 allowed.
2716 2716
2717 2717 At the end of the upgrade, the repository may not be readable while new
2718 2718 repository data is swapped in. This window will be as long as it takes to
2719 2719 rename some directories inside the ``.hg`` directory. On most machines, this
2720 2720 should complete almost instantaneously and the chances of a consumer being
2721 2721 unable to access the repository should be low.
2722 2722 """
2723 2723 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2724 2724
2725 2725 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2726 2726 inferrepo=True)
2727 2727 def debugwalk(ui, repo, *pats, **opts):
2728 2728 """show how files match on given patterns"""
2729 2729 opts = pycompat.byteskwargs(opts)
2730 2730 m = scmutil.match(repo[None], pats, opts)
2731 2731 if ui.verbose:
2732 2732 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2733 2733 items = list(repo[None].walk(m))
2734 2734 if not items:
2735 2735 return
2736 2736 f = lambda fn: fn
2737 2737 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2738 2738 f = lambda fn: util.normpath(fn)
2739 2739 fmt = 'f %%-%ds %%-%ds %%s' % (
2740 2740 max([len(abs) for abs in items]),
2741 2741 max([len(m.rel(abs)) for abs in items]))
2742 2742 for abs in items:
2743 2743 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2744 2744 ui.write("%s\n" % line.rstrip())
2745 2745
2746 2746 @command('debugwhyunstable', [], _('REV'))
2747 2747 def debugwhyunstable(ui, repo, rev):
2748 2748 """explain instabilities of a changeset"""
2749 2749 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2750 2750 dnodes = ''
2751 2751 if entry.get('divergentnodes'):
2752 2752 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2753 2753 for ctx in entry['divergentnodes']) + ' '
2754 2754 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2755 2755 entry['reason'], entry['node']))
2756 2756
2757 2757 @command('debugwireargs',
2758 2758 [('', 'three', '', 'three'),
2759 2759 ('', 'four', '', 'four'),
2760 2760 ('', 'five', '', 'five'),
2761 2761 ] + cmdutil.remoteopts,
2762 2762 _('REPO [OPTIONS]... [ONE [TWO]]'),
2763 2763 norepo=True)
2764 2764 def debugwireargs(ui, repopath, *vals, **opts):
2765 2765 opts = pycompat.byteskwargs(opts)
2766 2766 repo = hg.peer(ui, opts, repopath)
2767 2767 for opt in cmdutil.remoteopts:
2768 2768 del opts[opt[1]]
2769 2769 args = {}
2770 2770 for k, v in opts.iteritems():
2771 2771 if v:
2772 2772 args[k] = v
2773 2773 args = pycompat.strkwargs(args)
2774 2774 # run twice to check that we don't mess up the stream for the next command
2775 2775 res1 = repo.debugwireargs(*vals, **args)
2776 2776 res2 = repo.debugwireargs(*vals, **args)
2777 2777 ui.write("%s\n" % res1)
2778 2778 if res1 != res2:
2779 2779 ui.warn("%s\n" % res2)
2780 2780
2781 2781 def _parsewirelangblocks(fh):
2782 2782 activeaction = None
2783 2783 blocklines = []
2784 2784
2785 2785 for line in fh:
2786 2786 line = line.rstrip()
2787 2787 if not line:
2788 2788 continue
2789 2789
2790 2790 if line.startswith(b'#'):
2791 2791 continue
2792 2792
2793 2793 if not line.startswith(b' '):
2794 2794 # New block. Flush previous one.
2795 2795 if activeaction:
2796 2796 yield activeaction, blocklines
2797 2797
2798 2798 activeaction = line
2799 2799 blocklines = []
2800 2800 continue
2801 2801
2802 2802 # Else we start with an indent.
2803 2803
2804 2804 if not activeaction:
2805 2805 raise error.Abort(_('indented line outside of block'))
2806 2806
2807 2807 blocklines.append(line)
2808 2808
2809 2809 # Flush last block.
2810 2810 if activeaction:
2811 2811 yield activeaction, blocklines
2812 2812
2813 2813 @command('debugwireproto',
2814 2814 [
2815 2815 ('', 'localssh', False, _('start an SSH server for this repo')),
2816 2816 ('', 'peer', '', _('construct a specific version of the peer')),
2817 2817 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2818 2818 ('', 'nologhandshake', False,
2819 2819 _('do not log I/O related to the peer handshake')),
2820 2820 ] + cmdutil.remoteopts,
2821 2821 _('[PATH]'),
2822 2822 optionalrepo=True)
2823 2823 def debugwireproto(ui, repo, path=None, **opts):
2824 2824 """send wire protocol commands to a server
2825 2825
2826 2826 This command can be used to issue wire protocol commands to remote
2827 2827 peers and to debug the raw data being exchanged.
2828 2828
2829 2829 ``--localssh`` will start an SSH server against the current repository
2830 2830 and connect to that. By default, the connection will perform a handshake
2831 2831 and establish an appropriate peer instance.
2832 2832
2833 2833 ``--peer`` can be used to bypass the handshake protocol and construct a
2834 2834 peer instance using the specified class type. Valid values are ``raw``,
2835 2835 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2836 2836 raw data payloads and don't support higher-level command actions.
2837 2837
2838 2838 ``--noreadstderr`` can be used to disable automatic reading from stderr
2839 2839 of the peer (for SSH connections only). Disabling automatic reading of
2840 2840 stderr is useful for making output more deterministic.
2841 2841
2842 2842 Commands are issued via a mini language which is specified via stdin.
2843 2843 The language consists of individual actions to perform. An action is
2844 2844 defined by a block. A block is defined as a line with no leading
2845 2845 space followed by 0 or more lines with leading space. Blocks are
2846 2846 effectively a high-level command with additional metadata.
2847 2847
2848 2848 Lines beginning with ``#`` are ignored.
2849 2849
2850 2850 The following sections denote available actions.
2851 2851
2852 2852 raw
2853 2853 ---
2854 2854
2855 2855 Send raw data to the server.
2856 2856
2857 2857 The block payload contains the raw data to send as one atomic send
2858 2858 operation. The data may not actually be delivered in a single system
2859 2859 call: it depends on the abilities of the transport being used.
2860 2860
2861 2861 Each line in the block is de-indented and concatenated. Then, that
2862 2862 value is evaluated as a Python b'' literal. This allows the use of
2863 2863 backslash escaping, etc.
2864 2864
2865 2865 raw+
2866 2866 ----
2867 2867
2868 2868 Behaves like ``raw`` except flushes output afterwards.
2869 2869
2870 2870 command <X>
2871 2871 -----------
2872 2872
2873 2873 Send a request to run a named command, whose name follows the ``command``
2874 2874 string.
2875 2875
2876 2876 Arguments to the command are defined as lines in this block. The format of
2877 2877 each line is ``<key> <value>``. e.g.::
2878 2878
2879 2879 command listkeys
2880 2880 namespace bookmarks
2881 2881
2882 2882 If the value begins with ``eval:``, it will be interpreted as a Python
2883 2883 literal expression. Otherwise values are interpreted as Python b'' literals.
2884 2884 This allows sending complex types and encoding special byte sequences via
2885 2885 backslash escaping.
2886 2886
2887 2887 The following arguments have special meaning:
2888 2888
2889 2889 ``PUSHFILE``
2890 2890 When defined, the *push* mechanism of the peer will be used instead
2891 2891 of the static request-response mechanism and the content of the
2892 2892 file specified in the value of this argument will be sent as the
2893 2893 command payload.
2894 2894
2895 2895 This can be used to submit a local bundle file to the remote.
2896 2896
2897 2897 batchbegin
2898 2898 ----------
2899 2899
2900 2900 Instruct the peer to begin a batched send.
2901 2901
2902 2902 All ``command`` blocks are queued for execution until the next
2903 2903 ``batchsubmit`` block.
2904 2904
2905 2905 batchsubmit
2906 2906 -----------
2907 2907
2908 2908 Submit previously queued ``command`` blocks as a batch request.
2909 2909
2910 2910 This action MUST be paired with a ``batchbegin`` action.
2911 2911
2912 2912 httprequest <method> <path>
2913 2913 ---------------------------
2914 2914
2915 2915 (HTTP peer only)
2916 2916
2917 2917 Send an HTTP request to the peer.
2918 2918
2919 2919 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2920 2920
2921 2921 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2922 2922 headers to add to the request. e.g. ``Accept: foo``.
2923 2923
2924 2924 The following arguments are special:
2925 2925
2926 2926 ``BODYFILE``
2927 2927 The content of the file defined as the value to this argument will be
2928 2928 transferred verbatim as the HTTP request body.
2929 2929
2930 2930 ``frame <type> <flags> <payload>``
2931 2931 Send a unified protocol frame as part of the request body.
2932 2932
2933 2933 All frames will be collected and sent as the body to the HTTP
2934 2934 request.
2935 2935
2936 2936 close
2937 2937 -----
2938 2938
2939 2939 Close the connection to the server.
2940 2940
2941 2941 flush
2942 2942 -----
2943 2943
2944 2944 Flush data written to the server.
2945 2945
2946 2946 readavailable
2947 2947 -------------
2948 2948
2949 2949 Close the write end of the connection and read all available data from
2950 2950 the server.
2951 2951
2952 2952 If the connection to the server encompasses multiple pipes, we poll both
2953 2953 pipes and read available data.
2954 2954
2955 2955 readline
2956 2956 --------
2957 2957
2958 2958 Read a line of output from the server. If there are multiple output
2959 2959 pipes, reads only the main pipe.
2960 2960
2961 2961 ereadline
2962 2962 ---------
2963 2963
2964 2964 Like ``readline``, but read from the stderr pipe, if available.
2965 2965
2966 2966 read <X>
2967 2967 --------
2968 2968
2969 2969 ``read()`` N bytes from the server's main output pipe.
2970 2970
2971 2971 eread <X>
2972 2972 ---------
2973 2973
2974 2974 ``read()`` N bytes from the server's stderr pipe, if available.
2975 2975
2976 2976 Specifying Unified Frame-Based Protocol Frames
2977 2977 ----------------------------------------------
2978 2978
2979 2979 It is possible to emit a *Unified Frame-Based Protocol* by using special
2980 2980 syntax.
2981 2981
2982 2982 A frame is composed as a type, flags, and payload. These can be parsed
2983 2983 from a string of the form:
2984 2984
2985 2985 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
2986 2986
2987 2987 ``request-id`` and ``stream-id`` are integers defining the request and
2988 2988 stream identifiers.
2989 2989
2990 2990 ``type`` can be an integer value for the frame type or the string name
2991 2991 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
2992 2992 ``command-name``.
2993 2993
2994 2994 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
2995 2995 components. Each component (and there can be just one) can be an integer
2996 2996 or a flag name for stream flags or frame flags, respectively. Values are
2997 2997 resolved to integers and then bitwise OR'd together.
2998 2998
2999 2999 ``payload`` represents the raw frame payload. If it begins with
3000 3000 ``cbor:``, the following string is evaluated as Python code and the
3001 3001 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3002 3002 as a Python byte string literal.
3003 3003 """
3004 3004 opts = pycompat.byteskwargs(opts)
3005 3005
3006 3006 if opts['localssh'] and not repo:
3007 3007 raise error.Abort(_('--localssh requires a repository'))
3008 3008
3009 3009 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3010 3010 raise error.Abort(_('invalid value for --peer'),
3011 3011 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3012 3012
3013 3013 if path and opts['localssh']:
3014 3014 raise error.Abort(_('cannot specify --localssh with an explicit '
3015 3015 'path'))
3016 3016
3017 3017 if ui.interactive():
3018 3018 ui.write(_('(waiting for commands on stdin)\n'))
3019 3019
3020 3020 blocks = list(_parsewirelangblocks(ui.fin))
3021 3021
3022 3022 proc = None
3023 3023 stdin = None
3024 3024 stdout = None
3025 3025 stderr = None
3026 3026 opener = None
3027 3027
3028 3028 if opts['localssh']:
3029 3029 # We start the SSH server in its own process so there is process
3030 3030 # separation. This prevents a whole class of potential bugs around
3031 3031 # shared state from interfering with server operation.
3032 3032 args = procutil.hgcmd() + [
3033 3033 '-R', repo.root,
3034 3034 'debugserve', '--sshstdio',
3035 3035 ]
3036 3036 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
3037 3037 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3038 3038 bufsize=0)
3039 3039
3040 3040 stdin = proc.stdin
3041 3041 stdout = proc.stdout
3042 3042 stderr = proc.stderr
3043 3043
3044 3044 # We turn the pipes into observers so we can log I/O.
3045 3045 if ui.verbose or opts['peer'] == 'raw':
3046 3046 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3047 3047 logdata=True)
3048 3048 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3049 3049 logdata=True)
3050 3050 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3051 3051 logdata=True)
3052 3052
3053 3053 # --localssh also implies the peer connection settings.
3054 3054
3055 3055 url = 'ssh://localserver'
3056 3056 autoreadstderr = not opts['noreadstderr']
3057 3057
3058 3058 if opts['peer'] == 'ssh1':
3059 3059 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3060 3060 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3061 3061 None, autoreadstderr=autoreadstderr)
3062 3062 elif opts['peer'] == 'ssh2':
3063 3063 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3064 3064 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3065 3065 None, autoreadstderr=autoreadstderr)
3066 3066 elif opts['peer'] == 'raw':
3067 3067 ui.write(_('using raw connection to peer\n'))
3068 3068 peer = None
3069 3069 else:
3070 3070 ui.write(_('creating ssh peer from handshake results\n'))
3071 3071 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3072 3072 autoreadstderr=autoreadstderr)
3073 3073
3074 3074 elif path:
3075 3075 # We bypass hg.peer() so we can proxy the sockets.
3076 3076 # TODO consider not doing this because we skip
3077 3077 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3078 3078 u = util.url(path)
3079 3079 if u.scheme != 'http':
3080 3080 raise error.Abort(_('only http:// paths are currently supported'))
3081 3081
3082 3082 url, authinfo = u.authinfo()
3083 3083 openerargs = {
3084 3084 r'useragent': b'Mercurial debugwireproto',
3085 3085 }
3086 3086
3087 3087 # Turn pipes/sockets into observers so we can log I/O.
3088 3088 if ui.verbose:
3089 3089 openerargs.update({
3090 3090 r'loggingfh': ui,
3091 3091 r'loggingname': b's',
3092 3092 r'loggingopts': {
3093 3093 r'logdata': True,
3094 3094 r'logdataapis': False,
3095 3095 },
3096 3096 })
3097 3097
3098 3098 if ui.debugflag:
3099 3099 openerargs[r'loggingopts'][r'logdataapis'] = True
3100 3100
3101 3101 # Don't send default headers when in raw mode. This allows us to
3102 3102 # bypass most of the behavior of our URL handling code so we can
3103 3103 # have near complete control over what's sent on the wire.
3104 3104 if opts['peer'] == 'raw':
3105 3105 openerargs[r'sendaccept'] = False
3106 3106
3107 3107 opener = urlmod.opener(ui, authinfo, **openerargs)
3108 3108
3109 3109 if opts['peer'] == 'http2':
3110 3110 ui.write(_('creating http peer for wire protocol version 2\n'))
3111 3111 # We go through makepeer() because we need an API descriptor for
3112 3112 # the peer instance to be useful.
3113 3113 with ui.configoverride({
3114 3114 ('experimental', 'httppeer.advertise-v2'): True}):
3115 3115 if opts['nologhandshake']:
3116 3116 ui.pushbuffer()
3117 3117
3118 3118 peer = httppeer.makepeer(ui, path, opener=opener)
3119 3119
3120 3120 if opts['nologhandshake']:
3121 3121 ui.popbuffer()
3122 3122
3123 3123 if not isinstance(peer, httppeer.httpv2peer):
3124 3124 raise error.Abort(_('could not instantiate HTTP peer for '
3125 3125 'wire protocol version 2'),
3126 3126 hint=_('the server may not have the feature '
3127 3127 'enabled or is not allowing this '
3128 3128 'client version'))
3129 3129
3130 3130 elif opts['peer'] == 'raw':
3131 3131 ui.write(_('using raw connection to peer\n'))
3132 3132 peer = None
3133 3133 elif opts['peer']:
3134 3134 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3135 3135 opts['peer'])
3136 3136 else:
3137 3137 peer = httppeer.makepeer(ui, path, opener=opener)
3138 3138
3139 3139 # We /could/ populate stdin/stdout with sock.makefile()...
3140 3140 else:
3141 3141 raise error.Abort(_('unsupported connection configuration'))
3142 3142
3143 3143 batchedcommands = None
3144 3144
3145 3145 # Now perform actions based on the parsed wire language instructions.
3146 3146 for action, lines in blocks:
3147 3147 if action in ('raw', 'raw+'):
3148 3148 if not stdin:
3149 3149 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3150 3150
3151 3151 # Concatenate the data together.
3152 3152 data = ''.join(l.lstrip() for l in lines)
3153 3153 data = stringutil.unescapestr(data)
3154 3154 stdin.write(data)
3155 3155
3156 3156 if action == 'raw+':
3157 3157 stdin.flush()
3158 3158 elif action == 'flush':
3159 3159 if not stdin:
3160 3160 raise error.Abort(_('cannot call flush on this peer'))
3161 3161 stdin.flush()
3162 3162 elif action.startswith('command'):
3163 3163 if not peer:
3164 3164 raise error.Abort(_('cannot send commands unless peer instance '
3165 3165 'is available'))
3166 3166
3167 3167 command = action.split(' ', 1)[1]
3168 3168
3169 3169 args = {}
3170 3170 for line in lines:
3171 3171 # We need to allow empty values.
3172 3172 fields = line.lstrip().split(' ', 1)
3173 3173 if len(fields) == 1:
3174 3174 key = fields[0]
3175 3175 value = ''
3176 3176 else:
3177 3177 key, value = fields
3178 3178
3179 3179 if value.startswith('eval:'):
3180 3180 value = stringutil.evalpythonliteral(value[5:])
3181 3181 else:
3182 3182 value = stringutil.unescapestr(value)
3183 3183
3184 3184 args[key] = value
3185 3185
3186 3186 if batchedcommands is not None:
3187 3187 batchedcommands.append((command, args))
3188 3188 continue
3189 3189
3190 3190 ui.status(_('sending %s command\n') % command)
3191 3191
3192 3192 if 'PUSHFILE' in args:
3193 3193 with open(args['PUSHFILE'], r'rb') as fh:
3194 3194 del args['PUSHFILE']
3195 3195 res, output = peer._callpush(command, fh,
3196 3196 **pycompat.strkwargs(args))
3197 3197 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3198 3198 ui.status(_('remote output: %s\n') %
3199 3199 stringutil.escapestr(output))
3200 3200 else:
3201 3201 with peer.commandexecutor() as e:
3202 3202 res = e.callcommand(command, args).result()
3203 3203
3204 3204 if isinstance(res, wireprotov2peer.commandresponse):
3205 3205 val = list(res.cborobjects())
3206 3206 ui.status(_('response: %s\n') %
3207 3207 stringutil.pprint(val, bprefix=True))
3208 3208
3209 3209 else:
3210 3210 ui.status(_('response: %s\n') %
3211 3211 stringutil.pprint(res, bprefix=True))
3212 3212
3213 3213 elif action == 'batchbegin':
3214 3214 if batchedcommands is not None:
3215 3215 raise error.Abort(_('nested batchbegin not allowed'))
3216 3216
3217 3217 batchedcommands = []
3218 3218 elif action == 'batchsubmit':
3219 3219 # There is a batching API we could go through. But it would be
3220 3220 # difficult to normalize requests into function calls. It is easier
3221 3221 # to bypass this layer and normalize to commands + args.
3222 3222 ui.status(_('sending batch with %d sub-commands\n') %
3223 3223 len(batchedcommands))
3224 3224 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3225 3225 ui.status(_('response #%d: %s\n') %
3226 3226 (i, stringutil.escapestr(chunk)))
3227 3227
3228 3228 batchedcommands = None
3229 3229
3230 3230 elif action.startswith('httprequest '):
3231 3231 if not opener:
3232 3232 raise error.Abort(_('cannot use httprequest without an HTTP '
3233 3233 'peer'))
3234 3234
3235 3235 request = action.split(' ', 2)
3236 3236 if len(request) != 3:
3237 3237 raise error.Abort(_('invalid httprequest: expected format is '
3238 3238 '"httprequest <method> <path>'))
3239 3239
3240 3240 method, httppath = request[1:]
3241 3241 headers = {}
3242 3242 body = None
3243 3243 frames = []
3244 3244 for line in lines:
3245 3245 line = line.lstrip()
3246 3246 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3247 3247 if m:
3248 3248 headers[m.group(1)] = m.group(2)
3249 3249 continue
3250 3250
3251 3251 if line.startswith(b'BODYFILE '):
3252 3252 with open(line.split(b' ', 1), 'rb') as fh:
3253 3253 body = fh.read()
3254 3254 elif line.startswith(b'frame '):
3255 3255 frame = wireprotoframing.makeframefromhumanstring(
3256 3256 line[len(b'frame '):])
3257 3257
3258 3258 frames.append(frame)
3259 3259 else:
3260 3260 raise error.Abort(_('unknown argument to httprequest: %s') %
3261 3261 line)
3262 3262
3263 3263 url = path + httppath
3264 3264
3265 3265 if frames:
3266 3266 body = b''.join(bytes(f) for f in frames)
3267 3267
3268 3268 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3269 3269
3270 3270 # urllib.Request insists on using has_data() as a proxy for
3271 3271 # determining the request method. Override that to use our
3272 3272 # explicitly requested method.
3273 3273 req.get_method = lambda: pycompat.sysstr(method)
3274 3274
3275 3275 try:
3276 3276 res = opener.open(req)
3277 3277 body = res.read()
3278 3278 except util.urlerr.urlerror as e:
3279 3279 # read() method must be called, but only exists in Python 2
3280 3280 getattr(e, 'read', lambda: None)()
3281 3281 continue
3282 3282
3283 3283 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3284 3284 ui.write(_('cbor> %s\n') %
3285 3285 stringutil.pprint(cbor.loads(body), bprefix=True))
3286 3286
3287 3287 elif action == 'close':
3288 3288 peer.close()
3289 3289 elif action == 'readavailable':
3290 3290 if not stdout or not stderr:
3291 3291 raise error.Abort(_('readavailable not available on this peer'))
3292 3292
3293 3293 stdin.close()
3294 3294 stdout.read()
3295 3295 stderr.read()
3296 3296
3297 3297 elif action == 'readline':
3298 3298 if not stdout:
3299 3299 raise error.Abort(_('readline not available on this peer'))
3300 3300 stdout.readline()
3301 3301 elif action == 'ereadline':
3302 3302 if not stderr:
3303 3303 raise error.Abort(_('ereadline not available on this peer'))
3304 3304 stderr.readline()
3305 3305 elif action.startswith('read '):
3306 3306 count = int(action.split(' ', 1)[1])
3307 3307 if not stdout:
3308 3308 raise error.Abort(_('read not available on this peer'))
3309 3309 stdout.read(count)
3310 3310 elif action.startswith('eread '):
3311 3311 count = int(action.split(' ', 1)[1])
3312 3312 if not stderr:
3313 3313 raise error.Abort(_('eread not available on this peer'))
3314 3314 stderr.read(count)
3315 3315 else:
3316 3316 raise error.Abort(_('unknown action: %s') % action)
3317 3317
3318 3318 if batchedcommands is not None:
3319 3319 raise error.Abort(_('unclosed "batchbegin" request'))
3320 3320
3321 3321 if peer:
3322 3322 peer.close()
3323 3323
3324 3324 if proc:
3325 3325 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now