debugcommands.py
3467 lines
| 125.4 KiB
| text/x-python
|
PythonLexer
/ mercurial / debugcommands.py
Gregory Szorc
|
r30401 | # debugcommands.py - command processing for debug* commands | ||
# | ||||
# Copyright 2005-2016 Matt Mackall <mpm@selenic.com> | ||||
# | ||||
# This software may be used and distributed according to the terms of the | ||||
# GNU General Public License version 2 or any later version. | ||||
from __future__ import absolute_import | ||||
Yuya Nishihara
|
r34131 | import codecs | ||
Augie Fackler
|
r34027 | import collections | ||
Pierre-Yves David
|
r30952 | import difflib | ||
Pierre-Yves David
|
r30938 | import errno | ||
Gregory Szorc
|
r30518 | import operator | ||
Gregory Szorc
|
r30525 | import os | ||
Gregory Szorc
|
r30517 | import random | ||
Gregory Szorc
|
r37031 | import re | ||
Pierre-Yves David
|
r30938 | import socket | ||
Matt Harbison
|
r33493 | import ssl | ||
Augie Fackler
|
r36799 | import stat | ||
Pierre-Yves David
|
r30951 | import string | ||
Gregory Szorc
|
r36545 | import subprocess | ||
Pierre-Yves David
|
r30918 | import sys | ||
Pierre-Yves David
|
r30938 | import time | ||
Gregory Szorc
|
r30401 | |||
from .i18n import _ | ||||
Gregory Szorc
|
r30402 | from .node import ( | ||
Gregory Szorc
|
r30526 | bin, | ||
Gregory Szorc
|
r30402 | hex, | ||
Pierre-Yves David
|
r30936 | nullhex, | ||
Gregory Szorc
|
r30528 | nullid, | ||
Pierre-Yves David
|
r30951 | nullrev, | ||
Gregory Szorc
|
r30517 | short, | ||
Gregory Szorc
|
r30402 | ) | ||
Gregory Szorc
|
r30401 | from . import ( | ||
Gregory Szorc
|
r30501 | bundle2, | ||
changegroup, | ||||
Gregory Szorc
|
r30401 | cmdutil, | ||
Pierre-Yves David
|
r31120 | color, | ||
Gregory Szorc
|
r30402 | context, | ||
Martin von Zweigbergk
|
r41656 | copies, | ||
Gregory Szorc
|
r30402 | dagparser, | ||
Pierre-Yves David
|
r30918 | encoding, | ||
Gregory Szorc
|
r30401 | error, | ||
Gregory Szorc
|
r30501 | exchange, | ||
Gregory Szorc
|
r30518 | extensions, | ||
FUJIWARA Katsunori
|
r32256 | filemerge, | ||
Yuya Nishihara
|
r38841 | filesetlang, | ||
Pierre-Yves David
|
r30957 | formatter, | ||
Gregory Szorc
|
r30501 | hg, | ||
Gregory Szorc
|
r37030 | httppeer, | ||
Gregory Szorc
|
r30517 | localrepo, | ||
Gregory Szorc
|
r30402 | lock as lockmod, | ||
Yuya Nishihara
|
r35906 | logcmdutil, | ||
Pierre-Yves David
|
r30936 | merge as mergemod, | ||
Pierre-Yves David
|
r30939 | obsolete, | ||
r33143 | obsutil, | |||
Gregory Szorc
|
r32745 | phases, | ||
Pierre-Yves David
|
r30918 | policy, | ||
Pierre-Yves David
|
r30947 | pvec, | ||
Pulkit Goyal
|
r30519 | pycompat, | ||
Yuya Nishihara
|
r32337 | registrar, | ||
Gregory Szorc
|
r30775 | repair, | ||
Gregory Szorc
|
r30401 | revlog, | ||
Pierre-Yves David
|
r30952 | revset, | ||
Yuya Nishihara
|
r31024 | revsetlang, | ||
Gregory Szorc
|
r30401 | scmutil, | ||
Gregory Szorc
|
r30517 | setdiscovery, | ||
Gregory Szorc
|
r30402 | simplemerge, | ||
Gregory Szorc
|
r36545 | sshpeer, | ||
Pierre-Yves David
|
r30918 | sslutil, | ||
Gregory Szorc
|
r30502 | streamclone, | ||
Pierre-Yves David
|
r30918 | templater, | ||
Gregory Szorc
|
r30517 | treediscovery, | ||
Pierre-Yves David
|
r31864 | upgrade, | ||
Boris Feld
|
r35578 | url as urlmod, | ||
Gregory Szorc
|
r30516 | util, | ||
Pierre-Yves David
|
r31239 | vfs as vfsmod, | ||
Gregory Szorc
|
r37069 | wireprotoframing, | ||
Gregory Szorc
|
r36544 | wireprotoserver, | ||
Gregory Szorc
|
r37738 | wireprotov2peer, | ||
Gregory Szorc
|
r30401 | ) | ||
Yuya Nishihara
|
r37102 | from .utils import ( | ||
Gregory Szorc
|
r39480 | cborutil, | ||
r42208 | compression, | |||
Yuya Nishihara
|
r37102 | dateutil, | ||
Yuya Nishihara
|
r37138 | procutil, | ||
Yuya Nishihara
|
r37102 | stringutil, | ||
) | ||||
Gregory Szorc
|
r30401 | |||
Boris Feld
|
r39366 | from .revlogutils import ( | ||
deltas as deltautil | ||||
) | ||||
Gregory Szorc
|
r30402 | release = lockmod.release | ||
Yuya Nishihara
|
r32377 | command = registrar.command() | ||
Gregory Szorc
|
r30401 | |||
@command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True) | ||||
def debugancestor(ui, repo, *args): | ||||
"""find the ancestor revision of two revisions in a given index""" | ||||
if len(args) == 3: | ||||
index, rev1, rev2 = args | ||||
Matt Harbison
|
r39843 | r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index) | ||
Gregory Szorc
|
r30401 | lookup = r.lookup | ||
elif len(args) == 2: | ||||
if not repo: | ||||
raise error.Abort(_('there is no Mercurial repository here ' | ||||
'(.hg not found)')) | ||||
rev1, rev2 = args | ||||
r = repo.changelog | ||||
lookup = repo.lookup | ||||
else: | ||||
raise error.Abort(_('either two or three arguments required')) | ||||
a = r.ancestor(lookup(rev1), lookup(rev2)) | ||||
ui.write('%d:%s\n' % (r.rev(a), hex(a))) | ||||
Gregory Szorc
|
r30402 | |||
Gregory Szorc
|
r30541 | @command('debugapplystreamclonebundle', [], 'FILE') | ||
def debugapplystreamclonebundle(ui, repo, fname): | ||||
"""apply a stream clone bundle file""" | ||||
f = hg.openpath(ui, fname) | ||||
gen = exchange.readbundle(ui, f, fname) | ||||
gen.apply(repo) | ||||
Gregory Szorc
|
r30402 | @command('debugbuilddag', | ||
[('m', 'mergeable-file', None, _('add single file mergeable changes')), | ||||
('o', 'overwritten-file', None, _('add single file all revs overwrite')), | ||||
('n', 'new-file', None, _('add new file at each rev'))], | ||||
_('[OPTION]... [TEXT]')) | ||||
def debugbuilddag(ui, repo, text=None, | ||||
mergeable_file=False, | ||||
overwritten_file=False, | ||||
new_file=False): | ||||
"""builds a repo with a given DAG from scratch in the current empty repo | ||||
The description of the DAG is read from stdin if not given on the | ||||
command line. | ||||
Elements: | ||||
- "+n" is a linear run of n nodes based on the current default parent | ||||
- "." is a single node based on the current default parent | ||||
- "$" resets the default parent to null (implied at the start); | ||||
otherwise the default parent is always the last node created | ||||
- "<p" sets the default parent to the backref p | ||||
- "*p" is a fork at parent p, which is a backref | ||||
- "*p1/p2" is a merge of parents p1 and p2, which are backrefs | ||||
- "/p2" is a merge of the preceding node and p2 | ||||
- ":tag" defines a local tag for the preceding node | ||||
- "@branch" sets the named branch for subsequent nodes | ||||
- "#...\\n" is a comment up to the end of the line | ||||
Whitespace between the above elements is ignored. | ||||
A backref is either | ||||
- a number n, which references the node curr-n, where curr is the current | ||||
node, or | ||||
- the name of a local tag you placed earlier using ":tag", or | ||||
- empty to denote the default parent. | ||||
All string valued-elements are either strictly alphanumeric, or must | ||||
be enclosed in double quotes ("..."), with "\\" as escape character. | ||||
""" | ||||
if text is None: | ||||
ui.status(_("reading DAG from stdin\n")) | ||||
text = ui.fin.read() | ||||
cl = repo.changelog | ||||
if len(cl) > 0: | ||||
raise error.Abort(_('repository is not empty')) | ||||
# determine number of revs in DAG | ||||
total = 0 | ||||
for type, data in dagparser.parsedag(text): | ||||
if type == 'n': | ||||
total += 1 | ||||
if mergeable_file: | ||||
linesperrev = 2 | ||||
# make a file with k lines per rev | ||||
Gregory Szorc
|
r38806 | initialmergedlines = ['%d' % i | ||
for i in pycompat.xrange(0, total * linesperrev)] | ||||
Gregory Szorc
|
r30402 | initialmergedlines.append("") | ||
tags = [] | ||||
Martin von Zweigbergk
|
r38394 | progress = ui.makeprogress(_('building'), unit=_('revisions'), | ||
total=total) | ||||
Martin von Zweigbergk
|
r38395 | with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"): | ||
Gregory Szorc
|
r30402 | at = -1 | ||
atbranch = 'default' | ||||
nodeids = [] | ||||
id = 0 | ||||
Martin von Zweigbergk
|
r38394 | progress.update(id) | ||
Gregory Szorc
|
r30402 | for type, data in dagparser.parsedag(text): | ||
if type == 'n': | ||||
Pulkit Goyal
|
r35603 | ui.note(('node %s\n' % pycompat.bytestr(data))) | ||
Gregory Szorc
|
r30402 | id, ps = data | ||
files = [] | ||||
Martin von Zweigbergk
|
r35400 | filecontent = {} | ||
Gregory Szorc
|
r30402 | |||
p2 = None | ||||
if mergeable_file: | ||||
fn = "mf" | ||||
p1 = repo[ps[0]] | ||||
if len(ps) > 1: | ||||
p2 = repo[ps[1]] | ||||
pa = p1.ancestor(p2) | ||||
base, local, other = [x[fn].data() for x in (pa, p1, | ||||
p2)] | ||||
m3 = simplemerge.Merge3Text(base, local, other) | ||||
ml = [l.strip() for l in m3.merge_lines()] | ||||
ml.append("") | ||||
elif at > 0: | ||||
ml = p1[fn].data().split("\n") | ||||
else: | ||||
ml = initialmergedlines | ||||
ml[id * linesperrev] += " r%i" % id | ||||
mergedtext = "\n".join(ml) | ||||
files.append(fn) | ||||
Martin von Zweigbergk
|
r35400 | filecontent[fn] = mergedtext | ||
Gregory Szorc
|
r30402 | |||
if overwritten_file: | ||||
fn = "of" | ||||
files.append(fn) | ||||
Martin von Zweigbergk
|
r35400 | filecontent[fn] = "r%i\n" % id | ||
Gregory Szorc
|
r30402 | |||
if new_file: | ||||
fn = "nf%i" % id | ||||
files.append(fn) | ||||
Martin von Zweigbergk
|
r35400 | filecontent[fn] = "r%i\n" % id | ||
Gregory Szorc
|
r30402 | if len(ps) > 1: | ||
if not p2: | ||||
p2 = repo[ps[1]] | ||||
for fn in p2: | ||||
if fn.startswith("nf"): | ||||
files.append(fn) | ||||
Martin von Zweigbergk
|
r35400 | filecontent[fn] = p2[fn].data() | ||
Gregory Szorc
|
r30402 | |||
def fctxfn(repo, cx, path): | ||||
Martin von Zweigbergk
|
r35400 | if path in filecontent: | ||
Martin von Zweigbergk
|
r35401 | return context.memfilectx(repo, cx, path, | ||
filecontent[path]) | ||||
Martin von Zweigbergk
|
r35400 | return None | ||
Gregory Szorc
|
r30402 | |||
if len(ps) == 0 or ps[0] < 0: | ||||
pars = [None, None] | ||||
elif len(ps) == 1: | ||||
pars = [nodeids[ps[0]], None] | ||||
else: | ||||
pars = [nodeids[p] for p in ps] | ||||
cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn, | ||||
date=(id, 0), | ||||
user="debugbuilddag", | ||||
extra={'branch': atbranch}) | ||||
nodeid = repo.commitctx(cx) | ||||
nodeids.append(nodeid) | ||||
at = id | ||||
elif type == 'l': | ||||
id, name = data | ||||
ui.note(('tag %s\n' % name)) | ||||
tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name)) | ||||
elif type == 'a': | ||||
ui.note(('branch %s\n' % data)) | ||||
atbranch = data | ||||
Martin von Zweigbergk
|
r38394 | progress.update(id) | ||
Gregory Szorc
|
r30402 | |||
if tags: | ||||
repo.vfs.write("localtags", "".join(tags)) | ||||
Gregory Szorc
|
r30501 | |||
def _debugchangegroup(ui, gen, all=None, indent=0, **opts): | ||||
indent_string = ' ' * indent | ||||
if all: | ||||
ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n") | ||||
% indent_string) | ||||
def showchunks(named): | ||||
ui.write("\n%s%s\n" % (indent_string, named)) | ||||
Durham Goode
|
r34293 | for deltadata in gen.deltaiter(): | ||
node, p1, p2, cs, deltabase, delta, flags = deltadata | ||||
Pulkit Goyal
|
r36477 | ui.write("%s%s %s %s %s %s %d\n" % | ||
Gregory Szorc
|
r30501 | (indent_string, hex(node), hex(p1), hex(p2), | ||
hex(cs), hex(deltabase), len(delta))) | ||||
chunkdata = gen.changelogheader() | ||||
showchunks("changelog") | ||||
chunkdata = gen.manifestheader() | ||||
showchunks("manifest") | ||||
for chunkdata in iter(gen.filelogheader, {}): | ||||
fname = chunkdata['filename'] | ||||
showchunks(fname) | ||||
else: | ||||
if isinstance(gen, bundle2.unbundle20): | ||||
raise error.Abort(_('use debugbundle2 for this file')) | ||||
chunkdata = gen.changelogheader() | ||||
Durham Goode
|
r34293 | for deltadata in gen.deltaiter(): | ||
node, p1, p2, cs, deltabase, delta, flags = deltadata | ||||
Gregory Szorc
|
r30501 | ui.write("%s%s\n" % (indent_string, hex(node))) | ||
Martin von Zweigbergk
|
r33029 | def _debugobsmarkers(ui, part, indent=0, **opts): | ||
r32517 | """display version and markers contained in 'data'""" | |||
Pulkit Goyal
|
r33100 | opts = pycompat.byteskwargs(opts) | ||
Martin von Zweigbergk
|
r33029 | data = part.read() | ||
r32517 | indent_string = ' ' * indent | |||
try: | ||||
version, markers = obsolete._readmarkers(data) | ||||
except error.UnknownVersion as exc: | ||||
msg = "%sunsupported version: %s (%d bytes)\n" | ||||
msg %= indent_string, exc.version, len(data) | ||||
ui.write(msg) | ||||
else: | ||||
Pulkit Goyal
|
r35144 | msg = "%sversion: %d (%d bytes)\n" | ||
r32517 | msg %= indent_string, version, len(data) | |||
ui.write(msg) | ||||
fm = ui.formatter('debugobsolete', opts) | ||||
for rawmarker in sorted(markers): | ||||
r33149 | m = obsutil.marker(None, rawmarker) | |||
r32517 | fm.startitem() | |||
fm.plain(indent_string) | ||||
cmdutil.showmarker(fm, m) | ||||
fm.end() | ||||
Martin von Zweigbergk
|
r33031 | def _debugphaseheads(ui, data, indent=0): | ||
"""display version and markers contained in 'data'""" | ||||
indent_string = ' ' * indent | ||||
Boris Feld
|
r34321 | headsbyphase = phases.binarydecode(data) | ||
Martin von Zweigbergk
|
r33031 | for phase in phases.allphases: | ||
for head in headsbyphase[phase]: | ||||
ui.write(indent_string) | ||||
ui.write('%s %s\n' % (hex(head), phases.phasenames[phase])) | ||||
Augie Fackler
|
r34027 | def _quasirepr(thing): | ||
if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)): | ||||
return '{%s}' % ( | ||||
b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))) | ||||
return pycompat.bytestr(repr(thing)) | ||||
Gregory Szorc
|
r30501 | def _debugbundle2(ui, gen, all=None, **opts): | ||
"""lists the contents of a bundle2""" | ||||
if not isinstance(gen, bundle2.unbundle20): | ||||
raise error.Abort(_('not a bundle2 file')) | ||||
Augie Fackler
|
r34027 | ui.write(('Stream params: %s\n' % _quasirepr(gen.params))) | ||
Pulkit Goyal
|
r33101 | parttypes = opts.get(r'part_type', []) | ||
Gregory Szorc
|
r30501 | for part in gen.iterparts(): | ||
Danek Duvall
|
r32694 | if parttypes and part.type not in parttypes: | ||
continue | ||||
Boris Feld
|
r37919 | msg = '%s -- %s (mandatory: %r)\n' | ||
ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory))) | ||||
Gregory Szorc
|
r30501 | if part.type == 'changegroup': | ||
version = part.params.get('version', '01') | ||||
cg = changegroup.getunbundler(version, part, 'UN') | ||||
Boris Feld
|
r36969 | if not ui.quiet: | ||
_debugchangegroup(ui, cg, all=all, indent=4, **opts) | ||||
r32517 | if part.type == 'obsmarkers': | |||
Boris Feld
|
r36969 | if not ui.quiet: | ||
_debugobsmarkers(ui, part, indent=4, **opts) | ||||
Martin von Zweigbergk
|
r33031 | if part.type == 'phase-heads': | ||
Boris Feld
|
r36969 | if not ui.quiet: | ||
_debugphaseheads(ui, part, indent=4) | ||||
Gregory Szorc
|
r30502 | |||
Gregory Szorc
|
r30541 | @command('debugbundle', | ||
[('a', 'all', None, _('show all details')), | ||||
Danek Duvall
|
r32694 | ('', 'part-type', [], _('show only the named part type')), | ||
Gregory Szorc
|
r30541 | ('', 'spec', None, _('print the bundlespec of the bundle'))], | ||
_('FILE'), | ||||
norepo=True) | ||||
def debugbundle(ui, bundlepath, all=None, spec=None, **opts): | ||||
"""lists the contents of a bundle""" | ||||
with hg.openpath(ui, bundlepath) as f: | ||||
if spec: | ||||
spec = exchange.getbundlespec(ui, f) | ||||
ui.write('%s\n' % spec) | ||||
return | ||||
Gregory Szorc
|
r30502 | |||
Gregory Szorc
|
r30541 | gen = exchange.readbundle(ui, f, bundlepath) | ||
if isinstance(gen, bundle2.unbundle20): | ||||
return _debugbundle2(ui, gen, all=all, **opts) | ||||
_debugchangegroup(ui, gen, all=all, **opts) | ||||
Gregory Szorc
|
r30503 | |||
Boris Feld
|
r34960 | @command('debugcapabilities', | ||
[], _('PATH'), | ||||
norepo=True) | ||||
def debugcapabilities(ui, path, **opts): | ||||
"""lists the capabilities of a remote peer""" | ||||
Pulkit Goyal
|
r35402 | opts = pycompat.byteskwargs(opts) | ||
Boris Feld
|
r34960 | peer = hg.peer(ui, opts, path) | ||
caps = peer.capabilities() | ||||
ui.write(('Main capabilities:\n')) | ||||
for c in sorted(caps): | ||||
ui.write((' %s\n') % c) | ||||
Boris Feld
|
r34961 | b2caps = bundle2.bundle2caps(peer) | ||
if b2caps: | ||||
ui.write(('Bundle2 capabilities:\n')) | ||||
for key, values in sorted(b2caps.iteritems()): | ||||
ui.write((' %s\n') % key) | ||||
for v in values: | ||||
ui.write((' %s\n') % v) | ||||
Gregory Szorc
|
r30503 | @command('debugcheckstate', [], '') | ||
def debugcheckstate(ui, repo): | ||||
"""validate the correctness of the current dirstate""" | ||||
parent1, parent2 = repo.dirstate.parents() | ||||
m1 = repo[parent1].manifest() | ||||
m2 = repo[parent2].manifest() | ||||
errors = 0 | ||||
for f in repo.dirstate: | ||||
state = repo.dirstate[f] | ||||
if state in "nr" and f not in m1: | ||||
ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state)) | ||||
errors += 1 | ||||
if state in "a" and f in m1: | ||||
ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state)) | ||||
errors += 1 | ||||
if state in "m" and f not in m1 and f not in m2: | ||||
ui.warn(_("%s in state %s, but not in either manifest\n") % | ||||
(f, state)) | ||||
errors += 1 | ||||
for f in m1: | ||||
state = repo.dirstate[f] | ||||
if state not in "nrm": | ||||
ui.warn(_("%s in manifest1, but listed as state %s") % (f, state)) | ||||
errors += 1 | ||||
if errors: | ||||
error = _(".hg/dirstate inconsistent with current parent's manifest") | ||||
raise error.Abort(error) | ||||
Gregory Szorc
|
r30504 | |||
Pierre-Yves David
|
r31120 | @command('debugcolor', | ||
[('', 'style', None, _('show all configured styles'))], | ||||
'hg debugcolor') | ||||
def debugcolor(ui, repo, **opts): | ||||
"""show available color, effects or style""" | ||||
Pulkit Goyal
|
r38463 | ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode)) | ||
Pulkit Goyal
|
r33101 | if opts.get(r'style'): | ||
Pierre-Yves David
|
r31120 | return _debugdisplaystyle(ui) | ||
else: | ||||
return _debugdisplaycolor(ui) | ||||
def _debugdisplaycolor(ui): | ||||
Pierre-Yves David
|
r31121 | ui = ui.copy() | ||
ui._styles.clear() | ||||
Matt Harbison
|
r31689 | for effect in color._activeeffects(ui).keys(): | ||
Pierre-Yves David
|
r31121 | ui._styles[effect] = effect | ||
if ui._terminfoparams: | ||||
for k, v in ui.configitems('color'): | ||||
if k.startswith('color.'): | ||||
ui._styles[k] = k[6:] | ||||
elif k.startswith('terminfo.'): | ||||
ui._styles[k] = k[9:] | ||||
ui.write(_('available colors:\n')) | ||||
# sort label with a '_' after the other to group '_background' entry. | ||||
items = sorted(ui._styles.items(), | ||||
key=lambda i: ('_' in i[0], i[0], i[1])) | ||||
for colorname, label in items: | ||||
ui.write(('%s\n') % colorname, label=label) | ||||
Pierre-Yves David
|
r31120 | |||
def _debugdisplaystyle(ui): | ||||
ui.write(_('available style:\n')) | ||||
Yuya Nishihara
|
r37841 | if not ui._styles: | ||
return | ||||
Pierre-Yves David
|
r31120 | width = max(len(s) for s in ui._styles) | ||
for label, effects in sorted(ui._styles.items()): | ||||
ui.write('%s' % label, label=label) | ||||
if effects: | ||||
# 50 | ||||
ui.write(': ') | ||||
ui.write(' ' * (max(0, width - len(label)))) | ||||
ui.write(', '.join(ui.label(e, e) for e in effects.split())) | ||||
ui.write('\n') | ||||
Gregory Szorc
|
r30541 | @command('debugcreatestreamclonebundle', [], 'FILE') | ||
def debugcreatestreamclonebundle(ui, repo, fname): | ||||
"""create a stream clone bundle file | ||||
Stream bundles are special bundles that are essentially archives of | ||||
revlog files. They are commonly used for cloning very quickly. | ||||
""" | ||||
Gregory Szorc
|
r32745 | # TODO we may want to turn this into an abort when this functionality | ||
# is moved into `hg bundle`. | ||||
if phases.hassecret(repo): | ||||
ui.warn(_('(warning: stream clone bundle will contain secret ' | ||||
'revisions)\n')) | ||||
Gregory Szorc
|
r30541 | requirements, gen = streamclone.generatebundlev1(repo) | ||
changegroup.writechunks(ui, gen, fname) | ||||
ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements))) | ||||
Gregory Szorc
|
r30514 | @command('debugdag', | ||
[('t', 'tags', None, _('use tags as labels')), | ||||
('b', 'branches', None, _('annotate with branch names')), | ||||
('', 'dots', None, _('use dots for runs')), | ||||
('s', 'spaces', None, _('separate elements by spaces'))], | ||||
_('[OPTION]... [FILE [REV]...]'), | ||||
optionalrepo=True) | ||||
def debugdag(ui, repo, file_=None, *revs, **opts): | ||||
"""format the changelog or an index DAG as a concise textual description | ||||
If you pass a revlog index, the revlog's DAG is emitted. If you list | ||||
revision numbers, they get labeled in the output as rN. | ||||
Otherwise, the changelog DAG of the current repo is emitted. | ||||
""" | ||||
Pulkit Goyal
|
r33101 | spaces = opts.get(r'spaces') | ||
dots = opts.get(r'dots') | ||||
Gregory Szorc
|
r30514 | if file_: | ||
Matt Harbison
|
r39843 | rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), | ||
Pulkit Goyal
|
r30519 | file_) | ||
Gregory Szorc
|
r30514 | revs = set((int(r) for r in revs)) | ||
def events(): | ||||
for r in rlog: | ||||
yield 'n', (r, list(p for p in rlog.parentrevs(r) | ||||
if p != -1)) | ||||
if r in revs: | ||||
yield 'l', (r, "r%i" % r) | ||||
elif repo: | ||||
cl = repo.changelog | ||||
Pulkit Goyal
|
r33101 | tags = opts.get(r'tags') | ||
branches = opts.get(r'branches') | ||||
Gregory Szorc
|
r30514 | if tags: | ||
labels = {} | ||||
for l, n in repo.tags().items(): | ||||
labels.setdefault(cl.rev(n), []).append(l) | ||||
def events(): | ||||
b = "default" | ||||
for r in cl: | ||||
if branches: | ||||
newb = cl.read(cl.node(r))[5]['branch'] | ||||
if newb != b: | ||||
yield 'a', newb | ||||
b = newb | ||||
yield 'n', (r, list(p for p in cl.parentrevs(r) | ||||
if p != -1)) | ||||
if tags: | ||||
ls = labels.get(r) | ||||
if ls: | ||||
for l in ls: | ||||
yield 'l', (r, l) | ||||
else: | ||||
raise error.Abort(_('need repo for changelog dag')) | ||||
for line in dagparser.dagtextlines(events(), | ||||
addspaces=spaces, | ||||
wraplabels=True, | ||||
wrapannotations=True, | ||||
wrapnonlinear=dots, | ||||
usedots=dots, | ||||
maxlinewidth=70): | ||||
ui.write(line) | ||||
ui.write("\n") | ||||
Gregory Szorc
|
r30515 | |||
Yuya Nishihara
|
r32375 | @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV')) | ||
Gregory Szorc
|
r30515 | def debugdata(ui, repo, file_, rev=None, **opts): | ||
"""dump the contents of a data file revision""" | ||||
Pulkit Goyal
|
r33100 | opts = pycompat.byteskwargs(opts) | ||
Gregory Szorc
|
r30515 | if opts.get('changelog') or opts.get('manifest') or opts.get('dir'): | ||
if rev is not None: | ||||
raise error.CommandError('debugdata', _('invalid arguments')) | ||||
file_, rev = None, file_ | ||||
elif rev is None: | ||||
raise error.CommandError('debugdata', _('invalid arguments')) | ||||
Gregory Szorc
|
r39317 | r = cmdutil.openstorage(repo, 'debugdata', file_, opts) | ||
Gregory Szorc
|
r30515 | try: | ||
Remi Chaintron
|
r30743 | ui.write(r.revision(r.lookup(rev), raw=True)) | ||
Gregory Szorc
|
r30515 | except KeyError: | ||
raise error.Abort(_('invalid revision identifier %s') % rev) | ||||
Gregory Szorc
|
r30516 | |||
@command('debugdate', | ||||
[('e', 'extended', None, _('try extended date formats'))], | ||||
_('[-e] DATE [RANGE]'), | ||||
norepo=True, optionalrepo=True) | ||||
def debugdate(ui, date, range=None, **opts): | ||||
"""parse and display a date""" | ||||
Pulkit Goyal
|
r33101 | if opts[r"extended"]: | ||
Boris Feld
|
r36625 | d = dateutil.parsedate(date, util.extendeddateformats) | ||
Gregory Szorc
|
r30516 | else: | ||
Boris Feld
|
r36625 | d = dateutil.parsedate(date) | ||
Pulkit Goyal
|
r36417 | ui.write(("internal: %d %d\n") % d) | ||
Boris Feld
|
r36625 | ui.write(("standard: %s\n") % dateutil.datestr(d)) | ||
Gregory Szorc
|
r30516 | if range: | ||
Boris Feld
|
r36625 | m = dateutil.matchdate(range) | ||
Gregory Szorc
|
r30516 | ui.write(("match: %s\n") % m(d[0])) | ||
Gregory Szorc
|
r30517 | |||
Gregory Szorc
|
r30541 | @command('debugdeltachain', | ||
Yuya Nishihara
|
r32375 | cmdutil.debugrevlogopts + cmdutil.formatteropts, | ||
Gregory Szorc
|
r30541 | _('-c|-m|FILE'), | ||
optionalrepo=True) | ||||
def debugdeltachain(ui, repo, file_=None, **opts): | ||||
"""dump information about delta chains in a revlog | ||||
Output can be templatized. Available template keywords are: | ||||
:``rev``: revision number | ||||
:``chainid``: delta chain identifier (numbered by unique base) | ||||
:``chainlen``: delta chain length to this revision | ||||
:``prevrev``: previous revision in delta chain | ||||
:``deltatype``: role of delta / how it was computed | ||||
:``compsize``: compressed size of revision | ||||
:``uncompsize``: uncompressed size of revision | ||||
:``chainsize``: total size of compressed revisions in chain | ||||
:``chainratio``: total chain size divided by uncompressed revision size | ||||
(new delta chains typically start at ratio 2.00) | ||||
:``lindist``: linear distance from base revision in delta chain to end | ||||
of this revision | ||||
:``extradist``: total size of revisions not part of this delta chain from | ||||
base of delta chain to end of this revision; a measurement | ||||
of how much extra data we need to read/seek across to read | ||||
the delta chain for this revision | ||||
:``extraratio``: extradist divided by chainsize; another representation of | ||||
how much unrelated data is needed to load this delta chain | ||||
Paul Morelle
|
r35050 | |||
If the repository is configured to use the sparse read, additional keywords | ||||
are available: | ||||
:``readsize``: total size of data read from the disk for a revision | ||||
(sum of the sizes of all the blocks) | ||||
:``largestblock``: size of the largest block of data read from the disk | ||||
:``readdensity``: density of useful bytes in the data read from the disk | ||||
Paul Morelle
|
r35696 | :``srchunks``: in how many data hunks the whole revision would be read | ||
Paul Morelle
|
r35050 | |||
The sparse read can be enabled with experimental.sparse-read = True | ||||
Gregory Szorc
|
r30541 | """ | ||
Pulkit Goyal
|
r33100 | opts = pycompat.byteskwargs(opts) | ||
Gregory Szorc
|
r30541 | r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts) | ||
index = r.index | ||||
Paul Morelle
|
r38127 | start = r.start | ||
length = r.length | ||||
Gregory Szorc
|
r32316 | generaldelta = r.version & revlog.FLAG_GENERALDELTA | ||
Paul Morelle
|
r35050 | withsparseread = getattr(r, '_withsparseread', False) | ||
Gregory Szorc
|
r30541 | |||
def revinfo(rev): | ||||
e = index[rev] | ||||
compsize = e[1] | ||||
uncompsize = e[2] | ||||
chainsize = 0 | ||||
if generaldelta: | ||||
if e[3] == e[5]: | ||||
deltatype = 'p1' | ||||
elif e[3] == e[6]: | ||||
deltatype = 'p2' | ||||
elif e[3] == rev - 1: | ||||
deltatype = 'prev' | ||||
elif e[3] == rev: | ||||
deltatype = 'base' | ||||
else: | ||||
deltatype = 'other' | ||||
else: | ||||
if e[3] == rev: | ||||
deltatype = 'base' | ||||
else: | ||||
deltatype = 'prev' | ||||
chain = r._deltachain(rev)[0] | ||||
for iterrev in chain: | ||||
e = index[iterrev] | ||||
chainsize += e[1] | ||||
return compsize, uncompsize, deltatype, chain, chainsize | ||||
fm = ui.formatter('debugdeltachain', opts) | ||||
fm.plain(' rev chain# chainlen prev delta ' | ||||
'size rawsize chainsize ratio lindist extradist ' | ||||
Paul Morelle
|
r35050 | 'extraratio') | ||
if withsparseread: | ||||
Paul Morelle
|
r35696 | fm.plain(' readsize largestblk rddensity srchunks') | ||
Paul Morelle
|
r35050 | fm.plain('\n') | ||
Gregory Szorc
|
r30541 | |||
chainbases = {} | ||||
for rev in r: | ||||
comp, uncomp, deltatype, chain, chainsize = revinfo(rev) | ||||
chainbase = chain[0] | ||||
chainid = chainbases.setdefault(chainbase, len(chainbases) + 1) | ||||
Paul Morelle
|
r35050 | basestart = start(chainbase) | ||
revstart = start(rev) | ||||
Gregory Szorc
|
r30541 | lineardist = revstart + comp - basestart | ||
extradist = lineardist - chainsize | ||||
try: | ||||
prevrev = chain[-2] | ||||
except IndexError: | ||||
prevrev = -1 | ||||
Paul Morelle
|
r38668 | if uncomp != 0: | ||
chainratio = float(chainsize) / float(uncomp) | ||||
else: | ||||
chainratio = chainsize | ||||
if chainsize != 0: | ||||
extraratio = float(extradist) / float(chainsize) | ||||
else: | ||||
extraratio = extradist | ||||
Gregory Szorc
|
r30541 | |||
fm.startitem() | ||||
fm.write('rev chainid chainlen prevrev deltatype compsize ' | ||||
'uncompsize chainsize chainratio lindist extradist ' | ||||
'extraratio', | ||||
Paul Morelle
|
r35050 | '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f', | ||
Gregory Szorc
|
r30541 | rev, chainid, len(chain), prevrev, deltatype, comp, | ||
uncomp, chainsize, chainratio, lineardist, extradist, | ||||
extraratio, | ||||
rev=rev, chainid=chainid, chainlen=len(chain), | ||||
prevrev=prevrev, deltatype=deltatype, compsize=comp, | ||||
uncompsize=uncomp, chainsize=chainsize, | ||||
chainratio=chainratio, lindist=lineardist, | ||||
extradist=extradist, extraratio=extraratio) | ||||
Paul Morelle
|
r35050 | if withsparseread: | ||
readsize = 0 | ||||
largestblock = 0 | ||||
Paul Morelle
|
r35746 | srchunks = 0 | ||
Boris Feld
|
r39366 | for revschunk in deltautil.slicechunk(r, chain): | ||
Paul Morelle
|
r35746 | srchunks += 1 | ||
Paul Morelle
|
r35050 | blkend = start(revschunk[-1]) + length(revschunk[-1]) | ||
blksize = blkend - start(revschunk[0]) | ||||
readsize += blksize | ||||
if largestblock < blksize: | ||||
largestblock = blksize | ||||
Boris Feld
|
r38669 | if readsize: | ||
readdensity = float(chainsize) / float(readsize) | ||||
else: | ||||
readdensity = 1 | ||||
Paul Morelle
|
r35050 | |||
Paul Morelle
|
r35696 | fm.write('readsize largestblock readdensity srchunks', | ||
' %10d %10d %9.5f %8d', | ||||
readsize, largestblock, readdensity, srchunks, | ||||
Paul Morelle
|
r35050 | readsize=readsize, largestblock=largestblock, | ||
Paul Morelle
|
r35696 | readdensity=readdensity, srchunks=srchunks) | ||
Paul Morelle
|
r35050 | |||
fm.plain('\n') | ||||
Gregory Szorc
|
r30541 | |||
fm.end() | ||||
Pierre-Yves David
|
r30954 | @command('debugdirstate|debugstate', | ||
Martin von Zweigbergk
|
r39796 | [('', 'nodates', None, _('do not display the saved mtime (DEPRECATED)')), | ||
('', 'dates', True, _('display the saved mtime')), | ||||
('', 'datesort', None, _('sort by saved mtime'))], | ||||
Pierre-Yves David
|
r30954 | _('[OPTION]...')) | ||
def debugstate(ui, repo, **opts): | ||||
"""show the contents of the current dirstate""" | ||||
Martin von Zweigbergk
|
r39796 | nodates = not opts[r'dates'] | ||
if opts.get(r'nodates') is not None: | ||||
nodates = True | ||||
Pulkit Goyal
|
r33101 | datesort = opts.get(r'datesort') | ||
Pierre-Yves David
|
r30954 | |||
if datesort: | ||||
keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename | ||||
else: | ||||
keyfunc = None # sort by filename | ||||
for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc): | ||||
if ent[3] == -1: | ||||
timestr = 'unset ' | ||||
elif nodates: | ||||
timestr = 'set ' | ||||
else: | ||||
Pulkit Goyal
|
r35152 | timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ", | ||
Pierre-Yves David
|
r30954 | time.localtime(ent[3])) | ||
Pulkit Goyal
|
r35205 | timestr = encoding.strtolocal(timestr) | ||
Pierre-Yves David
|
r30954 | if ent[1] & 0o20000: | ||
mode = 'lnk' | ||||
else: | ||||
mode = '%3o' % (ent[1] & 0o777 & ~util.umask) | ||||
ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_)) | ||||
for f in repo.dirstate.copies(): | ||||
ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f)) | ||||
Gregory Szorc
|
r30517 | @command('debugdiscovery', | ||
[('', 'old', None, _('use old-style discovery')), | ||||
('', 'nonheads', None, | ||||
_('use old-style discovery with non-heads included')), | ||||
Boris Feld
|
r35305 | ('', 'rev', [], 'restrict discovery to this set of revs'), | ||
r42197 | ('', 'seed', '12323', 'specify the random seed use for discovery'), | |||
Yuya Nishihara
|
r32375 | ] + cmdutil.remoteopts, | ||
Martin von Zweigbergk
|
r35425 | _('[--rev REV] [OTHER]')) | ||
Gregory Szorc
|
r30517 | def debugdiscovery(ui, repo, remoteurl="default", **opts): | ||
"""runs the changeset discovery protocol in isolation""" | ||||
Pulkit Goyal
|
r33100 | opts = pycompat.byteskwargs(opts) | ||
Martin von Zweigbergk
|
r35418 | remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl)) | ||
Gregory Szorc
|
r30517 | remote = hg.peer(repo, opts, remoteurl) | ||
ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl)) | ||||
# make sure tests are repeatable | ||||
r42197 | random.seed(int(opts['seed'])) | |||
Gregory Szorc
|
r30517 | |||
r42198 | ||||
if opts.get('old'): | ||||
def doit(pushedrevs, remoteheads, remote=remote): | ||||
Gregory Szorc
|
r30517 | if not util.safehasattr(remote, 'branches'): | ||
# enable in-client legacy support | ||||
remote = localrepo.locallegacypeer(remote.local()) | ||||
common, _in, hds = treediscovery.findcommonincoming(repo, remote, | ||||
force=True) | ||||
common = set(common) | ||||
if not opts.get('nonheads'): | ||||
ui.write(("unpruned common: %s\n") % | ||||
" ".join(sorted(short(n) for n in common))) | ||||
Gregory Szorc
|
r39199 | |||
clnode = repo.changelog.node | ||||
common = repo.revs('heads(::%ln)', common) | ||||
common = {clnode(r) for r in common} | ||||
r42198 | return common, hds | |||
else: | ||||
def doit(pushedrevs, remoteheads, remote=remote): | ||||
Boris Feld
|
r35305 | nodes = None | ||
if pushedrevs: | ||||
revs = scmutil.revrange(repo, pushedrevs) | ||||
nodes = [repo[r].node() for r in revs] | ||||
common, any, hds = setdiscovery.findcommonheads(ui, repo, remote, | ||||
ancestorsof=nodes) | ||||
r42198 | return common, hds | |||
Gregory Szorc
|
r30517 | |||
Martin von Zweigbergk
|
r35420 | remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None) | ||
Martin von Zweigbergk
|
r35419 | localrevs = opts['rev'] | ||
r42202 | with util.timedcm('debug-discovery') as t: | |||
common, hds = doit(localrevs, remoterevs) | ||||
r42198 | ||||
r42199 | # compute all statistics | |||
r42198 | common = set(common) | |||
rheads = set(hds) | ||||
lheads = set(repo.heads()) | ||||
r42199 | ||||
data = {} | ||||
r42202 | data['elapsed'] = t.elapsed | |||
r42199 | data['nb-common'] = len(common) | |||
data['nb-common-local'] = len(common & lheads) | ||||
data['nb-common-remote'] = len(common & rheads) | ||||
data['nb-local'] = len(lheads) | ||||
data['nb-local-missing'] = data['nb-local'] - data['nb-common-local'] | ||||
data['nb-remote'] = len(rheads) | ||||
data['nb-remote-unknown'] = data['nb-remote'] - data['nb-common-remote'] | ||||
data['nb-revs'] = len(repo.revs('all()')) | ||||
data['nb-revs-common'] = len(repo.revs('::%ln', common)) | ||||
data['nb-revs-missing'] = data['nb-revs'] - data['nb-revs-common'] | ||||
# display discovery summary | ||||
r42202 | ui.write(("elapsed time: %(elapsed)f seconds\n") % data) | |||
r42199 | ui.write(("heads summary:\n")) | |||
ui.write((" total common heads: %(nb-common)9d\n") % data) | ||||
ui.write((" also local heads: %(nb-common-local)9d\n") % data) | ||||
ui.write((" also remote heads: %(nb-common-remote)9d\n") % data) | ||||
ui.write((" local heads: %(nb-local)9d\n") % data) | ||||
ui.write((" common: %(nb-common-local)9d\n") % data) | ||||
ui.write((" missing: %(nb-local-missing)9d\n") % data) | ||||
ui.write((" remote heads: %(nb-remote)9d\n") % data) | ||||
ui.write((" common: %(nb-common-remote)9d\n") % data) | ||||
ui.write((" unknown: %(nb-remote-unknown)9d\n") % data) | ||||
ui.write(("local changesets: %(nb-revs)9d\n") % data) | ||||
ui.write((" common: %(nb-revs-common)9d\n") % data) | ||||
ui.write((" missing: %(nb-revs-missing)9d\n") % data) | ||||
r42201 | if ui.verbose: | |||
ui.write(("common heads: %s\n") % | ||||
" ".join(sorted(short(n) for n in common))) | ||||
Gregory Szorc
|
r30518 | |||
Boris Feld
|
r35578 | _chunksize = 4 << 10 | ||
@command('debugdownload', | ||||
[ | ||||
('o', 'output', '', _('path')), | ||||
], | ||||
Boris Feld
|
r35748 | optionalrepo=True) | ||
def debugdownload(ui, repo, url, output=None, **opts): | ||||
Boris Feld
|
r35578 | """download a resource using Mercurial logic and config | ||
""" | ||||
fh = urlmod.open(ui, url, output) | ||||
dest = ui | ||||
if output: | ||||
dest = open(output, "wb", _chunksize) | ||||
try: | ||||
data = fh.read(_chunksize) | ||||
while data: | ||||
dest.write(data) | ||||
data = fh.read(_chunksize) | ||||
finally: | ||||
if output: | ||||
dest.close() | ||||
Matt Harbison
|
r37998 | @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True) | ||
def debugextensions(ui, repo, **opts): | ||||
Gregory Szorc
|
r30518 | '''show information about active extensions''' | ||
Pulkit Goyal
|
r33100 | opts = pycompat.byteskwargs(opts) | ||
Gregory Szorc
|
r30518 | exts = extensions.extensions(ui) | ||
hgver = util.version() | ||||
fm = ui.formatter('debugextensions', opts) | ||||
for extname, extmod in sorted(exts, key=operator.itemgetter(0)): | ||||
isinternal = extensions.ismoduleinternal(extmod) | ||||
Pulkit Goyal
|
r31074 | extsource = pycompat.fsencode(extmod.__file__) | ||
Gregory Szorc
|
r30518 | if isinternal: | ||
exttestedwith = [] # never expose magic string to users | ||||
else: | ||||
exttestedwith = getattr(extmod, 'testedwith', '').split() | ||||
extbuglink = getattr(extmod, 'buglink', None) | ||||
fm.startitem() | ||||
if ui.quiet or ui.verbose: | ||||
fm.write('name', '%s\n', extname) | ||||
else: | ||||
fm.write('name', '%s', extname) | ||||
if isinternal or hgver in exttestedwith: | ||||
fm.plain('\n') | ||||
elif not exttestedwith: | ||||
fm.plain(_(' (untested!)\n')) | ||||
else: | ||||
lasttestedversion = exttestedwith[-1] | ||||
fm.plain(' (%s!)\n' % lasttestedversion) | ||||
fm.condwrite(ui.verbose and extsource, 'source', | ||||
_(' location: %s\n'), extsource or "") | ||||
if ui.verbose: | ||||
fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal]) | ||||
fm.data(bundled=isinternal) | ||||
fm.condwrite(ui.verbose and exttestedwith, 'testedwith', | ||||
_(' tested with: %s\n'), | ||||
fm.formatlist(exttestedwith, name='ver')) | ||||
fm.condwrite(ui.verbose and extbuglink, 'buglink', | ||||
_(' bug reporting: %s\n'), extbuglink or "") | ||||
fm.end() | ||||
Gregory Szorc
|
r30524 | |||
@command('debugfileset', | ||||
Yuya Nishihara
|
r38629 | [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')), | ||
('', 'all-files', False, | ||||
Yuya Nishihara
|
r38837 | _('test files from all revisions and working directory')), | ||
Yuya Nishihara
|
r38838 | ('s', 'show-matcher', None, | ||
_('print internal representation of matcher')), | ||||
Yuya Nishihara
|
r38837 | ('p', 'show-stage', [], | ||
_('print parsed tree at the given stage'), _('NAME'))], | ||||
_('[-r REV] [--all-files] [OPTION]... FILESPEC')) | ||||
Gregory Szorc
|
r30524 | def debugfileset(ui, repo, expr, **opts): | ||
'''parse and apply a fileset specification''' | ||||
Augie Fackler
|
r39008 | from . import fileset | ||
fileset.symbols # force import of fileset so we have predicates to optimize | ||||
Yuya Nishihara
|
r38629 | opts = pycompat.byteskwargs(opts) | ||
ctx = scmutil.revsingle(repo, opts.get('rev'), None) | ||||
Yuya Nishihara
|
r38837 | |||
stages = [ | ||||
('parsed', pycompat.identity), | ||||
Yuya Nishihara
|
r38862 | ('analyzed', filesetlang.analyze), | ||
Yuya Nishihara
|
r38865 | ('optimized', filesetlang.optimize), | ||
Yuya Nishihara
|
r38837 | ] | ||
stagenames = set(n for n, f in stages) | ||||
showalways = set() | ||||
if ui.verbose and not opts['show_stage']: | ||||
# show parsed tree by --verbose (deprecated) | ||||
showalways.add('parsed') | ||||
if opts['show_stage'] == ['all']: | ||||
showalways.update(stagenames) | ||||
else: | ||||
for n in opts['show_stage']: | ||||
if n not in stagenames: | ||||
raise error.Abort(_('invalid stage name: %s') % n) | ||||
showalways.update(opts['show_stage']) | ||||
Yuya Nishihara
|
r38841 | tree = filesetlang.parse(expr) | ||
Yuya Nishihara
|
r38837 | for n, f in stages: | ||
tree = f(tree) | ||||
if n in showalways: | ||||
if opts['show_stage'] or n != 'parsed': | ||||
ui.write(("* %s:\n") % n) | ||||
Yuya Nishihara
|
r38841 | ui.write(filesetlang.prettyformat(tree), "\n") | ||
Gregory Szorc
|
r30524 | |||
Yuya Nishihara
|
r38629 | files = set() | ||
if opts['all_files']: | ||||
for r in repo: | ||||
c = repo[r] | ||||
files.update(c.files()) | ||||
files.update(c.substate) | ||||
if opts['all_files'] or ctx.rev() is None: | ||||
wctx = repo[None] | ||||
files.update(repo.dirstate.walk(scmutil.matchall(repo), | ||||
subrepos=list(wctx.substate), | ||||
unknown=True, ignored=True)) | ||||
files.update(wctx.substate) | ||||
else: | ||||
files.update(ctx.files()) | ||||
files.update(ctx.substate) | ||||
Yuya Nishihara
|
r38631 | m = ctx.matchfileset(expr) | ||
Yuya Nishihara
|
r38838 | if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose): | ||
ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n') | ||||
Yuya Nishihara
|
r38629 | for f in sorted(files): | ||
if not m(f): | ||||
continue | ||||
Gregory Szorc
|
r30524 | ui.write("%s\n" % f) | ||
Gregory Szorc
|
r30525 | |||
Boris Feld
|
r35337 | @command('debugformat', | ||
FUJIWARA Katsunori
|
r38850 | [] + cmdutil.formatteropts) | ||
Boris Feld
|
r35337 | def debugformat(ui, repo, **opts): | ||
Boris Feld
|
r35338 | """display format information about the current repository | ||
Use --verbose to get extra information about current config value and | ||||
Mercurial default.""" | ||||
Pulkit Goyal
|
r35402 | opts = pycompat.byteskwargs(opts) | ||
Boris Feld
|
r35337 | maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant) | ||
maxvariantlength = max(len('format-variant'), maxvariantlength) | ||||
def makeformatname(name): | ||||
return '%s:' + (' ' * (maxvariantlength - len(name))) | ||||
Yuya Nishihara
|
r35379 | fm = ui.formatter('debugformat', opts) | ||
if fm.isplain(): | ||||
def formatvalue(value): | ||||
if util.safehasattr(value, 'startswith'): | ||||
return value | ||||
if value: | ||||
return 'yes' | ||||
else: | ||||
return 'no' | ||||
else: | ||||
formatvalue = pycompat.identity | ||||
Boris Feld
|
r35337 | |||
fm.plain('format-variant') | ||||
fm.plain(' ' * (maxvariantlength - len('format-variant'))) | ||||
fm.plain(' repo') | ||||
Boris Feld
|
r35338 | if ui.verbose: | ||
fm.plain(' config default') | ||||
Boris Feld
|
r35337 | fm.plain('\n') | ||
for fv in upgrade.allformatvariant: | ||||
Yuya Nishihara
|
r35378 | fm.startitem() | ||
Boris Feld
|
r35337 | repovalue = fv.fromrepo(repo) | ||
Boris Feld
|
r35338 | configvalue = fv.fromconfig(repo) | ||
Boris Feld
|
r35337 | |||
Boris Feld
|
r35339 | if repovalue != configvalue: | ||
namelabel = 'formatvariant.name.mismatchconfig' | ||||
repolabel = 'formatvariant.repo.mismatchconfig' | ||||
elif repovalue != fv.default: | ||||
namelabel = 'formatvariant.name.mismatchdefault' | ||||
repolabel = 'formatvariant.repo.mismatchdefault' | ||||
else: | ||||
namelabel = 'formatvariant.name.uptodate' | ||||
repolabel = 'formatvariant.repo.uptodate' | ||||
Boris Feld
|
r35337 | fm.write('name', makeformatname(fv.name), fv.name, | ||
Boris Feld
|
r35339 | label=namelabel) | ||
Boris Feld
|
r35337 | fm.write('repo', ' %3s', formatvalue(repovalue), | ||
Boris Feld
|
r35339 | label=repolabel) | ||
if fv.default != configvalue: | ||||
configlabel = 'formatvariant.config.special' | ||||
else: | ||||
configlabel = 'formatvariant.config.default' | ||||
Boris Feld
|
r35338 | fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue), | ||
Boris Feld
|
r35339 | label=configlabel) | ||
Boris Feld
|
r35338 | fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default), | ||
label='formatvariant.default') | ||||
Boris Feld
|
r35337 | fm.plain('\n') | ||
Yuya Nishihara
|
r35378 | fm.end() | ||
Boris Feld
|
r35337 | |||
Gregory Szorc
|
r30525 | @command('debugfsinfo', [], _('[PATH]'), norepo=True) | ||
def debugfsinfo(ui, path="."): | ||||
"""show information detected about current filesystem""" | ||||
Matt Harbison
|
r35532 | ui.write(('path: %s\n') % path) | ||
ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)')) | ||||
Gregory Szorc
|
r30525 | ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no')) | ||
Jun Wu
|
r31633 | ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)')) | ||
Gregory Szorc
|
r30525 | ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no')) | ||
ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no')) | ||||
Jun Wu
|
r31634 | casesensitive = '(unknown)' | ||
try: | ||||
Yuya Nishihara
|
r38184 | with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f: | ||
Jun Wu
|
r31634 | casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no' | ||
except OSError: | ||||
pass | ||||
ui.write(('case-sensitive: %s\n') % casesensitive) | ||||
Gregory Szorc
|
r30526 | |||
@command('debuggetbundle', | ||||
[('H', 'head', [], _('id of head node'), _('ID')), | ||||
('C', 'common', [], _('id of common node'), _('ID')), | ||||
('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))], | ||||
_('REPO FILE [-H|-C ID]...'), | ||||
norepo=True) | ||||
def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts): | ||||
"""retrieves a bundle from a repo | ||||
Every ID must be a full-length hex node id string. Saves the bundle to the | ||||
given file. | ||||
""" | ||||
Pulkit Goyal
|
r33100 | opts = pycompat.byteskwargs(opts) | ||
Gregory Szorc
|
r30526 | repo = hg.peer(ui, opts, repopath) | ||
if not repo.capable('getbundle'): | ||||
raise error.Abort("getbundle() not supported by target repository") | ||||
args = {} | ||||
if common: | ||||
Pulkit Goyal
|
r33101 | args[r'common'] = [bin(s) for s in common] | ||
Gregory Szorc
|
r30526 | if head: | ||
Pulkit Goyal
|
r33101 | args[r'heads'] = [bin(s) for s in head] | ||
Gregory Szorc
|
r30526 | # TODO: get desired bundlecaps from command line. | ||
Pulkit Goyal
|
r33101 | args[r'bundlecaps'] = None | ||
Gregory Szorc
|
r30526 | bundle = repo.getbundle('debug', **args) | ||
bundletype = opts.get('type', 'bzip2').lower() | ||||
btypes = {'none': 'HG10UN', | ||||
'bzip2': 'HG10BZ', | ||||
'gzip': 'HG10GZ', | ||||
'bundle2': 'HG20'} | ||||
bundletype = btypes.get(bundletype) | ||||
if bundletype not in bundle2.bundletypes: | ||||
raise error.Abort(_('unknown bundle type specified with --type')) | ||||
bundle2.writebundle(ui, bundle, bundlepath, bundletype) | ||||
Gregory Szorc
|
r30527 | |||
@command('debugignore', [], '[FILE]') | ||||
def debugignore(ui, repo, *files, **opts): | ||||
"""display the combined ignore pattern and information about ignored files | ||||
With no argument display the combined ignore pattern. | ||||
Given space separated file names, shows if the given file is ignored and | ||||
if so, show the ignore rule (file and line number) that matched it. | ||||
""" | ||||
ignore = repo.dirstate._ignore | ||||
if not files: | ||||
# Show all the patterns | ||||
Augie Fackler
|
r36596 | ui.write("%s\n" % pycompat.byterepr(ignore)) | ||
Gregory Szorc
|
r30527 | else: | ||
Matt Harbison
|
r33507 | m = scmutil.match(repo[None], pats=files) | ||
Martin von Zweigbergk
|
r41785 | uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True) | ||
Matt Harbison
|
r33507 | for f in m.files(): | ||
Gregory Szorc
|
r30527 | nf = util.normpath(f) | ||
ignored = None | ||||
ignoredata = None | ||||
if nf != '.': | ||||
if ignore(nf): | ||||
ignored = nf | ||||
ignoredata = repo.dirstate._ignorefileandline(nf) | ||||
else: | ||||
for p in util.finddirs(nf): | ||||
if ignore(p): | ||||
ignored = p | ||||
ignoredata = repo.dirstate._ignorefileandline(p) | ||||
break | ||||
if ignored: | ||||
if ignored == nf: | ||||
Martin von Zweigbergk
|
r41785 | ui.write(_("%s is ignored\n") % uipathfn(f)) | ||
Gregory Szorc
|
r30527 | else: | ||
ui.write(_("%s is ignored because of " | ||||
Martin von Zweigbergk
|
r42269 | "containing directory %s\n") | ||
Martin von Zweigbergk
|
r41785 | % (uipathfn(f), ignored)) | ||
Gregory Szorc
|
r30527 | ignorefile, lineno, line = ignoredata | ||
ui.write(_("(ignore rule in %s, line %d: '%s')\n") | ||||
% (ignorefile, lineno, line)) | ||||
else: | ||||
Martin von Zweigbergk
|
r41785 | ui.write(_("%s is not ignored\n") % uipathfn(f)) | ||
Gregory Szorc
|
r30528 | |||
Gregory Szorc
|
r39318 | @command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts, | ||
_('-c|-m|FILE')) | ||||
Gregory Szorc
|
r30528 | def debugindex(ui, repo, file_=None, **opts): | ||
Gregory Szorc
|
r39318 | """dump index data for a storage primitive""" | ||
Pulkit Goyal
|
r33100 | opts = pycompat.byteskwargs(opts) | ||
Gregory Szorc
|
r39318 | store = cmdutil.openstorage(repo, 'debugindex', file_, opts) | ||
Gregory Szorc
|
r30528 | |||
if ui.debugflag: | ||||
shortfn = hex | ||||
else: | ||||
shortfn = short | ||||
idlen = 12 | ||||
Gregory Szorc
|
r39318 | for i in store: | ||
idlen = len(shortfn(store.node(i))) | ||||
Gregory Szorc
|
r30528 | break | ||
Gregory Szorc
|
r39318 | fm = ui.formatter('debugindex', opts) | ||
fm.plain(b' rev linkrev %s %s p2\n' % ( | ||||
b'nodeid'.ljust(idlen), | ||||
b'p1'.ljust(idlen))) | ||||
for rev in store: | ||||
node = store.node(rev) | ||||
parents = store.parents(node) | ||||
fm.startitem() | ||||
fm.write(b'rev', b'%6d ', rev) | ||||
fm.write(b'linkrev', '%7d ', store.linkrev(rev)) | ||||
fm.write(b'node', '%s ', shortfn(node)) | ||||
fm.write(b'p1', '%s ', shortfn(parents[0])) | ||||
fm.write(b'p2', '%s', shortfn(parents[1])) | ||||
fm.plain(b'\n') | ||||
fm.end() | ||||
Gregory Szorc
|
r30528 | |||
Yuya Nishihara
|
r32375 | @command('debugindexdot', cmdutil.debugrevlogopts, | ||
Gregory Szorc
|
r30528 | _('-c|-m|FILE'), optionalrepo=True) | ||
def debugindexdot(ui, repo, file_=None, **opts): | ||||
"""dump an index DAG as a graphviz dot file""" | ||||
Pulkit Goyal
|
r33100 | opts = pycompat.byteskwargs(opts) | ||
Gregory Szorc
|
r39315 | r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts) | ||
Gregory Szorc
|
r30528 | ui.write(("digraph G {\n")) | ||
for i in r: | ||||
node = r.node(i) | ||||
pp = r.parents(node) | ||||
ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i)) | ||||
if pp[1] != nullid: | ||||
ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i)) | ||||
ui.write("}\n") | ||||
Gregory Szorc
|
r30774 | |||
Martin von Zweigbergk
|
r40016 | @command('debugindexstats', []) | ||
def debugindexstats(ui, repo): | ||||
"""show stats related to the changelog index""" | ||||
repo.changelog.shortest(nullid, 1) | ||||
Martin von Zweigbergk
|
r40401 | index = repo.changelog.index | ||
if not util.safehasattr(index, 'stats'): | ||||
raise error.Abort(_('debugindexstats only works with native code')) | ||||
for k, v in sorted(index.stats().items()): | ||||
Yuya Nishihara
|
r40477 | ui.write('%s: %d\n' % (k, v)) | ||
Martin von Zweigbergk
|
r40016 | |||
Yuya Nishihara
|
r32375 | @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True) | ||
Pierre-Yves David
|
r30918 | def debuginstall(ui, **opts): | ||
'''test Mercurial installation | ||||
Returns 0 on success. | ||||
''' | ||||
Pulkit Goyal
|
r33100 | opts = pycompat.byteskwargs(opts) | ||
Pierre-Yves David
|
r30918 | |||
problems = 0 | ||||
fm = ui.formatter('debuginstall', opts) | ||||
fm.startitem() | ||||
# encoding | ||||
fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding) | ||||
err = None | ||||
try: | ||||
Yuya Nishihara
|
r34131 | codecs.lookup(pycompat.sysstr(encoding.encoding)) | ||
except LookupError as inst: | ||||
Yuya Nishihara
|
r37102 | err = stringutil.forcebytestr(inst) | ||
Pierre-Yves David
|
r30918 | problems += 1 | ||
fm.condwrite(err, 'encodingerror', _(" %s\n" | ||||
" (check that your locale is properly set)\n"), err) | ||||
# Python | ||||
fm.write('pythonexe', _("checking Python executable (%s)\n"), | ||||
pycompat.sysexecutable) | ||||
fm.write('pythonver', _("checking Python version (%s)\n"), | ||||
("%d.%d.%d" % sys.version_info[:3])) | ||||
fm.write('pythonlib', _("checking Python lib (%s)...\n"), | ||||
os.path.dirname(pycompat.fsencode(os.__file__))) | ||||
security = set(sslutil.supportedprotocols) | ||||
if sslutil.hassni: | ||||
security.add('sni') | ||||
fm.write('pythonsecurity', _("checking Python security support (%s)\n"), | ||||
fm.formatlist(sorted(security), name='protocol', | ||||
fmt='%s', sep=',')) | ||||
# These are warnings, not errors. So don't increment problem count. This | ||||
# may change in the future. | ||||
if 'tls1.2' not in security: | ||||
fm.plain(_(' TLS 1.2 not supported by Python install; ' | ||||
'network connections lack modern security\n')) | ||||
if 'sni' not in security: | ||||
fm.plain(_(' SNI not supported by Python install; may have ' | ||||
'connectivity issues with some servers\n')) | ||||
# TODO print CA cert info | ||||
# hg version | ||||
hgver = util.version() | ||||
fm.write('hgver', _("checking Mercurial version (%s)\n"), | ||||
hgver.split('+')[0]) | ||||
fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"), | ||||
'+'.join(hgver.split('+')[1:])) | ||||
# compiled modules | ||||
fm.write('hgmodulepolicy', _("checking module policy (%s)\n"), | ||||
policy.policy) | ||||
fm.write('hgmodules', _("checking installed modules (%s)...\n"), | ||||
Pulkit Goyal
|
r31074 | os.path.dirname(pycompat.fsencode(__file__))) | ||
Pierre-Yves David
|
r30918 | |||
Yuya Nishihara
|
r32204 | if policy.policy in ('c', 'allow'): | ||
err = None | ||||
try: | ||||
Yuya Nishihara
|
r32367 | from .cext import ( | ||
Yuya Nishihara
|
r32368 | base85, | ||
Yuya Nishihara
|
r32369 | bdiff, | ||
Yuya Nishihara
|
r32371 | mpatch, | ||
Yuya Nishihara
|
r32204 | osutil, | ||
) | ||||
dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes | ||||
except Exception as inst: | ||||
Yuya Nishihara
|
r37102 | err = stringutil.forcebytestr(inst) | ||
Yuya Nishihara
|
r32204 | problems += 1 | ||
fm.condwrite(err, 'extensionserror', " %s\n", err) | ||||
Pierre-Yves David
|
r30918 | |||
compengines = util.compengines._engines.values() | ||||
fm.write('compengines', _('checking registered compression engines (%s)\n'), | ||||
fm.formatlist(sorted(e.name() for e in compengines), | ||||
name='compengine', fmt='%s', sep=', ')) | ||||
fm.write('compenginesavail', _('checking available compression engines ' | ||||
'(%s)\n'), | ||||
fm.formatlist(sorted(e.name() for e in compengines | ||||
if e.available()), | ||||
name='compengine', fmt='%s', sep=', ')) | ||||
r42208 | wirecompengines = compression.compengines.supportedwireengines( | |||
compression.SERVERROLE) | ||||
Pierre-Yves David
|
r30918 | fm.write('compenginesserver', _('checking available compression engines ' | ||
'for wire protocol (%s)\n'), | ||||
fm.formatlist([e.name() for e in wirecompengines | ||||
if e.wireprotosupport()], | ||||
name='compengine', fmt='%s', sep=', ')) | ||||
Boris Feld
|
r35464 | re2 = 'missing' | ||
if util._re2: | ||||
re2 = 'available' | ||||
fm.plain(_('checking "re2" regexp engine (%s)\n') % re2) | ||||
fm.data(re2=bool(util._re2)) | ||||
Pierre-Yves David
|
r30918 | |||
# templates | ||||
p = templater.templatepaths() | ||||
fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p)) | ||||
fm.condwrite(not p, '', _(" no template directories found\n")) | ||||
if p: | ||||
m = templater.templatepath("map-cmdline.default") | ||||
if m: | ||||
# template found, check if it is working | ||||
err = None | ||||
try: | ||||
templater.templater.frommapfile(m) | ||||
except Exception as inst: | ||||
Yuya Nishihara
|
r37102 | err = stringutil.forcebytestr(inst) | ||
Pierre-Yves David
|
r30918 | p = None | ||
fm.condwrite(err, 'defaulttemplateerror', " %s\n", err) | ||||
else: | ||||
p = None | ||||
fm.condwrite(p, 'defaulttemplate', | ||||
_("checking default template (%s)\n"), m) | ||||
fm.condwrite(not m, 'defaulttemplatenotfound', | ||||
_(" template '%s' not found\n"), "default") | ||||
if not p: | ||||
problems += 1 | ||||
fm.condwrite(not p, '', | ||||
_(" (templates seem to have been installed incorrectly)\n")) | ||||
# editor | ||||
editor = ui.geteditor() | ||||
editor = util.expandpath(editor) | ||||
Yuya Nishihara
|
r37138 | editorbin = procutil.shellsplit(editor)[0] | ||
Kyle Lippincott
|
r36254 | fm.write('editor', _("checking commit editor... (%s)\n"), editorbin) | ||
Yuya Nishihara
|
r37138 | cmdpath = procutil.findexe(editorbin) | ||
Pierre-Yves David
|
r30918 | fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound', | ||
_(" No commit editor set and can't find %s in PATH\n" | ||||
" (specify a commit editor in your configuration" | ||||
Kyle Lippincott
|
r36254 | " file)\n"), not cmdpath and editor == 'vi' and editorbin) | ||
Pierre-Yves David
|
r30918 | fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound', | ||
_(" Can't find editor '%s' in PATH\n" | ||||
" (specify a commit editor in your configuration" | ||||
Kyle Lippincott
|
r36254 | " file)\n"), not cmdpath and editorbin) | ||
Pierre-Yves David
|
r30918 | if not cmdpath and editor != 'vi': | ||
problems += 1 | ||||
# check username | ||||
username = None | ||||
err = None | ||||
try: | ||||
username = ui.username() | ||||
except error.Abort as e: | ||||
Yuya Nishihara
|
r37102 | err = stringutil.forcebytestr(e) | ||
Pierre-Yves David
|
r30918 | problems += 1 | ||
fm.condwrite(username, 'username', _("checking username (%s)\n"), username) | ||||
fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n" | ||||
" (specify a username in your configuration file)\n"), err) | ||||
fm.condwrite(not problems, '', | ||||
_("no problems detected\n")) | ||||
if not problems: | ||||
fm.data(problems=problems) | ||||
fm.condwrite(problems, 'problems', | ||||
_("%d problems detected," | ||||
" please check your install!\n"), problems) | ||||
fm.end() | ||||
return problems | ||||
Pierre-Yves David
|
r30919 | @command('debugknown', [], _('REPO ID...'), norepo=True) | ||
def debugknown(ui, repopath, *ids, **opts): | ||||
"""test whether node ids are known to a repo | ||||
Every ID must be a full-length hex node id string. Returns a list of 0s | ||||
and 1s indicating unknown/known. | ||||
""" | ||||
Pulkit Goyal
|
r33100 | opts = pycompat.byteskwargs(opts) | ||
Pierre-Yves David
|
r30919 | repo = hg.peer(ui, opts, repopath) | ||
if not repo.capable('known'): | ||||
raise error.Abort("known() not supported by target repository") | ||||
flags = repo.known([bin(s) for s in ids]) | ||||
ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags]))) | ||||
Pierre-Yves David
|
r30935 | @command('debuglabelcomplete', [], _('LABEL...')) | ||
def debuglabelcomplete(ui, repo, *args): | ||||
'''backwards compatibility with old bash completion scripts (DEPRECATED)''' | ||||
Kyle Lippincott
|
r31402 | debugnamecomplete(ui, repo, *args) | ||
Pierre-Yves David
|
r30935 | |||
Pierre-Yves David
|
r30938 | @command('debuglocks', | ||
[('L', 'force-lock', None, _('free the store lock (DANGEROUS)')), | ||||
('W', 'force-wlock', None, | ||||
Paul Morelle
|
r35396 | _('free the working state lock (DANGEROUS)')), | ||
('s', 'set-lock', None, _('set the store lock until stopped')), | ||||
('S', 'set-wlock', None, | ||||
_('set the working state lock until stopped'))], | ||||
Pierre-Yves David
|
r30938 | _('[OPTION]...')) | ||
def debuglocks(ui, repo, **opts): | ||||
"""show or modify state of locks | ||||
By default, this command will show which locks are held. This | ||||
includes the user and process holding the lock, the amount of time | ||||
the lock has been held, and the machine name where the process is | ||||
running if it's not local. | ||||
Locks protect the integrity of Mercurial's data, so should be | ||||
treated with care. System crashes or other interruptions may cause | ||||
locks to not be properly released, though Mercurial will usually | ||||
detect and remove such stale locks automatically. | ||||
However, detecting stale locks may not always be possible (for | ||||
instance, on a shared filesystem). Removing locks may also be | ||||
blocked by filesystem permissions. | ||||
Paul Morelle
|
r35396 | Setting a lock will prevent other commands from changing the data. | ||
The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs. | ||||
The set locks are removed when the command exits. | ||||
Pierre-Yves David
|
r30938 | Returns 0 if no locks are held. | ||
""" | ||||
Pulkit Goyal
|
r33101 | if opts.get(r'force_lock'): | ||
Pierre-Yves David
|
r30938 | repo.svfs.unlink('lock') | ||
Pulkit Goyal
|
r33101 | if opts.get(r'force_wlock'): | ||
Pierre-Yves David
|
r30938 | repo.vfs.unlink('wlock') | ||
Paul Morelle
|
r35395 | if opts.get(r'force_lock') or opts.get(r'force_wlock'): | ||
Pierre-Yves David
|
r30938 | return 0 | ||
Paul Morelle
|
r35396 | locks = [] | ||
try: | ||||
if opts.get(r'set_wlock'): | ||||
try: | ||||
locks.append(repo.wlock(False)) | ||||
except error.LockHeld: | ||||
raise error.Abort(_('wlock is already held')) | ||||
if opts.get(r'set_lock'): | ||||
try: | ||||
locks.append(repo.lock(False)) | ||||
except error.LockHeld: | ||||
raise error.Abort(_('lock is already held')) | ||||
if len(locks): | ||||
ui.promptchoice(_("ready to release the lock (y)? $$ &Yes")) | ||||
return 0 | ||||
finally: | ||||
release(*locks) | ||||
Pierre-Yves David
|
r30938 | now = time.time() | ||
held = 0 | ||||
def report(vfs, name, method): | ||||
# this causes stale locks to get reaped for more accurate reporting | ||||
try: | ||||
l = method(False) | ||||
except error.LockHeld: | ||||
l = None | ||||
if l: | ||||
l.release() | ||||
else: | ||||
try: | ||||
Augie Fackler
|
r36799 | st = vfs.lstat(name) | ||
age = now - st[stat.ST_MTIME] | ||||
user = util.username(st.st_uid) | ||||
Pierre-Yves David
|
r30938 | locker = vfs.readlock(name) | ||
if ":" in locker: | ||||
host, pid = locker.split(':') | ||||
if host == socket.gethostname(): | ||||
Matt Harbison
|
r39920 | locker = 'user %s, process %s' % (user or b'None', pid) | ||
Pierre-Yves David
|
r30938 | else: | ||
Augie Fackler
|
r41925 | locker = ('user %s, process %s, host %s' | ||
% (user or b'None', pid, host)) | ||||
Pierre-Yves David
|
r30938 | ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age)) | ||
return 1 | ||||
except OSError as e: | ||||
if e.errno != errno.ENOENT: | ||||
raise | ||||
ui.write(("%-6s free\n") % (name + ":")) | ||||
return 0 | ||||
held += report(repo.svfs, "lock", repo.lock) | ||||
held += report(repo.vfs, "wlock", repo.wlock) | ||||
return held | ||||
Martijn Pieters
|
r38803 | @command('debugmanifestfulltextcache', [ | ||
('', 'clear', False, _('clear the cache')), | ||||
r42124 | ('a', 'add', [], _('add the given manifest nodes to the cache'), | |||
Martijn Pieters
|
r38803 | _('NODE')) | ||
], '') | ||||
r42124 | def debugmanifestfulltextcache(ui, repo, add=(), **opts): | |||
Martijn Pieters
|
r38803 | """show, clear or amend the contents of the manifest fulltext cache""" | ||
r42108 | ||||
def getcache(): | ||||
Gregory Szorc
|
r39280 | r = repo.manifestlog.getstorage(b'') | ||
Martijn Pieters
|
r38803 | try: | ||
r42108 | return r._fulltextcache | |||
Martijn Pieters
|
r38803 | except AttributeError: | ||
r42108 | msg = _("Current revlog implementation doesn't appear to have a " | |||
"manifest fulltext cache\n") | ||||
raise error.Abort(msg) | ||||
if opts.get(r'clear'): | ||||
r42130 | with repo.wlock(): | |||
r42108 | cache = getcache() | |||
r42113 | cache.clear(clear_persisted_data=True) | |||
return | ||||
Martijn Pieters
|
r38803 | |||
r42108 | if add: | |||
r42130 | with repo.wlock(): | |||
r42124 | m = repo.manifestlog | |||
store = m.getstorage(b'') | ||||
for n in add: | ||||
try: | ||||
manifest = m[store.lookup(n)] | ||||
except error.LookupError as e: | ||||
raise error.Abort(e, hint="Check your manifest node id") | ||||
manifest.read() # stores revisision in cache too | ||||
r42109 | return | |||
Martijn Pieters
|
r38803 | |||
r42108 | cache = getcache() | |||
if not len(cache): | ||||
ui.write(_('cache empty\n')) | ||||
else: | ||||
ui.write( | ||||
_('cache contains %d manifest entries, in order of most to ' | ||||
'least recent:\n') % (len(cache),)) | ||||
totalsize = 0 | ||||
for nodeid in cache: | ||||
# Use cache.get to not update the LRU order | ||||
r42125 | data = cache.peek(nodeid) | |||
r42108 | size = len(data) | |||
totalsize += size + 24 # 20 bytes nodeid, 4 bytes size | ||||
ui.write(_('id: %s, size %s\n') % ( | ||||
hex(nodeid), util.bytecount(size))) | ||||
ondisk = cache._opener.stat('manifestfulltextcache').st_size | ||||
ui.write( | ||||
_('total cache data size %s, on-disk %s\n') % ( | ||||
util.bytecount(totalsize), util.bytecount(ondisk)) | ||||
) | ||||
Martijn Pieters
|
r38803 | |||
Pierre-Yves David
|
r30936 | @command('debugmergestate', [], '') | ||
def debugmergestate(ui, repo, *args): | ||||
"""print merge state | ||||
Use --verbose to print out information about whether v1 or v2 merge state | ||||
was chosen.""" | ||||
def _hashornull(h): | ||||
if h == nullhex: | ||||
return 'null' | ||||
else: | ||||
return h | ||||
def printrecords(version): | ||||
Augie Fackler
|
r36171 | ui.write(('* version %d records\n') % version) | ||
Pierre-Yves David
|
r30936 | if version == 1: | ||
records = v1records | ||||
else: | ||||
records = v2records | ||||
for rtype, record in records: | ||||
# pretty print some record types | ||||
if rtype == 'L': | ||||
ui.write(('local: %s\n') % record) | ||||
elif rtype == 'O': | ||||
ui.write(('other: %s\n') % record) | ||||
elif rtype == 'm': | ||||
driver, mdstate = record.split('\0', 1) | ||||
ui.write(('merge driver: %s (state "%s")\n') | ||||
% (driver, mdstate)) | ||||
elif rtype in 'FDC': | ||||
r = record.split('\0') | ||||
f, state, hash, lfile, afile, anode, ofile = r[0:7] | ||||
if version == 1: | ||||
onode = 'not stored in v1 format' | ||||
flags = r[7] | ||||
else: | ||||
onode, flags = r[7:9] | ||||
ui.write(('file: %s (record type "%s", state "%s", hash %s)\n') | ||||
% (f, rtype, state, _hashornull(hash))) | ||||
ui.write((' local path: %s (flags "%s")\n') % (lfile, flags)) | ||||
ui.write((' ancestor path: %s (node %s)\n') | ||||
% (afile, _hashornull(anode))) | ||||
ui.write((' other path: %s (node %s)\n') | ||||
% (ofile, _hashornull(onode))) | ||||
elif rtype == 'f': | ||||
filename, rawextras = record.split('\0', 1) | ||||
extras = rawextras.split('\0') | ||||
i = 0 | ||||
extrastrings = [] | ||||
while i < len(extras): | ||||
extrastrings.append('%s = %s' % (extras[i], extras[i + 1])) | ||||
i += 2 | ||||
ui.write(('file extras: %s (%s)\n') | ||||
% (filename, ', '.join(extrastrings))) | ||||
elif rtype == 'l': | ||||
labels = record.split('\0', 2) | ||||
labels = [l for l in labels if len(l) > 0] | ||||
ui.write(('labels:\n')) | ||||
ui.write((' local: %s\n' % labels[0])) | ||||
ui.write((' other: %s\n' % labels[1])) | ||||
if len(labels) > 2: | ||||
ui.write((' base: %s\n' % labels[2])) | ||||
else: | ||||
ui.write(('unrecognized entry: %s\t%s\n') | ||||
% (rtype, record.replace('\0', '\t'))) | ||||
# Avoid mergestate.read() since it may raise an exception for unsupported | ||||
# merge state records. We shouldn't be doing this, but this is OK since this | ||||
# command is pretty low-level. | ||||
ms = mergemod.mergestate(repo) | ||||
# sort so that reasonable information is on top | ||||
v1records = ms._readrecordsv1() | ||||
v2records = ms._readrecordsv2() | ||||
order = 'LOml' | ||||
def key(r): | ||||
idx = order.find(r[0]) | ||||
if idx == -1: | ||||
return (1, r[1]) | ||||
else: | ||||
return (0, idx) | ||||
v1records.sort(key=key) | ||||
v2records.sort(key=key) | ||||
if not v1records and not v2records: | ||||
ui.write(('no merge state found\n')) | ||||
elif not v2records: | ||||
ui.note(('no version 2 merge state\n')) | ||||
printrecords(1) | ||||
elif ms._v1v2match(v1records, v2records): | ||||
ui.note(('v1 and v2 states match: using v2\n')) | ||||
printrecords(2) | ||||
else: | ||||
ui.note(('v1 and v2 states mismatch: using v1\n')) | ||||
printrecords(1) | ||||
if ui.verbose: | ||||
printrecords(2) | ||||
Pierre-Yves David
|
r30937 | @command('debugnamecomplete', [], _('NAME...')) | ||
def debugnamecomplete(ui, repo, *args): | ||||
'''complete "names" - tags, open branch names, bookmark names''' | ||||
names = set() | ||||
# since we previously only listed open branches, we will handle that | ||||
# specially (after this for loop) | ||||
for name, ns in repo.names.iteritems(): | ||||
if name != 'branches': | ||||
names.update(ns.listnames(repo)) | ||||
names.update(tag for (tag, heads, tip, closed) | ||||
in repo.branchmap().iterbranches() if not closed) | ||||
completions = set() | ||||
if not args: | ||||
args = [''] | ||||
for a in args: | ||||
completions.update(n for n in names if n.startswith(a)) | ||||
ui.write('\n'.join(sorted(completions))) | ||||
ui.write('\n') | ||||
Pierre-Yves David
|
r30939 | @command('debugobsolete', | ||
[('', 'flags', 0, _('markers flag')), | ||||
('', 'record-parents', False, | ||||
_('record parent information for the precursor')), | ||||
('r', 'rev', [], _('display markers relevant to REV')), | ||||
r32626 | ('', 'exclusive', False, _('restrict display to markers only ' | |||
'relevant to REV')), | ||||
Pierre-Yves David
|
r30939 | ('', 'index', False, _('display index of the marker')), | ||
('', 'delete', [], _('delete markers specified by indices')), | ||||
Yuya Nishihara
|
r32375 | ] + cmdutil.commitopts2 + cmdutil.formatteropts, | ||
Pierre-Yves David
|
r30939 | _('[OBSOLETED [REPLACEMENT ...]]')) | ||
def debugobsolete(ui, repo, precursor=None, *successors, **opts): | ||||
"""create arbitrary obsolete marker | ||||
With no arguments, displays the list of obsolescence markers.""" | ||||
Pulkit Goyal
|
r33100 | opts = pycompat.byteskwargs(opts) | ||
Pierre-Yves David
|
r30939 | def parsenodeid(s): | ||
try: | ||||
# We do not use revsingle/revrange functions here to accept | ||||
# arbitrary node identifiers, possibly not present in the | ||||
# local repository. | ||||
n = bin(s) | ||||
if len(n) != len(nullid): | ||||
raise TypeError() | ||||
return n | ||||
except TypeError: | ||||
raise error.Abort('changeset references must be full hexadecimal ' | ||||
'node identifiers') | ||||
if opts.get('delete'): | ||||
indices = [] | ||||
for v in opts.get('delete'): | ||||
try: | ||||
indices.append(int(v)) | ||||
except ValueError: | ||||
raise error.Abort(_('invalid index value: %r') % v, | ||||
hint=_('use integers for indices')) | ||||
if repo.currenttransaction(): | ||||
raise error.Abort(_('cannot delete obsmarkers in the middle ' | ||||
'of transaction.')) | ||||
with repo.lock(): | ||||
n = repair.deleteobsmarkers(repo.obsstore, indices) | ||||
ui.write(_('deleted %i obsolescence markers\n') % n) | ||||
return | ||||
if precursor is not None: | ||||
if opts['rev']: | ||||
raise error.Abort('cannot select revision when creating marker') | ||||
metadata = {} | ||||
Yuya Nishihara
|
r38729 | metadata['user'] = encoding.fromlocal(opts['user'] or ui.username()) | ||
Pierre-Yves David
|
r30939 | succs = tuple(parsenodeid(succ) for succ in successors) | ||
l = repo.lock() | ||||
try: | ||||
tr = repo.transaction('debugobsolete') | ||||
try: | ||||
date = opts.get('date') | ||||
if date: | ||||
Boris Feld
|
r36625 | date = dateutil.parsedate(date) | ||
Pierre-Yves David
|
r30939 | else: | ||
date = None | ||||
prec = parsenodeid(precursor) | ||||
parents = None | ||||
if opts['record_parents']: | ||||
if prec not in repo.unfiltered(): | ||||
raise error.Abort('cannot used --record-parents on ' | ||||
'unknown changesets') | ||||
parents = repo.unfiltered()[prec].parents() | ||||
parents = tuple(p.node() for p in parents) | ||||
repo.obsstore.create(tr, prec, succs, opts['flags'], | ||||
parents=parents, date=date, | ||||
Boris Feld
|
r32411 | metadata=metadata, ui=ui) | ||
Pierre-Yves David
|
r30939 | tr.close() | ||
except ValueError as exc: | ||||
Pulkit Goyal
|
r36510 | raise error.Abort(_('bad obsmarker input: %s') % | ||
pycompat.bytestr(exc)) | ||||
Pierre-Yves David
|
r30939 | finally: | ||
tr.release() | ||||
finally: | ||||
l.release() | ||||
else: | ||||
if opts['rev']: | ||||
revs = scmutil.revrange(repo, opts['rev']) | ||||
nodes = [repo[r].node() for r in revs] | ||||
r33150 | markers = list(obsutil.getmarkers(repo, nodes=nodes, | |||
r32626 | exclusive=opts['exclusive'])) | |||
Pierre-Yves David
|
r30939 | markers.sort(key=lambda x: x._data) | ||
else: | ||||
r33150 | markers = obsutil.getmarkers(repo) | |||
Pierre-Yves David
|
r30939 | |||
markerstoiter = markers | ||||
isrelevant = lambda m: True | ||||
if opts.get('rev') and opts.get('index'): | ||||
r33150 | markerstoiter = obsutil.getmarkers(repo) | |||
Pierre-Yves David
|
r30939 | markerset = set(markers) | ||
isrelevant = lambda m: m in markerset | ||||
fm = ui.formatter('debugobsolete', opts) | ||||
for i, m in enumerate(markerstoiter): | ||||
if not isrelevant(m): | ||||
# marker can be irrelevant when we're iterating over a set | ||||
# of markers (markerstoiter) which is bigger than the set | ||||
# of markers we want to display (markers) | ||||
# this can happen if both --index and --rev options are | ||||
# provided and thus we need to iterate over all of the markers | ||||
# to get the correct indices, but only display the ones that | ||||
# are relevant to --rev value | ||||
continue | ||||
fm.startitem() | ||||
ind = i if opts.get('index') else None | ||||
cmdutil.showmarker(fm, m, index=ind) | ||||
fm.end() | ||||
Martin von Zweigbergk
|
r41921 | @command('debugp1copies', | ||
[('r', 'rev', '', _('revision to debug'), _('REV'))], | ||||
_('[-r REV]')) | ||||
def debugp1copies(ui, repo, **opts): | ||||
"""dump copy information compared to p1""" | ||||
opts = pycompat.byteskwargs(opts) | ||||
ctx = scmutil.revsingle(repo, opts.get('rev'), default=None) | ||||
for dst, src in ctx.p1copies().items(): | ||||
ui.write('%s -> %s\n' % (src, dst)) | ||||
@command('debugp2copies', | ||||
[('r', 'rev', '', _('revision to debug'), _('REV'))], | ||||
_('[-r REV]')) | ||||
def debugp1copies(ui, repo, **opts): | ||||
"""dump copy information compared to p2""" | ||||
opts = pycompat.byteskwargs(opts) | ||||
ctx = scmutil.revsingle(repo, opts.get('rev'), default=None) | ||||
for dst, src in ctx.p2copies().items(): | ||||
ui.write('%s -> %s\n' % (src, dst)) | ||||
Pierre-Yves David
|
r30940 | @command('debugpathcomplete', | ||
[('f', 'full', None, _('complete an entire path')), | ||||
('n', 'normal', None, _('show only normal files')), | ||||
('a', 'added', None, _('show only added files')), | ||||
('r', 'removed', None, _('show only removed files'))], | ||||
_('FILESPEC...')) | ||||
def debugpathcomplete(ui, repo, *specs, **opts): | ||||
'''complete part or all of a tracked path | ||||
This command supports shells that offer path name completion. It | ||||
currently completes only files already known to the dirstate. | ||||
Completion extends only to the next path segment unless | ||||
--full is specified, in which case entire paths are used.''' | ||||
def complete(path, acceptable): | ||||
dirstate = repo.dirstate | ||||
Matt Harbison
|
r39843 | spec = os.path.normpath(os.path.join(encoding.getcwd(), path)) | ||
Pierre-Yves David
|
r30940 | rootdir = repo.root + pycompat.ossep | ||
if spec != repo.root and not spec.startswith(rootdir): | ||||
return [], [] | ||||
if os.path.isdir(spec): | ||||
spec += '/' | ||||
spec = spec[len(rootdir):] | ||||
fixpaths = pycompat.ossep != '/' | ||||
if fixpaths: | ||||
spec = spec.replace(pycompat.ossep, '/') | ||||
speclen = len(spec) | ||||
Pulkit Goyal
|
r33101 | fullpaths = opts[r'full'] | ||
Pierre-Yves David
|
r30940 | files, dirs = set(), set() | ||
adddir, addfile = dirs.add, files.add | ||||
for f, st in dirstate.iteritems(): | ||||
if f.startswith(spec) and st[0] in acceptable: | ||||
if fixpaths: | ||||
f = f.replace('/', pycompat.ossep) | ||||
if fullpaths: | ||||
addfile(f) | ||||
continue | ||||
s = f.find(pycompat.ossep, speclen) | ||||
if s >= 0: | ||||
adddir(f[:s]) | ||||
else: | ||||
addfile(f) | ||||
return files, dirs | ||||
acceptable = '' | ||||
Pulkit Goyal
|
r33101 | if opts[r'normal']: | ||
Pierre-Yves David
|
r30940 | acceptable += 'nm' | ||
Pulkit Goyal
|
r33101 | if opts[r'added']: | ||
Pierre-Yves David
|
r30940 | acceptable += 'a' | ||
Pulkit Goyal
|
r33101 | if opts[r'removed']: | ||
Pierre-Yves David
|
r30940 | acceptable += 'r' | ||
cwd = repo.getcwd() | ||||
if not specs: | ||||
specs = ['.'] | ||||
files, dirs = set(), set() | ||||
for spec in specs: | ||||
f, d = complete(spec, acceptable or 'nmar') | ||||
files.update(f) | ||||
dirs.update(d) | ||||
files.update(dirs) | ||||
ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files))) | ||||
ui.write('\n') | ||||
Martin von Zweigbergk
|
r41656 | @command('debugpathcopies', | ||
cmdutil.walkopts, | ||||
Martin von Zweigbergk
|
r41840 | 'hg debugpathcopies REV1 REV2 [FILE]', | ||
Martin von Zweigbergk
|
r41656 | inferrepo=True) | ||
def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts): | ||||
"""show copies between two revisions""" | ||||
ctx1 = scmutil.revsingle(repo, rev1) | ||||
ctx2 = scmutil.revsingle(repo, rev2) | ||||
m = scmutil.match(ctx1, pats, opts) | ||||
Martin von Zweigbergk
|
r41915 | for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()): | ||
Martin von Zweigbergk
|
r41656 | ui.write('%s -> %s\n' % (src, dst)) | ||
Gregory Szorc
|
r35947 | @command('debugpeer', [], _('PATH'), norepo=True) | ||
def debugpeer(ui, path): | ||||
"""establish a connection to a peer repository""" | ||||
# Always enable peer request logging. Requires --debug to display | ||||
# though. | ||||
overrides = { | ||||
('devel', 'debug.peer-request'): True, | ||||
} | ||||
with ui.configoverride(overrides): | ||||
peer = hg.peer(ui, {}, path) | ||||
local = peer.local() is not None | ||||
canpush = peer.canpush() | ||||
ui.write(_('url: %s\n') % peer.url()) | ||||
ui.write(_('local: %s\n') % (_('yes') if local else _('no'))) | ||||
ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no'))) | ||||
FUJIWARA Katsunori
|
r32256 | @command('debugpickmergetool', | ||
[('r', 'rev', '', _('check for files in this revision'), _('REV')), | ||||
('', 'changedelete', None, _('emulate merging change and delete')), | ||||
Yuya Nishihara
|
r32375 | ] + cmdutil.walkopts + cmdutil.mergetoolopts, | ||
FUJIWARA Katsunori
|
r32256 | _('[PATTERN]...'), | ||
inferrepo=True) | ||||
def debugpickmergetool(ui, repo, *pats, **opts): | ||||
"""examine which merge tool is chosen for specified file | ||||
As described in :hg:`help merge-tools`, Mercurial examines | ||||
configurations below in this order to decide which merge tool is | ||||
chosen for specified file. | ||||
1. ``--tool`` option | ||||
2. ``HGMERGE`` environment variable | ||||
3. configurations in ``merge-patterns`` section | ||||
4. configuration of ``ui.merge`` | ||||
5. configurations in ``merge-tools`` section | ||||
6. ``hgmerge`` tool (for historical reason only) | ||||
7. default tool for fallback (``:merge`` or ``:prompt``) | ||||
This command writes out examination result in the style below:: | ||||
FILE = MERGETOOL | ||||
By default, all files known in the first parent context of the | ||||
working directory are examined. Use file patterns and/or -I/-X | ||||
options to limit target files. -r/--rev is also useful to examine | ||||
files in another context without actual updating to it. | ||||
With --debug, this command shows warning messages while matching | ||||
against ``merge-patterns`` and so on, too. It is recommended to | ||||
use this option with explicit file patterns and/or -I/-X options, | ||||
because this option increases amount of output per file according | ||||
to configurations in hgrc. | ||||
With -v/--verbose, this command shows configurations below at | ||||
first (only if specified). | ||||
- ``--tool`` option | ||||
- ``HGMERGE`` environment variable | ||||
- configuration of ``ui.merge`` | ||||
If merge tool is chosen before matching against | ||||
``merge-patterns``, this command can't show any helpful | ||||
information, even with --debug. In such case, information above is | ||||
useful to know why a merge tool is chosen. | ||||
""" | ||||
Pulkit Goyal
|
r33100 | opts = pycompat.byteskwargs(opts) | ||
FUJIWARA Katsunori
|
r32256 | overrides = {} | ||
if opts['tool']: | ||||
overrides[('ui', 'forcemerge')] = opts['tool'] | ||||
Augie Fackler
|
r36796 | ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool']))) | ||
FUJIWARA Katsunori
|
r32256 | |||
with ui.configoverride(overrides, 'debugmergepatterns'): | ||||
hgmerge = encoding.environ.get("HGMERGE") | ||||
if hgmerge is not None: | ||||
Augie Fackler
|
r36796 | ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge))) | ||
FUJIWARA Katsunori
|
r32256 | uimerge = ui.config("ui", "merge") | ||
if uimerge: | ||||
Augie Fackler
|
r36796 | ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge))) | ||
FUJIWARA Katsunori
|
r32256 | |||
ctx = scmutil.revsingle(repo, opts.get('rev')) | ||||
m = scmutil.match(ctx, pats, opts) | ||||
changedelete = opts['changedelete'] | ||||
for path in ctx.walk(m): | ||||
fctx = ctx[path] | ||||
try: | ||||
if not ui.debugflag: | ||||
ui.pushbuffer(error=True) | ||||
tool, toolpath = filemerge._picktool(repo, ui, path, | ||||
fctx.isbinary(), | ||||
'l' in fctx.flags(), | ||||
changedelete) | ||||
finally: | ||||
if not ui.debugflag: | ||||
ui.popbuffer() | ||||
ui.write(('%s = %s\n') % (path, tool)) | ||||
Pierre-Yves David
|
r30946 | @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True) | ||
def debugpushkey(ui, repopath, namespace, *keyinfo, **opts): | ||||
'''access the pushkey key/value protocol | ||||
With two args, list the keys in the given namespace. | ||||
With five args, set a key to new if it currently is set to old. | ||||
Reports success or failure. | ||||
''' | ||||
target = hg.peer(ui, {}, repopath) | ||||
if keyinfo: | ||||
key, old, new = keyinfo | ||||
Gregory Szorc
|
r37665 | with target.commandexecutor() as e: | ||
r = e.callcommand('pushkey', { | ||||
'namespace': namespace, | ||||
'key': key, | ||||
'old': old, | ||||
'new': new, | ||||
}).result() | ||||
Pulkit Goyal
|
r36736 | ui.status(pycompat.bytestr(r) + '\n') | ||
Pierre-Yves David
|
r30946 | return not r | ||
else: | ||||
for k, v in sorted(target.listkeys(namespace).iteritems()): | ||||
Yuya Nishihara
|
r37102 | ui.write("%s\t%s\n" % (stringutil.escapestr(k), | ||
stringutil.escapestr(v))) | ||||
Pierre-Yves David
|
r30946 | |||
Pierre-Yves David
|
r30947 | @command('debugpvec', [], _('A B')) | ||
def debugpvec(ui, repo, a, b=None): | ||||
ca = scmutil.revsingle(repo, a) | ||||
cb = scmutil.revsingle(repo, b) | ||||
pa = pvec.ctxpvec(ca) | ||||
pb = pvec.ctxpvec(cb) | ||||
if pa == pb: | ||||
rel = "=" | ||||
elif pa > pb: | ||||
rel = ">" | ||||
elif pa < pb: | ||||
rel = "<" | ||||
elif pa | pb: | ||||
rel = "|" | ||||
ui.write(_("a: %s\n") % pa) | ||||
ui.write(_("b: %s\n") % pb) | ||||
ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth)) | ||||
ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") % | ||||
(abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec), | ||||
pa.distance(pb), rel)) | ||||
Pierre-Yves David
|
r30948 | @command('debugrebuilddirstate|debugrebuildstate', | ||
[('r', 'rev', '', _('revision to rebuild to'), _('REV')), | ||||
('', 'minimal', None, _('only rebuild files that are inconsistent with ' | ||||
'the working copy parent')), | ||||
], | ||||
_('[-r REV]')) | ||||
def debugrebuilddirstate(ui, repo, rev, **opts): | ||||
"""rebuild the dirstate as it would look like for the given revision | ||||
If no revision is specified the first current parent will be used. | ||||
The dirstate will be set to the files of the given revision. | ||||
The actual working directory content or existing dirstate | ||||
information such as adds or removes is not considered. | ||||
``minimal`` will only rebuild the dirstate status for files that claim to be | ||||
tracked but are not in the parent manifest, or that exist in the parent | ||||
manifest but are not in the dirstate. It will not change adds, removes, or | ||||
modified files that are in the working copy parent. | ||||
One use of this command is to make the next :hg:`status` invocation | ||||
check the actual file content. | ||||
""" | ||||
ctx = scmutil.revsingle(repo, rev) | ||||
with repo.wlock(): | ||||
dirstate = repo.dirstate | ||||
changedfiles = None | ||||
# See command doc for what minimal does. | ||||
Pulkit Goyal
|
r33101 | if opts.get(r'minimal'): | ||
Pierre-Yves David
|
r30948 | manifestfiles = set(ctx.manifest().keys()) | ||
dirstatefiles = set(dirstate) | ||||
manifestonly = manifestfiles - dirstatefiles | ||||
dsonly = dirstatefiles - manifestfiles | ||||
dsnotadded = set(f for f in dsonly if dirstate[f] != 'a') | ||||
changedfiles = manifestonly | dsnotadded | ||||
dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles) | ||||
Pierre-Yves David
|
r30949 | @command('debugrebuildfncache', [], '') | ||
def debugrebuildfncache(ui, repo): | ||||
"""rebuild the fncache file""" | ||||
repair.rebuildfncache(ui, repo) | ||||
Pierre-Yves David
|
r30950 | @command('debugrename', | ||
[('r', 'rev', '', _('revision to debug'), _('REV'))], | ||||
Martin von Zweigbergk
|
r41839 | _('[-r REV] [FILE]...')) | ||
def debugrename(ui, repo, *pats, **opts): | ||||
Pierre-Yves David
|
r30950 | """dump rename information""" | ||
Pulkit Goyal
|
r33100 | opts = pycompat.byteskwargs(opts) | ||
Pierre-Yves David
|
r30950 | ctx = scmutil.revsingle(repo, opts.get('rev')) | ||
Martin von Zweigbergk
|
r41839 | m = scmutil.match(ctx, pats, opts) | ||
Pierre-Yves David
|
r30950 | for abs in ctx.walk(m): | ||
fctx = ctx[abs] | ||||
o = fctx.filelog().renamed(fctx.filenode()) | ||||
Martin von Zweigbergk
|
r41808 | rel = repo.pathto(abs) | ||
Pierre-Yves David
|
r30950 | if o: | ||
ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1]))) | ||||
else: | ||||
ui.write(_("%s not renamed\n") % rel) | ||||
Yuya Nishihara
|
r32375 | @command('debugrevlog', cmdutil.debugrevlogopts + | ||
Pierre-Yves David
|
r30951 | [('d', 'dump', False, _('dump index data'))], | ||
_('-c|-m|FILE'), | ||||
optionalrepo=True) | ||||
def debugrevlog(ui, repo, file_=None, **opts): | ||||
"""show data and statistics about a revlog""" | ||||
Pulkit Goyal
|
r33100 | opts = pycompat.byteskwargs(opts) | ||
Pierre-Yves David
|
r30951 | r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts) | ||
if opts.get("dump"): | ||||
numrevs = len(r) | ||||
ui.write(("# rev p1rev p2rev start end deltastart base p1 p2" | ||||
" rawsize totalsize compression heads chainlen\n")) | ||||
ts = 0 | ||||
heads = set() | ||||
Gregory Szorc
|
r38806 | for rev in pycompat.xrange(numrevs): | ||
Pierre-Yves David
|
r30951 | dbase = r.deltaparent(rev) | ||
if dbase == -1: | ||||
dbase = rev | ||||
cbase = r.chainbase(rev) | ||||
clen = r.chainlen(rev) | ||||
p1, p2 = r.parentrevs(rev) | ||||
rs = r.rawsize(rev) | ||||
ts = ts + rs | ||||
heads -= set(r.parentrevs(rev)) | ||||
heads.add(rev) | ||||
try: | ||||
compression = ts / r.end(rev) | ||||
except ZeroDivisionError: | ||||
compression = 0 | ||||
ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d " | ||||
"%11d %5d %8d\n" % | ||||
(rev, p1, p2, r.start(rev), r.end(rev), | ||||
r.start(dbase), r.start(cbase), | ||||
r.start(p1), r.start(p2), | ||||
rs, ts, compression, len(heads), clen)) | ||||
return 0 | ||||
v = r.version | ||||
format = v & 0xFFFF | ||||
flags = [] | ||||
gdelta = False | ||||
Gregory Szorc
|
r32316 | if v & revlog.FLAG_INLINE_DATA: | ||
Pierre-Yves David
|
r30951 | flags.append('inline') | ||
Gregory Szorc
|
r32316 | if v & revlog.FLAG_GENERALDELTA: | ||
Pierre-Yves David
|
r30951 | gdelta = True | ||
flags.append('generaldelta') | ||||
if not flags: | ||||
flags = ['(none)'] | ||||
Boris Feld
|
r39115 | ### tracks merge vs single parent | ||
Pierre-Yves David
|
r30951 | nummerges = 0 | ||
Boris Feld
|
r39115 | |||
### tracks ways the "delta" are build | ||||
Boris Feld
|
r39116 | # nodelta | ||
numempty = 0 | ||||
Boris Feld
|
r39117 | numemptytext = 0 | ||
numemptydelta = 0 | ||||
Boris Feld
|
r39115 | # full file content | ||
Pierre-Yves David
|
r30951 | numfull = 0 | ||
Boris Feld
|
r39187 | # intermediate snapshot against a prior snapshot | ||
numsemi = 0 | ||||
Boris Feld
|
r39189 | # snapshot count per depth | ||
numsnapdepth = collections.defaultdict(lambda: 0) | ||||
Boris Feld
|
r39115 | # delta against previous revision | ||
Pierre-Yves David
|
r30951 | numprev = 0 | ||
Boris Feld
|
r39115 | # delta against first or second parent (not prev) | ||
Pierre-Yves David
|
r30951 | nump1 = 0 | ||
nump2 = 0 | ||||
Boris Feld
|
r39115 | # delta against neither prev nor parents | ||
Pierre-Yves David
|
r30951 | numother = 0 | ||
Boris Feld
|
r39115 | # delta against prev that are also first or second parent | ||
# (details of `numprev`) | ||||
Pierre-Yves David
|
r30951 | nump1prev = 0 | ||
nump2prev = 0 | ||||
Boris Feld
|
r39115 | |||
# data about delta chain of each revs | ||||
Pierre-Yves David
|
r30951 | chainlengths = [] | ||
r33057 | chainbases = [] | |||
chainspans = [] | ||||
Pierre-Yves David
|
r30951 | |||
Boris Feld
|
r39115 | # data about each revision | ||
Pierre-Yves David
|
r30951 | datasize = [None, 0, 0] | ||
fullsize = [None, 0, 0] | ||||
Boris Feld
|
r39187 | semisize = [None, 0, 0] | ||
Boris Feld
|
r39189 | # snapshot count per depth | ||
snapsizedepth = collections.defaultdict(lambda: [None, 0, 0]) | ||||
Pierre-Yves David
|
r30951 | deltasize = [None, 0, 0] | ||
chunktypecounts = {} | ||||
chunktypesizes = {} | ||||
def addsize(size, l): | ||||
if l[0] is None or size < l[0]: | ||||
l[0] = size | ||||
if size > l[1]: | ||||
l[1] = size | ||||
l[2] += size | ||||
numrevs = len(r) | ||||
Gregory Szorc
|
r38806 | for rev in pycompat.xrange(numrevs): | ||
Pierre-Yves David
|
r30951 | p1, p2 = r.parentrevs(rev) | ||
delta = r.deltaparent(rev) | ||||
if format > 0: | ||||
addsize(r.rawsize(rev), datasize) | ||||
if p2 != nullrev: | ||||
nummerges += 1 | ||||
size = r.length(rev) | ||||
if delta == nullrev: | ||||
chainlengths.append(0) | ||||
r33057 | chainbases.append(r.start(rev)) | |||
chainspans.append(size) | ||||
Boris Feld
|
r39116 | if size == 0: | ||
numempty += 1 | ||||
Boris Feld
|
r39117 | numemptytext += 1 | ||
Boris Feld
|
r39116 | else: | ||
numfull += 1 | ||||
Boris Feld
|
r39189 | numsnapdepth[0] += 1 | ||
Boris Feld
|
r39116 | addsize(size, fullsize) | ||
Boris Feld
|
r39189 | addsize(size, snapsizedepth[0]) | ||
Pierre-Yves David
|
r30951 | else: | ||
chainlengths.append(chainlengths[delta] + 1) | ||||
r33057 | baseaddr = chainbases[delta] | |||
revaddr = r.start(rev) | ||||
chainbases.append(baseaddr) | ||||
chainspans.append((revaddr - baseaddr) + size) | ||||
Boris Feld
|
r39116 | if size == 0: | ||
numempty += 1 | ||||
Boris Feld
|
r39117 | numemptydelta += 1 | ||
Boris Feld
|
r39187 | elif r.issnapshot(rev): | ||
addsize(size, semisize) | ||||
numsemi += 1 | ||||
Boris Feld
|
r39189 | depth = r.snapshotdepth(rev) | ||
numsnapdepth[depth] += 1 | ||||
addsize(size, snapsizedepth[depth]) | ||||
Boris Feld
|
r39116 | else: | ||
addsize(size, deltasize) | ||||
if delta == rev - 1: | ||||
numprev += 1 | ||||
if delta == p1: | ||||
nump1prev += 1 | ||||
elif delta == p2: | ||||
nump2prev += 1 | ||||
elif delta == p1: | ||||
nump1 += 1 | ||||
Pierre-Yves David
|
r30951 | elif delta == p2: | ||
Boris Feld
|
r39116 | nump2 += 1 | ||
elif delta != nullrev: | ||||
numother += 1 | ||||
Pierre-Yves David
|
r30951 | |||
# Obtain data on the raw chunks in the revlog. | ||||
Boris Feld
|
r39184 | if util.safehasattr(r, '_getsegmentforrevs'): | ||
segment = r._getsegmentforrevs(rev, rev)[1] | ||||
else: | ||||
segment = r._revlog._getsegmentforrevs(rev, rev)[1] | ||||
Gregory Szorc
|
r32224 | if segment: | ||
Pulkit Goyal
|
r33106 | chunktype = bytes(segment[0:1]) | ||
Pierre-Yves David
|
r30951 | else: | ||
chunktype = 'empty' | ||||
if chunktype not in chunktypecounts: | ||||
chunktypecounts[chunktype] = 0 | ||||
chunktypesizes[chunktype] = 0 | ||||
chunktypecounts[chunktype] += 1 | ||||
chunktypesizes[chunktype] += size | ||||
# Adjust size min value for empty cases | ||||
Boris Feld
|
r39187 | for size in (datasize, fullsize, semisize, deltasize): | ||
Pierre-Yves David
|
r30951 | if size[0] is None: | ||
size[0] = 0 | ||||
Boris Feld
|
r39187 | numdeltas = numrevs - numfull - numempty - numsemi | ||
Pierre-Yves David
|
r30951 | numoprev = numprev - nump1prev - nump2prev | ||
totalrawsize = datasize[2] | ||||
datasize[2] /= numrevs | ||||
fulltotal = fullsize[2] | ||||
fullsize[2] /= numfull | ||||
Boris Feld
|
r39187 | semitotal = semisize[2] | ||
Boris Feld
|
r39189 | snaptotal = {} | ||
Martin von Zweigbergk
|
r40065 | if numsemi > 0: | ||
Boris Feld
|
r39187 | semisize[2] /= numsemi | ||
Boris Feld
|
r39189 | for depth in snapsizedepth: | ||
snaptotal[depth] = snapsizedepth[depth][2] | ||||
snapsizedepth[depth][2] /= numsnapdepth[depth] | ||||
Pierre-Yves David
|
r30951 | deltatotal = deltasize[2] | ||
Boris Feld
|
r39116 | if numdeltas > 0: | ||
deltasize[2] /= numdeltas | ||||
Boris Feld
|
r39187 | totalsize = fulltotal + semitotal + deltatotal | ||
Pierre-Yves David
|
r30951 | avgchainlen = sum(chainlengths) / numrevs | ||
maxchainlen = max(chainlengths) | ||||
r33057 | maxchainspan = max(chainspans) | |||
Pierre-Yves David
|
r30951 | compratio = 1 | ||
if totalsize: | ||||
compratio = totalrawsize / totalsize | ||||
basedfmtstr = '%%%dd\n' | ||||
basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n' | ||||
def dfmtstr(max): | ||||
return basedfmtstr % len(str(max)) | ||||
def pcfmtstr(max, padding=0): | ||||
return basepcfmtstr % (len(str(max)), ' ' * padding) | ||||
def pcfmt(value, total): | ||||
if total: | ||||
return (value, 100 * float(value) / total) | ||||
else: | ||||
return value, 100.0 | ||||
ui.write(('format : %d\n') % format) | ||||
ui.write(('flags : %s\n') % ', '.join(flags)) | ||||
ui.write('\n') | ||||
fmt = pcfmtstr(totalsize) | ||||
fmt2 = dfmtstr(totalsize) | ||||
ui.write(('revisions : ') + fmt2 % numrevs) | ||||
ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs)) | ||||
ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs)) | ||||
ui.write(('revisions : ') + fmt2 % numrevs) | ||||
Boris Feld
|
r39116 | ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs)) | ||
Boris Feld
|
r39117 | ui.write((' text : ') | ||
+ fmt % pcfmt(numemptytext, numemptytext + numemptydelta)) | ||||
ui.write((' delta : ') | ||||
+ fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)) | ||||
Boris Feld
|
r39189 | ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs)) | ||
for depth in sorted(numsnapdepth): | ||||
ui.write((' lvl-%-3d : ' % depth) | ||||
+ fmt % pcfmt(numsnapdepth[depth], numrevs)) | ||||
Pierre-Yves David
|
r30951 | ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs)) | ||
ui.write(('revision size : ') + fmt2 % totalsize) | ||||
Boris Feld
|
r39189 | ui.write((' snapshot : ') | ||
+ fmt % pcfmt(fulltotal + semitotal, totalsize)) | ||||
for depth in sorted(numsnapdepth): | ||||
ui.write((' lvl-%-3d : ' % depth) | ||||
+ fmt % pcfmt(snaptotal[depth], totalsize)) | ||||
Pierre-Yves David
|
r30951 | ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize)) | ||
def fmtchunktype(chunktype): | ||||
if chunktype == 'empty': | ||||
return ' %s : ' % chunktype | ||||
Pulkit Goyal
|
r33107 | elif chunktype in pycompat.bytestr(string.ascii_letters): | ||
Pierre-Yves David
|
r30951 | return ' 0x%s (%s) : ' % (hex(chunktype), chunktype) | ||
else: | ||||
return ' 0x%s : ' % hex(chunktype) | ||||
ui.write('\n') | ||||
ui.write(('chunks : ') + fmt2 % numrevs) | ||||
for chunktype in sorted(chunktypecounts): | ||||
ui.write(fmtchunktype(chunktype)) | ||||
ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs)) | ||||
ui.write(('chunks size : ') + fmt2 % totalsize) | ||||
for chunktype in sorted(chunktypecounts): | ||||
ui.write(fmtchunktype(chunktype)) | ||||
ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize)) | ||||
ui.write('\n') | ||||
Yuya Nishihara
|
r33062 | fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio)) | ||
Pierre-Yves David
|
r30951 | ui.write(('avg chain length : ') + fmt % avgchainlen) | ||
ui.write(('max chain length : ') + fmt % maxchainlen) | ||||
Yuya Nishihara
|
r33062 | ui.write(('max chain reach : ') + fmt % maxchainspan) | ||
Pierre-Yves David
|
r30951 | ui.write(('compression ratio : ') + fmt % compratio) | ||
if format > 0: | ||||
ui.write('\n') | ||||
ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n') | ||||
% tuple(datasize)) | ||||
ui.write(('full revision size (min/max/avg) : %d / %d / %d\n') | ||||
% tuple(fullsize)) | ||||
Boris Feld
|
r39189 | ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n') | ||
% tuple(semisize)) | ||||
for depth in sorted(snapsizedepth): | ||||
if depth == 0: | ||||
continue | ||||
ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n') | ||||
% ((depth,) + tuple(snapsizedepth[depth]))) | ||||
Pierre-Yves David
|
r30951 | ui.write(('delta size (min/max/avg) : %d / %d / %d\n') | ||
% tuple(deltasize)) | ||||
if numdeltas > 0: | ||||
ui.write('\n') | ||||
fmt = pcfmtstr(numdeltas) | ||||
fmt2 = pcfmtstr(numdeltas, 4) | ||||
ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas)) | ||||
if numprev > 0: | ||||
ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev, | ||||
numprev)) | ||||
ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev, | ||||
numprev)) | ||||
ui.write((' other : ') + fmt2 % pcfmt(numoprev, | ||||
numprev)) | ||||
if gdelta: | ||||
ui.write(('deltas against p1 : ') | ||||
+ fmt % pcfmt(nump1, numdeltas)) | ||||
ui.write(('deltas against p2 : ') | ||||
+ fmt % pcfmt(nump2, numdeltas)) | ||||
ui.write(('deltas against other : ') + fmt % pcfmt(numother, | ||||
numdeltas)) | ||||
Gregory Szorc
|
r39318 | @command('debugrevlogindex', cmdutil.debugrevlogopts + | ||
[('f', 'format', 0, _('revlog format'), _('FORMAT'))], | ||||
_('[-f FORMAT] -c|-m|FILE'), | ||||
optionalrepo=True) | ||||
def debugrevlogindex(ui, repo, file_=None, **opts): | ||||
"""dump the contents of a revlog index""" | ||||
opts = pycompat.byteskwargs(opts) | ||||
r = cmdutil.openrevlog(repo, 'debugrevlogindex', file_, opts) | ||||
format = opts.get('format', 0) | ||||
if format not in (0, 1): | ||||
raise error.Abort(_("unknown format %d") % format) | ||||
if ui.debugflag: | ||||
shortfn = hex | ||||
else: | ||||
shortfn = short | ||||
# There might not be anything in r, so have a sane default | ||||
idlen = 12 | ||||
for i in r: | ||||
idlen = len(shortfn(r.node(i))) | ||||
break | ||||
if format == 0: | ||||
if ui.verbose: | ||||
ui.write((" rev offset length linkrev" | ||||
" %s %s p2\n") % ("nodeid".ljust(idlen), | ||||
"p1".ljust(idlen))) | ||||
else: | ||||
ui.write((" rev linkrev %s %s p2\n") % ( | ||||
"nodeid".ljust(idlen), "p1".ljust(idlen))) | ||||
elif format == 1: | ||||
if ui.verbose: | ||||
ui.write((" rev flag offset length size link p1" | ||||
" p2 %s\n") % "nodeid".rjust(idlen)) | ||||
else: | ||||
ui.write((" rev flag size link p1 p2 %s\n") % | ||||
"nodeid".rjust(idlen)) | ||||
for i in r: | ||||
node = r.node(i) | ||||
if format == 0: | ||||
try: | ||||
pp = r.parents(node) | ||||
except Exception: | ||||
pp = [nullid, nullid] | ||||
if ui.verbose: | ||||
ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % ( | ||||
i, r.start(i), r.length(i), r.linkrev(i), | ||||
shortfn(node), shortfn(pp[0]), shortfn(pp[1]))) | ||||
else: | ||||
ui.write("% 6d % 7d %s %s %s\n" % ( | ||||
i, r.linkrev(i), shortfn(node), shortfn(pp[0]), | ||||
shortfn(pp[1]))) | ||||
elif format == 1: | ||||
pr = r.parentrevs(i) | ||||
if ui.verbose: | ||||
ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % ( | ||||
i, r.flags(i), r.start(i), r.length(i), r.rawsize(i), | ||||
r.linkrev(i), pr[0], pr[1], shortfn(node))) | ||||
else: | ||||
ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % ( | ||||
i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1], | ||||
shortfn(node))) | ||||
Pierre-Yves David
|
r30952 | @command('debugrevspec', | ||
[('', 'optimize', None, | ||||
_('print parsed tree after optimizing (DEPRECATED)')), | ||||
Yuya Nishihara
|
r32796 | ('', 'show-revs', True, _('print list of result revisions (default)')), | ||
Yuya Nishihara
|
r32795 | ('s', 'show-set', None, _('print internal representation of result set')), | ||
Pierre-Yves David
|
r30952 | ('p', 'show-stage', [], | ||
_('print parsed tree at the given stage'), _('NAME')), | ||||
('', 'no-optimized', False, _('evaluate tree without optimization')), | ||||
('', 'verify-optimized', False, _('verify optimized result')), | ||||
], | ||||
('REVSPEC')) | ||||
def debugrevspec(ui, repo, expr, **opts): | ||||
"""parse and apply a revision specification | ||||
Use -p/--show-stage option to print the parsed tree at the given stages. | ||||
Use -p all to print tree at every stage. | ||||
Yuya Nishihara
|
r32796 | Use --no-show-revs option with -s or -p to print only the set | ||
representation or the parsed tree respectively. | ||||
Pierre-Yves David
|
r30952 | Use --verify-optimized to compare the optimized result with the unoptimized | ||
one. Returns 1 if the optimized result differs. | ||||
""" | ||||
Pulkit Goyal
|
r33100 | opts = pycompat.byteskwargs(opts) | ||
Jun Wu
|
r33336 | aliases = ui.configitems('revsetalias') | ||
Pierre-Yves David
|
r30952 | stages = [ | ||
('parsed', lambda tree: tree), | ||||
Jun Wu
|
r33336 | ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases, | ||
ui.warn)), | ||||
Yuya Nishihara
|
r31024 | ('concatenated', revsetlang.foldconcat), | ||
('analyzed', revsetlang.analyze), | ||||
('optimized', revsetlang.optimize), | ||||
Pierre-Yves David
|
r30952 | ] | ||
if opts['no_optimized']: | ||||
stages = stages[:-1] | ||||
if opts['verify_optimized'] and opts['no_optimized']: | ||||
raise error.Abort(_('cannot use --verify-optimized with ' | ||||
'--no-optimized')) | ||||
stagenames = set(n for n, f in stages) | ||||
showalways = set() | ||||
showchanged = set() | ||||
if ui.verbose and not opts['show_stage']: | ||||
# show parsed tree by --verbose (deprecated) | ||||
showalways.add('parsed') | ||||
showchanged.update(['expanded', 'concatenated']) | ||||
if opts['optimize']: | ||||
showalways.add('optimized') | ||||
if opts['show_stage'] and opts['optimize']: | ||||
raise error.Abort(_('cannot use --optimize with --show-stage')) | ||||
if opts['show_stage'] == ['all']: | ||||
showalways.update(stagenames) | ||||
else: | ||||
for n in opts['show_stage']: | ||||
if n not in stagenames: | ||||
raise error.Abort(_('invalid stage name: %s') % n) | ||||
showalways.update(opts['show_stage']) | ||||
treebystage = {} | ||||
printedtree = None | ||||
Martin von Zweigbergk
|
r37368 | tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo)) | ||
Pierre-Yves David
|
r30952 | for n, f in stages: | ||
treebystage[n] = tree = f(tree) | ||||
if n in showalways or (n in showchanged and tree != printedtree): | ||||
if opts['show_stage'] or n != 'parsed': | ||||
ui.write(("* %s:\n") % n) | ||||
Yuya Nishihara
|
r31024 | ui.write(revsetlang.prettyformat(tree), "\n") | ||
Pierre-Yves David
|
r30952 | printedtree = tree | ||
if opts['verify_optimized']: | ||||
arevs = revset.makematcher(treebystage['analyzed'])(repo) | ||||
brevs = revset.makematcher(treebystage['optimized'])(repo) | ||||
Yuya Nishihara
|
r32795 | if opts['show_set'] or (opts['show_set'] is None and ui.verbose): | ||
Yuya Nishihara
|
r38280 | ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n") | ||
ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n") | ||||
Pierre-Yves David
|
r30952 | arevs = list(arevs) | ||
brevs = list(brevs) | ||||
if arevs == brevs: | ||||
return 0 | ||||
ui.write(('--- analyzed\n'), label='diff.file_a') | ||||
ui.write(('+++ optimized\n'), label='diff.file_b') | ||||
sm = difflib.SequenceMatcher(None, arevs, brevs) | ||||
for tag, alo, ahi, blo, bhi in sm.get_opcodes(): | ||||
Gregory Szorc
|
r41820 | if tag in (r'delete', r'replace'): | ||
Pierre-Yves David
|
r30952 | for c in arevs[alo:ahi]: | ||
Gregory Szorc
|
r41820 | ui.write('-%d\n' % c, label='diff.deleted') | ||
if tag in (r'insert', r'replace'): | ||||
Pierre-Yves David
|
r30952 | for c in brevs[blo:bhi]: | ||
Gregory Szorc
|
r41820 | ui.write('+%d\n' % c, label='diff.inserted') | ||
if tag == r'equal': | ||||
Pierre-Yves David
|
r30952 | for c in arevs[alo:ahi]: | ||
Gregory Szorc
|
r41820 | ui.write(' %d\n' % c) | ||
Pierre-Yves David
|
r30952 | return 1 | ||
func = revset.makematcher(tree) | ||||
revs = func(repo) | ||||
Yuya Nishihara
|
r32795 | if opts['show_set'] or (opts['show_set'] is None and ui.verbose): | ||
Yuya Nishihara
|
r38280 | ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n") | ||
Yuya Nishihara
|
r32796 | if not opts['show_revs']: | ||
return | ||||
Pierre-Yves David
|
r30952 | for c in revs: | ||
Yuya Nishihara
|
r35920 | ui.write("%d\n" % c) | ||
Pierre-Yves David
|
r30952 | |||
Gregory Szorc
|
r36544 | @command('debugserve', [ | ||
('', 'sshstdio', False, _('run an SSH server bound to process handles')), | ||||
('', 'logiofd', '', _('file descriptor to log server I/O to')), | ||||
('', 'logiofile', '', _('file to log server I/O to')), | ||||
], '') | ||||
def debugserve(ui, repo, **opts): | ||||
"""run a server with advanced settings | ||||
This command is similar to :hg:`serve`. It exists partially as a | ||||
workaround to the fact that ``hg serve --stdio`` must have specific | ||||
arguments for security reasons. | ||||
""" | ||||
opts = pycompat.byteskwargs(opts) | ||||
if not opts['sshstdio']: | ||||
raise error.Abort(_('only --sshstdio is currently supported')) | ||||
logfh = None | ||||
if opts['logiofd'] and opts['logiofile']: | ||||
raise error.Abort(_('cannot use both --logiofd and --logiofile')) | ||||
if opts['logiofd']: | ||||
# Line buffered because output is line based. | ||||
Augie Fackler
|
r38333 | try: | ||
logfh = os.fdopen(int(opts['logiofd']), r'ab', 1) | ||||
except OSError as e: | ||||
if e.errno != errno.ESPIPE: | ||||
raise | ||||
# can't seek a pipe, so `ab` mode fails on py3 | ||||
logfh = os.fdopen(int(opts['logiofd']), r'wb', 1) | ||||
Gregory Szorc
|
r36544 | elif opts['logiofile']: | ||
logfh = open(opts['logiofile'], 'ab', 1) | ||||
s = wireprotoserver.sshserver(ui, repo, logfh=logfh) | ||||
s.serve_forever() | ||||
Pierre-Yves David
|
r30953 | @command('debugsetparents', [], _('REV1 [REV2]')) | ||
def debugsetparents(ui, repo, rev1, rev2=None): | ||||
"""manually set the parents of the current working directory | ||||
This is useful for writing repository conversion tools, but should | ||||
be used with care. For example, neither the working directory nor the | ||||
dirstate is updated, so file status may be incorrect after running this | ||||
command. | ||||
Returns 0 on success. | ||||
""" | ||||
Martin von Zweigbergk
|
r37161 | node1 = scmutil.revsingle(repo, rev1).node() | ||
node2 = scmutil.revsingle(repo, rev2, 'null').node() | ||||
Pierre-Yves David
|
r30953 | |||
with repo.wlock(): | ||||
Martin von Zweigbergk
|
r37161 | repo.setparents(node1, node2) | ||
Pierre-Yves David
|
r30953 | |||
Matt Harbison
|
r33493 | @command('debugssl', [], '[SOURCE]', optionalrepo=True) | ||
def debugssl(ui, repo, source=None, **opts): | ||||
'''test a secure connection to a server | ||||
This builds the certificate chain for the server on Windows, installing the | ||||
missing intermediates and trusted root via Windows Update if necessary. It | ||||
does nothing on other platforms. | ||||
If SOURCE is omitted, the 'default' path will be used. If a URL is given, | ||||
that server is used. See :hg:`help urls` for more information. | ||||
If the update succeeds, retry the original operation. Otherwise, the cause | ||||
of the SSL error is likely another issue. | ||||
''' | ||||
Jun Wu
|
r34646 | if not pycompat.iswindows: | ||
Kevin Bullock
|
r33553 | raise error.Abort(_('certificate chain building is only possible on ' | ||
Matt Harbison
|
r33493 | 'Windows')) | ||
if not source: | ||||
Matt Harbison
|
r34031 | if not repo: | ||
raise error.Abort(_("there is no Mercurial repository here, and no " | ||||
"server specified")) | ||||
Matt Harbison
|
r33493 | source = "default" | ||
source, branches = hg.parseurl(ui.expandpath(source)) | ||||
url = util.url(source) | ||||
Yuya Nishihara
|
r35444 | defaultport = {'https': 443, 'ssh': 22} | ||
if url.scheme in defaultport: | ||||
try: | ||||
addr = (url.host, int(url.port or defaultport[url.scheme])) | ||||
except ValueError: | ||||
raise error.Abort(_("malformed port number in URL")) | ||||
Matt Harbison
|
r33493 | else: | ||
Kevin Bullock
|
r33553 | raise error.Abort(_("only https and ssh connections are supported")) | ||
Matt Harbison
|
r33493 | |||
from . import win32 | ||||
s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS, | ||||
cert_reqs=ssl.CERT_NONE, ca_certs=None) | ||||
try: | ||||
s.connect(addr) | ||||
cert = s.getpeercert(True) | ||||
Kevin Bullock
|
r33553 | ui.status(_('checking the certificate chain for %s\n') % url.host) | ||
Matt Harbison
|
r33493 | |||
complete = win32.checkcertificatechain(cert, build=False) | ||||
if not complete: | ||||
Kevin Bullock
|
r33553 | ui.status(_('certificate chain is incomplete, updating... ')) | ||
Matt Harbison
|
r33493 | |||
if not win32.checkcertificatechain(cert): | ||||
Kevin Bullock
|
r33553 | ui.status(_('failed.\n')) | ||
Matt Harbison
|
r33493 | else: | ||
Kevin Bullock
|
r33553 | ui.status(_('done.\n')) | ||
Matt Harbison
|
r33493 | else: | ||
Kevin Bullock
|
r33553 | ui.status(_('full certificate chain is available\n')) | ||
Matt Harbison
|
r33493 | finally: | ||
s.close() | ||||
Pierre-Yves David
|
r30955 | @command('debugsub', | ||
[('r', 'rev', '', | ||||
_('revision to check'), _('REV'))], | ||||
_('[-r REV] [REV]')) | ||||
def debugsub(ui, repo, rev=None): | ||||
ctx = scmutil.revsingle(repo, rev, None) | ||||
for k, v in sorted(ctx.substate.items()): | ||||
ui.write(('path %s\n') % k) | ||||
ui.write((' source %s\n') % v[0]) | ||||
ui.write((' revision %s\n') % v[1]) | ||||
Pierre-Yves David
|
r30956 | @command('debugsuccessorssets', | ||
Boris Feld
|
r33274 | [('', 'closest', False, _('return closest successors sets only'))], | ||
Pierre-Yves David
|
r30956 | _('[REV]')) | ||
Boris Feld
|
r33274 | def debugsuccessorssets(ui, repo, *revs, **opts): | ||
Pierre-Yves David
|
r30956 | """show set of successors for revision | ||
A successors set of changeset A is a consistent group of revisions that | ||||
Boris Feld
|
r33274 | succeed A. It contains non-obsolete changesets only unless closests | ||
successors set is set. | ||||
Pierre-Yves David
|
r30956 | |||
In most cases a changeset A has a single successors set containing a single | ||||
successor (changeset A replaced by A'). | ||||
A changeset that is made obsolete with no successors are called "pruned". | ||||
Such changesets have no successors sets at all. | ||||
A changeset that has been "split" will have a successors set containing | ||||
more than one successor. | ||||
A changeset that has been rewritten in multiple different ways is called | ||||
"divergent". Such changesets have multiple successor sets (each of which | ||||
may also be split, i.e. have multiple successors). | ||||
Results are displayed as follows:: | ||||
<rev1> | ||||
<successors-1A> | ||||
<rev2> | ||||
<successors-2A> | ||||
<successors-2B1> <successors-2B2> <successors-2B3> | ||||
Here rev2 has two possible (i.e. divergent) successors sets. The first | ||||
holds one element, whereas the second holds three (i.e. the changeset has | ||||
been split). | ||||
""" | ||||
# passed to successorssets caching computation from one call to another | ||||
cache = {} | ||||
Gregory Szorc
|
r36140 | ctx2str = bytes | ||
Pierre-Yves David
|
r30956 | node2str = short | ||
for rev in scmutil.revrange(repo, revs): | ||||
ctx = repo[rev] | ||||
ui.write('%s\n'% ctx2str(ctx)) | ||||
Boris Feld
|
r33274 | for succsset in obsutil.successorssets(repo, ctx.node(), | ||
Pulkit Goyal
|
r35402 | closest=opts[r'closest'], | ||
Boris Feld
|
r33274 | cache=cache): | ||
Pierre-Yves David
|
r30956 | if succsset: | ||
ui.write(' ') | ||||
ui.write(node2str(succsset[0])) | ||||
for node in succsset[1:]: | ||||
ui.write(' ') | ||||
ui.write(node2str(node)) | ||||
ui.write('\n') | ||||
Pierre-Yves David
|
r30957 | @command('debugtemplate', | ||
[('r', 'rev', [], _('apply template on changesets'), _('REV')), | ||||
('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))], | ||||
_('[-r REV]... [-D KEY=VALUE]... TEMPLATE'), | ||||
optionalrepo=True) | ||||
def debugtemplate(ui, repo, tmpl, **opts): | ||||
"""parse and apply a template | ||||
If -r/--rev is given, the template is processed as a log template and | ||||
applied to the given changesets. Otherwise, it is processed as a generic | ||||
template. | ||||
Use --verbose to print the parsed tree. | ||||
""" | ||||
revs = None | ||||
Pulkit Goyal
|
r33101 | if opts[r'rev']: | ||
Pierre-Yves David
|
r30957 | if repo is None: | ||
raise error.RepoError(_('there is no Mercurial repository here ' | ||||
'(.hg not found)')) | ||||
Pulkit Goyal
|
r33101 | revs = scmutil.revrange(repo, opts[r'rev']) | ||
Pierre-Yves David
|
r30957 | |||
props = {} | ||||
Pulkit Goyal
|
r33101 | for d in opts[r'define']: | ||
Pierre-Yves David
|
r30957 | try: | ||
k, v = (e.strip() for e in d.split('=', 1)) | ||||
Yuya Nishihara
|
r31517 | if not k or k == 'ui': | ||
Pierre-Yves David
|
r30957 | raise ValueError | ||
props[k] = v | ||||
except ValueError: | ||||
raise error.Abort(_('malformed keyword definition: %s') % d) | ||||
if ui.verbose: | ||||
aliases = ui.configitems('templatealias') | ||||
tree = templater.parse(tmpl) | ||||
ui.note(templater.prettyformat(tree), '\n') | ||||
newtree = templater.expandaliases(tree, aliases) | ||||
if newtree != tree: | ||||
ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n') | ||||
if revs is None: | ||||
Yuya Nishihara
|
r35485 | tres = formatter.templateresources(ui, repo) | ||
t = formatter.maketemplater(ui, tmpl, resources=tres) | ||||
Yuya Nishihara
|
r38374 | if ui.verbose: | ||
kwds, funcs = t.symbolsuseddefault() | ||||
ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds))) | ||||
ui.write(("* functions: %s\n") % ', '.join(sorted(funcs))) | ||||
Yuya Nishihara
|
r37003 | ui.write(t.renderdefault(props)) | ||
Pierre-Yves David
|
r30957 | else: | ||
Yuya Nishihara
|
r35906 | displayer = logcmdutil.maketemplater(ui, repo, tmpl) | ||
Yuya Nishihara
|
r38374 | if ui.verbose: | ||
kwds, funcs = displayer.t.symbolsuseddefault() | ||||
ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds))) | ||||
ui.write(("* functions: %s\n") % ', '.join(sorted(funcs))) | ||||
Pierre-Yves David
|
r30957 | for r in revs: | ||
Pulkit Goyal
|
r33102 | displayer.show(repo[r], **pycompat.strkwargs(props)) | ||
Pierre-Yves David
|
r30957 | displayer.close() | ||
Yuya Nishihara
|
r36810 | @command('debuguigetpass', [ | ||
('p', 'prompt', '', _('prompt text'), _('TEXT')), | ||||
], _('[-p TEXT]'), norepo=True) | ||||
def debuguigetpass(ui, prompt=''): | ||||
"""show prompt to type password""" | ||||
r = ui.getpass(prompt) | ||||
ui.write(('respose: %s\n') % r) | ||||
@command('debuguiprompt', [ | ||||
('p', 'prompt', '', _('prompt text'), _('TEXT')), | ||||
], _('[-p TEXT]'), norepo=True) | ||||
def debuguiprompt(ui, prompt=''): | ||||
"""show plain prompt""" | ||||
r = ui.prompt(prompt) | ||||
ui.write(('response: %s\n') % r) | ||||
Pierre-Yves David
|
r32265 | @command('debugupdatecaches', []) | ||
def debugupdatecaches(ui, repo, *pats, **opts): | ||||
"""warm all known caches in the repository""" | ||||
Jun Wu
|
r33438 | with repo.wlock(), repo.lock(): | ||
Boris Feld
|
r36970 | repo.updatecaches(full=True) | ||
Pierre-Yves David
|
r32265 | |||
Gregory Szorc
|
r30774 | @command('debugupgraderepo', [ | ||
('o', 'optimize', [], _('extra optimization to perform'), _('NAME')), | ||||
('', 'run', False, _('performs an upgrade')), | ||||
Boris Feld
|
r41121 | ('', 'backup', True, _('keep the old repository content around')), | ||
Gregory Szorc
|
r30774 | ]) | ||
Boris Feld
|
r41121 | def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True): | ||
Gregory Szorc
|
r30774 | """upgrade a repository to use different features | ||
If no arguments are specified, the repository is evaluated for upgrade | ||||
and a list of problems and potential optimizations is printed. | ||||
With ``--run``, a repository upgrade is performed. Behavior of the upgrade | ||||
can be influenced via additional arguments. More details will be provided | ||||
by the command output when run without ``--run``. | ||||
During the upgrade, the repository will be locked and no writes will be | ||||
allowed. | ||||
At the end of the upgrade, the repository may not be readable while new | ||||
repository data is swapped in. This window will be as long as it takes to | ||||
rename some directories inside the ``.hg`` directory. On most machines, this | ||||
should complete almost instantaneously and the chances of a consumer being | ||||
unable to access the repository should be low. | ||||
""" | ||||
Boris Feld
|
r41121 | return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize, | ||
backup=backup) | ||||
Pierre-Yves David
|
r30958 | |||
Yuya Nishihara
|
r32375 | @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'), | ||
Pierre-Yves David
|
r30958 | inferrepo=True) | ||
def debugwalk(ui, repo, *pats, **opts): | ||||
"""show how files match on given patterns""" | ||||
Pulkit Goyal
|
r33100 | opts = pycompat.byteskwargs(opts) | ||
Pierre-Yves David
|
r30958 | m = scmutil.match(repo[None], pats, opts) | ||
Yuya Nishihara
|
r38281 | if ui.verbose: | ||
Yuya Nishihara
|
r38282 | ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n') | ||
Augie Fackler
|
r32363 | items = list(repo[None].walk(m)) | ||
Pierre-Yves David
|
r30958 | if not items: | ||
return | ||||
f = lambda fn: fn | ||||
if ui.configbool('ui', 'slash') and pycompat.ossep != '/': | ||||
f = lambda fn: util.normpath(fn) | ||||
fmt = 'f %%-%ds %%-%ds %%s' % ( | ||||
max([len(abs) for abs in items]), | ||||
Martin von Zweigbergk
|
r41808 | max([len(repo.pathto(abs)) for abs in items])) | ||
Pierre-Yves David
|
r30958 | for abs in items: | ||
Martin von Zweigbergk
|
r41808 | line = fmt % (abs, f(repo.pathto(abs)), m.exact(abs) and 'exact' or '') | ||
Pierre-Yves David
|
r30958 | ui.write("%s\n" % line.rstrip()) | ||
Pierre-Yves David
|
r30959 | |||
r36972 | @command('debugwhyunstable', [], _('REV')) | |||
def debugwhyunstable(ui, repo, rev): | ||||
"""explain instabilities of a changeset""" | ||||
Martin von Zweigbergk
|
r37414 | for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)): | ||
r36972 | dnodes = '' | |||
if entry.get('divergentnodes'): | ||||
dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr()) | ||||
for ctx in entry['divergentnodes']) + ' ' | ||||
ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes, | ||||
entry['reason'], entry['node'])) | ||||
Pierre-Yves David
|
r30959 | @command('debugwireargs', | ||
[('', 'three', '', 'three'), | ||||
('', 'four', '', 'four'), | ||||
('', 'five', '', 'five'), | ||||
Yuya Nishihara
|
r32375 | ] + cmdutil.remoteopts, | ||
Pierre-Yves David
|
r30959 | _('REPO [OPTIONS]... [ONE [TWO]]'), | ||
norepo=True) | ||||
def debugwireargs(ui, repopath, *vals, **opts): | ||||
Pulkit Goyal
|
r33100 | opts = pycompat.byteskwargs(opts) | ||
Pierre-Yves David
|
r30959 | repo = hg.peer(ui, opts, repopath) | ||
Yuya Nishihara
|
r32375 | for opt in cmdutil.remoteopts: | ||
Pierre-Yves David
|
r30959 | del opts[opt[1]] | ||
args = {} | ||||
for k, v in opts.iteritems(): | ||||
if v: | ||||
args[k] = v | ||||
Pulkit Goyal
|
r35402 | args = pycompat.strkwargs(args) | ||
Pierre-Yves David
|
r30959 | # run twice to check that we don't mess up the stream for the next command | ||
res1 = repo.debugwireargs(*vals, **args) | ||||
res2 = repo.debugwireargs(*vals, **args) | ||||
ui.write("%s\n" % res1) | ||||
if res1 != res2: | ||||
ui.warn("%s\n" % res2) | ||||
Gregory Szorc
|
r36545 | |||
def _parsewirelangblocks(fh): | ||||
activeaction = None | ||||
blocklines = [] | ||||
Gregory Szorc
|
r40210 | lastindent = 0 | ||
Gregory Szorc
|
r36545 | |||
for line in fh: | ||||
line = line.rstrip() | ||||
if not line: | ||||
continue | ||||
if line.startswith(b'#'): | ||||
continue | ||||
Augie Fackler
|
r39097 | if not line.startswith(b' '): | ||
Gregory Szorc
|
r36545 | # New block. Flush previous one. | ||
if activeaction: | ||||
yield activeaction, blocklines | ||||
activeaction = line | ||||
blocklines = [] | ||||
Gregory Szorc
|
r40210 | lastindent = 0 | ||
Gregory Szorc
|
r36545 | continue | ||
# Else we start with an indent. | ||||
if not activeaction: | ||||
raise error.Abort(_('indented line outside of block')) | ||||
Gregory Szorc
|
r40210 | indent = len(line) - len(line.lstrip()) | ||
# If this line is indented more than the last line, concatenate it. | ||||
if indent > lastindent and blocklines: | ||||
blocklines[-1] += line.lstrip() | ||||
else: | ||||
blocklines.append(line) | ||||
lastindent = indent | ||||
Gregory Szorc
|
r36545 | |||
# Flush last block. | ||||
if activeaction: | ||||
yield activeaction, blocklines | ||||
@command('debugwireproto', | ||||
[ | ||||
('', 'localssh', False, _('start an SSH server for this repo')), | ||||
('', 'peer', '', _('construct a specific version of the peer')), | ||||
Gregory Szorc
|
r36551 | ('', 'noreadstderr', False, _('do not read from stderr of the remote')), | ||
Gregory Szorc
|
r37736 | ('', 'nologhandshake', False, | ||
_('do not log I/O related to the peer handshake')), | ||||
Gregory Szorc
|
r36545 | ] + cmdutil.remoteopts, | ||
Gregory Szorc
|
r37030 | _('[PATH]'), | ||
Gregory Szorc
|
r36545 | optionalrepo=True) | ||
Gregory Szorc
|
r37030 | def debugwireproto(ui, repo, path=None, **opts): | ||
Gregory Szorc
|
r36545 | """send wire protocol commands to a server | ||
This command can be used to issue wire protocol commands to remote | ||||
peers and to debug the raw data being exchanged. | ||||
``--localssh`` will start an SSH server against the current repository | ||||
and connect to that. By default, the connection will perform a handshake | ||||
and establish an appropriate peer instance. | ||||
``--peer`` can be used to bypass the handshake protocol and construct a | ||||
peer instance using the specified class type. Valid values are ``raw``, | ||||
Gregory Szorc
|
r37501 | ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending | ||
raw data payloads and don't support higher-level command actions. | ||||
Gregory Szorc
|
r36545 | |||
Gregory Szorc
|
r36551 | ``--noreadstderr`` can be used to disable automatic reading from stderr | ||
of the peer (for SSH connections only). Disabling automatic reading of | ||||
stderr is useful for making output more deterministic. | ||||
Gregory Szorc
|
r36545 | Commands are issued via a mini language which is specified via stdin. | ||
The language consists of individual actions to perform. An action is | ||||
defined by a block. A block is defined as a line with no leading | ||||
space followed by 0 or more lines with leading space. Blocks are | ||||
effectively a high-level command with additional metadata. | ||||
Lines beginning with ``#`` are ignored. | ||||
The following sections denote available actions. | ||||
raw | ||||
--- | ||||
Send raw data to the server. | ||||
The block payload contains the raw data to send as one atomic send | ||||
operation. The data may not actually be delivered in a single system | ||||
call: it depends on the abilities of the transport being used. | ||||
Each line in the block is de-indented and concatenated. Then, that | ||||
value is evaluated as a Python b'' literal. This allows the use of | ||||
backslash escaping, etc. | ||||
raw+ | ||||
---- | ||||
Behaves like ``raw`` except flushes output afterwards. | ||||
Gregory Szorc
|
r36547 | command <X> | ||
----------- | ||||
Send a request to run a named command, whose name follows the ``command`` | ||||
string. | ||||
Arguments to the command are defined as lines in this block. The format of | ||||
each line is ``<key> <value>``. e.g.:: | ||||
command listkeys | ||||
namespace bookmarks | ||||
Gregory Szorc
|
r37501 | If the value begins with ``eval:``, it will be interpreted as a Python | ||
literal expression. Otherwise values are interpreted as Python b'' literals. | ||||
This allows sending complex types and encoding special byte sequences via | ||||
backslash escaping. | ||||
Gregory Szorc
|
r36547 | |||
Gregory Szorc
|
r36551 | The following arguments have special meaning: | ||
``PUSHFILE`` | ||||
When defined, the *push* mechanism of the peer will be used instead | ||||
of the static request-response mechanism and the content of the | ||||
file specified in the value of this argument will be sent as the | ||||
command payload. | ||||
This can be used to submit a local bundle file to the remote. | ||||
Gregory Szorc
|
r36548 | batchbegin | ||
---------- | ||||
Instruct the peer to begin a batched send. | ||||
All ``command`` blocks are queued for execution until the next | ||||
``batchsubmit`` block. | ||||
batchsubmit | ||||
----------- | ||||
Submit previously queued ``command`` blocks as a batch request. | ||||
This action MUST be paired with a ``batchbegin`` action. | ||||
Gregory Szorc
|
r37031 | httprequest <method> <path> | ||
--------------------------- | ||||
(HTTP peer only) | ||||
Send an HTTP request to the peer. | ||||
The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``. | ||||
Arguments of the form ``<key>: <value>`` are interpreted as HTTP request | ||||
headers to add to the request. e.g. ``Accept: foo``. | ||||
The following arguments are special: | ||||
``BODYFILE`` | ||||
The content of the file defined as the value to this argument will be | ||||
transferred verbatim as the HTTP request body. | ||||
Gregory Szorc
|
r37069 | ``frame <type> <flags> <payload>`` | ||
Send a unified protocol frame as part of the request body. | ||||
All frames will be collected and sent as the body to the HTTP | ||||
request. | ||||
Gregory Szorc
|
r36545 | close | ||
----- | ||||
Close the connection to the server. | ||||
flush | ||||
----- | ||||
Flush data written to the server. | ||||
readavailable | ||||
------------- | ||||
Yuya Nishihara
|
r36861 | Close the write end of the connection and read all available data from | ||
the server. | ||||
Gregory Szorc
|
r36545 | |||
If the connection to the server encompasses multiple pipes, we poll both | ||||
pipes and read available data. | ||||
readline | ||||
-------- | ||||
Read a line of output from the server. If there are multiple output | ||||
pipes, reads only the main pipe. | ||||
Gregory Szorc
|
r37025 | |||
ereadline | ||||
--------- | ||||
Like ``readline``, but read from the stderr pipe, if available. | ||||
read <X> | ||||
-------- | ||||
``read()`` N bytes from the server's main output pipe. | ||||
eread <X> | ||||
--------- | ||||
``read()`` N bytes from the server's stderr pipe, if available. | ||||
Gregory Szorc
|
r37069 | |||
Specifying Unified Frame-Based Protocol Frames | ||||
---------------------------------------------- | ||||
It is possible to emit a *Unified Frame-Based Protocol* by using special | ||||
syntax. | ||||
A frame is composed as a type, flags, and payload. These can be parsed | ||||
Gregory Szorc
|
r37304 | from a string of the form: | ||
<request-id> <stream-id> <stream-flags> <type> <flags> <payload> | ||||
``request-id`` and ``stream-id`` are integers defining the request and | ||||
stream identifiers. | ||||
Gregory Szorc
|
r37075 | |||
Gregory Szorc
|
r37069 | ``type`` can be an integer value for the frame type or the string name | ||
of the type. The strings are defined in ``wireprotoframing.py``. e.g. | ||||
``command-name``. | ||||
Gregory Szorc
|
r37304 | ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag | ||
components. Each component (and there can be just one) can be an integer | ||||
or a flag name for stream flags or frame flags, respectively. Values are | ||||
resolved to integers and then bitwise OR'd together. | ||||
Gregory Szorc
|
r37306 | ``payload`` represents the raw frame payload. If it begins with | ||
``cbor:``, the following string is evaluated as Python code and the | ||||
resulting object is fed into a CBOR encoder. Otherwise it is interpreted | ||||
as a Python byte string literal. | ||||
Gregory Szorc
|
r36545 | """ | ||
opts = pycompat.byteskwargs(opts) | ||||
if opts['localssh'] and not repo: | ||||
raise error.Abort(_('--localssh requires a repository')) | ||||
Gregory Szorc
|
r37501 | if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'): | ||
Gregory Szorc
|
r36545 | raise error.Abort(_('invalid value for --peer'), | ||
hint=_('valid values are "raw", "ssh1", and "ssh2"')) | ||||
Gregory Szorc
|
r37030 | if path and opts['localssh']: | ||
raise error.Abort(_('cannot specify --localssh with an explicit ' | ||||
'path')) | ||||
Gregory Szorc
|
r36545 | if ui.interactive(): | ||
ui.write(_('(waiting for commands on stdin)\n')) | ||||
blocks = list(_parsewirelangblocks(ui.fin)) | ||||
proc = None | ||||
Gregory Szorc
|
r37030 | stdin = None | ||
stdout = None | ||||
stderr = None | ||||
Gregory Szorc
|
r37031 | opener = None | ||
Gregory Szorc
|
r36545 | |||
if opts['localssh']: | ||||
# We start the SSH server in its own process so there is process | ||||
# separation. This prevents a whole class of potential bugs around | ||||
# shared state from interfering with server operation. | ||||
Yuya Nishihara
|
r37138 | args = procutil.hgcmd() + [ | ||
Gregory Szorc
|
r36545 | '-R', repo.root, | ||
'debugserve', '--sshstdio', | ||||
] | ||||
Matt Harbison
|
r39851 | proc = subprocess.Popen(pycompat.rapply(procutil.tonativestr, args), | ||
stdin=subprocess.PIPE, | ||||
Gregory Szorc
|
r36545 | stdout=subprocess.PIPE, stderr=subprocess.PIPE, | ||
bufsize=0) | ||||
stdin = proc.stdin | ||||
stdout = proc.stdout | ||||
stderr = proc.stderr | ||||
# We turn the pipes into observers so we can log I/O. | ||||
if ui.verbose or opts['peer'] == 'raw': | ||||
stdin = util.makeloggingfileobject(ui, proc.stdin, b'i', | ||||
logdata=True) | ||||
stdout = util.makeloggingfileobject(ui, proc.stdout, b'o', | ||||
logdata=True) | ||||
stderr = util.makeloggingfileobject(ui, proc.stderr, b'e', | ||||
logdata=True) | ||||
# --localssh also implies the peer connection settings. | ||||
url = 'ssh://localserver' | ||||
Gregory Szorc
|
r36551 | autoreadstderr = not opts['noreadstderr'] | ||
Gregory Szorc
|
r36545 | |||
if opts['peer'] == 'ssh1': | ||||
ui.write(_('creating ssh peer for wire protocol version 1\n')) | ||||
peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr, | ||||
Gregory Szorc
|
r36551 | None, autoreadstderr=autoreadstderr) | ||
Gregory Szorc
|
r36545 | elif opts['peer'] == 'ssh2': | ||
ui.write(_('creating ssh peer for wire protocol version 2\n')) | ||||
peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr, | ||||
Gregory Szorc
|
r36551 | None, autoreadstderr=autoreadstderr) | ||
Gregory Szorc
|
r36545 | elif opts['peer'] == 'raw': | ||
ui.write(_('using raw connection to peer\n')) | ||||
peer = None | ||||
else: | ||||
ui.write(_('creating ssh peer from handshake results\n')) | ||||
Gregory Szorc
|
r36551 | peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr, | ||
autoreadstderr=autoreadstderr) | ||||
Gregory Szorc
|
r36545 | |||
Gregory Szorc
|
r37030 | elif path: | ||
# We bypass hg.peer() so we can proxy the sockets. | ||||
# TODO consider not doing this because we skip | ||||
# ``hg.wirepeersetupfuncs`` and potentially other useful functionality. | ||||
u = util.url(path) | ||||
if u.scheme != 'http': | ||||
raise error.Abort(_('only http:// paths are currently supported')) | ||||
url, authinfo = u.authinfo() | ||||
Gregory Szorc
|
r37501 | openerargs = { | ||
r'useragent': b'Mercurial debugwireproto', | ||||
} | ||||
Gregory Szorc
|
r37030 | |||
# Turn pipes/sockets into observers so we can log I/O. | ||||
if ui.verbose: | ||||
Gregory Szorc
|
r37501 | openerargs.update({ | ||
Gregory Szorc
|
r37030 | r'loggingfh': ui, | ||
r'loggingname': b's', | ||||
r'loggingopts': { | ||||
r'logdata': True, | ||||
Gregory Szorc
|
r37062 | r'logdataapis': False, | ||
Gregory Szorc
|
r37030 | }, | ||
Gregory Szorc
|
r37501 | }) | ||
Gregory Szorc
|
r37030 | |||
Gregory Szorc
|
r37062 | if ui.debugflag: | ||
openerargs[r'loggingopts'][r'logdataapis'] = True | ||||
Gregory Szorc
|
r37063 | # Don't send default headers when in raw mode. This allows us to | ||
# bypass most of the behavior of our URL handling code so we can | ||||
# have near complete control over what's sent on the wire. | ||||
if opts['peer'] == 'raw': | ||||
openerargs[r'sendaccept'] = False | ||||
Gregory Szorc
|
r37030 | opener = urlmod.opener(ui, authinfo, **openerargs) | ||
Gregory Szorc
|
r37501 | if opts['peer'] == 'http2': | ||
ui.write(_('creating http peer for wire protocol version 2\n')) | ||||
Gregory Szorc
|
r37663 | # We go through makepeer() because we need an API descriptor for | ||
# the peer instance to be useful. | ||||
with ui.configoverride({ | ||||
('experimental', 'httppeer.advertise-v2'): True}): | ||||
Gregory Szorc
|
r37736 | if opts['nologhandshake']: | ||
ui.pushbuffer() | ||||
Gregory Szorc
|
r37663 | peer = httppeer.makepeer(ui, path, opener=opener) | ||
Gregory Szorc
|
r37736 | if opts['nologhandshake']: | ||
ui.popbuffer() | ||||
Gregory Szorc
|
r37663 | if not isinstance(peer, httppeer.httpv2peer): | ||
raise error.Abort(_('could not instantiate HTTP peer for ' | ||||
'wire protocol version 2'), | ||||
hint=_('the server may not have the feature ' | ||||
'enabled or is not allowing this ' | ||||
'client version')) | ||||
Gregory Szorc
|
r37501 | elif opts['peer'] == 'raw': | ||
Gregory Szorc
|
r37030 | ui.write(_('using raw connection to peer\n')) | ||
peer = None | ||||
elif opts['peer']: | ||||
raise error.Abort(_('--peer %s not supported with HTTP peers') % | ||||
opts['peer']) | ||||
else: | ||||
Gregory Szorc
|
r37571 | peer = httppeer.makepeer(ui, path, opener=opener) | ||
Gregory Szorc
|
r37030 | |||
# We /could/ populate stdin/stdout with sock.makefile()... | ||||
Gregory Szorc
|
r36545 | else: | ||
Gregory Szorc
|
r37030 | raise error.Abort(_('unsupported connection configuration')) | ||
Gregory Szorc
|
r36545 | |||
Gregory Szorc
|
r36548 | batchedcommands = None | ||
Gregory Szorc
|
r36545 | # Now perform actions based on the parsed wire language instructions. | ||
for action, lines in blocks: | ||||
if action in ('raw', 'raw+'): | ||||
Gregory Szorc
|
r37030 | if not stdin: | ||
raise error.Abort(_('cannot call raw/raw+ on this peer')) | ||||
Gregory Szorc
|
r36545 | # Concatenate the data together. | ||
data = ''.join(l.lstrip() for l in lines) | ||||
Yuya Nishihara
|
r37102 | data = stringutil.unescapestr(data) | ||
Gregory Szorc
|
r36545 | stdin.write(data) | ||
if action == 'raw+': | ||||
stdin.flush() | ||||
elif action == 'flush': | ||||
Gregory Szorc
|
r37030 | if not stdin: | ||
raise error.Abort(_('cannot call flush on this peer')) | ||||
Gregory Szorc
|
r36545 | stdin.flush() | ||
Gregory Szorc
|
r36547 | elif action.startswith('command'): | ||
if not peer: | ||||
raise error.Abort(_('cannot send commands unless peer instance ' | ||||
'is available')) | ||||
command = action.split(' ', 1)[1] | ||||
args = {} | ||||
for line in lines: | ||||
# We need to allow empty values. | ||||
fields = line.lstrip().split(' ', 1) | ||||
if len(fields) == 1: | ||||
key = fields[0] | ||||
value = '' | ||||
else: | ||||
key, value = fields | ||||
Gregory Szorc
|
r37501 | if value.startswith('eval:'): | ||
value = stringutil.evalpythonliteral(value[5:]) | ||||
else: | ||||
value = stringutil.unescapestr(value) | ||||
args[key] = value | ||||
Gregory Szorc
|
r36547 | |||
Gregory Szorc
|
r36548 | if batchedcommands is not None: | ||
batchedcommands.append((command, args)) | ||||
continue | ||||
Gregory Szorc
|
r36547 | ui.status(_('sending %s command\n') % command) | ||
Gregory Szorc
|
r36551 | |||
if 'PUSHFILE' in args: | ||||
with open(args['PUSHFILE'], r'rb') as fh: | ||||
del args['PUSHFILE'] | ||||
Augie Fackler
|
r36605 | res, output = peer._callpush(command, fh, | ||
**pycompat.strkwargs(args)) | ||||
Yuya Nishihara
|
r37338 | ui.status(_('result: %s\n') % stringutil.escapestr(res)) | ||
Gregory Szorc
|
r36551 | ui.status(_('remote output: %s\n') % | ||
Yuya Nishihara
|
r37338 | stringutil.escapestr(output)) | ||
Gregory Szorc
|
r36551 | else: | ||
Gregory Szorc
|
r37670 | with peer.commandexecutor() as e: | ||
res = e.callcommand(command, args).result() | ||||
Gregory Szorc
|
r37738 | if isinstance(res, wireprotov2peer.commandresponse): | ||
Gregory Szorc
|
r39597 | val = res.objects() | ||
Yuya Nishihara
|
r37961 | ui.status(_('response: %s\n') % | ||
Gregory Szorc
|
r39414 | stringutil.pprint(val, bprefix=True, indent=2)) | ||
Gregory Szorc
|
r37738 | else: | ||
Yuya Nishihara
|
r37961 | ui.status(_('response: %s\n') % | ||
Gregory Szorc
|
r39414 | stringutil.pprint(res, bprefix=True, indent=2)) | ||
Gregory Szorc
|
r36547 | |||
Gregory Szorc
|
r36548 | elif action == 'batchbegin': | ||
if batchedcommands is not None: | ||||
raise error.Abort(_('nested batchbegin not allowed')) | ||||
batchedcommands = [] | ||||
elif action == 'batchsubmit': | ||||
# There is a batching API we could go through. But it would be | ||||
# difficult to normalize requests into function calls. It is easier | ||||
# to bypass this layer and normalize to commands + args. | ||||
ui.status(_('sending batch with %d sub-commands\n') % | ||||
len(batchedcommands)) | ||||
for i, chunk in enumerate(peer._submitbatch(batchedcommands)): | ||||
Yuya Nishihara
|
r37102 | ui.status(_('response #%d: %s\n') % | ||
Yuya Nishihara
|
r37338 | (i, stringutil.escapestr(chunk))) | ||
Gregory Szorc
|
r36548 | |||
batchedcommands = None | ||||
Gregory Szorc
|
r37031 | |||
elif action.startswith('httprequest '): | ||||
if not opener: | ||||
raise error.Abort(_('cannot use httprequest without an HTTP ' | ||||
'peer')) | ||||
request = action.split(' ', 2) | ||||
if len(request) != 3: | ||||
raise error.Abort(_('invalid httprequest: expected format is ' | ||||
'"httprequest <method> <path>')) | ||||
method, httppath = request[1:] | ||||
headers = {} | ||||
body = None | ||||
Gregory Szorc
|
r37069 | frames = [] | ||
Gregory Szorc
|
r37031 | for line in lines: | ||
line = line.lstrip() | ||||
m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line) | ||||
if m: | ||||
Gregory Szorc
|
r39991 | # Headers need to use native strings. | ||
key = pycompat.strurl(m.group(1)) | ||||
value = pycompat.strurl(m.group(2)) | ||||
headers[key] = value | ||||
Gregory Szorc
|
r37031 | continue | ||
if line.startswith(b'BODYFILE '): | ||||
with open(line.split(b' ', 1), 'rb') as fh: | ||||
body = fh.read() | ||||
Gregory Szorc
|
r37069 | elif line.startswith(b'frame '): | ||
frame = wireprotoframing.makeframefromhumanstring( | ||||
line[len(b'frame '):]) | ||||
frames.append(frame) | ||||
Gregory Szorc
|
r37031 | else: | ||
raise error.Abort(_('unknown argument to httprequest: %s') % | ||||
line) | ||||
url = path + httppath | ||||
Gregory Szorc
|
r37069 | |||
if frames: | ||||
body = b''.join(bytes(f) for f in frames) | ||||
Gregory Szorc
|
r37031 | req = urlmod.urlreq.request(pycompat.strurl(url), body, headers) | ||
Gregory Szorc
|
r37065 | # urllib.Request insists on using has_data() as a proxy for | ||
# determining the request method. Override that to use our | ||||
# explicitly requested method. | ||||
Augie Fackler
|
r39099 | req.get_method = lambda: pycompat.sysstr(method) | ||
Gregory Szorc
|
r37065 | |||
Gregory Szorc
|
r37031 | try: | ||
Gregory Szorc
|
r37575 | res = opener.open(req) | ||
body = res.read() | ||||
Gregory Szorc
|
r37031 | except util.urlerr.urlerror as e: | ||
Augie Fackler
|
r39100 | # read() method must be called, but only exists in Python 2 | ||
getattr(e, 'read', lambda: None)() | ||||
Gregory Szorc
|
r37575 | continue | ||
Gregory Szorc
|
r39992 | ct = res.headers.get(r'Content-Type') | ||
if ct == r'application/mercurial-cbor': | ||||
Yuya Nishihara
|
r37961 | ui.write(_('cbor> %s\n') % | ||
Gregory Szorc
|
r40053 | stringutil.pprint(cborutil.decodeall(body), | ||
Gregory Szorc
|
r39480 | bprefix=True, | ||
Gregory Szorc
|
r39414 | indent=2)) | ||
Gregory Szorc
|
r37031 | |||
Gregory Szorc
|
r36545 | elif action == 'close': | ||
peer.close() | ||||
elif action == 'readavailable': | ||||
Gregory Szorc
|
r37030 | if not stdout or not stderr: | ||
raise error.Abort(_('readavailable not available on this peer')) | ||||
Yuya Nishihara
|
r36861 | stdin.close() | ||
stdout.read() | ||||
stderr.read() | ||||
Gregory Szorc
|
r37030 | |||
Gregory Szorc
|
r36545 | elif action == 'readline': | ||
Gregory Szorc
|
r37030 | if not stdout: | ||
raise error.Abort(_('readline not available on this peer')) | ||||
Gregory Szorc
|
r36545 | stdout.readline() | ||
Gregory Szorc
|
r37025 | elif action == 'ereadline': | ||
Gregory Szorc
|
r37030 | if not stderr: | ||
raise error.Abort(_('ereadline not available on this peer')) | ||||
Gregory Szorc
|
r37025 | stderr.readline() | ||
elif action.startswith('read '): | ||||
count = int(action.split(' ', 1)[1]) | ||||
Gregory Szorc
|
r37030 | if not stdout: | ||
raise error.Abort(_('read not available on this peer')) | ||||
Gregory Szorc
|
r37025 | stdout.read(count) | ||
elif action.startswith('eread '): | ||||
count = int(action.split(' ', 1)[1]) | ||||
Gregory Szorc
|
r37030 | if not stderr: | ||
raise error.Abort(_('eread not available on this peer')) | ||||
Gregory Szorc
|
r37025 | stderr.read(count) | ||
Gregory Szorc
|
r36545 | else: | ||
raise error.Abort(_('unknown action: %s') % action) | ||||
Gregory Szorc
|
r36548 | if batchedcommands is not None: | ||
raise error.Abort(_('unclosed "batchbegin" request')) | ||||
Gregory Szorc
|
r36545 | if peer: | ||
peer.close() | ||||
if proc: | ||||
proc.kill() | ||||