debugcommands.py
480 lines
| 13.3 KiB
| text/x-python
|
PythonLexer
Augie Fackler
|
r40530 | # debugcommands.py - debug logic for remotefilelog | ||
# | ||||
# Copyright 2013 Facebook, Inc. | ||||
# | ||||
# This software may be used and distributed according to the terms of the | ||||
# GNU General Public License version 2 or any later version. | ||||
import os | ||||
Augie Fackler
|
r40542 | import zlib | ||
Augie Fackler
|
r40530 | |||
Joerg Sonnenberger
|
r46729 | from mercurial.node import ( | ||
bin, | ||||
hex, | ||||
Joerg Sonnenberger
|
r47771 | sha1nodeconstants, | ||
Joerg Sonnenberger
|
r46729 | short, | ||
) | ||||
Augie Fackler
|
r40530 | from mercurial.i18n import _ | ||
Gregory Szorc
|
r43355 | from mercurial.pycompat import open | ||
Augie Fackler
|
r40530 | from mercurial import ( | ||
error, | ||||
filelog, | ||||
Boris Feld
|
r43213 | lock as lockmod, | ||
Kyle Lippincott
|
r41973 | pycompat, | ||
Augie Fackler
|
r40530 | revlog, | ||
) | ||||
Augie Fackler
|
r44519 | from mercurial.utils import hashutil | ||
Augie Fackler
|
r40530 | from . import ( | ||
constants, | ||||
datapack, | ||||
fileserverclient, | ||||
historypack, | ||||
repack, | ||||
shallowutil, | ||||
) | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def debugremotefilelog(ui, path, **opts): | ||
Augie Fackler
|
r43906 | decompress = opts.get('decompress') | ||
Augie Fackler
|
r40530 | |||
size, firstnode, mapping = parsefileblob(path, decompress) | ||||
Augie Fackler
|
r43347 | ui.status(_(b"size: %d bytes\n") % size) | ||
ui.status(_(b"path: %s \n") % path) | ||||
ui.status(_(b"key: %s \n") % (short(firstnode))) | ||||
ui.status(_(b"\n")) | ||||
Augie Fackler
|
r43346 | ui.status( | ||
Augie Fackler
|
r43347 | _(b"%12s => %12s %13s %13s %12s\n") | ||
% (b"node", b"p1", b"p2", b"linknode", b"copyfrom") | ||||
Augie Fackler
|
r43346 | ) | ||
Augie Fackler
|
r40530 | |||
queue = [firstnode] | ||||
while queue: | ||||
node = queue.pop(0) | ||||
p1, p2, linknode, copyfrom = mapping[node] | ||||
Augie Fackler
|
r43346 | ui.status( | ||
Augie Fackler
|
r43347 | _(b"%s => %s %s %s %s\n") | ||
Augie Fackler
|
r43346 | % (short(node), short(p1), short(p2), short(linknode), copyfrom) | ||
) | ||||
Joerg Sonnenberger
|
r47771 | if p1 != sha1nodeconstants.nullid: | ||
Augie Fackler
|
r40530 | queue.append(p1) | ||
Joerg Sonnenberger
|
r47771 | if p2 != sha1nodeconstants.nullid: | ||
Augie Fackler
|
r40530 | queue.append(p2) | ||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def buildtemprevlog(repo, file): | ||
# get filename key | ||||
Joerg Sonnenberger
|
r46729 | filekey = hex(hashutil.sha1(file).digest()) | ||
Augie Fackler
|
r43347 | filedir = os.path.join(repo.path, b'store/data', filekey) | ||
Augie Fackler
|
r40530 | |||
# sort all entries based on linkrev | ||||
fctxs = [] | ||||
for filenode in os.listdir(filedir): | ||||
Augie Fackler
|
r43347 | if b'_old' not in filenode: | ||
Augie Fackler
|
r40530 | fctxs.append(repo.filectx(file, fileid=bin(filenode))) | ||
fctxs = sorted(fctxs, key=lambda x: x.linkrev()) | ||||
# add to revlog | ||||
Augie Fackler
|
r43347 | temppath = repo.sjoin(b'data/temprevlog.i') | ||
Augie Fackler
|
r40530 | if os.path.exists(temppath): | ||
os.remove(temppath) | ||||
Augie Fackler
|
r43347 | r = filelog.filelog(repo.svfs, b'temprevlog') | ||
Augie Fackler
|
r40530 | |||
class faket(object): | ||||
def add(self, a, b, c): | ||||
pass | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | t = faket() | ||
for fctx in fctxs: | ||||
if fctx.node() not in repo: | ||||
continue | ||||
p = fctx.filelog().parents(fctx.filenode()) | ||||
meta = {} | ||||
if fctx.renamed(): | ||||
Augie Fackler
|
r43347 | meta[b'copy'] = fctx.renamed()[0] | ||
meta[b'copyrev'] = hex(fctx.renamed()[1]) | ||||
Augie Fackler
|
r40530 | |||
r.add(fctx.data(), meta, t, fctx.linkrev(), p[0], p[1]) | ||||
return r | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def debugindex(orig, ui, repo, file_=None, **opts): | ||
"""dump the contents of an index file""" | ||||
Augie Fackler
|
r43346 | if ( | ||
Augie Fackler
|
r43906 | opts.get('changelog') | ||
or opts.get('manifest') | ||||
or opts.get('dir') | ||||
Augie Fackler
|
r43346 | or not shallowutil.isenabled(repo) | ||
or not repo.shallowmatch(file_) | ||||
): | ||||
Augie Fackler
|
r40530 | return orig(ui, repo, file_, **opts) | ||
r = buildtemprevlog(repo, file_) | ||||
# debugindex like normal | ||||
Augie Fackler
|
r43347 | format = opts.get(b'format', 0) | ||
Augie Fackler
|
r40530 | if format not in (0, 1): | ||
Augie Fackler
|
r43347 | raise error.Abort(_(b"unknown format %d") % format) | ||
Augie Fackler
|
r40530 | |||
generaldelta = r.version & revlog.FLAG_GENERALDELTA | ||||
if generaldelta: | ||||
Augie Fackler
|
r43347 | basehdr = b' delta' | ||
Augie Fackler
|
r40530 | else: | ||
Augie Fackler
|
r43347 | basehdr = b' base' | ||
Augie Fackler
|
r40530 | |||
if format == 0: | ||||
Augie Fackler
|
r43346 | ui.write( | ||
( | ||||
Augie Fackler
|
r43347 | b" rev offset length " + basehdr + b" linkrev" | ||
b" nodeid p1 p2\n" | ||||
Augie Fackler
|
r43346 | ) | ||
) | ||||
Augie Fackler
|
r40530 | elif format == 1: | ||
Augie Fackler
|
r43346 | ui.write( | ||
( | ||||
Augie Fackler
|
r43347 | b" rev flag offset length" | ||
b" size " + basehdr + b" link p1 p2" | ||||
b" nodeid\n" | ||||
Augie Fackler
|
r43346 | ) | ||
) | ||||
Augie Fackler
|
r40530 | |||
for i in r: | ||||
node = r.node(i) | ||||
if generaldelta: | ||||
base = r.deltaparent(i) | ||||
else: | ||||
base = r.chainbase(i) | ||||
if format == 0: | ||||
try: | ||||
pp = r.parents(node) | ||||
except Exception: | ||||
Joerg Sonnenberger
|
r47771 | pp = [repo.nullid, repo.nullid] | ||
Augie Fackler
|
r43346 | ui.write( | ||
Augie Fackler
|
r43347 | b"% 6d % 9d % 7d % 6d % 7d %s %s %s\n" | ||
Augie Fackler
|
r43346 | % ( | ||
i, | ||||
r.start(i), | ||||
r.length(i), | ||||
base, | ||||
r.linkrev(i), | ||||
short(node), | ||||
short(pp[0]), | ||||
short(pp[1]), | ||||
) | ||||
) | ||||
Augie Fackler
|
r40530 | elif format == 1: | ||
pr = r.parentrevs(i) | ||||
Augie Fackler
|
r43346 | ui.write( | ||
Augie Fackler
|
r43347 | b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" | ||
Augie Fackler
|
r43346 | % ( | ||
i, | ||||
r.flags(i), | ||||
r.start(i), | ||||
r.length(i), | ||||
r.rawsize(i), | ||||
base, | ||||
r.linkrev(i), | ||||
pr[0], | ||||
pr[1], | ||||
short(node), | ||||
) | ||||
) | ||||
Augie Fackler
|
r40530 | |||
def debugindexdot(orig, ui, repo, file_): | ||||
"""dump an index DAG as a graphviz dot file""" | ||||
Pulkit Goyal
|
r40549 | if not shallowutil.isenabled(repo): | ||
Augie Fackler
|
r40530 | return orig(ui, repo, file_) | ||
r = buildtemprevlog(repo, os.path.basename(file_)[:-2]) | ||||
Augie Fackler
|
r43350 | ui.writenoi18n(b"digraph G {\n") | ||
Augie Fackler
|
r40530 | for i in r: | ||
node = r.node(i) | ||||
pp = r.parents(node) | ||||
Augie Fackler
|
r43347 | ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i)) | ||
Joerg Sonnenberger
|
r47771 | if pp[1] != repo.nullid: | ||
Augie Fackler
|
r43347 | ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i)) | ||
ui.write(b"}\n") | ||||
Augie Fackler
|
r40530 | |||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def verifyremotefilelog(ui, path, **opts): | ||
Augie Fackler
|
r43906 | decompress = opts.get('decompress') | ||
Augie Fackler
|
r40530 | |||
for root, dirs, files in os.walk(path): | ||||
for file in files: | ||||
Augie Fackler
|
r43347 | if file == b"repos": | ||
Augie Fackler
|
r40530 | continue | ||
filepath = os.path.join(root, file) | ||||
size, firstnode, mapping = parsefileblob(filepath, decompress) | ||||
Gregory Szorc
|
r43374 | for p1, p2, linknode, copyfrom in pycompat.itervalues(mapping): | ||
Joerg Sonnenberger
|
r47771 | if linknode == sha1nodeconstants.nullid: | ||
Augie Fackler
|
r40530 | actualpath = os.path.relpath(root, path) | ||
Augie Fackler
|
r43346 | key = fileserverclient.getcachekey( | ||
Augie Fackler
|
r43347 | b"reponame", actualpath, file | ||
Augie Fackler
|
r43346 | ) | ||
ui.status( | ||||
Augie Fackler
|
r43347 | b"%s %s\n" % (key, os.path.relpath(filepath, path)) | ||
Augie Fackler
|
r43346 | ) | ||
Augie Fackler
|
r40530 | |||
Augie Fackler
|
r40542 | def _decompressblob(raw): | ||
return zlib.decompress(raw) | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def parsefileblob(path, decompress): | ||
Augie Fackler
|
r43347 | f = open(path, b"rb") | ||
Augie Fackler
|
r40530 | try: | ||
raw = f.read() | ||||
finally: | ||||
f.close() | ||||
if decompress: | ||||
Augie Fackler
|
r40542 | raw = _decompressblob(raw) | ||
Augie Fackler
|
r40530 | |||
offset, size, flags = shallowutil.parsesizeflags(raw) | ||||
start = offset + size | ||||
firstnode = None | ||||
mapping = {} | ||||
while start < len(raw): | ||||
Augie Fackler
|
r43347 | divider = raw.index(b'\0', start + 80) | ||
Augie Fackler
|
r40530 | |||
Augie Fackler
|
r43346 | currentnode = raw[start : (start + 20)] | ||
Augie Fackler
|
r40530 | if not firstnode: | ||
firstnode = currentnode | ||||
Augie Fackler
|
r43346 | p1 = raw[(start + 20) : (start + 40)] | ||
p2 = raw[(start + 40) : (start + 60)] | ||||
linknode = raw[(start + 60) : (start + 80)] | ||||
copyfrom = raw[(start + 80) : divider] | ||||
Augie Fackler
|
r40530 | |||
mapping[currentnode] = (p1, p2, linknode, copyfrom) | ||||
start = divider + 1 | ||||
return size, firstnode, mapping | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def debugdatapack(ui, *paths, **opts): | ||
for path in paths: | ||||
Augie Fackler
|
r43347 | if b'.data' in path: | ||
path = path[: path.index(b'.data')] | ||||
ui.write(b"%s:\n" % path) | ||||
Augie Fackler
|
r40530 | dpack = datapack.datapack(path) | ||
Augie Fackler
|
r43906 | node = opts.get('node') | ||
Augie Fackler
|
r40530 | if node: | ||
Augie Fackler
|
r43347 | deltachain = dpack.getdeltachain(b'', bin(node)) | ||
Augie Fackler
|
r40530 | dumpdeltachain(ui, deltachain, **opts) | ||
return | ||||
Augie Fackler
|
r43906 | if opts.get('long'): | ||
Augie Fackler
|
r40530 | hashformatter = hex | ||
hashlen = 42 | ||||
else: | ||||
hashformatter = short | ||||
hashlen = 14 | ||||
lastfilename = None | ||||
totaldeltasize = 0 | ||||
totalblobsize = 0 | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def printtotals(): | ||
if lastfilename is not None: | ||||
Augie Fackler
|
r43347 | ui.write(b"\n") | ||
Augie Fackler
|
r40530 | if not totaldeltasize or not totalblobsize: | ||
return | ||||
difference = totalblobsize - totaldeltasize | ||||
Augie Fackler
|
r43347 | deltastr = b"%0.1f%% %s" % ( | ||
Augie Fackler
|
r40530 | (100.0 * abs(difference) / totalblobsize), | ||
Augie Fackler
|
r43347 | (b"smaller" if difference > 0 else b"bigger"), | ||
Augie Fackler
|
r43346 | ) | ||
Augie Fackler
|
r40530 | |||
Augie Fackler
|
r43350 | ui.writenoi18n( | ||
Augie Fackler
|
r43347 | b"Total:%s%s %s (%s)\n" | ||
Augie Fackler
|
r43346 | % ( | ||
Augie Fackler
|
r43347 | b"".ljust(2 * hashlen - len(b"Total:")), | ||
(b'%d' % totaldeltasize).ljust(12), | ||||
(b'%d' % totalblobsize).ljust(9), | ||||
Augie Fackler
|
r43346 | deltastr, | ||
) | ||||
) | ||||
Augie Fackler
|
r40530 | |||
bases = {} | ||||
nodes = set() | ||||
failures = 0 | ||||
for filename, node, deltabase, deltalen in dpack.iterentries(): | ||||
bases[node] = deltabase | ||||
if node in nodes: | ||||
Augie Fackler
|
r43347 | ui.write((b"Bad entry: %s appears twice\n" % short(node))) | ||
Augie Fackler
|
r40530 | failures += 1 | ||
nodes.add(node) | ||||
if filename != lastfilename: | ||||
printtotals() | ||||
Augie Fackler
|
r43347 | name = b'(empty name)' if filename == b'' else filename | ||
ui.write(b"%s:\n" % name) | ||||
Augie Fackler
|
r43346 | ui.write( | ||
Augie Fackler
|
r43347 | b"%s%s%s%s\n" | ||
Augie Fackler
|
r43346 | % ( | ||
Augie Fackler
|
r43347 | b"Node".ljust(hashlen), | ||
b"Delta Base".ljust(hashlen), | ||||
b"Delta Length".ljust(14), | ||||
b"Blob Size".ljust(9), | ||||
Augie Fackler
|
r43346 | ) | ||
) | ||||
Augie Fackler
|
r40530 | lastfilename = filename | ||
totalblobsize = 0 | ||||
totaldeltasize = 0 | ||||
# Metadata could be missing, in which case it will be an empty dict. | ||||
meta = dpack.getmeta(filename, node) | ||||
if constants.METAKEYSIZE in meta: | ||||
blobsize = meta[constants.METAKEYSIZE] | ||||
totaldeltasize += deltalen | ||||
totalblobsize += blobsize | ||||
else: | ||||
Augie Fackler
|
r43347 | blobsize = b"(missing)" | ||
Augie Fackler
|
r43346 | ui.write( | ||
Augie Fackler
|
r43347 | b"%s %s %s%s\n" | ||
Augie Fackler
|
r43346 | % ( | ||
hashformatter(node), | ||||
hashformatter(deltabase), | ||||
Augie Fackler
|
r43347 | (b'%d' % deltalen).ljust(14), | ||
Augie Fackler
|
r43346 | pycompat.bytestr(blobsize), | ||
) | ||||
) | ||||
Augie Fackler
|
r40530 | |||
if filename is not None: | ||||
printtotals() | ||||
failures += _sanitycheck(ui, set(nodes), bases) | ||||
if failures > 1: | ||||
Augie Fackler
|
r43347 | ui.warn((b"%d failures\n" % failures)) | ||
Augie Fackler
|
r40530 | return 1 | ||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def _sanitycheck(ui, nodes, bases): | ||
""" | ||||
Does some basic sanity checking on a packfiles with ``nodes`` ``bases`` (a | ||||
mapping of node->base): | ||||
- Each deltabase must itself be a node elsewhere in the pack | ||||
- There must be no cycles | ||||
""" | ||||
failures = 0 | ||||
for node in nodes: | ||||
seen = set() | ||||
current = node | ||||
deltabase = bases[current] | ||||
Joerg Sonnenberger
|
r47771 | while deltabase != sha1nodeconstants.nullid: | ||
Augie Fackler
|
r40530 | if deltabase not in nodes: | ||
Augie Fackler
|
r43346 | ui.warn( | ||
( | ||||
Augie Fackler
|
r43347 | b"Bad entry: %s has an unknown deltabase (%s)\n" | ||
Augie Fackler
|
r43346 | % (short(node), short(deltabase)) | ||
) | ||||
) | ||||
Augie Fackler
|
r40530 | failures += 1 | ||
break | ||||
if deltabase in seen: | ||||
Augie Fackler
|
r43346 | ui.warn( | ||
( | ||||
Augie Fackler
|
r43347 | b"Bad entry: %s has a cycle (at %s)\n" | ||
Augie Fackler
|
r43346 | % (short(node), short(deltabase)) | ||
) | ||||
) | ||||
Augie Fackler
|
r40530 | failures += 1 | ||
break | ||||
current = deltabase | ||||
seen.add(current) | ||||
deltabase = bases[current] | ||||
# Since ``node`` begins a valid chain, reset/memoize its base to nullid | ||||
# so we don't traverse it again. | ||||
Joerg Sonnenberger
|
r47771 | bases[node] = sha1nodeconstants.nullid | ||
Augie Fackler
|
r40530 | return failures | ||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def dumpdeltachain(ui, deltachain, **opts): | ||
hashformatter = hex | ||||
hashlen = 40 | ||||
lastfilename = None | ||||
for filename, node, filename, deltabasenode, delta in deltachain: | ||||
if filename != lastfilename: | ||||
Augie Fackler
|
r43347 | ui.write(b"\n%s\n" % filename) | ||
Augie Fackler
|
r40530 | lastfilename = filename | ||
Augie Fackler
|
r43346 | ui.write( | ||
Augie Fackler
|
r43347 | b"%s %s %s %s\n" | ||
Augie Fackler
|
r43346 | % ( | ||
Augie Fackler
|
r43347 | b"Node".ljust(hashlen), | ||
b"Delta Base".ljust(hashlen), | ||||
b"Delta SHA1".ljust(hashlen), | ||||
b"Delta Length".ljust(6), | ||||
Augie Fackler
|
r43346 | ) | ||
) | ||||
Augie Fackler
|
r40530 | |||
Augie Fackler
|
r43346 | ui.write( | ||
Augie Fackler
|
r43347 | b"%s %s %s %d\n" | ||
Augie Fackler
|
r43346 | % ( | ||
hashformatter(node), | ||||
hashformatter(deltabasenode), | ||||
Joerg Sonnenberger
|
r46729 | hex(hashutil.sha1(delta).digest()), | ||
Augie Fackler
|
r43346 | len(delta), | ||
) | ||||
) | ||||
Augie Fackler
|
r40530 | |||
def debughistorypack(ui, path): | ||||
Augie Fackler
|
r43347 | if b'.hist' in path: | ||
path = path[: path.index(b'.hist')] | ||||
Augie Fackler
|
r40530 | hpack = historypack.historypack(path) | ||
lastfilename = None | ||||
for entry in hpack.iterentries(): | ||||
filename, node, p1node, p2node, linknode, copyfrom = entry | ||||
if filename != lastfilename: | ||||
Augie Fackler
|
r43347 | ui.write(b"\n%s\n" % filename) | ||
Augie Fackler
|
r43346 | ui.write( | ||
Augie Fackler
|
r43347 | b"%s%s%s%s%s\n" | ||
Augie Fackler
|
r43346 | % ( | ||
Augie Fackler
|
r43347 | b"Node".ljust(14), | ||
b"P1 Node".ljust(14), | ||||
b"P2 Node".ljust(14), | ||||
b"Link Node".ljust(14), | ||||
b"Copy From", | ||||
Augie Fackler
|
r43346 | ) | ||
) | ||||
Augie Fackler
|
r40530 | lastfilename = filename | ||
Augie Fackler
|
r43346 | ui.write( | ||
Augie Fackler
|
r43347 | b"%s %s %s %s %s\n" | ||
Augie Fackler
|
r43346 | % ( | ||
short(node), | ||||
short(p1node), | ||||
short(p2node), | ||||
short(linknode), | ||||
copyfrom, | ||||
) | ||||
) | ||||
Augie Fackler
|
r40530 | |||
def debugwaitonrepack(repo): | ||||
Augie Fackler
|
r43347 | with lockmod.lock(repack.repacklockvfs(repo), b"repacklock", timeout=-1): | ||
Augie Fackler
|
r40530 | return | ||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def debugwaitonprefetch(repo): | ||
Augie Fackler
|
r43346 | with repo._lock( | ||
repo.svfs, | ||||
Augie Fackler
|
r43347 | b"prefetchlock", | ||
Augie Fackler
|
r43346 | True, | ||
None, | ||||
None, | ||||
Augie Fackler
|
r43347 | _(b'prefetching in %s') % repo.origroot, | ||
Augie Fackler
|
r43346 | ): | ||
Augie Fackler
|
r40530 | pass | ||