test-remotefilelog-datapack.py
415 lines
| 12.8 KiB
| text/x-python
|
PythonLexer
/ tests / test-remotefilelog-datapack.py
r48294 | #!/usr/bin/env python | |||
Augie Fackler
|
r40530 | |||
import hashlib | ||||
import os | ||||
import random | ||||
import shutil | ||||
import stat | ||||
import struct | ||||
import sys | ||||
import tempfile | ||||
import time | ||||
import unittest | ||||
import silenttestrunner | ||||
# Load the local remotefilelog, not the system one | ||||
sys.path[0:0] = [os.path.join(os.path.dirname(__file__), '..')] | ||||
Joerg Sonnenberger
|
r47771 | from mercurial.node import sha1nodeconstants | ||
r44947 | from mercurial import policy | |||
if not policy._packageprefs.get(policy.policy, (False, False))[1]: | ||||
if __name__ == '__main__': | ||||
msg = "skipped: pure module not available with module policy:" | ||||
print(msg, policy.policy, file=sys.stderr) | ||||
sys.exit(80) | ||||
Augie Fackler
|
r40530 | from mercurial import ( | ||
Augie Fackler
|
r41290 | pycompat, | ||
Augie Fackler
|
r40530 | ui as uimod, | ||
) | ||||
from hgext.remotefilelog import ( | ||||
basepack, | ||||
constants, | ||||
datapack, | ||||
) | ||||
Augie Fackler
|
r43346 | |||
Gregory Szorc
|
r49801 | class datapacktestsbase: | ||
Augie Fackler
|
r40530 | def __init__(self, datapackreader, paramsavailable): | ||
self.datapackreader = datapackreader | ||||
self.paramsavailable = paramsavailable | ||||
def setUp(self): | ||||
self.tempdirs = [] | ||||
def tearDown(self): | ||||
for d in self.tempdirs: | ||||
shutil.rmtree(d) | ||||
def makeTempDir(self): | ||||
Gregory Szorc
|
r41614 | tempdir = pycompat.bytestr(tempfile.mkdtemp()) | ||
Augie Fackler
|
r40530 | self.tempdirs.append(tempdir) | ||
return tempdir | ||||
def getHash(self, content): | ||||
return hashlib.sha1(content).digest() | ||||
def getFakeHash(self): | ||||
Augie Fackler
|
r43346 | return b''.join( | ||
pycompat.bytechr(random.randint(0, 255)) for _ in range(20) | ||||
) | ||||
Augie Fackler
|
r40530 | |||
Augie Fackler
|
r40541 | def createPack(self, revisions=None, packdir=None): | ||
Augie Fackler
|
r40530 | if revisions is None: | ||
Joerg Sonnenberger
|
r47771 | revisions = [ | ||
( | ||||
b"filename", | ||||
self.getFakeHash(), | ||||
sha1nodeconstants.nullid, | ||||
b"content", | ||||
) | ||||
] | ||||
Augie Fackler
|
r40530 | |||
if packdir is None: | ||||
packdir = self.makeTempDir() | ||||
Augie Fackler
|
r40541 | packer = datapack.mutabledatapack(uimod.ui(), packdir, version=2) | ||
Augie Fackler
|
r40530 | |||
for args in revisions: | ||||
filename, node, base, content = args[0:4] | ||||
# meta is optional | ||||
meta = None | ||||
if len(args) > 4: | ||||
meta = args[4] | ||||
packer.add(filename, node, base, content, metadata=meta) | ||||
path = packer.close() | ||||
return self.datapackreader(path) | ||||
def _testAddSingle(self, content): | ||||
Augie Fackler
|
r46554 | """Test putting a simple blob into a pack and reading it out.""" | ||
Gregory Szorc
|
r41612 | filename = b"foo" | ||
Augie Fackler
|
r40530 | node = self.getHash(content) | ||
Joerg Sonnenberger
|
r47771 | revisions = [(filename, node, sha1nodeconstants.nullid, content)] | ||
Augie Fackler
|
r40530 | pack = self.createPack(revisions) | ||
if self.paramsavailable: | ||||
Augie Fackler
|
r43346 | self.assertEqual( | ||
pack.params.fanoutprefix, basepack.SMALLFANOUTPREFIX | ||||
) | ||||
Augie Fackler
|
r40530 | |||
chain = pack.getdeltachain(filename, node) | ||||
Gregory Szorc
|
r41351 | self.assertEqual(content, chain[0][4]) | ||
Augie Fackler
|
r40530 | |||
def testAddSingle(self): | ||||
Gregory Szorc
|
r41612 | self._testAddSingle(b'') | ||
Augie Fackler
|
r40530 | |||
def testAddSingleEmpty(self): | ||||
Gregory Szorc
|
r41612 | self._testAddSingle(b'abcdef') | ||
Augie Fackler
|
r40530 | |||
def testAddMultiple(self): | ||||
"""Test putting multiple unrelated blobs into a pack and reading them | ||||
out. | ||||
""" | ||||
revisions = [] | ||||
for i in range(10): | ||||
Gregory Szorc
|
r41612 | filename = b"foo%d" % i | ||
content = b"abcdef%d" % i | ||||
Augie Fackler
|
r40530 | node = self.getHash(content) | ||
revisions.append((filename, node, self.getFakeHash(), content)) | ||||
pack = self.createPack(revisions) | ||||
for filename, node, base, content in revisions: | ||||
entry = pack.getdelta(filename, node) | ||||
Gregory Szorc
|
r41351 | self.assertEqual((content, filename, base, {}), entry) | ||
Augie Fackler
|
r40530 | |||
chain = pack.getdeltachain(filename, node) | ||||
Gregory Szorc
|
r41351 | self.assertEqual(content, chain[0][4]) | ||
Augie Fackler
|
r40530 | |||
def testAddDeltas(self): | ||||
Augie Fackler
|
r46554 | """Test putting multiple delta blobs into a pack and read the chain.""" | ||
Augie Fackler
|
r40530 | revisions = [] | ||
Gregory Szorc
|
r41612 | filename = b"foo" | ||
Joerg Sonnenberger
|
r47771 | lastnode = sha1nodeconstants.nullid | ||
Augie Fackler
|
r40530 | for i in range(10): | ||
Gregory Szorc
|
r41612 | content = b"abcdef%d" % i | ||
Augie Fackler
|
r40530 | node = self.getHash(content) | ||
revisions.append((filename, node, lastnode, content)) | ||||
lastnode = node | ||||
pack = self.createPack(revisions) | ||||
entry = pack.getdelta(filename, revisions[0][1]) | ||||
realvalue = (revisions[0][3], filename, revisions[0][2], {}) | ||||
Gregory Szorc
|
r41351 | self.assertEqual(entry, realvalue) | ||
Augie Fackler
|
r40530 | |||
# Test that the chain for the final entry has all the others | ||||
chain = pack.getdeltachain(filename, node) | ||||
for i in range(10): | ||||
Gregory Szorc
|
r41612 | content = b"abcdef%d" % i | ||
Gregory Szorc
|
r41351 | self.assertEqual(content, chain[-i - 1][4]) | ||
Augie Fackler
|
r40530 | |||
def testPackMany(self): | ||||
Augie Fackler
|
r46554 | """Pack many related and unrelated objects.""" | ||
Augie Fackler
|
r40530 | # Build a random pack file | ||
revisions = [] | ||||
blobs = {} | ||||
random.seed(0) | ||||
for i in range(100): | ||||
Gregory Szorc
|
r41612 | filename = b"filename-%d" % i | ||
Augie Fackler
|
r40530 | filerevs = [] | ||
for j in range(random.randint(1, 100)): | ||||
Gregory Szorc
|
r41612 | content = b"content-%d" % j | ||
Augie Fackler
|
r40530 | node = self.getHash(content) | ||
Joerg Sonnenberger
|
r47771 | lastnode = sha1nodeconstants.nullid | ||
Augie Fackler
|
r40530 | if len(filerevs) > 0: | ||
lastnode = filerevs[random.randint(0, len(filerevs) - 1)] | ||||
filerevs.append(node) | ||||
blobs[(filename, node, lastnode)] = content | ||||
revisions.append((filename, node, lastnode, content)) | ||||
pack = self.createPack(revisions) | ||||
# Verify the pack contents | ||||
Gregory Szorc
|
r41613 | for (filename, node, lastnode), content in sorted(blobs.items()): | ||
Augie Fackler
|
r40530 | chain = pack.getdeltachain(filename, node) | ||
for entry in chain: | ||||
expectedcontent = blobs[(entry[0], entry[1], entry[3])] | ||||
Gregory Szorc
|
r41351 | self.assertEqual(entry[4], expectedcontent) | ||
Augie Fackler
|
r40530 | |||
def testPackMetadata(self): | ||||
revisions = [] | ||||
for i in range(100): | ||||
Gregory Szorc
|
r41612 | filename = b'%d.txt' % i | ||
content = b'put-something-here \n' * i | ||||
Augie Fackler
|
r40530 | node = self.getHash(content) | ||
Augie Fackler
|
r43346 | meta = { | ||
Raphaël Gomès
|
r52596 | constants.METAKEYFLAG: i**4, | ||
Augie Fackler
|
r43346 | constants.METAKEYSIZE: len(content), | ||
b'Z': b'random_string', | ||||
b'_': b'\0' * i, | ||||
} | ||||
Joerg Sonnenberger
|
r47771 | revisions.append( | ||
(filename, node, sha1nodeconstants.nullid, content, meta) | ||||
) | ||||
Augie Fackler
|
r40541 | pack = self.createPack(revisions) | ||
Augie Fackler
|
r40530 | for name, node, x, content, origmeta in revisions: | ||
parsedmeta = pack.getmeta(name, node) | ||||
# flag == 0 should be optimized out | ||||
if origmeta[constants.METAKEYFLAG] == 0: | ||||
del origmeta[constants.METAKEYFLAG] | ||||
Gregory Szorc
|
r41351 | self.assertEqual(parsedmeta, origmeta) | ||
Augie Fackler
|
r40530 | |||
def testGetMissing(self): | ||||
Augie Fackler
|
r46554 | """Test the getmissing() api.""" | ||
Augie Fackler
|
r40530 | revisions = [] | ||
Gregory Szorc
|
r41612 | filename = b"foo" | ||
Joerg Sonnenberger
|
r47771 | lastnode = sha1nodeconstants.nullid | ||
Augie Fackler
|
r40530 | for i in range(10): | ||
Gregory Szorc
|
r41612 | content = b"abcdef%d" % i | ||
Augie Fackler
|
r40530 | node = self.getHash(content) | ||
revisions.append((filename, node, lastnode, content)) | ||||
lastnode = node | ||||
pack = self.createPack(revisions) | ||||
Gregory Szorc
|
r41612 | missing = pack.getmissing([(b"foo", revisions[0][1])]) | ||
Augie Fackler
|
r40530 | self.assertFalse(missing) | ||
Augie Fackler
|
r43346 | missing = pack.getmissing( | ||
[(b"foo", revisions[0][1]), (b"foo", revisions[1][1])] | ||||
) | ||||
Augie Fackler
|
r40530 | self.assertFalse(missing) | ||
fakenode = self.getFakeHash() | ||||
Augie Fackler
|
r43346 | missing = pack.getmissing( | ||
[(b"foo", revisions[0][1]), (b"foo", fakenode)] | ||||
) | ||||
Gregory Szorc
|
r41612 | self.assertEqual(missing, [(b"foo", fakenode)]) | ||
Augie Fackler
|
r40530 | |||
def testAddThrows(self): | ||||
pack = self.createPack() | ||||
try: | ||||
Joerg Sonnenberger
|
r47771 | pack.add(b'filename', sha1nodeconstants.nullid, b'contents') | ||
Augie Fackler
|
r40530 | self.assertTrue(False, "datapack.add should throw") | ||
except RuntimeError: | ||||
pass | ||||
def testBadVersionThrows(self): | ||||
pack = self.createPack() | ||||
Gregory Szorc
|
r41612 | path = pack.path + b'.datapack' | ||
Gregory Szorc
|
r41614 | with open(path, 'rb') as f: | ||
Augie Fackler
|
r40530 | raw = f.read() | ||
raw = struct.pack('!B', 255) + raw[1:] | ||||
os.chmod(path, os.stat(path).st_mode | stat.S_IWRITE) | ||||
Gregory Szorc
|
r41614 | with open(path, 'wb+') as f: | ||
Augie Fackler
|
r40530 | f.write(raw) | ||
try: | ||||
Matt Harbison
|
r44442 | self.datapackreader(pack.path) | ||
Augie Fackler
|
r40530 | self.assertTrue(False, "bad version number should have thrown") | ||
except RuntimeError: | ||||
pass | ||||
def testMissingDeltabase(self): | ||||
fakenode = self.getFakeHash() | ||||
Gregory Szorc
|
r41612 | revisions = [(b"filename", fakenode, self.getFakeHash(), b"content")] | ||
Augie Fackler
|
r40530 | pack = self.createPack(revisions) | ||
Gregory Szorc
|
r41612 | chain = pack.getdeltachain(b"filename", fakenode) | ||
Gregory Szorc
|
r41351 | self.assertEqual(len(chain), 1) | ||
Augie Fackler
|
r40530 | |||
def testLargePack(self): | ||||
"""Test creating and reading from a large pack with over X entries. | ||||
This causes it to use a 2^16 fanout table instead.""" | ||||
revisions = [] | ||||
blobs = {} | ||||
total = basepack.SMALLFANOUTCUTOFF + 1 | ||||
Manuel Jacob
|
r50179 | for i in range(total): | ||
Gregory Szorc
|
r41612 | filename = b"filename-%d" % i | ||
Augie Fackler
|
r40530 | content = filename | ||
node = self.getHash(content) | ||||
blobs[(filename, node)] = content | ||||
Joerg Sonnenberger
|
r47771 | revisions.append( | ||
(filename, node, sha1nodeconstants.nullid, content) | ||||
) | ||||
Augie Fackler
|
r40530 | |||
pack = self.createPack(revisions) | ||||
if self.paramsavailable: | ||||
Augie Fackler
|
r43346 | self.assertEqual( | ||
pack.params.fanoutprefix, basepack.LARGEFANOUTPREFIX | ||||
) | ||||
Augie Fackler
|
r40530 | |||
Gregory Szorc
|
r41613 | for (filename, node), content in blobs.items(): | ||
Augie Fackler
|
r40530 | actualcontent = pack.getdeltachain(filename, node)[0][4] | ||
Gregory Szorc
|
r41351 | self.assertEqual(actualcontent, content) | ||
Augie Fackler
|
r40530 | |||
def testPacksCache(self): | ||||
"""Test that we remember the most recent packs while fetching the delta | ||||
chain.""" | ||||
packdir = self.makeTempDir() | ||||
deltachains = [] | ||||
numpacks = 10 | ||||
revisionsperpack = 100 | ||||
for i in range(numpacks): | ||||
chain = [] | ||||
Joerg Sonnenberger
|
r47771 | revision = ( | ||
b'%d' % i, | ||||
self.getFakeHash(), | ||||
sha1nodeconstants.nullid, | ||||
b"content", | ||||
) | ||||
Augie Fackler
|
r40530 | |||
for _ in range(revisionsperpack): | ||||
chain.append(revision) | ||||
revision = ( | ||||
Gregory Szorc
|
r41612 | b'%d' % i, | ||
Augie Fackler
|
r40530 | self.getFakeHash(), | ||
revision[1], | ||||
Augie Fackler
|
r43346 | self.getFakeHash(), | ||
Augie Fackler
|
r40530 | ) | ||
self.createPack(chain, packdir) | ||||
deltachains.append(chain) | ||||
class testdatapackstore(datapack.datapackstore): | ||||
# Ensures that we are not keeping everything in the cache. | ||||
Boris Feld
|
r41668 | DEFAULTCACHESIZE = numpacks // 2 | ||
Augie Fackler
|
r40530 | |||
store = testdatapackstore(uimod.ui(), packdir) | ||||
random.shuffle(deltachains) | ||||
for randomchain in deltachains: | ||||
revision = random.choice(randomchain) | ||||
chain = store.getdeltachain(revision[0], revision[1]) | ||||
mostrecentpack = next(iter(store.packs), None) | ||||
Gregory Szorc
|
r41351 | self.assertEqual( | ||
Augie Fackler
|
r43346 | mostrecentpack.getdeltachain(revision[0], revision[1]), chain | ||
Augie Fackler
|
r40530 | ) | ||
Gregory Szorc
|
r41351 | self.assertEqual(randomchain.index(revision) + 1, len(chain)) | ||
Augie Fackler
|
r40530 | |||
# perf test off by default since it's slow | ||||
def _testIndexPerf(self): | ||||
random.seed(0) | ||||
print("Multi-get perf test") | ||||
packsizes = [ | ||||
100, | ||||
10000, | ||||
100000, | ||||
500000, | ||||
1000000, | ||||
3000000, | ||||
] | ||||
lookupsizes = [ | ||||
10, | ||||
100, | ||||
1000, | ||||
10000, | ||||
100000, | ||||
1000000, | ||||
] | ||||
for packsize in packsizes: | ||||
revisions = [] | ||||
Manuel Jacob
|
r50179 | for i in range(packsize): | ||
Gregory Szorc
|
r41612 | filename = b"filename-%d" % i | ||
content = b"content-%d" % i | ||||
Augie Fackler
|
r40530 | node = self.getHash(content) | ||
Joerg Sonnenberger
|
r47771 | revisions.append( | ||
(filename, node, sha1nodeconstants.nullid, content) | ||||
) | ||||
Augie Fackler
|
r40530 | |||
path = self.createPack(revisions).path | ||||
# Perf of large multi-get | ||||
import gc | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | gc.disable() | ||
pack = self.datapackreader(path) | ||||
for lookupsize in lookupsizes: | ||||
if lookupsize > packsize: | ||||
continue | ||||
random.shuffle(revisions) | ||||
findnodes = [(rev[0], rev[1]) for rev in revisions] | ||||
start = time.time() | ||||
pack.getmissing(findnodes[:lookupsize]) | ||||
elapsed = time.time() - start | ||||
Augie Fackler
|
r43346 | print( | ||
"%s pack %d lookups = %0.04f" | ||||
% ( | ||||
('%d' % packsize).rjust(7), | ||||
Gregory Szorc
|
r41612 | ('%d' % lookupsize).rjust(7), | ||
Augie Fackler
|
r43346 | elapsed, | ||
) | ||||
) | ||||
Augie Fackler
|
r40530 | |||
print("") | ||||
gc.enable() | ||||
# The perf test is meant to produce output, so we always fail the test | ||||
# so the user sees the output. | ||||
raise RuntimeError("perf test always fails") | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | class datapacktests(datapacktestsbase, unittest.TestCase): | ||
def __init__(self, *args, **kwargs): | ||||
datapacktestsbase.__init__(self, datapack.datapack, True) | ||||
unittest.TestCase.__init__(self, *args, **kwargs) | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | # TODO: | ||
# datapack store: | ||||
# - getmissing | ||||
# - GC two packs into one | ||||
if __name__ == '__main__': | ||||
Matt Harbison
|
r41304 | if pycompat.iswindows: | ||
Augie Fackler
|
r43346 | sys.exit(80) # Skip on Windows | ||
Augie Fackler
|
r40530 | silenttestrunner.main(__name__) | ||