test-remotefilelog-histpack.py
313 lines
| 10.2 KiB
| text/x-python
|
PythonLexer
/ tests / test-remotefilelog-histpack.py
Gregory Szorc
|
r46434 | #!/usr/bin/env python3 | ||
Augie Fackler
|
r40530 | from __future__ import absolute_import | ||
import hashlib | ||||
import os | ||||
import random | ||||
import shutil | ||||
import stat | ||||
import struct | ||||
import sys | ||||
import tempfile | ||||
import unittest | ||||
import silenttestrunner | ||||
Joerg Sonnenberger
|
r47758 | from mercurial.node import sha1nodeconstants | ||
Augie Fackler
|
r40530 | from mercurial import ( | ||
Pulkit Goyal
|
r40764 | pycompat, | ||
Augie Fackler
|
r40530 | ui as uimod, | ||
) | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | # Load the local remotefilelog, not the system one | ||
sys.path[0:0] = [os.path.join(os.path.dirname(__file__), '..')] | ||||
from hgext.remotefilelog import ( | ||||
basepack, | ||||
historypack, | ||||
) | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | class histpacktests(unittest.TestCase): | ||
def setUp(self): | ||||
self.tempdirs = [] | ||||
def tearDown(self): | ||||
for d in self.tempdirs: | ||||
shutil.rmtree(d) | ||||
def makeTempDir(self): | ||||
tempdir = tempfile.mkdtemp() | ||||
self.tempdirs.append(tempdir) | ||||
Pulkit Goyal
|
r40764 | return pycompat.fsencode(tempdir) | ||
Augie Fackler
|
r40530 | |||
def getHash(self, content): | ||||
return hashlib.sha1(content).digest() | ||||
def getFakeHash(self): | ||||
Augie Fackler
|
r43346 | return b''.join( | ||
pycompat.bytechr(random.randint(0, 255)) for _ in range(20) | ||||
) | ||||
Augie Fackler
|
r40530 | |||
def createPack(self, revisions=None): | ||||
"""Creates and returns a historypack containing the specified revisions. | ||||
`revisions` is a list of tuples, where each tuple contains a filanem, | ||||
node, p1node, p2node, and linknode. | ||||
""" | ||||
if revisions is None: | ||||
Augie Fackler
|
r43346 | revisions = [ | ||
( | ||||
b"filename", | ||||
self.getFakeHash(), | ||||
Joerg Sonnenberger
|
r47758 | sha1nodeconstants.nullid, | ||
sha1nodeconstants.nullid, | ||||
Augie Fackler
|
r43346 | self.getFakeHash(), | ||
None, | ||||
) | ||||
] | ||||
Augie Fackler
|
r40530 | |||
Pulkit Goyal
|
r40764 | packdir = pycompat.fsencode(self.makeTempDir()) | ||
Augie Fackler
|
r43346 | packer = historypack.mutablehistorypack(uimod.ui(), packdir, version=2) | ||
Augie Fackler
|
r40530 | |||
for filename, node, p1, p2, linknode, copyfrom in revisions: | ||||
packer.add(filename, node, p1, p2, linknode, copyfrom) | ||||
path = packer.close() | ||||
return historypack.historypack(path) | ||||
def testAddSingle(self): | ||||
Augie Fackler
|
r46554 | """Test putting a single entry into a pack and reading it out.""" | ||
Gregory Szorc
|
r41352 | filename = b"foo" | ||
Augie Fackler
|
r40530 | node = self.getFakeHash() | ||
p1 = self.getFakeHash() | ||||
p2 = self.getFakeHash() | ||||
linknode = self.getFakeHash() | ||||
revisions = [(filename, node, p1, p2, linknode, None)] | ||||
pack = self.createPack(revisions) | ||||
actual = pack.getancestors(filename, node)[node] | ||||
Gregory Szorc
|
r41351 | self.assertEqual(p1, actual[0]) | ||
self.assertEqual(p2, actual[1]) | ||||
self.assertEqual(linknode, actual[2]) | ||||
Augie Fackler
|
r40530 | |||
def testAddMultiple(self): | ||||
"""Test putting multiple unrelated revisions into a pack and reading | ||||
them out. | ||||
""" | ||||
revisions = [] | ||||
for i in range(10): | ||||
Gregory Szorc
|
r41352 | filename = b"foo-%d" % i | ||
Augie Fackler
|
r40530 | node = self.getFakeHash() | ||
p1 = self.getFakeHash() | ||||
p2 = self.getFakeHash() | ||||
linknode = self.getFakeHash() | ||||
revisions.append((filename, node, p1, p2, linknode, None)) | ||||
pack = self.createPack(revisions) | ||||
for filename, node, p1, p2, linknode, copyfrom in revisions: | ||||
actual = pack.getancestors(filename, node)[node] | ||||
Gregory Szorc
|
r41351 | self.assertEqual(p1, actual[0]) | ||
self.assertEqual(p2, actual[1]) | ||||
self.assertEqual(linknode, actual[2]) | ||||
self.assertEqual(copyfrom, actual[3]) | ||||
Augie Fackler
|
r40530 | |||
def testAddAncestorChain(self): | ||||
"""Test putting multiple revisions in into a pack and read the ancestor | ||||
chain. | ||||
""" | ||||
revisions = [] | ||||
Pulkit Goyal
|
r40764 | filename = b"foo" | ||
Joerg Sonnenberger
|
r47758 | lastnode = sha1nodeconstants.nullid | ||
Augie Fackler
|
r40530 | for i in range(10): | ||
node = self.getFakeHash() | ||||
Joerg Sonnenberger
|
r47758 | revisions.append( | ||
( | ||||
filename, | ||||
node, | ||||
lastnode, | ||||
sha1nodeconstants.nullid, | ||||
sha1nodeconstants.nullid, | ||||
None, | ||||
) | ||||
) | ||||
Augie Fackler
|
r40530 | lastnode = node | ||
# revisions must be added in topological order, newest first | ||||
revisions = list(reversed(revisions)) | ||||
pack = self.createPack(revisions) | ||||
# Test that the chain has all the entries | ||||
ancestors = pack.getancestors(revisions[0][0], revisions[0][1]) | ||||
for filename, node, p1, p2, linknode, copyfrom in revisions: | ||||
ap1, ap2, alinknode, acopyfrom = ancestors[node] | ||||
Gregory Szorc
|
r41351 | self.assertEqual(ap1, p1) | ||
self.assertEqual(ap2, p2) | ||||
self.assertEqual(alinknode, linknode) | ||||
self.assertEqual(acopyfrom, copyfrom) | ||||
Augie Fackler
|
r40530 | |||
def testPackMany(self): | ||||
Augie Fackler
|
r46554 | """Pack many related and unrelated ancestors.""" | ||
Augie Fackler
|
r40530 | # Build a random pack file | ||
allentries = {} | ||||
ancestorcounts = {} | ||||
revisions = [] | ||||
random.seed(0) | ||||
for i in range(100): | ||||
Pulkit Goyal
|
r40764 | filename = b"filename-%d" % i | ||
Augie Fackler
|
r40530 | entries = [] | ||
Joerg Sonnenberger
|
r47758 | p2 = sha1nodeconstants.nullid | ||
linknode = sha1nodeconstants.nullid | ||||
Augie Fackler
|
r40530 | for j in range(random.randint(1, 100)): | ||
node = self.getFakeHash() | ||||
Joerg Sonnenberger
|
r47758 | p1 = sha1nodeconstants.nullid | ||
Augie Fackler
|
r40530 | if len(entries) > 0: | ||
p1 = entries[random.randint(0, len(entries) - 1)] | ||||
entries.append(node) | ||||
revisions.append((filename, node, p1, p2, linknode, None)) | ||||
allentries[(filename, node)] = (p1, p2, linknode) | ||||
Joerg Sonnenberger
|
r47758 | if p1 == sha1nodeconstants.nullid: | ||
Augie Fackler
|
r40530 | ancestorcounts[(filename, node)] = 1 | ||
else: | ||||
newcount = ancestorcounts[(filename, p1)] + 1 | ||||
ancestorcounts[(filename, node)] = newcount | ||||
# Must add file entries in reverse topological order | ||||
revisions = list(reversed(revisions)) | ||||
pack = self.createPack(revisions) | ||||
# Verify the pack contents | ||||
Martin von Zweigbergk
|
r41401 | for (filename, node) in allentries: | ||
Augie Fackler
|
r40530 | ancestors = pack.getancestors(filename, node) | ||
Augie Fackler
|
r43346 | self.assertEqual(ancestorcounts[(filename, node)], len(ancestors)) | ||
Pulkit Goyal
|
r40765 | for anode, (ap1, ap2, alinknode, copyfrom) in ancestors.items(): | ||
Augie Fackler
|
r40530 | ep1, ep2, elinknode = allentries[(filename, anode)] | ||
Gregory Szorc
|
r41351 | self.assertEqual(ap1, ep1) | ||
self.assertEqual(ap2, ep2) | ||||
self.assertEqual(alinknode, elinknode) | ||||
self.assertEqual(copyfrom, None) | ||||
Augie Fackler
|
r40530 | |||
def testGetNodeInfo(self): | ||||
revisions = [] | ||||
Pulkit Goyal
|
r40764 | filename = b"foo" | ||
Joerg Sonnenberger
|
r47758 | lastnode = sha1nodeconstants.nullid | ||
Augie Fackler
|
r40530 | for i in range(10): | ||
node = self.getFakeHash() | ||||
Joerg Sonnenberger
|
r47758 | revisions.append( | ||
( | ||||
filename, | ||||
node, | ||||
lastnode, | ||||
sha1nodeconstants.nullid, | ||||
sha1nodeconstants.nullid, | ||||
None, | ||||
) | ||||
) | ||||
Augie Fackler
|
r40530 | lastnode = node | ||
pack = self.createPack(revisions) | ||||
# Test that getnodeinfo returns the expected results | ||||
for filename, node, p1, p2, linknode, copyfrom in revisions: | ||||
ap1, ap2, alinknode, acopyfrom = pack.getnodeinfo(filename, node) | ||||
Gregory Szorc
|
r41351 | self.assertEqual(ap1, p1) | ||
self.assertEqual(ap2, p2) | ||||
self.assertEqual(alinknode, linknode) | ||||
self.assertEqual(acopyfrom, copyfrom) | ||||
Augie Fackler
|
r40530 | |||
def testGetMissing(self): | ||||
Augie Fackler
|
r46554 | """Test the getmissing() api.""" | ||
Augie Fackler
|
r40530 | revisions = [] | ||
Pulkit Goyal
|
r40764 | filename = b"foo" | ||
Augie Fackler
|
r40530 | for i in range(10): | ||
node = self.getFakeHash() | ||||
p1 = self.getFakeHash() | ||||
p2 = self.getFakeHash() | ||||
linknode = self.getFakeHash() | ||||
revisions.append((filename, node, p1, p2, linknode, None)) | ||||
pack = self.createPack(revisions) | ||||
missing = pack.getmissing([(filename, revisions[0][1])]) | ||||
self.assertFalse(missing) | ||||
Augie Fackler
|
r43346 | missing = pack.getmissing( | ||
[(filename, revisions[0][1]), (filename, revisions[1][1])] | ||||
) | ||||
Augie Fackler
|
r40530 | self.assertFalse(missing) | ||
fakenode = self.getFakeHash() | ||||
Augie Fackler
|
r43346 | missing = pack.getmissing( | ||
[(filename, revisions[0][1]), (filename, fakenode)] | ||||
) | ||||
Gregory Szorc
|
r41351 | self.assertEqual(missing, [(filename, fakenode)]) | ||
Augie Fackler
|
r40530 | |||
# Test getmissing on a non-existant filename | ||||
Gregory Szorc
|
r41352 | missing = pack.getmissing([(b"bar", fakenode)]) | ||
self.assertEqual(missing, [(b"bar", fakenode)]) | ||||
Augie Fackler
|
r40530 | |||
def testAddThrows(self): | ||||
pack = self.createPack() | ||||
try: | ||||
Joerg Sonnenberger
|
r47758 | pack.add( | ||
b'filename', | ||||
sha1nodeconstants.nullid, | ||||
sha1nodeconstants.nullid, | ||||
sha1nodeconstants.nullid, | ||||
sha1nodeconstants.nullid, | ||||
None, | ||||
) | ||||
Augie Fackler
|
r40530 | self.assertTrue(False, "historypack.add should throw") | ||
except RuntimeError: | ||||
pass | ||||
def testBadVersionThrows(self): | ||||
pack = self.createPack() | ||||
Gregory Szorc
|
r41353 | path = pack.path + b'.histpack' | ||
with open(path, 'rb') as f: | ||||
Augie Fackler
|
r40530 | raw = f.read() | ||
raw = struct.pack('!B', 255) + raw[1:] | ||||
os.chmod(path, os.stat(path).st_mode | stat.S_IWRITE) | ||||
Gregory Szorc
|
r41353 | with open(path, 'wb+') as f: | ||
Augie Fackler
|
r40530 | f.write(raw) | ||
try: | ||||
Matt Harbison
|
r44442 | historypack.historypack(pack.path) | ||
Augie Fackler
|
r40530 | self.assertTrue(False, "bad version number should have thrown") | ||
except RuntimeError: | ||||
pass | ||||
def testLargePack(self): | ||||
"""Test creating and reading from a large pack with over X entries. | ||||
This causes it to use a 2^16 fanout table instead.""" | ||||
total = basepack.SMALLFANOUTCUTOFF + 1 | ||||
revisions = [] | ||||
Pulkit Goyal
|
r40780 | for i in pycompat.xrange(total): | ||
Pulkit Goyal
|
r40764 | filename = b"foo-%d" % i | ||
Augie Fackler
|
r40530 | node = self.getFakeHash() | ||
p1 = self.getFakeHash() | ||||
p2 = self.getFakeHash() | ||||
linknode = self.getFakeHash() | ||||
revisions.append((filename, node, p1, p2, linknode, None)) | ||||
pack = self.createPack(revisions) | ||||
Gregory Szorc
|
r41351 | self.assertEqual(pack.params.fanoutprefix, basepack.LARGEFANOUTPREFIX) | ||
Augie Fackler
|
r40530 | |||
for filename, node, p1, p2, linknode, copyfrom in revisions: | ||||
actual = pack.getancestors(filename, node)[node] | ||||
Gregory Szorc
|
r41351 | self.assertEqual(p1, actual[0]) | ||
self.assertEqual(p2, actual[1]) | ||||
self.assertEqual(linknode, actual[2]) | ||||
self.assertEqual(copyfrom, actual[3]) | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | # TODO: | ||
# histpack store: | ||||
# - repack two packs into one | ||||
if __name__ == '__main__': | ||||
Matt Harbison
|
r41304 | if pycompat.iswindows: | ||
Augie Fackler
|
r43346 | sys.exit(80) # Skip on Windows | ||
Augie Fackler
|
r40530 | silenttestrunner.main(__name__) | ||