##// END OF EJS Templates
tests: fix up uses of xrange in remotefilelog tests for py3...
Augie Fackler -
r41290:2888d12b default
parent child Browse files
Show More
@@ -1,123 +1,125 b''
1 #require no-windows
1 #require no-windows
2
2
3 $ . "$TESTDIR/remotefilelog-library.sh"
3 $ . "$TESTDIR/remotefilelog-library.sh"
4
4
5 $ hg init repo
5 $ hg init repo
6 $ cd repo
6 $ cd repo
7 $ cat >> .hg/hgrc <<EOF
7 $ cat >> .hg/hgrc <<EOF
8 > [remotefilelog]
8 > [remotefilelog]
9 > server=True
9 > server=True
10 > EOF
10 > EOF
11 $ echo x > x
11 $ echo x > x
12 $ echo y > y
12 $ echo y > y
13 $ echo z > z
13 $ echo z > z
14 $ hg commit -qAm xy
14 $ hg commit -qAm xy
15 $ cd ..
15 $ cd ..
16
16
17 $ cat > cacheprocess-logger.py <<EOF
17 $ cat > cacheprocess-logger.py <<EOF
18 > import os
18 > import os
19 > import shutil
19 > import shutil
20 > import sys
20 > import sys
21 > if sys.version_info[0] > 2:
22 > xrange = range
21 > f = open('$TESTTMP/cachelog.log', 'w')
23 > f = open('$TESTTMP/cachelog.log', 'w')
22 > srccache = os.path.join('$TESTTMP', 'oldhgcache')
24 > srccache = os.path.join('$TESTTMP', 'oldhgcache')
23 > def log(message):
25 > def log(message):
24 > f.write(message)
26 > f.write(message)
25 > f.flush()
27 > f.flush()
26 > destcache = sys.argv[-1]
28 > destcache = sys.argv[-1]
27 > try:
29 > try:
28 > while True:
30 > while True:
29 > cmd = sys.stdin.readline().strip()
31 > cmd = sys.stdin.readline().strip()
30 > log('got command %r\n' % cmd)
32 > log('got command %r\n' % cmd)
31 > if cmd == 'exit':
33 > if cmd == 'exit':
32 > sys.exit(0)
34 > sys.exit(0)
33 > elif cmd == 'get':
35 > elif cmd == 'get':
34 > count = int(sys.stdin.readline())
36 > count = int(sys.stdin.readline())
35 > log('client wants %r blobs\n' % count)
37 > log('client wants %r blobs\n' % count)
36 > wants = []
38 > wants = []
37 > for _ in xrange(count):
39 > for _ in xrange(count):
38 > key = sys.stdin.readline()[:-1]
40 > key = sys.stdin.readline()[:-1]
39 > wants.append(key)
41 > wants.append(key)
40 > if '\0' in key:
42 > if '\0' in key:
41 > _, key = key.split('\0')
43 > _, key = key.split('\0')
42 > srcpath = os.path.join(srccache, key)
44 > srcpath = os.path.join(srccache, key)
43 > if os.path.exists(srcpath):
45 > if os.path.exists(srcpath):
44 > dest = os.path.join(destcache, key)
46 > dest = os.path.join(destcache, key)
45 > destdir = os.path.dirname(dest)
47 > destdir = os.path.dirname(dest)
46 > if not os.path.exists(destdir):
48 > if not os.path.exists(destdir):
47 > os.makedirs(destdir)
49 > os.makedirs(destdir)
48 > shutil.copyfile(srcpath, dest)
50 > shutil.copyfile(srcpath, dest)
49 > else:
51 > else:
50 > # report a cache miss
52 > # report a cache miss
51 > sys.stdout.write(key + '\n')
53 > sys.stdout.write(key + '\n')
52 > sys.stdout.write('0\n')
54 > sys.stdout.write('0\n')
53 > for key in sorted(wants):
55 > for key in sorted(wants):
54 > log('requested %r\n' % key)
56 > log('requested %r\n' % key)
55 > sys.stdout.flush()
57 > sys.stdout.flush()
56 > elif cmd == 'set':
58 > elif cmd == 'set':
57 > assert False, 'todo writing'
59 > assert False, 'todo writing'
58 > else:
60 > else:
59 > assert False, 'unknown command! %r' % cmd
61 > assert False, 'unknown command! %r' % cmd
60 > except Exception as e:
62 > except Exception as e:
61 > log('Exception! %r\n' % e)
63 > log('Exception! %r\n' % e)
62 > raise
64 > raise
63 > EOF
65 > EOF
64
66
65 $ cat >> $HGRCPATH <<EOF
67 $ cat >> $HGRCPATH <<EOF
66 > [remotefilelog]
68 > [remotefilelog]
67 > cacheprocess = python $TESTTMP/cacheprocess-logger.py
69 > cacheprocess = python $TESTTMP/cacheprocess-logger.py
68 > EOF
70 > EOF
69
71
70 Test cache keys and cache misses.
72 Test cache keys and cache misses.
71 $ hgcloneshallow ssh://user@dummy/repo clone -q
73 $ hgcloneshallow ssh://user@dummy/repo clone -q
72 3 files fetched over 1 fetches - (3 misses, 0.00% hit ratio) over *s (glob)
74 3 files fetched over 1 fetches - (3 misses, 0.00% hit ratio) over *s (glob)
73 $ cat cachelog.log
75 $ cat cachelog.log
74 got command 'get'
76 got command 'get'
75 client wants 3 blobs
77 client wants 3 blobs
76 requested 'master/11/f6ad8ec52a2984abaafd7c3b516503785c2072/1406e74118627694268417491f018a4a883152f0'
78 requested 'master/11/f6ad8ec52a2984abaafd7c3b516503785c2072/1406e74118627694268417491f018a4a883152f0'
77 requested 'master/39/5df8f7c51f007019cb30201c49e884b46b92fa/69a1b67522704ec122181c0890bd16e9d3e7516a'
79 requested 'master/39/5df8f7c51f007019cb30201c49e884b46b92fa/69a1b67522704ec122181c0890bd16e9d3e7516a'
78 requested 'master/95/cb0bfd2977c761298d9624e4b4d4c72a39974a/076f5e2225b3ff0400b98c92aa6cdf403ee24cca'
80 requested 'master/95/cb0bfd2977c761298d9624e4b4d4c72a39974a/076f5e2225b3ff0400b98c92aa6cdf403ee24cca'
79 got command 'set'
81 got command 'set'
80 Exception! AssertionError('todo writing',)
82 Exception! AssertionError('todo writing',)
81
83
82 Test cache hits.
84 Test cache hits.
83 $ mv hgcache oldhgcache
85 $ mv hgcache oldhgcache
84 $ rm cachelog.log
86 $ rm cachelog.log
85 $ hgcloneshallow ssh://user@dummy/repo clone-cachehit -q
87 $ hgcloneshallow ssh://user@dummy/repo clone-cachehit -q
86 3 files fetched over 1 fetches - (0 misses, 100.00% hit ratio) over *s (glob)
88 3 files fetched over 1 fetches - (0 misses, 100.00% hit ratio) over *s (glob)
87 $ cat cachelog.log | grep -v exit
89 $ cat cachelog.log | grep -v exit
88 got command 'get'
90 got command 'get'
89 client wants 3 blobs
91 client wants 3 blobs
90 requested 'master/11/f6ad8ec52a2984abaafd7c3b516503785c2072/1406e74118627694268417491f018a4a883152f0'
92 requested 'master/11/f6ad8ec52a2984abaafd7c3b516503785c2072/1406e74118627694268417491f018a4a883152f0'
91 requested 'master/39/5df8f7c51f007019cb30201c49e884b46b92fa/69a1b67522704ec122181c0890bd16e9d3e7516a'
93 requested 'master/39/5df8f7c51f007019cb30201c49e884b46b92fa/69a1b67522704ec122181c0890bd16e9d3e7516a'
92 requested 'master/95/cb0bfd2977c761298d9624e4b4d4c72a39974a/076f5e2225b3ff0400b98c92aa6cdf403ee24cca'
94 requested 'master/95/cb0bfd2977c761298d9624e4b4d4c72a39974a/076f5e2225b3ff0400b98c92aa6cdf403ee24cca'
93
95
94 $ cat >> $HGRCPATH <<EOF
96 $ cat >> $HGRCPATH <<EOF
95 > [remotefilelog]
97 > [remotefilelog]
96 > cacheprocess.includepath = yes
98 > cacheprocess.includepath = yes
97 > EOF
99 > EOF
98
100
99 Test cache keys and cache misses with includepath.
101 Test cache keys and cache misses with includepath.
100 $ rm -r hgcache oldhgcache
102 $ rm -r hgcache oldhgcache
101 $ rm cachelog.log
103 $ rm cachelog.log
102 $ hgcloneshallow ssh://user@dummy/repo clone-withpath -q
104 $ hgcloneshallow ssh://user@dummy/repo clone-withpath -q
103 3 files fetched over 1 fetches - (3 misses, 0.00% hit ratio) over *s (glob)
105 3 files fetched over 1 fetches - (3 misses, 0.00% hit ratio) over *s (glob)
104 $ cat cachelog.log
106 $ cat cachelog.log
105 got command 'get'
107 got command 'get'
106 client wants 3 blobs
108 client wants 3 blobs
107 requested 'x\x00master/11/f6ad8ec52a2984abaafd7c3b516503785c2072/1406e74118627694268417491f018a4a883152f0'
109 requested 'x\x00master/11/f6ad8ec52a2984abaafd7c3b516503785c2072/1406e74118627694268417491f018a4a883152f0'
108 requested 'y\x00master/95/cb0bfd2977c761298d9624e4b4d4c72a39974a/076f5e2225b3ff0400b98c92aa6cdf403ee24cca'
110 requested 'y\x00master/95/cb0bfd2977c761298d9624e4b4d4c72a39974a/076f5e2225b3ff0400b98c92aa6cdf403ee24cca'
109 requested 'z\x00master/39/5df8f7c51f007019cb30201c49e884b46b92fa/69a1b67522704ec122181c0890bd16e9d3e7516a'
111 requested 'z\x00master/39/5df8f7c51f007019cb30201c49e884b46b92fa/69a1b67522704ec122181c0890bd16e9d3e7516a'
110 got command 'set'
112 got command 'set'
111 Exception! AssertionError('todo writing',)
113 Exception! AssertionError('todo writing',)
112
114
113 Test cache hits with includepath.
115 Test cache hits with includepath.
114 $ mv hgcache oldhgcache
116 $ mv hgcache oldhgcache
115 $ rm cachelog.log
117 $ rm cachelog.log
116 $ hgcloneshallow ssh://user@dummy/repo clone-withpath-cachehit -q
118 $ hgcloneshallow ssh://user@dummy/repo clone-withpath-cachehit -q
117 3 files fetched over 1 fetches - (0 misses, 100.00% hit ratio) over *s (glob)
119 3 files fetched over 1 fetches - (0 misses, 100.00% hit ratio) over *s (glob)
118 $ cat cachelog.log | grep -v exit
120 $ cat cachelog.log | grep -v exit
119 got command 'get'
121 got command 'get'
120 client wants 3 blobs
122 client wants 3 blobs
121 requested 'x\x00master/11/f6ad8ec52a2984abaafd7c3b516503785c2072/1406e74118627694268417491f018a4a883152f0'
123 requested 'x\x00master/11/f6ad8ec52a2984abaafd7c3b516503785c2072/1406e74118627694268417491f018a4a883152f0'
122 requested 'y\x00master/95/cb0bfd2977c761298d9624e4b4d4c72a39974a/076f5e2225b3ff0400b98c92aa6cdf403ee24cca'
124 requested 'y\x00master/95/cb0bfd2977c761298d9624e4b4d4c72a39974a/076f5e2225b3ff0400b98c92aa6cdf403ee24cca'
123 requested 'z\x00master/39/5df8f7c51f007019cb30201c49e884b46b92fa/69a1b67522704ec122181c0890bd16e9d3e7516a'
125 requested 'z\x00master/39/5df8f7c51f007019cb30201c49e884b46b92fa/69a1b67522704ec122181c0890bd16e9d3e7516a'
@@ -1,375 +1,376 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 from __future__ import absolute_import, print_function
2 from __future__ import absolute_import, print_function
3
3
4 import hashlib
4 import hashlib
5 import os
5 import os
6 import random
6 import random
7 import shutil
7 import shutil
8 import stat
8 import stat
9 import struct
9 import struct
10 import sys
10 import sys
11 import tempfile
11 import tempfile
12 import time
12 import time
13 import unittest
13 import unittest
14
14
15 import silenttestrunner
15 import silenttestrunner
16
16
17 # Load the local remotefilelog, not the system one
17 # Load the local remotefilelog, not the system one
18 sys.path[0:0] = [os.path.join(os.path.dirname(__file__), '..')]
18 sys.path[0:0] = [os.path.join(os.path.dirname(__file__), '..')]
19 from mercurial.node import nullid
19 from mercurial.node import nullid
20 from mercurial import (
20 from mercurial import (
21 pycompat,
21 ui as uimod,
22 ui as uimod,
22 )
23 )
23 from hgext.remotefilelog import (
24 from hgext.remotefilelog import (
24 basepack,
25 basepack,
25 constants,
26 constants,
26 datapack,
27 datapack,
27 )
28 )
28
29
29 class datapacktestsbase(object):
30 class datapacktestsbase(object):
30 def __init__(self, datapackreader, paramsavailable):
31 def __init__(self, datapackreader, paramsavailable):
31 self.datapackreader = datapackreader
32 self.datapackreader = datapackreader
32 self.paramsavailable = paramsavailable
33 self.paramsavailable = paramsavailable
33
34
34 def setUp(self):
35 def setUp(self):
35 self.tempdirs = []
36 self.tempdirs = []
36
37
37 def tearDown(self):
38 def tearDown(self):
38 for d in self.tempdirs:
39 for d in self.tempdirs:
39 shutil.rmtree(d)
40 shutil.rmtree(d)
40
41
41 def makeTempDir(self):
42 def makeTempDir(self):
42 tempdir = tempfile.mkdtemp()
43 tempdir = tempfile.mkdtemp()
43 self.tempdirs.append(tempdir)
44 self.tempdirs.append(tempdir)
44 return tempdir
45 return tempdir
45
46
46 def getHash(self, content):
47 def getHash(self, content):
47 return hashlib.sha1(content).digest()
48 return hashlib.sha1(content).digest()
48
49
49 def getFakeHash(self):
50 def getFakeHash(self):
50 return ''.join(chr(random.randint(0, 255)) for _ in range(20))
51 return ''.join(chr(random.randint(0, 255)) for _ in range(20))
51
52
52 def createPack(self, revisions=None, packdir=None):
53 def createPack(self, revisions=None, packdir=None):
53 if revisions is None:
54 if revisions is None:
54 revisions = [("filename", self.getFakeHash(), nullid, "content")]
55 revisions = [("filename", self.getFakeHash(), nullid, "content")]
55
56
56 if packdir is None:
57 if packdir is None:
57 packdir = self.makeTempDir()
58 packdir = self.makeTempDir()
58
59
59 packer = datapack.mutabledatapack(uimod.ui(), packdir, version=2)
60 packer = datapack.mutabledatapack(uimod.ui(), packdir, version=2)
60
61
61 for args in revisions:
62 for args in revisions:
62 filename, node, base, content = args[0:4]
63 filename, node, base, content = args[0:4]
63 # meta is optional
64 # meta is optional
64 meta = None
65 meta = None
65 if len(args) > 4:
66 if len(args) > 4:
66 meta = args[4]
67 meta = args[4]
67 packer.add(filename, node, base, content, metadata=meta)
68 packer.add(filename, node, base, content, metadata=meta)
68
69
69 path = packer.close()
70 path = packer.close()
70 return self.datapackreader(path)
71 return self.datapackreader(path)
71
72
72 def _testAddSingle(self, content):
73 def _testAddSingle(self, content):
73 """Test putting a simple blob into a pack and reading it out.
74 """Test putting a simple blob into a pack and reading it out.
74 """
75 """
75 filename = "foo"
76 filename = "foo"
76 node = self.getHash(content)
77 node = self.getHash(content)
77
78
78 revisions = [(filename, node, nullid, content)]
79 revisions = [(filename, node, nullid, content)]
79 pack = self.createPack(revisions)
80 pack = self.createPack(revisions)
80 if self.paramsavailable:
81 if self.paramsavailable:
81 self.assertEquals(pack.params.fanoutprefix,
82 self.assertEquals(pack.params.fanoutprefix,
82 basepack.SMALLFANOUTPREFIX)
83 basepack.SMALLFANOUTPREFIX)
83
84
84 chain = pack.getdeltachain(filename, node)
85 chain = pack.getdeltachain(filename, node)
85 self.assertEquals(content, chain[0][4])
86 self.assertEquals(content, chain[0][4])
86
87
87 def testAddSingle(self):
88 def testAddSingle(self):
88 self._testAddSingle('')
89 self._testAddSingle('')
89
90
90 def testAddSingleEmpty(self):
91 def testAddSingleEmpty(self):
91 self._testAddSingle('abcdef')
92 self._testAddSingle('abcdef')
92
93
93 def testAddMultiple(self):
94 def testAddMultiple(self):
94 """Test putting multiple unrelated blobs into a pack and reading them
95 """Test putting multiple unrelated blobs into a pack and reading them
95 out.
96 out.
96 """
97 """
97 revisions = []
98 revisions = []
98 for i in range(10):
99 for i in range(10):
99 filename = "foo%s" % i
100 filename = "foo%s" % i
100 content = "abcdef%s" % i
101 content = "abcdef%s" % i
101 node = self.getHash(content)
102 node = self.getHash(content)
102 revisions.append((filename, node, self.getFakeHash(), content))
103 revisions.append((filename, node, self.getFakeHash(), content))
103
104
104 pack = self.createPack(revisions)
105 pack = self.createPack(revisions)
105
106
106 for filename, node, base, content in revisions:
107 for filename, node, base, content in revisions:
107 entry = pack.getdelta(filename, node)
108 entry = pack.getdelta(filename, node)
108 self.assertEquals((content, filename, base, {}), entry)
109 self.assertEquals((content, filename, base, {}), entry)
109
110
110 chain = pack.getdeltachain(filename, node)
111 chain = pack.getdeltachain(filename, node)
111 self.assertEquals(content, chain[0][4])
112 self.assertEquals(content, chain[0][4])
112
113
113 def testAddDeltas(self):
114 def testAddDeltas(self):
114 """Test putting multiple delta blobs into a pack and read the chain.
115 """Test putting multiple delta blobs into a pack and read the chain.
115 """
116 """
116 revisions = []
117 revisions = []
117 filename = "foo"
118 filename = "foo"
118 lastnode = nullid
119 lastnode = nullid
119 for i in range(10):
120 for i in range(10):
120 content = "abcdef%s" % i
121 content = "abcdef%s" % i
121 node = self.getHash(content)
122 node = self.getHash(content)
122 revisions.append((filename, node, lastnode, content))
123 revisions.append((filename, node, lastnode, content))
123 lastnode = node
124 lastnode = node
124
125
125 pack = self.createPack(revisions)
126 pack = self.createPack(revisions)
126
127
127 entry = pack.getdelta(filename, revisions[0][1])
128 entry = pack.getdelta(filename, revisions[0][1])
128 realvalue = (revisions[0][3], filename, revisions[0][2], {})
129 realvalue = (revisions[0][3], filename, revisions[0][2], {})
129 self.assertEquals(entry, realvalue)
130 self.assertEquals(entry, realvalue)
130
131
131 # Test that the chain for the final entry has all the others
132 # Test that the chain for the final entry has all the others
132 chain = pack.getdeltachain(filename, node)
133 chain = pack.getdeltachain(filename, node)
133 for i in range(10):
134 for i in range(10):
134 content = "abcdef%s" % i
135 content = "abcdef%s" % i
135 self.assertEquals(content, chain[-i - 1][4])
136 self.assertEquals(content, chain[-i - 1][4])
136
137
137 def testPackMany(self):
138 def testPackMany(self):
138 """Pack many related and unrelated objects.
139 """Pack many related and unrelated objects.
139 """
140 """
140 # Build a random pack file
141 # Build a random pack file
141 revisions = []
142 revisions = []
142 blobs = {}
143 blobs = {}
143 random.seed(0)
144 random.seed(0)
144 for i in range(100):
145 for i in range(100):
145 filename = "filename-%s" % i
146 filename = "filename-%s" % i
146 filerevs = []
147 filerevs = []
147 for j in range(random.randint(1, 100)):
148 for j in range(random.randint(1, 100)):
148 content = "content-%s" % j
149 content = "content-%s" % j
149 node = self.getHash(content)
150 node = self.getHash(content)
150 lastnode = nullid
151 lastnode = nullid
151 if len(filerevs) > 0:
152 if len(filerevs) > 0:
152 lastnode = filerevs[random.randint(0, len(filerevs) - 1)]
153 lastnode = filerevs[random.randint(0, len(filerevs) - 1)]
153 filerevs.append(node)
154 filerevs.append(node)
154 blobs[(filename, node, lastnode)] = content
155 blobs[(filename, node, lastnode)] = content
155 revisions.append((filename, node, lastnode, content))
156 revisions.append((filename, node, lastnode, content))
156
157
157 pack = self.createPack(revisions)
158 pack = self.createPack(revisions)
158
159
159 # Verify the pack contents
160 # Verify the pack contents
160 for (filename, node, lastnode), content in sorted(blobs.iteritems()):
161 for (filename, node, lastnode), content in sorted(blobs.iteritems()):
161 chain = pack.getdeltachain(filename, node)
162 chain = pack.getdeltachain(filename, node)
162 for entry in chain:
163 for entry in chain:
163 expectedcontent = blobs[(entry[0], entry[1], entry[3])]
164 expectedcontent = blobs[(entry[0], entry[1], entry[3])]
164 self.assertEquals(entry[4], expectedcontent)
165 self.assertEquals(entry[4], expectedcontent)
165
166
166 def testPackMetadata(self):
167 def testPackMetadata(self):
167 revisions = []
168 revisions = []
168 for i in range(100):
169 for i in range(100):
169 filename = '%s.txt' % i
170 filename = '%s.txt' % i
170 content = 'put-something-here \n' * i
171 content = 'put-something-here \n' * i
171 node = self.getHash(content)
172 node = self.getHash(content)
172 meta = {constants.METAKEYFLAG: i ** 4,
173 meta = {constants.METAKEYFLAG: i ** 4,
173 constants.METAKEYSIZE: len(content),
174 constants.METAKEYSIZE: len(content),
174 'Z': 'random_string',
175 'Z': 'random_string',
175 '_': '\0' * i}
176 '_': '\0' * i}
176 revisions.append((filename, node, nullid, content, meta))
177 revisions.append((filename, node, nullid, content, meta))
177 pack = self.createPack(revisions)
178 pack = self.createPack(revisions)
178 for name, node, x, content, origmeta in revisions:
179 for name, node, x, content, origmeta in revisions:
179 parsedmeta = pack.getmeta(name, node)
180 parsedmeta = pack.getmeta(name, node)
180 # flag == 0 should be optimized out
181 # flag == 0 should be optimized out
181 if origmeta[constants.METAKEYFLAG] == 0:
182 if origmeta[constants.METAKEYFLAG] == 0:
182 del origmeta[constants.METAKEYFLAG]
183 del origmeta[constants.METAKEYFLAG]
183 self.assertEquals(parsedmeta, origmeta)
184 self.assertEquals(parsedmeta, origmeta)
184
185
185 def testGetMissing(self):
186 def testGetMissing(self):
186 """Test the getmissing() api.
187 """Test the getmissing() api.
187 """
188 """
188 revisions = []
189 revisions = []
189 filename = "foo"
190 filename = "foo"
190 lastnode = nullid
191 lastnode = nullid
191 for i in range(10):
192 for i in range(10):
192 content = "abcdef%s" % i
193 content = "abcdef%s" % i
193 node = self.getHash(content)
194 node = self.getHash(content)
194 revisions.append((filename, node, lastnode, content))
195 revisions.append((filename, node, lastnode, content))
195 lastnode = node
196 lastnode = node
196
197
197 pack = self.createPack(revisions)
198 pack = self.createPack(revisions)
198
199
199 missing = pack.getmissing([("foo", revisions[0][1])])
200 missing = pack.getmissing([("foo", revisions[0][1])])
200 self.assertFalse(missing)
201 self.assertFalse(missing)
201
202
202 missing = pack.getmissing([("foo", revisions[0][1]),
203 missing = pack.getmissing([("foo", revisions[0][1]),
203 ("foo", revisions[1][1])])
204 ("foo", revisions[1][1])])
204 self.assertFalse(missing)
205 self.assertFalse(missing)
205
206
206 fakenode = self.getFakeHash()
207 fakenode = self.getFakeHash()
207 missing = pack.getmissing([("foo", revisions[0][1]), ("foo", fakenode)])
208 missing = pack.getmissing([("foo", revisions[0][1]), ("foo", fakenode)])
208 self.assertEquals(missing, [("foo", fakenode)])
209 self.assertEquals(missing, [("foo", fakenode)])
209
210
210 def testAddThrows(self):
211 def testAddThrows(self):
211 pack = self.createPack()
212 pack = self.createPack()
212
213
213 try:
214 try:
214 pack.add('filename', nullid, 'contents')
215 pack.add('filename', nullid, 'contents')
215 self.assertTrue(False, "datapack.add should throw")
216 self.assertTrue(False, "datapack.add should throw")
216 except RuntimeError:
217 except RuntimeError:
217 pass
218 pass
218
219
219 def testBadVersionThrows(self):
220 def testBadVersionThrows(self):
220 pack = self.createPack()
221 pack = self.createPack()
221 path = pack.path + '.datapack'
222 path = pack.path + '.datapack'
222 with open(path) as f:
223 with open(path) as f:
223 raw = f.read()
224 raw = f.read()
224 raw = struct.pack('!B', 255) + raw[1:]
225 raw = struct.pack('!B', 255) + raw[1:]
225 os.chmod(path, os.stat(path).st_mode | stat.S_IWRITE)
226 os.chmod(path, os.stat(path).st_mode | stat.S_IWRITE)
226 with open(path, 'w+') as f:
227 with open(path, 'w+') as f:
227 f.write(raw)
228 f.write(raw)
228
229
229 try:
230 try:
230 pack = self.datapackreader(pack.path)
231 pack = self.datapackreader(pack.path)
231 self.assertTrue(False, "bad version number should have thrown")
232 self.assertTrue(False, "bad version number should have thrown")
232 except RuntimeError:
233 except RuntimeError:
233 pass
234 pass
234
235
235 def testMissingDeltabase(self):
236 def testMissingDeltabase(self):
236 fakenode = self.getFakeHash()
237 fakenode = self.getFakeHash()
237 revisions = [("filename", fakenode, self.getFakeHash(), "content")]
238 revisions = [("filename", fakenode, self.getFakeHash(), "content")]
238 pack = self.createPack(revisions)
239 pack = self.createPack(revisions)
239 chain = pack.getdeltachain("filename", fakenode)
240 chain = pack.getdeltachain("filename", fakenode)
240 self.assertEquals(len(chain), 1)
241 self.assertEquals(len(chain), 1)
241
242
242 def testLargePack(self):
243 def testLargePack(self):
243 """Test creating and reading from a large pack with over X entries.
244 """Test creating and reading from a large pack with over X entries.
244 This causes it to use a 2^16 fanout table instead."""
245 This causes it to use a 2^16 fanout table instead."""
245 revisions = []
246 revisions = []
246 blobs = {}
247 blobs = {}
247 total = basepack.SMALLFANOUTCUTOFF + 1
248 total = basepack.SMALLFANOUTCUTOFF + 1
248 for i in xrange(total):
249 for i in pycompat.xrange(total):
249 filename = "filename-%s" % i
250 filename = "filename-%s" % i
250 content = filename
251 content = filename
251 node = self.getHash(content)
252 node = self.getHash(content)
252 blobs[(filename, node)] = content
253 blobs[(filename, node)] = content
253 revisions.append((filename, node, nullid, content))
254 revisions.append((filename, node, nullid, content))
254
255
255 pack = self.createPack(revisions)
256 pack = self.createPack(revisions)
256 if self.paramsavailable:
257 if self.paramsavailable:
257 self.assertEquals(pack.params.fanoutprefix,
258 self.assertEquals(pack.params.fanoutprefix,
258 basepack.LARGEFANOUTPREFIX)
259 basepack.LARGEFANOUTPREFIX)
259
260
260 for (filename, node), content in blobs.iteritems():
261 for (filename, node), content in blobs.iteritems():
261 actualcontent = pack.getdeltachain(filename, node)[0][4]
262 actualcontent = pack.getdeltachain(filename, node)[0][4]
262 self.assertEquals(actualcontent, content)
263 self.assertEquals(actualcontent, content)
263
264
264 def testPacksCache(self):
265 def testPacksCache(self):
265 """Test that we remember the most recent packs while fetching the delta
266 """Test that we remember the most recent packs while fetching the delta
266 chain."""
267 chain."""
267
268
268 packdir = self.makeTempDir()
269 packdir = self.makeTempDir()
269 deltachains = []
270 deltachains = []
270
271
271 numpacks = 10
272 numpacks = 10
272 revisionsperpack = 100
273 revisionsperpack = 100
273
274
274 for i in range(numpacks):
275 for i in range(numpacks):
275 chain = []
276 chain = []
276 revision = (str(i), self.getFakeHash(), nullid, "content")
277 revision = (str(i), self.getFakeHash(), nullid, "content")
277
278
278 for _ in range(revisionsperpack):
279 for _ in range(revisionsperpack):
279 chain.append(revision)
280 chain.append(revision)
280 revision = (
281 revision = (
281 str(i),
282 str(i),
282 self.getFakeHash(),
283 self.getFakeHash(),
283 revision[1],
284 revision[1],
284 self.getFakeHash()
285 self.getFakeHash()
285 )
286 )
286
287
287 self.createPack(chain, packdir)
288 self.createPack(chain, packdir)
288 deltachains.append(chain)
289 deltachains.append(chain)
289
290
290 class testdatapackstore(datapack.datapackstore):
291 class testdatapackstore(datapack.datapackstore):
291 # Ensures that we are not keeping everything in the cache.
292 # Ensures that we are not keeping everything in the cache.
292 DEFAULTCACHESIZE = numpacks / 2
293 DEFAULTCACHESIZE = numpacks / 2
293
294
294 store = testdatapackstore(uimod.ui(), packdir)
295 store = testdatapackstore(uimod.ui(), packdir)
295
296
296 random.shuffle(deltachains)
297 random.shuffle(deltachains)
297 for randomchain in deltachains:
298 for randomchain in deltachains:
298 revision = random.choice(randomchain)
299 revision = random.choice(randomchain)
299 chain = store.getdeltachain(revision[0], revision[1])
300 chain = store.getdeltachain(revision[0], revision[1])
300
301
301 mostrecentpack = next(iter(store.packs), None)
302 mostrecentpack = next(iter(store.packs), None)
302 self.assertEquals(
303 self.assertEquals(
303 mostrecentpack.getdeltachain(revision[0], revision[1]),
304 mostrecentpack.getdeltachain(revision[0], revision[1]),
304 chain
305 chain
305 )
306 )
306
307
307 self.assertEquals(randomchain.index(revision) + 1, len(chain))
308 self.assertEquals(randomchain.index(revision) + 1, len(chain))
308
309
309 # perf test off by default since it's slow
310 # perf test off by default since it's slow
310 def _testIndexPerf(self):
311 def _testIndexPerf(self):
311 random.seed(0)
312 random.seed(0)
312 print("Multi-get perf test")
313 print("Multi-get perf test")
313 packsizes = [
314 packsizes = [
314 100,
315 100,
315 10000,
316 10000,
316 100000,
317 100000,
317 500000,
318 500000,
318 1000000,
319 1000000,
319 3000000,
320 3000000,
320 ]
321 ]
321 lookupsizes = [
322 lookupsizes = [
322 10,
323 10,
323 100,
324 100,
324 1000,
325 1000,
325 10000,
326 10000,
326 100000,
327 100000,
327 1000000,
328 1000000,
328 ]
329 ]
329 for packsize in packsizes:
330 for packsize in packsizes:
330 revisions = []
331 revisions = []
331 for i in xrange(packsize):
332 for i in pycompat.xrange(packsize):
332 filename = "filename-%s" % i
333 filename = "filename-%s" % i
333 content = "content-%s" % i
334 content = "content-%s" % i
334 node = self.getHash(content)
335 node = self.getHash(content)
335 revisions.append((filename, node, nullid, content))
336 revisions.append((filename, node, nullid, content))
336
337
337 path = self.createPack(revisions).path
338 path = self.createPack(revisions).path
338
339
339 # Perf of large multi-get
340 # Perf of large multi-get
340 import gc
341 import gc
341 gc.disable()
342 gc.disable()
342 pack = self.datapackreader(path)
343 pack = self.datapackreader(path)
343 for lookupsize in lookupsizes:
344 for lookupsize in lookupsizes:
344 if lookupsize > packsize:
345 if lookupsize > packsize:
345 continue
346 continue
346 random.shuffle(revisions)
347 random.shuffle(revisions)
347 findnodes = [(rev[0], rev[1]) for rev in revisions]
348 findnodes = [(rev[0], rev[1]) for rev in revisions]
348
349
349 start = time.time()
350 start = time.time()
350 pack.getmissing(findnodes[:lookupsize])
351 pack.getmissing(findnodes[:lookupsize])
351 elapsed = time.time() - start
352 elapsed = time.time() - start
352 print ("%s pack %s lookups = %0.04f" %
353 print ("%s pack %s lookups = %0.04f" %
353 (('%s' % packsize).rjust(7),
354 (('%s' % packsize).rjust(7),
354 ('%s' % lookupsize).rjust(7),
355 ('%s' % lookupsize).rjust(7),
355 elapsed))
356 elapsed))
356
357
357 print("")
358 print("")
358 gc.enable()
359 gc.enable()
359
360
360 # The perf test is meant to produce output, so we always fail the test
361 # The perf test is meant to produce output, so we always fail the test
361 # so the user sees the output.
362 # so the user sees the output.
362 raise RuntimeError("perf test always fails")
363 raise RuntimeError("perf test always fails")
363
364
364 class datapacktests(datapacktestsbase, unittest.TestCase):
365 class datapacktests(datapacktestsbase, unittest.TestCase):
365 def __init__(self, *args, **kwargs):
366 def __init__(self, *args, **kwargs):
366 datapacktestsbase.__init__(self, datapack.datapack, True)
367 datapacktestsbase.__init__(self, datapack.datapack, True)
367 unittest.TestCase.__init__(self, *args, **kwargs)
368 unittest.TestCase.__init__(self, *args, **kwargs)
368
369
369 # TODO:
370 # TODO:
370 # datapack store:
371 # datapack store:
371 # - getmissing
372 # - getmissing
372 # - GC two packs into one
373 # - GC two packs into one
373
374
374 if __name__ == '__main__':
375 if __name__ == '__main__':
375 silenttestrunner.main(__name__)
376 silenttestrunner.main(__name__)
General Comments 0
You need to be logged in to leave comments. Login now