##// END OF EJS Templates
tests: cast division result to int...
Gregory Szorc -
r41897:38a82e03 default draft
parent child Browse files
Show More
@@ -1,380 +1,380 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 from __future__ import absolute_import, print_function
2 from __future__ import absolute_import, print_function
3
3
4 import hashlib
4 import hashlib
5 import os
5 import os
6 import random
6 import random
7 import shutil
7 import shutil
8 import stat
8 import stat
9 import struct
9 import struct
10 import sys
10 import sys
11 import tempfile
11 import tempfile
12 import time
12 import time
13 import unittest
13 import unittest
14
14
15 import silenttestrunner
15 import silenttestrunner
16
16
17 # Load the local remotefilelog, not the system one
17 # Load the local remotefilelog, not the system one
18 sys.path[0:0] = [os.path.join(os.path.dirname(__file__), '..')]
18 sys.path[0:0] = [os.path.join(os.path.dirname(__file__), '..')]
19 from mercurial.node import nullid
19 from mercurial.node import nullid
20 from mercurial import (
20 from mercurial import (
21 pycompat,
21 pycompat,
22 ui as uimod,
22 ui as uimod,
23 )
23 )
24 from hgext.remotefilelog import (
24 from hgext.remotefilelog import (
25 basepack,
25 basepack,
26 constants,
26 constants,
27 datapack,
27 datapack,
28 )
28 )
29
29
30 class datapacktestsbase(object):
30 class datapacktestsbase(object):
31 def __init__(self, datapackreader, paramsavailable):
31 def __init__(self, datapackreader, paramsavailable):
32 self.datapackreader = datapackreader
32 self.datapackreader = datapackreader
33 self.paramsavailable = paramsavailable
33 self.paramsavailable = paramsavailable
34
34
35 def setUp(self):
35 def setUp(self):
36 self.tempdirs = []
36 self.tempdirs = []
37
37
38 def tearDown(self):
38 def tearDown(self):
39 for d in self.tempdirs:
39 for d in self.tempdirs:
40 shutil.rmtree(d)
40 shutil.rmtree(d)
41
41
42 def makeTempDir(self):
42 def makeTempDir(self):
43 tempdir = pycompat.bytestr(tempfile.mkdtemp())
43 tempdir = pycompat.bytestr(tempfile.mkdtemp())
44 self.tempdirs.append(tempdir)
44 self.tempdirs.append(tempdir)
45 return tempdir
45 return tempdir
46
46
47 def getHash(self, content):
47 def getHash(self, content):
48 return hashlib.sha1(content).digest()
48 return hashlib.sha1(content).digest()
49
49
50 def getFakeHash(self):
50 def getFakeHash(self):
51 return b''.join(pycompat.bytechr(random.randint(0, 255))
51 return b''.join(pycompat.bytechr(random.randint(0, 255))
52 for _ in range(20))
52 for _ in range(20))
53
53
54 def createPack(self, revisions=None, packdir=None):
54 def createPack(self, revisions=None, packdir=None):
55 if revisions is None:
55 if revisions is None:
56 revisions = [(b"filename", self.getFakeHash(), nullid, b"content")]
56 revisions = [(b"filename", self.getFakeHash(), nullid, b"content")]
57
57
58 if packdir is None:
58 if packdir is None:
59 packdir = self.makeTempDir()
59 packdir = self.makeTempDir()
60
60
61 packer = datapack.mutabledatapack(uimod.ui(), packdir, version=2)
61 packer = datapack.mutabledatapack(uimod.ui(), packdir, version=2)
62
62
63 for args in revisions:
63 for args in revisions:
64 filename, node, base, content = args[0:4]
64 filename, node, base, content = args[0:4]
65 # meta is optional
65 # meta is optional
66 meta = None
66 meta = None
67 if len(args) > 4:
67 if len(args) > 4:
68 meta = args[4]
68 meta = args[4]
69 packer.add(filename, node, base, content, metadata=meta)
69 packer.add(filename, node, base, content, metadata=meta)
70
70
71 path = packer.close()
71 path = packer.close()
72 return self.datapackreader(path)
72 return self.datapackreader(path)
73
73
74 def _testAddSingle(self, content):
74 def _testAddSingle(self, content):
75 """Test putting a simple blob into a pack and reading it out.
75 """Test putting a simple blob into a pack and reading it out.
76 """
76 """
77 filename = b"foo"
77 filename = b"foo"
78 node = self.getHash(content)
78 node = self.getHash(content)
79
79
80 revisions = [(filename, node, nullid, content)]
80 revisions = [(filename, node, nullid, content)]
81 pack = self.createPack(revisions)
81 pack = self.createPack(revisions)
82 if self.paramsavailable:
82 if self.paramsavailable:
83 self.assertEqual(pack.params.fanoutprefix,
83 self.assertEqual(pack.params.fanoutprefix,
84 basepack.SMALLFANOUTPREFIX)
84 basepack.SMALLFANOUTPREFIX)
85
85
86 chain = pack.getdeltachain(filename, node)
86 chain = pack.getdeltachain(filename, node)
87 self.assertEqual(content, chain[0][4])
87 self.assertEqual(content, chain[0][4])
88
88
89 def testAddSingle(self):
89 def testAddSingle(self):
90 self._testAddSingle(b'')
90 self._testAddSingle(b'')
91
91
92 def testAddSingleEmpty(self):
92 def testAddSingleEmpty(self):
93 self._testAddSingle(b'abcdef')
93 self._testAddSingle(b'abcdef')
94
94
95 def testAddMultiple(self):
95 def testAddMultiple(self):
96 """Test putting multiple unrelated blobs into a pack and reading them
96 """Test putting multiple unrelated blobs into a pack and reading them
97 out.
97 out.
98 """
98 """
99 revisions = []
99 revisions = []
100 for i in range(10):
100 for i in range(10):
101 filename = b"foo%d" % i
101 filename = b"foo%d" % i
102 content = b"abcdef%d" % i
102 content = b"abcdef%d" % i
103 node = self.getHash(content)
103 node = self.getHash(content)
104 revisions.append((filename, node, self.getFakeHash(), content))
104 revisions.append((filename, node, self.getFakeHash(), content))
105
105
106 pack = self.createPack(revisions)
106 pack = self.createPack(revisions)
107
107
108 for filename, node, base, content in revisions:
108 for filename, node, base, content in revisions:
109 entry = pack.getdelta(filename, node)
109 entry = pack.getdelta(filename, node)
110 self.assertEqual((content, filename, base, {}), entry)
110 self.assertEqual((content, filename, base, {}), entry)
111
111
112 chain = pack.getdeltachain(filename, node)
112 chain = pack.getdeltachain(filename, node)
113 self.assertEqual(content, chain[0][4])
113 self.assertEqual(content, chain[0][4])
114
114
115 def testAddDeltas(self):
115 def testAddDeltas(self):
116 """Test putting multiple delta blobs into a pack and read the chain.
116 """Test putting multiple delta blobs into a pack and read the chain.
117 """
117 """
118 revisions = []
118 revisions = []
119 filename = b"foo"
119 filename = b"foo"
120 lastnode = nullid
120 lastnode = nullid
121 for i in range(10):
121 for i in range(10):
122 content = b"abcdef%d" % i
122 content = b"abcdef%d" % i
123 node = self.getHash(content)
123 node = self.getHash(content)
124 revisions.append((filename, node, lastnode, content))
124 revisions.append((filename, node, lastnode, content))
125 lastnode = node
125 lastnode = node
126
126
127 pack = self.createPack(revisions)
127 pack = self.createPack(revisions)
128
128
129 entry = pack.getdelta(filename, revisions[0][1])
129 entry = pack.getdelta(filename, revisions[0][1])
130 realvalue = (revisions[0][3], filename, revisions[0][2], {})
130 realvalue = (revisions[0][3], filename, revisions[0][2], {})
131 self.assertEqual(entry, realvalue)
131 self.assertEqual(entry, realvalue)
132
132
133 # Test that the chain for the final entry has all the others
133 # Test that the chain for the final entry has all the others
134 chain = pack.getdeltachain(filename, node)
134 chain = pack.getdeltachain(filename, node)
135 for i in range(10):
135 for i in range(10):
136 content = b"abcdef%d" % i
136 content = b"abcdef%d" % i
137 self.assertEqual(content, chain[-i - 1][4])
137 self.assertEqual(content, chain[-i - 1][4])
138
138
139 def testPackMany(self):
139 def testPackMany(self):
140 """Pack many related and unrelated objects.
140 """Pack many related and unrelated objects.
141 """
141 """
142 # Build a random pack file
142 # Build a random pack file
143 revisions = []
143 revisions = []
144 blobs = {}
144 blobs = {}
145 random.seed(0)
145 random.seed(0)
146 for i in range(100):
146 for i in range(100):
147 filename = b"filename-%d" % i
147 filename = b"filename-%d" % i
148 filerevs = []
148 filerevs = []
149 for j in range(random.randint(1, 100)):
149 for j in range(random.randint(1, 100)):
150 content = b"content-%d" % j
150 content = b"content-%d" % j
151 node = self.getHash(content)
151 node = self.getHash(content)
152 lastnode = nullid
152 lastnode = nullid
153 if len(filerevs) > 0:
153 if len(filerevs) > 0:
154 lastnode = filerevs[random.randint(0, len(filerevs) - 1)]
154 lastnode = filerevs[random.randint(0, len(filerevs) - 1)]
155 filerevs.append(node)
155 filerevs.append(node)
156 blobs[(filename, node, lastnode)] = content
156 blobs[(filename, node, lastnode)] = content
157 revisions.append((filename, node, lastnode, content))
157 revisions.append((filename, node, lastnode, content))
158
158
159 pack = self.createPack(revisions)
159 pack = self.createPack(revisions)
160
160
161 # Verify the pack contents
161 # Verify the pack contents
162 for (filename, node, lastnode), content in sorted(blobs.items()):
162 for (filename, node, lastnode), content in sorted(blobs.items()):
163 chain = pack.getdeltachain(filename, node)
163 chain = pack.getdeltachain(filename, node)
164 for entry in chain:
164 for entry in chain:
165 expectedcontent = blobs[(entry[0], entry[1], entry[3])]
165 expectedcontent = blobs[(entry[0], entry[1], entry[3])]
166 self.assertEqual(entry[4], expectedcontent)
166 self.assertEqual(entry[4], expectedcontent)
167
167
168 def testPackMetadata(self):
168 def testPackMetadata(self):
169 revisions = []
169 revisions = []
170 for i in range(100):
170 for i in range(100):
171 filename = b'%d.txt' % i
171 filename = b'%d.txt' % i
172 content = b'put-something-here \n' * i
172 content = b'put-something-here \n' * i
173 node = self.getHash(content)
173 node = self.getHash(content)
174 meta = {constants.METAKEYFLAG: i ** 4,
174 meta = {constants.METAKEYFLAG: i ** 4,
175 constants.METAKEYSIZE: len(content),
175 constants.METAKEYSIZE: len(content),
176 b'Z': b'random_string',
176 b'Z': b'random_string',
177 b'_': b'\0' * i}
177 b'_': b'\0' * i}
178 revisions.append((filename, node, nullid, content, meta))
178 revisions.append((filename, node, nullid, content, meta))
179 pack = self.createPack(revisions)
179 pack = self.createPack(revisions)
180 for name, node, x, content, origmeta in revisions:
180 for name, node, x, content, origmeta in revisions:
181 parsedmeta = pack.getmeta(name, node)
181 parsedmeta = pack.getmeta(name, node)
182 # flag == 0 should be optimized out
182 # flag == 0 should be optimized out
183 if origmeta[constants.METAKEYFLAG] == 0:
183 if origmeta[constants.METAKEYFLAG] == 0:
184 del origmeta[constants.METAKEYFLAG]
184 del origmeta[constants.METAKEYFLAG]
185 self.assertEqual(parsedmeta, origmeta)
185 self.assertEqual(parsedmeta, origmeta)
186
186
187 def testGetMissing(self):
187 def testGetMissing(self):
188 """Test the getmissing() api.
188 """Test the getmissing() api.
189 """
189 """
190 revisions = []
190 revisions = []
191 filename = b"foo"
191 filename = b"foo"
192 lastnode = nullid
192 lastnode = nullid
193 for i in range(10):
193 for i in range(10):
194 content = b"abcdef%d" % i
194 content = b"abcdef%d" % i
195 node = self.getHash(content)
195 node = self.getHash(content)
196 revisions.append((filename, node, lastnode, content))
196 revisions.append((filename, node, lastnode, content))
197 lastnode = node
197 lastnode = node
198
198
199 pack = self.createPack(revisions)
199 pack = self.createPack(revisions)
200
200
201 missing = pack.getmissing([(b"foo", revisions[0][1])])
201 missing = pack.getmissing([(b"foo", revisions[0][1])])
202 self.assertFalse(missing)
202 self.assertFalse(missing)
203
203
204 missing = pack.getmissing([(b"foo", revisions[0][1]),
204 missing = pack.getmissing([(b"foo", revisions[0][1]),
205 (b"foo", revisions[1][1])])
205 (b"foo", revisions[1][1])])
206 self.assertFalse(missing)
206 self.assertFalse(missing)
207
207
208 fakenode = self.getFakeHash()
208 fakenode = self.getFakeHash()
209 missing = pack.getmissing([(b"foo", revisions[0][1]),
209 missing = pack.getmissing([(b"foo", revisions[0][1]),
210 (b"foo", fakenode)])
210 (b"foo", fakenode)])
211 self.assertEqual(missing, [(b"foo", fakenode)])
211 self.assertEqual(missing, [(b"foo", fakenode)])
212
212
213 def testAddThrows(self):
213 def testAddThrows(self):
214 pack = self.createPack()
214 pack = self.createPack()
215
215
216 try:
216 try:
217 pack.add(b'filename', nullid, b'contents')
217 pack.add(b'filename', nullid, b'contents')
218 self.assertTrue(False, "datapack.add should throw")
218 self.assertTrue(False, "datapack.add should throw")
219 except RuntimeError:
219 except RuntimeError:
220 pass
220 pass
221
221
222 def testBadVersionThrows(self):
222 def testBadVersionThrows(self):
223 pack = self.createPack()
223 pack = self.createPack()
224 path = pack.path + b'.datapack'
224 path = pack.path + b'.datapack'
225 with open(path, 'rb') as f:
225 with open(path, 'rb') as f:
226 raw = f.read()
226 raw = f.read()
227 raw = struct.pack('!B', 255) + raw[1:]
227 raw = struct.pack('!B', 255) + raw[1:]
228 os.chmod(path, os.stat(path).st_mode | stat.S_IWRITE)
228 os.chmod(path, os.stat(path).st_mode | stat.S_IWRITE)
229 with open(path, 'wb+') as f:
229 with open(path, 'wb+') as f:
230 f.write(raw)
230 f.write(raw)
231
231
232 try:
232 try:
233 pack = self.datapackreader(pack.path)
233 pack = self.datapackreader(pack.path)
234 self.assertTrue(False, "bad version number should have thrown")
234 self.assertTrue(False, "bad version number should have thrown")
235 except RuntimeError:
235 except RuntimeError:
236 pass
236 pass
237
237
238 def testMissingDeltabase(self):
238 def testMissingDeltabase(self):
239 fakenode = self.getFakeHash()
239 fakenode = self.getFakeHash()
240 revisions = [(b"filename", fakenode, self.getFakeHash(), b"content")]
240 revisions = [(b"filename", fakenode, self.getFakeHash(), b"content")]
241 pack = self.createPack(revisions)
241 pack = self.createPack(revisions)
242 chain = pack.getdeltachain(b"filename", fakenode)
242 chain = pack.getdeltachain(b"filename", fakenode)
243 self.assertEqual(len(chain), 1)
243 self.assertEqual(len(chain), 1)
244
244
245 def testLargePack(self):
245 def testLargePack(self):
246 """Test creating and reading from a large pack with over X entries.
246 """Test creating and reading from a large pack with over X entries.
247 This causes it to use a 2^16 fanout table instead."""
247 This causes it to use a 2^16 fanout table instead."""
248 revisions = []
248 revisions = []
249 blobs = {}
249 blobs = {}
250 total = basepack.SMALLFANOUTCUTOFF + 1
250 total = basepack.SMALLFANOUTCUTOFF + 1
251 for i in pycompat.xrange(total):
251 for i in pycompat.xrange(total):
252 filename = b"filename-%d" % i
252 filename = b"filename-%d" % i
253 content = filename
253 content = filename
254 node = self.getHash(content)
254 node = self.getHash(content)
255 blobs[(filename, node)] = content
255 blobs[(filename, node)] = content
256 revisions.append((filename, node, nullid, content))
256 revisions.append((filename, node, nullid, content))
257
257
258 pack = self.createPack(revisions)
258 pack = self.createPack(revisions)
259 if self.paramsavailable:
259 if self.paramsavailable:
260 self.assertEqual(pack.params.fanoutprefix,
260 self.assertEqual(pack.params.fanoutprefix,
261 basepack.LARGEFANOUTPREFIX)
261 basepack.LARGEFANOUTPREFIX)
262
262
263 for (filename, node), content in blobs.items():
263 for (filename, node), content in blobs.items():
264 actualcontent = pack.getdeltachain(filename, node)[0][4]
264 actualcontent = pack.getdeltachain(filename, node)[0][4]
265 self.assertEqual(actualcontent, content)
265 self.assertEqual(actualcontent, content)
266
266
267 def testPacksCache(self):
267 def testPacksCache(self):
268 """Test that we remember the most recent packs while fetching the delta
268 """Test that we remember the most recent packs while fetching the delta
269 chain."""
269 chain."""
270
270
271 packdir = self.makeTempDir()
271 packdir = self.makeTempDir()
272 deltachains = []
272 deltachains = []
273
273
274 numpacks = 10
274 numpacks = 10
275 revisionsperpack = 100
275 revisionsperpack = 100
276
276
277 for i in range(numpacks):
277 for i in range(numpacks):
278 chain = []
278 chain = []
279 revision = (b'%d' % i, self.getFakeHash(), nullid, b"content")
279 revision = (b'%d' % i, self.getFakeHash(), nullid, b"content")
280
280
281 for _ in range(revisionsperpack):
281 for _ in range(revisionsperpack):
282 chain.append(revision)
282 chain.append(revision)
283 revision = (
283 revision = (
284 b'%d' % i,
284 b'%d' % i,
285 self.getFakeHash(),
285 self.getFakeHash(),
286 revision[1],
286 revision[1],
287 self.getFakeHash()
287 self.getFakeHash()
288 )
288 )
289
289
290 self.createPack(chain, packdir)
290 self.createPack(chain, packdir)
291 deltachains.append(chain)
291 deltachains.append(chain)
292
292
293 class testdatapackstore(datapack.datapackstore):
293 class testdatapackstore(datapack.datapackstore):
294 # Ensures that we are not keeping everything in the cache.
294 # Ensures that we are not keeping everything in the cache.
295 DEFAULTCACHESIZE = numpacks / 2
295 DEFAULTCACHESIZE = int(numpacks / 2)
296
296
297 store = testdatapackstore(uimod.ui(), packdir)
297 store = testdatapackstore(uimod.ui(), packdir)
298
298
299 random.shuffle(deltachains)
299 random.shuffle(deltachains)
300 for randomchain in deltachains:
300 for randomchain in deltachains:
301 revision = random.choice(randomchain)
301 revision = random.choice(randomchain)
302 chain = store.getdeltachain(revision[0], revision[1])
302 chain = store.getdeltachain(revision[0], revision[1])
303
303
304 mostrecentpack = next(iter(store.packs), None)
304 mostrecentpack = next(iter(store.packs), None)
305 self.assertEqual(
305 self.assertEqual(
306 mostrecentpack.getdeltachain(revision[0], revision[1]),
306 mostrecentpack.getdeltachain(revision[0], revision[1]),
307 chain
307 chain
308 )
308 )
309
309
310 self.assertEqual(randomchain.index(revision) + 1, len(chain))
310 self.assertEqual(randomchain.index(revision) + 1, len(chain))
311
311
312 # perf test off by default since it's slow
312 # perf test off by default since it's slow
313 def _testIndexPerf(self):
313 def _testIndexPerf(self):
314 random.seed(0)
314 random.seed(0)
315 print("Multi-get perf test")
315 print("Multi-get perf test")
316 packsizes = [
316 packsizes = [
317 100,
317 100,
318 10000,
318 10000,
319 100000,
319 100000,
320 500000,
320 500000,
321 1000000,
321 1000000,
322 3000000,
322 3000000,
323 ]
323 ]
324 lookupsizes = [
324 lookupsizes = [
325 10,
325 10,
326 100,
326 100,
327 1000,
327 1000,
328 10000,
328 10000,
329 100000,
329 100000,
330 1000000,
330 1000000,
331 ]
331 ]
332 for packsize in packsizes:
332 for packsize in packsizes:
333 revisions = []
333 revisions = []
334 for i in pycompat.xrange(packsize):
334 for i in pycompat.xrange(packsize):
335 filename = b"filename-%d" % i
335 filename = b"filename-%d" % i
336 content = b"content-%d" % i
336 content = b"content-%d" % i
337 node = self.getHash(content)
337 node = self.getHash(content)
338 revisions.append((filename, node, nullid, content))
338 revisions.append((filename, node, nullid, content))
339
339
340 path = self.createPack(revisions).path
340 path = self.createPack(revisions).path
341
341
342 # Perf of large multi-get
342 # Perf of large multi-get
343 import gc
343 import gc
344 gc.disable()
344 gc.disable()
345 pack = self.datapackreader(path)
345 pack = self.datapackreader(path)
346 for lookupsize in lookupsizes:
346 for lookupsize in lookupsizes:
347 if lookupsize > packsize:
347 if lookupsize > packsize:
348 continue
348 continue
349 random.shuffle(revisions)
349 random.shuffle(revisions)
350 findnodes = [(rev[0], rev[1]) for rev in revisions]
350 findnodes = [(rev[0], rev[1]) for rev in revisions]
351
351
352 start = time.time()
352 start = time.time()
353 pack.getmissing(findnodes[:lookupsize])
353 pack.getmissing(findnodes[:lookupsize])
354 elapsed = time.time() - start
354 elapsed = time.time() - start
355 print ("%s pack %d lookups = %0.04f" %
355 print ("%s pack %d lookups = %0.04f" %
356 (('%d' % packsize).rjust(7),
356 (('%d' % packsize).rjust(7),
357 ('%d' % lookupsize).rjust(7),
357 ('%d' % lookupsize).rjust(7),
358 elapsed))
358 elapsed))
359
359
360 print("")
360 print("")
361 gc.enable()
361 gc.enable()
362
362
363 # The perf test is meant to produce output, so we always fail the test
363 # The perf test is meant to produce output, so we always fail the test
364 # so the user sees the output.
364 # so the user sees the output.
365 raise RuntimeError("perf test always fails")
365 raise RuntimeError("perf test always fails")
366
366
367 class datapacktests(datapacktestsbase, unittest.TestCase):
367 class datapacktests(datapacktestsbase, unittest.TestCase):
368 def __init__(self, *args, **kwargs):
368 def __init__(self, *args, **kwargs):
369 datapacktestsbase.__init__(self, datapack.datapack, True)
369 datapacktestsbase.__init__(self, datapack.datapack, True)
370 unittest.TestCase.__init__(self, *args, **kwargs)
370 unittest.TestCase.__init__(self, *args, **kwargs)
371
371
372 # TODO:
372 # TODO:
373 # datapack store:
373 # datapack store:
374 # - getmissing
374 # - getmissing
375 # - GC two packs into one
375 # - GC two packs into one
376
376
377 if __name__ == '__main__':
377 if __name__ == '__main__':
378 if pycompat.iswindows:
378 if pycompat.iswindows:
379 sys.exit(80) # Skip on Windows
379 sys.exit(80) # Skip on Windows
380 silenttestrunner.main(__name__)
380 silenttestrunner.main(__name__)
General Comments 0
You need to be logged in to leave comments. Login now