##// END OF EJS Templates
tests: cast division result to int...
Gregory Szorc -
r41897:38a82e03 default draft
parent child Browse files
Show More
@@ -1,380 +1,380 b''
1 1 #!/usr/bin/env python
2 2 from __future__ import absolute_import, print_function
3 3
4 4 import hashlib
5 5 import os
6 6 import random
7 7 import shutil
8 8 import stat
9 9 import struct
10 10 import sys
11 11 import tempfile
12 12 import time
13 13 import unittest
14 14
15 15 import silenttestrunner
16 16
17 17 # Load the local remotefilelog, not the system one
18 18 sys.path[0:0] = [os.path.join(os.path.dirname(__file__), '..')]
19 19 from mercurial.node import nullid
20 20 from mercurial import (
21 21 pycompat,
22 22 ui as uimod,
23 23 )
24 24 from hgext.remotefilelog import (
25 25 basepack,
26 26 constants,
27 27 datapack,
28 28 )
29 29
30 30 class datapacktestsbase(object):
31 31 def __init__(self, datapackreader, paramsavailable):
32 32 self.datapackreader = datapackreader
33 33 self.paramsavailable = paramsavailable
34 34
35 35 def setUp(self):
36 36 self.tempdirs = []
37 37
38 38 def tearDown(self):
39 39 for d in self.tempdirs:
40 40 shutil.rmtree(d)
41 41
42 42 def makeTempDir(self):
43 43 tempdir = pycompat.bytestr(tempfile.mkdtemp())
44 44 self.tempdirs.append(tempdir)
45 45 return tempdir
46 46
47 47 def getHash(self, content):
48 48 return hashlib.sha1(content).digest()
49 49
50 50 def getFakeHash(self):
51 51 return b''.join(pycompat.bytechr(random.randint(0, 255))
52 52 for _ in range(20))
53 53
54 54 def createPack(self, revisions=None, packdir=None):
55 55 if revisions is None:
56 56 revisions = [(b"filename", self.getFakeHash(), nullid, b"content")]
57 57
58 58 if packdir is None:
59 59 packdir = self.makeTempDir()
60 60
61 61 packer = datapack.mutabledatapack(uimod.ui(), packdir, version=2)
62 62
63 63 for args in revisions:
64 64 filename, node, base, content = args[0:4]
65 65 # meta is optional
66 66 meta = None
67 67 if len(args) > 4:
68 68 meta = args[4]
69 69 packer.add(filename, node, base, content, metadata=meta)
70 70
71 71 path = packer.close()
72 72 return self.datapackreader(path)
73 73
74 74 def _testAddSingle(self, content):
75 75 """Test putting a simple blob into a pack and reading it out.
76 76 """
77 77 filename = b"foo"
78 78 node = self.getHash(content)
79 79
80 80 revisions = [(filename, node, nullid, content)]
81 81 pack = self.createPack(revisions)
82 82 if self.paramsavailable:
83 83 self.assertEqual(pack.params.fanoutprefix,
84 84 basepack.SMALLFANOUTPREFIX)
85 85
86 86 chain = pack.getdeltachain(filename, node)
87 87 self.assertEqual(content, chain[0][4])
88 88
89 89 def testAddSingle(self):
90 90 self._testAddSingle(b'')
91 91
92 92 def testAddSingleEmpty(self):
93 93 self._testAddSingle(b'abcdef')
94 94
95 95 def testAddMultiple(self):
96 96 """Test putting multiple unrelated blobs into a pack and reading them
97 97 out.
98 98 """
99 99 revisions = []
100 100 for i in range(10):
101 101 filename = b"foo%d" % i
102 102 content = b"abcdef%d" % i
103 103 node = self.getHash(content)
104 104 revisions.append((filename, node, self.getFakeHash(), content))
105 105
106 106 pack = self.createPack(revisions)
107 107
108 108 for filename, node, base, content in revisions:
109 109 entry = pack.getdelta(filename, node)
110 110 self.assertEqual((content, filename, base, {}), entry)
111 111
112 112 chain = pack.getdeltachain(filename, node)
113 113 self.assertEqual(content, chain[0][4])
114 114
115 115 def testAddDeltas(self):
116 116 """Test putting multiple delta blobs into a pack and read the chain.
117 117 """
118 118 revisions = []
119 119 filename = b"foo"
120 120 lastnode = nullid
121 121 for i in range(10):
122 122 content = b"abcdef%d" % i
123 123 node = self.getHash(content)
124 124 revisions.append((filename, node, lastnode, content))
125 125 lastnode = node
126 126
127 127 pack = self.createPack(revisions)
128 128
129 129 entry = pack.getdelta(filename, revisions[0][1])
130 130 realvalue = (revisions[0][3], filename, revisions[0][2], {})
131 131 self.assertEqual(entry, realvalue)
132 132
133 133 # Test that the chain for the final entry has all the others
134 134 chain = pack.getdeltachain(filename, node)
135 135 for i in range(10):
136 136 content = b"abcdef%d" % i
137 137 self.assertEqual(content, chain[-i - 1][4])
138 138
139 139 def testPackMany(self):
140 140 """Pack many related and unrelated objects.
141 141 """
142 142 # Build a random pack file
143 143 revisions = []
144 144 blobs = {}
145 145 random.seed(0)
146 146 for i in range(100):
147 147 filename = b"filename-%d" % i
148 148 filerevs = []
149 149 for j in range(random.randint(1, 100)):
150 150 content = b"content-%d" % j
151 151 node = self.getHash(content)
152 152 lastnode = nullid
153 153 if len(filerevs) > 0:
154 154 lastnode = filerevs[random.randint(0, len(filerevs) - 1)]
155 155 filerevs.append(node)
156 156 blobs[(filename, node, lastnode)] = content
157 157 revisions.append((filename, node, lastnode, content))
158 158
159 159 pack = self.createPack(revisions)
160 160
161 161 # Verify the pack contents
162 162 for (filename, node, lastnode), content in sorted(blobs.items()):
163 163 chain = pack.getdeltachain(filename, node)
164 164 for entry in chain:
165 165 expectedcontent = blobs[(entry[0], entry[1], entry[3])]
166 166 self.assertEqual(entry[4], expectedcontent)
167 167
168 168 def testPackMetadata(self):
169 169 revisions = []
170 170 for i in range(100):
171 171 filename = b'%d.txt' % i
172 172 content = b'put-something-here \n' * i
173 173 node = self.getHash(content)
174 174 meta = {constants.METAKEYFLAG: i ** 4,
175 175 constants.METAKEYSIZE: len(content),
176 176 b'Z': b'random_string',
177 177 b'_': b'\0' * i}
178 178 revisions.append((filename, node, nullid, content, meta))
179 179 pack = self.createPack(revisions)
180 180 for name, node, x, content, origmeta in revisions:
181 181 parsedmeta = pack.getmeta(name, node)
182 182 # flag == 0 should be optimized out
183 183 if origmeta[constants.METAKEYFLAG] == 0:
184 184 del origmeta[constants.METAKEYFLAG]
185 185 self.assertEqual(parsedmeta, origmeta)
186 186
187 187 def testGetMissing(self):
188 188 """Test the getmissing() api.
189 189 """
190 190 revisions = []
191 191 filename = b"foo"
192 192 lastnode = nullid
193 193 for i in range(10):
194 194 content = b"abcdef%d" % i
195 195 node = self.getHash(content)
196 196 revisions.append((filename, node, lastnode, content))
197 197 lastnode = node
198 198
199 199 pack = self.createPack(revisions)
200 200
201 201 missing = pack.getmissing([(b"foo", revisions[0][1])])
202 202 self.assertFalse(missing)
203 203
204 204 missing = pack.getmissing([(b"foo", revisions[0][1]),
205 205 (b"foo", revisions[1][1])])
206 206 self.assertFalse(missing)
207 207
208 208 fakenode = self.getFakeHash()
209 209 missing = pack.getmissing([(b"foo", revisions[0][1]),
210 210 (b"foo", fakenode)])
211 211 self.assertEqual(missing, [(b"foo", fakenode)])
212 212
213 213 def testAddThrows(self):
214 214 pack = self.createPack()
215 215
216 216 try:
217 217 pack.add(b'filename', nullid, b'contents')
218 218 self.assertTrue(False, "datapack.add should throw")
219 219 except RuntimeError:
220 220 pass
221 221
222 222 def testBadVersionThrows(self):
223 223 pack = self.createPack()
224 224 path = pack.path + b'.datapack'
225 225 with open(path, 'rb') as f:
226 226 raw = f.read()
227 227 raw = struct.pack('!B', 255) + raw[1:]
228 228 os.chmod(path, os.stat(path).st_mode | stat.S_IWRITE)
229 229 with open(path, 'wb+') as f:
230 230 f.write(raw)
231 231
232 232 try:
233 233 pack = self.datapackreader(pack.path)
234 234 self.assertTrue(False, "bad version number should have thrown")
235 235 except RuntimeError:
236 236 pass
237 237
238 238 def testMissingDeltabase(self):
239 239 fakenode = self.getFakeHash()
240 240 revisions = [(b"filename", fakenode, self.getFakeHash(), b"content")]
241 241 pack = self.createPack(revisions)
242 242 chain = pack.getdeltachain(b"filename", fakenode)
243 243 self.assertEqual(len(chain), 1)
244 244
245 245 def testLargePack(self):
246 246 """Test creating and reading from a large pack with over X entries.
247 247 This causes it to use a 2^16 fanout table instead."""
248 248 revisions = []
249 249 blobs = {}
250 250 total = basepack.SMALLFANOUTCUTOFF + 1
251 251 for i in pycompat.xrange(total):
252 252 filename = b"filename-%d" % i
253 253 content = filename
254 254 node = self.getHash(content)
255 255 blobs[(filename, node)] = content
256 256 revisions.append((filename, node, nullid, content))
257 257
258 258 pack = self.createPack(revisions)
259 259 if self.paramsavailable:
260 260 self.assertEqual(pack.params.fanoutprefix,
261 261 basepack.LARGEFANOUTPREFIX)
262 262
263 263 for (filename, node), content in blobs.items():
264 264 actualcontent = pack.getdeltachain(filename, node)[0][4]
265 265 self.assertEqual(actualcontent, content)
266 266
267 267 def testPacksCache(self):
268 268 """Test that we remember the most recent packs while fetching the delta
269 269 chain."""
270 270
271 271 packdir = self.makeTempDir()
272 272 deltachains = []
273 273
274 274 numpacks = 10
275 275 revisionsperpack = 100
276 276
277 277 for i in range(numpacks):
278 278 chain = []
279 279 revision = (b'%d' % i, self.getFakeHash(), nullid, b"content")
280 280
281 281 for _ in range(revisionsperpack):
282 282 chain.append(revision)
283 283 revision = (
284 284 b'%d' % i,
285 285 self.getFakeHash(),
286 286 revision[1],
287 287 self.getFakeHash()
288 288 )
289 289
290 290 self.createPack(chain, packdir)
291 291 deltachains.append(chain)
292 292
293 293 class testdatapackstore(datapack.datapackstore):
294 294 # Ensures that we are not keeping everything in the cache.
295 DEFAULTCACHESIZE = numpacks / 2
295 DEFAULTCACHESIZE = int(numpacks / 2)
296 296
297 297 store = testdatapackstore(uimod.ui(), packdir)
298 298
299 299 random.shuffle(deltachains)
300 300 for randomchain in deltachains:
301 301 revision = random.choice(randomchain)
302 302 chain = store.getdeltachain(revision[0], revision[1])
303 303
304 304 mostrecentpack = next(iter(store.packs), None)
305 305 self.assertEqual(
306 306 mostrecentpack.getdeltachain(revision[0], revision[1]),
307 307 chain
308 308 )
309 309
310 310 self.assertEqual(randomchain.index(revision) + 1, len(chain))
311 311
312 312 # perf test off by default since it's slow
313 313 def _testIndexPerf(self):
314 314 random.seed(0)
315 315 print("Multi-get perf test")
316 316 packsizes = [
317 317 100,
318 318 10000,
319 319 100000,
320 320 500000,
321 321 1000000,
322 322 3000000,
323 323 ]
324 324 lookupsizes = [
325 325 10,
326 326 100,
327 327 1000,
328 328 10000,
329 329 100000,
330 330 1000000,
331 331 ]
332 332 for packsize in packsizes:
333 333 revisions = []
334 334 for i in pycompat.xrange(packsize):
335 335 filename = b"filename-%d" % i
336 336 content = b"content-%d" % i
337 337 node = self.getHash(content)
338 338 revisions.append((filename, node, nullid, content))
339 339
340 340 path = self.createPack(revisions).path
341 341
342 342 # Perf of large multi-get
343 343 import gc
344 344 gc.disable()
345 345 pack = self.datapackreader(path)
346 346 for lookupsize in lookupsizes:
347 347 if lookupsize > packsize:
348 348 continue
349 349 random.shuffle(revisions)
350 350 findnodes = [(rev[0], rev[1]) for rev in revisions]
351 351
352 352 start = time.time()
353 353 pack.getmissing(findnodes[:lookupsize])
354 354 elapsed = time.time() - start
355 355 print ("%s pack %d lookups = %0.04f" %
356 356 (('%d' % packsize).rjust(7),
357 357 ('%d' % lookupsize).rjust(7),
358 358 elapsed))
359 359
360 360 print("")
361 361 gc.enable()
362 362
363 363 # The perf test is meant to produce output, so we always fail the test
364 364 # so the user sees the output.
365 365 raise RuntimeError("perf test always fails")
366 366
367 367 class datapacktests(datapacktestsbase, unittest.TestCase):
368 368 def __init__(self, *args, **kwargs):
369 369 datapacktestsbase.__init__(self, datapack.datapack, True)
370 370 unittest.TestCase.__init__(self, *args, **kwargs)
371 371
372 372 # TODO:
373 373 # datapack store:
374 374 # - getmissing
375 375 # - GC two packs into one
376 376
377 377 if __name__ == '__main__':
378 378 if pycompat.iswindows:
379 379 sys.exit(80) # Skip on Windows
380 380 silenttestrunner.main(__name__)
General Comments 0
You need to be logged in to leave comments. Login now