##// END OF EJS Templates
tests: use bytes and %d formatting in test-remotefilelog-datapack.py...
Gregory Szorc -
r41612:26832569 default
parent child Browse files
Show More
@@ -1,378 +1,379 b''
1 1 #!/usr/bin/env python
2 2 from __future__ import absolute_import, print_function
3 3
4 4 import hashlib
5 5 import os
6 6 import random
7 7 import shutil
8 8 import stat
9 9 import struct
10 10 import sys
11 11 import tempfile
12 12 import time
13 13 import unittest
14 14
15 15 import silenttestrunner
16 16
17 17 # Load the local remotefilelog, not the system one
18 18 sys.path[0:0] = [os.path.join(os.path.dirname(__file__), '..')]
19 19 from mercurial.node import nullid
20 20 from mercurial import (
21 21 pycompat,
22 22 ui as uimod,
23 23 )
24 24 from hgext.remotefilelog import (
25 25 basepack,
26 26 constants,
27 27 datapack,
28 28 )
29 29
30 30 class datapacktestsbase(object):
31 31 def __init__(self, datapackreader, paramsavailable):
32 32 self.datapackreader = datapackreader
33 33 self.paramsavailable = paramsavailable
34 34
35 35 def setUp(self):
36 36 self.tempdirs = []
37 37
38 38 def tearDown(self):
39 39 for d in self.tempdirs:
40 40 shutil.rmtree(d)
41 41
42 42 def makeTempDir(self):
43 43 tempdir = tempfile.mkdtemp()
44 44 self.tempdirs.append(tempdir)
45 45 return tempdir
46 46
47 47 def getHash(self, content):
48 48 return hashlib.sha1(content).digest()
49 49
50 50 def getFakeHash(self):
51 51 return ''.join(chr(random.randint(0, 255)) for _ in range(20))
52 52
53 53 def createPack(self, revisions=None, packdir=None):
54 54 if revisions is None:
55 revisions = [("filename", self.getFakeHash(), nullid, "content")]
55 revisions = [(b"filename", self.getFakeHash(), nullid, b"content")]
56 56
57 57 if packdir is None:
58 58 packdir = self.makeTempDir()
59 59
60 60 packer = datapack.mutabledatapack(uimod.ui(), packdir, version=2)
61 61
62 62 for args in revisions:
63 63 filename, node, base, content = args[0:4]
64 64 # meta is optional
65 65 meta = None
66 66 if len(args) > 4:
67 67 meta = args[4]
68 68 packer.add(filename, node, base, content, metadata=meta)
69 69
70 70 path = packer.close()
71 71 return self.datapackreader(path)
72 72
73 73 def _testAddSingle(self, content):
74 74 """Test putting a simple blob into a pack and reading it out.
75 75 """
76 filename = "foo"
76 filename = b"foo"
77 77 node = self.getHash(content)
78 78
79 79 revisions = [(filename, node, nullid, content)]
80 80 pack = self.createPack(revisions)
81 81 if self.paramsavailable:
82 82 self.assertEqual(pack.params.fanoutprefix,
83 83 basepack.SMALLFANOUTPREFIX)
84 84
85 85 chain = pack.getdeltachain(filename, node)
86 86 self.assertEqual(content, chain[0][4])
87 87
88 88 def testAddSingle(self):
89 self._testAddSingle('')
89 self._testAddSingle(b'')
90 90
91 91 def testAddSingleEmpty(self):
92 self._testAddSingle('abcdef')
92 self._testAddSingle(b'abcdef')
93 93
94 94 def testAddMultiple(self):
95 95 """Test putting multiple unrelated blobs into a pack and reading them
96 96 out.
97 97 """
98 98 revisions = []
99 99 for i in range(10):
100 filename = "foo%s" % i
101 content = "abcdef%s" % i
100 filename = b"foo%d" % i
101 content = b"abcdef%d" % i
102 102 node = self.getHash(content)
103 103 revisions.append((filename, node, self.getFakeHash(), content))
104 104
105 105 pack = self.createPack(revisions)
106 106
107 107 for filename, node, base, content in revisions:
108 108 entry = pack.getdelta(filename, node)
109 109 self.assertEqual((content, filename, base, {}), entry)
110 110
111 111 chain = pack.getdeltachain(filename, node)
112 112 self.assertEqual(content, chain[0][4])
113 113
114 114 def testAddDeltas(self):
115 115 """Test putting multiple delta blobs into a pack and read the chain.
116 116 """
117 117 revisions = []
118 filename = "foo"
118 filename = b"foo"
119 119 lastnode = nullid
120 120 for i in range(10):
121 content = "abcdef%s" % i
121 content = b"abcdef%d" % i
122 122 node = self.getHash(content)
123 123 revisions.append((filename, node, lastnode, content))
124 124 lastnode = node
125 125
126 126 pack = self.createPack(revisions)
127 127
128 128 entry = pack.getdelta(filename, revisions[0][1])
129 129 realvalue = (revisions[0][3], filename, revisions[0][2], {})
130 130 self.assertEqual(entry, realvalue)
131 131
132 132 # Test that the chain for the final entry has all the others
133 133 chain = pack.getdeltachain(filename, node)
134 134 for i in range(10):
135 content = "abcdef%s" % i
135 content = b"abcdef%d" % i
136 136 self.assertEqual(content, chain[-i - 1][4])
137 137
138 138 def testPackMany(self):
139 139 """Pack many related and unrelated objects.
140 140 """
141 141 # Build a random pack file
142 142 revisions = []
143 143 blobs = {}
144 144 random.seed(0)
145 145 for i in range(100):
146 filename = "filename-%s" % i
146 filename = b"filename-%d" % i
147 147 filerevs = []
148 148 for j in range(random.randint(1, 100)):
149 content = "content-%s" % j
149 content = b"content-%d" % j
150 150 node = self.getHash(content)
151 151 lastnode = nullid
152 152 if len(filerevs) > 0:
153 153 lastnode = filerevs[random.randint(0, len(filerevs) - 1)]
154 154 filerevs.append(node)
155 155 blobs[(filename, node, lastnode)] = content
156 156 revisions.append((filename, node, lastnode, content))
157 157
158 158 pack = self.createPack(revisions)
159 159
160 160 # Verify the pack contents
161 161 for (filename, node, lastnode), content in sorted(blobs.iteritems()):
162 162 chain = pack.getdeltachain(filename, node)
163 163 for entry in chain:
164 164 expectedcontent = blobs[(entry[0], entry[1], entry[3])]
165 165 self.assertEqual(entry[4], expectedcontent)
166 166
167 167 def testPackMetadata(self):
168 168 revisions = []
169 169 for i in range(100):
170 filename = '%s.txt' % i
171 content = 'put-something-here \n' * i
170 filename = b'%d.txt' % i
171 content = b'put-something-here \n' * i
172 172 node = self.getHash(content)
173 173 meta = {constants.METAKEYFLAG: i ** 4,
174 174 constants.METAKEYSIZE: len(content),
175 'Z': 'random_string',
176 '_': '\0' * i}
175 b'Z': b'random_string',
176 b'_': b'\0' * i}
177 177 revisions.append((filename, node, nullid, content, meta))
178 178 pack = self.createPack(revisions)
179 179 for name, node, x, content, origmeta in revisions:
180 180 parsedmeta = pack.getmeta(name, node)
181 181 # flag == 0 should be optimized out
182 182 if origmeta[constants.METAKEYFLAG] == 0:
183 183 del origmeta[constants.METAKEYFLAG]
184 184 self.assertEqual(parsedmeta, origmeta)
185 185
186 186 def testGetMissing(self):
187 187 """Test the getmissing() api.
188 188 """
189 189 revisions = []
190 filename = "foo"
190 filename = b"foo"
191 191 lastnode = nullid
192 192 for i in range(10):
193 content = "abcdef%s" % i
193 content = b"abcdef%d" % i
194 194 node = self.getHash(content)
195 195 revisions.append((filename, node, lastnode, content))
196 196 lastnode = node
197 197
198 198 pack = self.createPack(revisions)
199 199
200 missing = pack.getmissing([("foo", revisions[0][1])])
200 missing = pack.getmissing([(b"foo", revisions[0][1])])
201 201 self.assertFalse(missing)
202 202
203 missing = pack.getmissing([("foo", revisions[0][1]),
204 ("foo", revisions[1][1])])
203 missing = pack.getmissing([(b"foo", revisions[0][1]),
204 (b"foo", revisions[1][1])])
205 205 self.assertFalse(missing)
206 206
207 207 fakenode = self.getFakeHash()
208 missing = pack.getmissing([("foo", revisions[0][1]), ("foo", fakenode)])
209 self.assertEqual(missing, [("foo", fakenode)])
208 missing = pack.getmissing([(b"foo", revisions[0][1]),
209 (b"foo", fakenode)])
210 self.assertEqual(missing, [(b"foo", fakenode)])
210 211
211 212 def testAddThrows(self):
212 213 pack = self.createPack()
213 214
214 215 try:
215 pack.add('filename', nullid, 'contents')
216 pack.add(b'filename', nullid, b'contents')
216 217 self.assertTrue(False, "datapack.add should throw")
217 218 except RuntimeError:
218 219 pass
219 220
220 221 def testBadVersionThrows(self):
221 222 pack = self.createPack()
222 path = pack.path + '.datapack'
223 path = pack.path + b'.datapack'
223 224 with open(path) as f:
224 225 raw = f.read()
225 226 raw = struct.pack('!B', 255) + raw[1:]
226 227 os.chmod(path, os.stat(path).st_mode | stat.S_IWRITE)
227 228 with open(path, 'w+') as f:
228 229 f.write(raw)
229 230
230 231 try:
231 232 pack = self.datapackreader(pack.path)
232 233 self.assertTrue(False, "bad version number should have thrown")
233 234 except RuntimeError:
234 235 pass
235 236
236 237 def testMissingDeltabase(self):
237 238 fakenode = self.getFakeHash()
238 revisions = [("filename", fakenode, self.getFakeHash(), "content")]
239 revisions = [(b"filename", fakenode, self.getFakeHash(), b"content")]
239 240 pack = self.createPack(revisions)
240 chain = pack.getdeltachain("filename", fakenode)
241 chain = pack.getdeltachain(b"filename", fakenode)
241 242 self.assertEqual(len(chain), 1)
242 243
243 244 def testLargePack(self):
244 245 """Test creating and reading from a large pack with over X entries.
245 246 This causes it to use a 2^16 fanout table instead."""
246 247 revisions = []
247 248 blobs = {}
248 249 total = basepack.SMALLFANOUTCUTOFF + 1
249 250 for i in pycompat.xrange(total):
250 filename = "filename-%s" % i
251 filename = b"filename-%d" % i
251 252 content = filename
252 253 node = self.getHash(content)
253 254 blobs[(filename, node)] = content
254 255 revisions.append((filename, node, nullid, content))
255 256
256 257 pack = self.createPack(revisions)
257 258 if self.paramsavailable:
258 259 self.assertEqual(pack.params.fanoutprefix,
259 260 basepack.LARGEFANOUTPREFIX)
260 261
261 262 for (filename, node), content in blobs.iteritems():
262 263 actualcontent = pack.getdeltachain(filename, node)[0][4]
263 264 self.assertEqual(actualcontent, content)
264 265
265 266 def testPacksCache(self):
266 267 """Test that we remember the most recent packs while fetching the delta
267 268 chain."""
268 269
269 270 packdir = self.makeTempDir()
270 271 deltachains = []
271 272
272 273 numpacks = 10
273 274 revisionsperpack = 100
274 275
275 276 for i in range(numpacks):
276 277 chain = []
277 revision = (str(i), self.getFakeHash(), nullid, "content")
278 revision = (b'%d' % i, self.getFakeHash(), nullid, b"content")
278 279
279 280 for _ in range(revisionsperpack):
280 281 chain.append(revision)
281 282 revision = (
282 str(i),
283 b'%d' % i,
283 284 self.getFakeHash(),
284 285 revision[1],
285 286 self.getFakeHash()
286 287 )
287 288
288 289 self.createPack(chain, packdir)
289 290 deltachains.append(chain)
290 291
291 292 class testdatapackstore(datapack.datapackstore):
292 293 # Ensures that we are not keeping everything in the cache.
293 294 DEFAULTCACHESIZE = numpacks / 2
294 295
295 296 store = testdatapackstore(uimod.ui(), packdir)
296 297
297 298 random.shuffle(deltachains)
298 299 for randomchain in deltachains:
299 300 revision = random.choice(randomchain)
300 301 chain = store.getdeltachain(revision[0], revision[1])
301 302
302 303 mostrecentpack = next(iter(store.packs), None)
303 304 self.assertEqual(
304 305 mostrecentpack.getdeltachain(revision[0], revision[1]),
305 306 chain
306 307 )
307 308
308 309 self.assertEqual(randomchain.index(revision) + 1, len(chain))
309 310
310 311 # perf test off by default since it's slow
311 312 def _testIndexPerf(self):
312 313 random.seed(0)
313 314 print("Multi-get perf test")
314 315 packsizes = [
315 316 100,
316 317 10000,
317 318 100000,
318 319 500000,
319 320 1000000,
320 321 3000000,
321 322 ]
322 323 lookupsizes = [
323 324 10,
324 325 100,
325 326 1000,
326 327 10000,
327 328 100000,
328 329 1000000,
329 330 ]
330 331 for packsize in packsizes:
331 332 revisions = []
332 333 for i in pycompat.xrange(packsize):
333 filename = "filename-%s" % i
334 content = "content-%s" % i
334 filename = b"filename-%d" % i
335 content = b"content-%d" % i
335 336 node = self.getHash(content)
336 337 revisions.append((filename, node, nullid, content))
337 338
338 339 path = self.createPack(revisions).path
339 340
340 341 # Perf of large multi-get
341 342 import gc
342 343 gc.disable()
343 344 pack = self.datapackreader(path)
344 345 for lookupsize in lookupsizes:
345 346 if lookupsize > packsize:
346 347 continue
347 348 random.shuffle(revisions)
348 349 findnodes = [(rev[0], rev[1]) for rev in revisions]
349 350
350 351 start = time.time()
351 352 pack.getmissing(findnodes[:lookupsize])
352 353 elapsed = time.time() - start
353 print ("%s pack %s lookups = %0.04f" %
354 (('%s' % packsize).rjust(7),
355 ('%s' % lookupsize).rjust(7),
354 print ("%s pack %d lookups = %0.04f" %
355 (('%d' % packsize).rjust(7),
356 ('%d' % lookupsize).rjust(7),
356 357 elapsed))
357 358
358 359 print("")
359 360 gc.enable()
360 361
361 362 # The perf test is meant to produce output, so we always fail the test
362 363 # so the user sees the output.
363 364 raise RuntimeError("perf test always fails")
364 365
365 366 class datapacktests(datapacktestsbase, unittest.TestCase):
366 367 def __init__(self, *args, **kwargs):
367 368 datapacktestsbase.__init__(self, datapack.datapack, True)
368 369 unittest.TestCase.__init__(self, *args, **kwargs)
369 370
370 371 # TODO:
371 372 # datapack store:
372 373 # - getmissing
373 374 # - GC two packs into one
374 375
375 376 if __name__ == '__main__':
376 377 if pycompat.iswindows:
377 378 sys.exit(80) # Skip on Windows
378 379 silenttestrunner.main(__name__)
General Comments 0
You need to be logged in to leave comments. Login now