##// END OF EJS Templates
tests: also skip remotefilelog *.py tests on Windows...
Matt Harbison -
r41304:878084a4 default
parent child Browse files
Show More
@@ -1,376 +1,378 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 from __future__ import absolute_import, print_function
2 from __future__ import absolute_import, print_function
3
3
4 import hashlib
4 import hashlib
5 import os
5 import os
6 import random
6 import random
7 import shutil
7 import shutil
8 import stat
8 import stat
9 import struct
9 import struct
10 import sys
10 import sys
11 import tempfile
11 import tempfile
12 import time
12 import time
13 import unittest
13 import unittest
14
14
15 import silenttestrunner
15 import silenttestrunner
16
16
17 # Load the local remotefilelog, not the system one
17 # Load the local remotefilelog, not the system one
18 sys.path[0:0] = [os.path.join(os.path.dirname(__file__), '..')]
18 sys.path[0:0] = [os.path.join(os.path.dirname(__file__), '..')]
19 from mercurial.node import nullid
19 from mercurial.node import nullid
20 from mercurial import (
20 from mercurial import (
21 pycompat,
21 pycompat,
22 ui as uimod,
22 ui as uimod,
23 )
23 )
24 from hgext.remotefilelog import (
24 from hgext.remotefilelog import (
25 basepack,
25 basepack,
26 constants,
26 constants,
27 datapack,
27 datapack,
28 )
28 )
29
29
30 class datapacktestsbase(object):
30 class datapacktestsbase(object):
31 def __init__(self, datapackreader, paramsavailable):
31 def __init__(self, datapackreader, paramsavailable):
32 self.datapackreader = datapackreader
32 self.datapackreader = datapackreader
33 self.paramsavailable = paramsavailable
33 self.paramsavailable = paramsavailable
34
34
35 def setUp(self):
35 def setUp(self):
36 self.tempdirs = []
36 self.tempdirs = []
37
37
38 def tearDown(self):
38 def tearDown(self):
39 for d in self.tempdirs:
39 for d in self.tempdirs:
40 shutil.rmtree(d)
40 shutil.rmtree(d)
41
41
42 def makeTempDir(self):
42 def makeTempDir(self):
43 tempdir = tempfile.mkdtemp()
43 tempdir = tempfile.mkdtemp()
44 self.tempdirs.append(tempdir)
44 self.tempdirs.append(tempdir)
45 return tempdir
45 return tempdir
46
46
47 def getHash(self, content):
47 def getHash(self, content):
48 return hashlib.sha1(content).digest()
48 return hashlib.sha1(content).digest()
49
49
50 def getFakeHash(self):
50 def getFakeHash(self):
51 return ''.join(chr(random.randint(0, 255)) for _ in range(20))
51 return ''.join(chr(random.randint(0, 255)) for _ in range(20))
52
52
53 def createPack(self, revisions=None, packdir=None):
53 def createPack(self, revisions=None, packdir=None):
54 if revisions is None:
54 if revisions is None:
55 revisions = [("filename", self.getFakeHash(), nullid, "content")]
55 revisions = [("filename", self.getFakeHash(), nullid, "content")]
56
56
57 if packdir is None:
57 if packdir is None:
58 packdir = self.makeTempDir()
58 packdir = self.makeTempDir()
59
59
60 packer = datapack.mutabledatapack(uimod.ui(), packdir, version=2)
60 packer = datapack.mutabledatapack(uimod.ui(), packdir, version=2)
61
61
62 for args in revisions:
62 for args in revisions:
63 filename, node, base, content = args[0:4]
63 filename, node, base, content = args[0:4]
64 # meta is optional
64 # meta is optional
65 meta = None
65 meta = None
66 if len(args) > 4:
66 if len(args) > 4:
67 meta = args[4]
67 meta = args[4]
68 packer.add(filename, node, base, content, metadata=meta)
68 packer.add(filename, node, base, content, metadata=meta)
69
69
70 path = packer.close()
70 path = packer.close()
71 return self.datapackreader(path)
71 return self.datapackreader(path)
72
72
73 def _testAddSingle(self, content):
73 def _testAddSingle(self, content):
74 """Test putting a simple blob into a pack and reading it out.
74 """Test putting a simple blob into a pack and reading it out.
75 """
75 """
76 filename = "foo"
76 filename = "foo"
77 node = self.getHash(content)
77 node = self.getHash(content)
78
78
79 revisions = [(filename, node, nullid, content)]
79 revisions = [(filename, node, nullid, content)]
80 pack = self.createPack(revisions)
80 pack = self.createPack(revisions)
81 if self.paramsavailable:
81 if self.paramsavailable:
82 self.assertEquals(pack.params.fanoutprefix,
82 self.assertEquals(pack.params.fanoutprefix,
83 basepack.SMALLFANOUTPREFIX)
83 basepack.SMALLFANOUTPREFIX)
84
84
85 chain = pack.getdeltachain(filename, node)
85 chain = pack.getdeltachain(filename, node)
86 self.assertEquals(content, chain[0][4])
86 self.assertEquals(content, chain[0][4])
87
87
88 def testAddSingle(self):
88 def testAddSingle(self):
89 self._testAddSingle('')
89 self._testAddSingle('')
90
90
91 def testAddSingleEmpty(self):
91 def testAddSingleEmpty(self):
92 self._testAddSingle('abcdef')
92 self._testAddSingle('abcdef')
93
93
94 def testAddMultiple(self):
94 def testAddMultiple(self):
95 """Test putting multiple unrelated blobs into a pack and reading them
95 """Test putting multiple unrelated blobs into a pack and reading them
96 out.
96 out.
97 """
97 """
98 revisions = []
98 revisions = []
99 for i in range(10):
99 for i in range(10):
100 filename = "foo%s" % i
100 filename = "foo%s" % i
101 content = "abcdef%s" % i
101 content = "abcdef%s" % i
102 node = self.getHash(content)
102 node = self.getHash(content)
103 revisions.append((filename, node, self.getFakeHash(), content))
103 revisions.append((filename, node, self.getFakeHash(), content))
104
104
105 pack = self.createPack(revisions)
105 pack = self.createPack(revisions)
106
106
107 for filename, node, base, content in revisions:
107 for filename, node, base, content in revisions:
108 entry = pack.getdelta(filename, node)
108 entry = pack.getdelta(filename, node)
109 self.assertEquals((content, filename, base, {}), entry)
109 self.assertEquals((content, filename, base, {}), entry)
110
110
111 chain = pack.getdeltachain(filename, node)
111 chain = pack.getdeltachain(filename, node)
112 self.assertEquals(content, chain[0][4])
112 self.assertEquals(content, chain[0][4])
113
113
114 def testAddDeltas(self):
114 def testAddDeltas(self):
115 """Test putting multiple delta blobs into a pack and read the chain.
115 """Test putting multiple delta blobs into a pack and read the chain.
116 """
116 """
117 revisions = []
117 revisions = []
118 filename = "foo"
118 filename = "foo"
119 lastnode = nullid
119 lastnode = nullid
120 for i in range(10):
120 for i in range(10):
121 content = "abcdef%s" % i
121 content = "abcdef%s" % i
122 node = self.getHash(content)
122 node = self.getHash(content)
123 revisions.append((filename, node, lastnode, content))
123 revisions.append((filename, node, lastnode, content))
124 lastnode = node
124 lastnode = node
125
125
126 pack = self.createPack(revisions)
126 pack = self.createPack(revisions)
127
127
128 entry = pack.getdelta(filename, revisions[0][1])
128 entry = pack.getdelta(filename, revisions[0][1])
129 realvalue = (revisions[0][3], filename, revisions[0][2], {})
129 realvalue = (revisions[0][3], filename, revisions[0][2], {})
130 self.assertEquals(entry, realvalue)
130 self.assertEquals(entry, realvalue)
131
131
132 # Test that the chain for the final entry has all the others
132 # Test that the chain for the final entry has all the others
133 chain = pack.getdeltachain(filename, node)
133 chain = pack.getdeltachain(filename, node)
134 for i in range(10):
134 for i in range(10):
135 content = "abcdef%s" % i
135 content = "abcdef%s" % i
136 self.assertEquals(content, chain[-i - 1][4])
136 self.assertEquals(content, chain[-i - 1][4])
137
137
138 def testPackMany(self):
138 def testPackMany(self):
139 """Pack many related and unrelated objects.
139 """Pack many related and unrelated objects.
140 """
140 """
141 # Build a random pack file
141 # Build a random pack file
142 revisions = []
142 revisions = []
143 blobs = {}
143 blobs = {}
144 random.seed(0)
144 random.seed(0)
145 for i in range(100):
145 for i in range(100):
146 filename = "filename-%s" % i
146 filename = "filename-%s" % i
147 filerevs = []
147 filerevs = []
148 for j in range(random.randint(1, 100)):
148 for j in range(random.randint(1, 100)):
149 content = "content-%s" % j
149 content = "content-%s" % j
150 node = self.getHash(content)
150 node = self.getHash(content)
151 lastnode = nullid
151 lastnode = nullid
152 if len(filerevs) > 0:
152 if len(filerevs) > 0:
153 lastnode = filerevs[random.randint(0, len(filerevs) - 1)]
153 lastnode = filerevs[random.randint(0, len(filerevs) - 1)]
154 filerevs.append(node)
154 filerevs.append(node)
155 blobs[(filename, node, lastnode)] = content
155 blobs[(filename, node, lastnode)] = content
156 revisions.append((filename, node, lastnode, content))
156 revisions.append((filename, node, lastnode, content))
157
157
158 pack = self.createPack(revisions)
158 pack = self.createPack(revisions)
159
159
160 # Verify the pack contents
160 # Verify the pack contents
161 for (filename, node, lastnode), content in sorted(blobs.iteritems()):
161 for (filename, node, lastnode), content in sorted(blobs.iteritems()):
162 chain = pack.getdeltachain(filename, node)
162 chain = pack.getdeltachain(filename, node)
163 for entry in chain:
163 for entry in chain:
164 expectedcontent = blobs[(entry[0], entry[1], entry[3])]
164 expectedcontent = blobs[(entry[0], entry[1], entry[3])]
165 self.assertEquals(entry[4], expectedcontent)
165 self.assertEquals(entry[4], expectedcontent)
166
166
167 def testPackMetadata(self):
167 def testPackMetadata(self):
168 revisions = []
168 revisions = []
169 for i in range(100):
169 for i in range(100):
170 filename = '%s.txt' % i
170 filename = '%s.txt' % i
171 content = 'put-something-here \n' * i
171 content = 'put-something-here \n' * i
172 node = self.getHash(content)
172 node = self.getHash(content)
173 meta = {constants.METAKEYFLAG: i ** 4,
173 meta = {constants.METAKEYFLAG: i ** 4,
174 constants.METAKEYSIZE: len(content),
174 constants.METAKEYSIZE: len(content),
175 'Z': 'random_string',
175 'Z': 'random_string',
176 '_': '\0' * i}
176 '_': '\0' * i}
177 revisions.append((filename, node, nullid, content, meta))
177 revisions.append((filename, node, nullid, content, meta))
178 pack = self.createPack(revisions)
178 pack = self.createPack(revisions)
179 for name, node, x, content, origmeta in revisions:
179 for name, node, x, content, origmeta in revisions:
180 parsedmeta = pack.getmeta(name, node)
180 parsedmeta = pack.getmeta(name, node)
181 # flag == 0 should be optimized out
181 # flag == 0 should be optimized out
182 if origmeta[constants.METAKEYFLAG] == 0:
182 if origmeta[constants.METAKEYFLAG] == 0:
183 del origmeta[constants.METAKEYFLAG]
183 del origmeta[constants.METAKEYFLAG]
184 self.assertEquals(parsedmeta, origmeta)
184 self.assertEquals(parsedmeta, origmeta)
185
185
186 def testGetMissing(self):
186 def testGetMissing(self):
187 """Test the getmissing() api.
187 """Test the getmissing() api.
188 """
188 """
189 revisions = []
189 revisions = []
190 filename = "foo"
190 filename = "foo"
191 lastnode = nullid
191 lastnode = nullid
192 for i in range(10):
192 for i in range(10):
193 content = "abcdef%s" % i
193 content = "abcdef%s" % i
194 node = self.getHash(content)
194 node = self.getHash(content)
195 revisions.append((filename, node, lastnode, content))
195 revisions.append((filename, node, lastnode, content))
196 lastnode = node
196 lastnode = node
197
197
198 pack = self.createPack(revisions)
198 pack = self.createPack(revisions)
199
199
200 missing = pack.getmissing([("foo", revisions[0][1])])
200 missing = pack.getmissing([("foo", revisions[0][1])])
201 self.assertFalse(missing)
201 self.assertFalse(missing)
202
202
203 missing = pack.getmissing([("foo", revisions[0][1]),
203 missing = pack.getmissing([("foo", revisions[0][1]),
204 ("foo", revisions[1][1])])
204 ("foo", revisions[1][1])])
205 self.assertFalse(missing)
205 self.assertFalse(missing)
206
206
207 fakenode = self.getFakeHash()
207 fakenode = self.getFakeHash()
208 missing = pack.getmissing([("foo", revisions[0][1]), ("foo", fakenode)])
208 missing = pack.getmissing([("foo", revisions[0][1]), ("foo", fakenode)])
209 self.assertEquals(missing, [("foo", fakenode)])
209 self.assertEquals(missing, [("foo", fakenode)])
210
210
211 def testAddThrows(self):
211 def testAddThrows(self):
212 pack = self.createPack()
212 pack = self.createPack()
213
213
214 try:
214 try:
215 pack.add('filename', nullid, 'contents')
215 pack.add('filename', nullid, 'contents')
216 self.assertTrue(False, "datapack.add should throw")
216 self.assertTrue(False, "datapack.add should throw")
217 except RuntimeError:
217 except RuntimeError:
218 pass
218 pass
219
219
220 def testBadVersionThrows(self):
220 def testBadVersionThrows(self):
221 pack = self.createPack()
221 pack = self.createPack()
222 path = pack.path + '.datapack'
222 path = pack.path + '.datapack'
223 with open(path) as f:
223 with open(path) as f:
224 raw = f.read()
224 raw = f.read()
225 raw = struct.pack('!B', 255) + raw[1:]
225 raw = struct.pack('!B', 255) + raw[1:]
226 os.chmod(path, os.stat(path).st_mode | stat.S_IWRITE)
226 os.chmod(path, os.stat(path).st_mode | stat.S_IWRITE)
227 with open(path, 'w+') as f:
227 with open(path, 'w+') as f:
228 f.write(raw)
228 f.write(raw)
229
229
230 try:
230 try:
231 pack = self.datapackreader(pack.path)
231 pack = self.datapackreader(pack.path)
232 self.assertTrue(False, "bad version number should have thrown")
232 self.assertTrue(False, "bad version number should have thrown")
233 except RuntimeError:
233 except RuntimeError:
234 pass
234 pass
235
235
236 def testMissingDeltabase(self):
236 def testMissingDeltabase(self):
237 fakenode = self.getFakeHash()
237 fakenode = self.getFakeHash()
238 revisions = [("filename", fakenode, self.getFakeHash(), "content")]
238 revisions = [("filename", fakenode, self.getFakeHash(), "content")]
239 pack = self.createPack(revisions)
239 pack = self.createPack(revisions)
240 chain = pack.getdeltachain("filename", fakenode)
240 chain = pack.getdeltachain("filename", fakenode)
241 self.assertEquals(len(chain), 1)
241 self.assertEquals(len(chain), 1)
242
242
243 def testLargePack(self):
243 def testLargePack(self):
244 """Test creating and reading from a large pack with over X entries.
244 """Test creating and reading from a large pack with over X entries.
245 This causes it to use a 2^16 fanout table instead."""
245 This causes it to use a 2^16 fanout table instead."""
246 revisions = []
246 revisions = []
247 blobs = {}
247 blobs = {}
248 total = basepack.SMALLFANOUTCUTOFF + 1
248 total = basepack.SMALLFANOUTCUTOFF + 1
249 for i in pycompat.xrange(total):
249 for i in pycompat.xrange(total):
250 filename = "filename-%s" % i
250 filename = "filename-%s" % i
251 content = filename
251 content = filename
252 node = self.getHash(content)
252 node = self.getHash(content)
253 blobs[(filename, node)] = content
253 blobs[(filename, node)] = content
254 revisions.append((filename, node, nullid, content))
254 revisions.append((filename, node, nullid, content))
255
255
256 pack = self.createPack(revisions)
256 pack = self.createPack(revisions)
257 if self.paramsavailable:
257 if self.paramsavailable:
258 self.assertEquals(pack.params.fanoutprefix,
258 self.assertEquals(pack.params.fanoutprefix,
259 basepack.LARGEFANOUTPREFIX)
259 basepack.LARGEFANOUTPREFIX)
260
260
261 for (filename, node), content in blobs.iteritems():
261 for (filename, node), content in blobs.iteritems():
262 actualcontent = pack.getdeltachain(filename, node)[0][4]
262 actualcontent = pack.getdeltachain(filename, node)[0][4]
263 self.assertEquals(actualcontent, content)
263 self.assertEquals(actualcontent, content)
264
264
265 def testPacksCache(self):
265 def testPacksCache(self):
266 """Test that we remember the most recent packs while fetching the delta
266 """Test that we remember the most recent packs while fetching the delta
267 chain."""
267 chain."""
268
268
269 packdir = self.makeTempDir()
269 packdir = self.makeTempDir()
270 deltachains = []
270 deltachains = []
271
271
272 numpacks = 10
272 numpacks = 10
273 revisionsperpack = 100
273 revisionsperpack = 100
274
274
275 for i in range(numpacks):
275 for i in range(numpacks):
276 chain = []
276 chain = []
277 revision = (str(i), self.getFakeHash(), nullid, "content")
277 revision = (str(i), self.getFakeHash(), nullid, "content")
278
278
279 for _ in range(revisionsperpack):
279 for _ in range(revisionsperpack):
280 chain.append(revision)
280 chain.append(revision)
281 revision = (
281 revision = (
282 str(i),
282 str(i),
283 self.getFakeHash(),
283 self.getFakeHash(),
284 revision[1],
284 revision[1],
285 self.getFakeHash()
285 self.getFakeHash()
286 )
286 )
287
287
288 self.createPack(chain, packdir)
288 self.createPack(chain, packdir)
289 deltachains.append(chain)
289 deltachains.append(chain)
290
290
291 class testdatapackstore(datapack.datapackstore):
291 class testdatapackstore(datapack.datapackstore):
292 # Ensures that we are not keeping everything in the cache.
292 # Ensures that we are not keeping everything in the cache.
293 DEFAULTCACHESIZE = numpacks / 2
293 DEFAULTCACHESIZE = numpacks / 2
294
294
295 store = testdatapackstore(uimod.ui(), packdir)
295 store = testdatapackstore(uimod.ui(), packdir)
296
296
297 random.shuffle(deltachains)
297 random.shuffle(deltachains)
298 for randomchain in deltachains:
298 for randomchain in deltachains:
299 revision = random.choice(randomchain)
299 revision = random.choice(randomchain)
300 chain = store.getdeltachain(revision[0], revision[1])
300 chain = store.getdeltachain(revision[0], revision[1])
301
301
302 mostrecentpack = next(iter(store.packs), None)
302 mostrecentpack = next(iter(store.packs), None)
303 self.assertEquals(
303 self.assertEquals(
304 mostrecentpack.getdeltachain(revision[0], revision[1]),
304 mostrecentpack.getdeltachain(revision[0], revision[1]),
305 chain
305 chain
306 )
306 )
307
307
308 self.assertEquals(randomchain.index(revision) + 1, len(chain))
308 self.assertEquals(randomchain.index(revision) + 1, len(chain))
309
309
310 # perf test off by default since it's slow
310 # perf test off by default since it's slow
311 def _testIndexPerf(self):
311 def _testIndexPerf(self):
312 random.seed(0)
312 random.seed(0)
313 print("Multi-get perf test")
313 print("Multi-get perf test")
314 packsizes = [
314 packsizes = [
315 100,
315 100,
316 10000,
316 10000,
317 100000,
317 100000,
318 500000,
318 500000,
319 1000000,
319 1000000,
320 3000000,
320 3000000,
321 ]
321 ]
322 lookupsizes = [
322 lookupsizes = [
323 10,
323 10,
324 100,
324 100,
325 1000,
325 1000,
326 10000,
326 10000,
327 100000,
327 100000,
328 1000000,
328 1000000,
329 ]
329 ]
330 for packsize in packsizes:
330 for packsize in packsizes:
331 revisions = []
331 revisions = []
332 for i in pycompat.xrange(packsize):
332 for i in pycompat.xrange(packsize):
333 filename = "filename-%s" % i
333 filename = "filename-%s" % i
334 content = "content-%s" % i
334 content = "content-%s" % i
335 node = self.getHash(content)
335 node = self.getHash(content)
336 revisions.append((filename, node, nullid, content))
336 revisions.append((filename, node, nullid, content))
337
337
338 path = self.createPack(revisions).path
338 path = self.createPack(revisions).path
339
339
340 # Perf of large multi-get
340 # Perf of large multi-get
341 import gc
341 import gc
342 gc.disable()
342 gc.disable()
343 pack = self.datapackreader(path)
343 pack = self.datapackreader(path)
344 for lookupsize in lookupsizes:
344 for lookupsize in lookupsizes:
345 if lookupsize > packsize:
345 if lookupsize > packsize:
346 continue
346 continue
347 random.shuffle(revisions)
347 random.shuffle(revisions)
348 findnodes = [(rev[0], rev[1]) for rev in revisions]
348 findnodes = [(rev[0], rev[1]) for rev in revisions]
349
349
350 start = time.time()
350 start = time.time()
351 pack.getmissing(findnodes[:lookupsize])
351 pack.getmissing(findnodes[:lookupsize])
352 elapsed = time.time() - start
352 elapsed = time.time() - start
353 print ("%s pack %s lookups = %0.04f" %
353 print ("%s pack %s lookups = %0.04f" %
354 (('%s' % packsize).rjust(7),
354 (('%s' % packsize).rjust(7),
355 ('%s' % lookupsize).rjust(7),
355 ('%s' % lookupsize).rjust(7),
356 elapsed))
356 elapsed))
357
357
358 print("")
358 print("")
359 gc.enable()
359 gc.enable()
360
360
361 # The perf test is meant to produce output, so we always fail the test
361 # The perf test is meant to produce output, so we always fail the test
362 # so the user sees the output.
362 # so the user sees the output.
363 raise RuntimeError("perf test always fails")
363 raise RuntimeError("perf test always fails")
364
364
365 class datapacktests(datapacktestsbase, unittest.TestCase):
365 class datapacktests(datapacktestsbase, unittest.TestCase):
366 def __init__(self, *args, **kwargs):
366 def __init__(self, *args, **kwargs):
367 datapacktestsbase.__init__(self, datapack.datapack, True)
367 datapacktestsbase.__init__(self, datapack.datapack, True)
368 unittest.TestCase.__init__(self, *args, **kwargs)
368 unittest.TestCase.__init__(self, *args, **kwargs)
369
369
370 # TODO:
370 # TODO:
371 # datapack store:
371 # datapack store:
372 # - getmissing
372 # - getmissing
373 # - GC two packs into one
373 # - GC two packs into one
374
374
375 if __name__ == '__main__':
375 if __name__ == '__main__':
376 if pycompat.iswindows:
377 sys.exit(80) # Skip on Windows
376 silenttestrunner.main(__name__)
378 silenttestrunner.main(__name__)
@@ -1,276 +1,278 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 from __future__ import absolute_import
2 from __future__ import absolute_import
3
3
4 import hashlib
4 import hashlib
5 import os
5 import os
6 import random
6 import random
7 import shutil
7 import shutil
8 import stat
8 import stat
9 import struct
9 import struct
10 import sys
10 import sys
11 import tempfile
11 import tempfile
12 import unittest
12 import unittest
13
13
14 import silenttestrunner
14 import silenttestrunner
15
15
16 from mercurial.node import nullid
16 from mercurial.node import nullid
17 from mercurial import (
17 from mercurial import (
18 pycompat,
18 pycompat,
19 ui as uimod,
19 ui as uimod,
20 )
20 )
21 # Load the local remotefilelog, not the system one
21 # Load the local remotefilelog, not the system one
22 sys.path[0:0] = [os.path.join(os.path.dirname(__file__), '..')]
22 sys.path[0:0] = [os.path.join(os.path.dirname(__file__), '..')]
23 from hgext.remotefilelog import (
23 from hgext.remotefilelog import (
24 basepack,
24 basepack,
25 historypack,
25 historypack,
26 )
26 )
27
27
28 class histpacktests(unittest.TestCase):
28 class histpacktests(unittest.TestCase):
29 def setUp(self):
29 def setUp(self):
30 self.tempdirs = []
30 self.tempdirs = []
31
31
32 def tearDown(self):
32 def tearDown(self):
33 for d in self.tempdirs:
33 for d in self.tempdirs:
34 shutil.rmtree(d)
34 shutil.rmtree(d)
35
35
36 def makeTempDir(self):
36 def makeTempDir(self):
37 tempdir = tempfile.mkdtemp()
37 tempdir = tempfile.mkdtemp()
38 self.tempdirs.append(tempdir)
38 self.tempdirs.append(tempdir)
39 return pycompat.fsencode(tempdir)
39 return pycompat.fsencode(tempdir)
40
40
41 def getHash(self, content):
41 def getHash(self, content):
42 return hashlib.sha1(content).digest()
42 return hashlib.sha1(content).digest()
43
43
44 def getFakeHash(self):
44 def getFakeHash(self):
45 return b''.join(pycompat.bytechr(random.randint(0, 255))
45 return b''.join(pycompat.bytechr(random.randint(0, 255))
46 for _ in range(20))
46 for _ in range(20))
47
47
48 def createPack(self, revisions=None):
48 def createPack(self, revisions=None):
49 """Creates and returns a historypack containing the specified revisions.
49 """Creates and returns a historypack containing the specified revisions.
50
50
51 `revisions` is a list of tuples, where each tuple contains a filanem,
51 `revisions` is a list of tuples, where each tuple contains a filanem,
52 node, p1node, p2node, and linknode.
52 node, p1node, p2node, and linknode.
53 """
53 """
54 if revisions is None:
54 if revisions is None:
55 revisions = [("filename", self.getFakeHash(), nullid, nullid,
55 revisions = [("filename", self.getFakeHash(), nullid, nullid,
56 self.getFakeHash(), None)]
56 self.getFakeHash(), None)]
57
57
58 packdir = pycompat.fsencode(self.makeTempDir())
58 packdir = pycompat.fsencode(self.makeTempDir())
59 packer = historypack.mutablehistorypack(uimod.ui(), packdir,
59 packer = historypack.mutablehistorypack(uimod.ui(), packdir,
60 version=2)
60 version=2)
61
61
62 for filename, node, p1, p2, linknode, copyfrom in revisions:
62 for filename, node, p1, p2, linknode, copyfrom in revisions:
63 packer.add(filename, node, p1, p2, linknode, copyfrom)
63 packer.add(filename, node, p1, p2, linknode, copyfrom)
64
64
65 path = packer.close()
65 path = packer.close()
66 return historypack.historypack(path)
66 return historypack.historypack(path)
67
67
68 def testAddSingle(self):
68 def testAddSingle(self):
69 """Test putting a single entry into a pack and reading it out.
69 """Test putting a single entry into a pack and reading it out.
70 """
70 """
71 filename = "foo"
71 filename = "foo"
72 node = self.getFakeHash()
72 node = self.getFakeHash()
73 p1 = self.getFakeHash()
73 p1 = self.getFakeHash()
74 p2 = self.getFakeHash()
74 p2 = self.getFakeHash()
75 linknode = self.getFakeHash()
75 linknode = self.getFakeHash()
76
76
77 revisions = [(filename, node, p1, p2, linknode, None)]
77 revisions = [(filename, node, p1, p2, linknode, None)]
78 pack = self.createPack(revisions)
78 pack = self.createPack(revisions)
79
79
80 actual = pack.getancestors(filename, node)[node]
80 actual = pack.getancestors(filename, node)[node]
81 self.assertEquals(p1, actual[0])
81 self.assertEquals(p1, actual[0])
82 self.assertEquals(p2, actual[1])
82 self.assertEquals(p2, actual[1])
83 self.assertEquals(linknode, actual[2])
83 self.assertEquals(linknode, actual[2])
84
84
85 def testAddMultiple(self):
85 def testAddMultiple(self):
86 """Test putting multiple unrelated revisions into a pack and reading
86 """Test putting multiple unrelated revisions into a pack and reading
87 them out.
87 them out.
88 """
88 """
89 revisions = []
89 revisions = []
90 for i in range(10):
90 for i in range(10):
91 filename = "foo-%s" % i
91 filename = "foo-%s" % i
92 node = self.getFakeHash()
92 node = self.getFakeHash()
93 p1 = self.getFakeHash()
93 p1 = self.getFakeHash()
94 p2 = self.getFakeHash()
94 p2 = self.getFakeHash()
95 linknode = self.getFakeHash()
95 linknode = self.getFakeHash()
96 revisions.append((filename, node, p1, p2, linknode, None))
96 revisions.append((filename, node, p1, p2, linknode, None))
97
97
98 pack = self.createPack(revisions)
98 pack = self.createPack(revisions)
99
99
100 for filename, node, p1, p2, linknode, copyfrom in revisions:
100 for filename, node, p1, p2, linknode, copyfrom in revisions:
101 actual = pack.getancestors(filename, node)[node]
101 actual = pack.getancestors(filename, node)[node]
102 self.assertEquals(p1, actual[0])
102 self.assertEquals(p1, actual[0])
103 self.assertEquals(p2, actual[1])
103 self.assertEquals(p2, actual[1])
104 self.assertEquals(linknode, actual[2])
104 self.assertEquals(linknode, actual[2])
105 self.assertEquals(copyfrom, actual[3])
105 self.assertEquals(copyfrom, actual[3])
106
106
107 def testAddAncestorChain(self):
107 def testAddAncestorChain(self):
108 """Test putting multiple revisions in into a pack and read the ancestor
108 """Test putting multiple revisions in into a pack and read the ancestor
109 chain.
109 chain.
110 """
110 """
111 revisions = []
111 revisions = []
112 filename = b"foo"
112 filename = b"foo"
113 lastnode = nullid
113 lastnode = nullid
114 for i in range(10):
114 for i in range(10):
115 node = self.getFakeHash()
115 node = self.getFakeHash()
116 revisions.append((filename, node, lastnode, nullid, nullid, None))
116 revisions.append((filename, node, lastnode, nullid, nullid, None))
117 lastnode = node
117 lastnode = node
118
118
119 # revisions must be added in topological order, newest first
119 # revisions must be added in topological order, newest first
120 revisions = list(reversed(revisions))
120 revisions = list(reversed(revisions))
121 pack = self.createPack(revisions)
121 pack = self.createPack(revisions)
122
122
123 # Test that the chain has all the entries
123 # Test that the chain has all the entries
124 ancestors = pack.getancestors(revisions[0][0], revisions[0][1])
124 ancestors = pack.getancestors(revisions[0][0], revisions[0][1])
125 for filename, node, p1, p2, linknode, copyfrom in revisions:
125 for filename, node, p1, p2, linknode, copyfrom in revisions:
126 ap1, ap2, alinknode, acopyfrom = ancestors[node]
126 ap1, ap2, alinknode, acopyfrom = ancestors[node]
127 self.assertEquals(ap1, p1)
127 self.assertEquals(ap1, p1)
128 self.assertEquals(ap2, p2)
128 self.assertEquals(ap2, p2)
129 self.assertEquals(alinknode, linknode)
129 self.assertEquals(alinknode, linknode)
130 self.assertEquals(acopyfrom, copyfrom)
130 self.assertEquals(acopyfrom, copyfrom)
131
131
132 def testPackMany(self):
132 def testPackMany(self):
133 """Pack many related and unrelated ancestors.
133 """Pack many related and unrelated ancestors.
134 """
134 """
135 # Build a random pack file
135 # Build a random pack file
136 allentries = {}
136 allentries = {}
137 ancestorcounts = {}
137 ancestorcounts = {}
138 revisions = []
138 revisions = []
139 random.seed(0)
139 random.seed(0)
140 for i in range(100):
140 for i in range(100):
141 filename = b"filename-%d" % i
141 filename = b"filename-%d" % i
142 entries = []
142 entries = []
143 p2 = nullid
143 p2 = nullid
144 linknode = nullid
144 linknode = nullid
145 for j in range(random.randint(1, 100)):
145 for j in range(random.randint(1, 100)):
146 node = self.getFakeHash()
146 node = self.getFakeHash()
147 p1 = nullid
147 p1 = nullid
148 if len(entries) > 0:
148 if len(entries) > 0:
149 p1 = entries[random.randint(0, len(entries) - 1)]
149 p1 = entries[random.randint(0, len(entries) - 1)]
150 entries.append(node)
150 entries.append(node)
151 revisions.append((filename, node, p1, p2, linknode, None))
151 revisions.append((filename, node, p1, p2, linknode, None))
152 allentries[(filename, node)] = (p1, p2, linknode)
152 allentries[(filename, node)] = (p1, p2, linknode)
153 if p1 == nullid:
153 if p1 == nullid:
154 ancestorcounts[(filename, node)] = 1
154 ancestorcounts[(filename, node)] = 1
155 else:
155 else:
156 newcount = ancestorcounts[(filename, p1)] + 1
156 newcount = ancestorcounts[(filename, p1)] + 1
157 ancestorcounts[(filename, node)] = newcount
157 ancestorcounts[(filename, node)] = newcount
158
158
159 # Must add file entries in reverse topological order
159 # Must add file entries in reverse topological order
160 revisions = list(reversed(revisions))
160 revisions = list(reversed(revisions))
161 pack = self.createPack(revisions)
161 pack = self.createPack(revisions)
162
162
163 # Verify the pack contents
163 # Verify the pack contents
164 for (filename, node), (p1, p2, lastnode) in allentries.items():
164 for (filename, node), (p1, p2, lastnode) in allentries.items():
165 ancestors = pack.getancestors(filename, node)
165 ancestors = pack.getancestors(filename, node)
166 self.assertEquals(ancestorcounts[(filename, node)],
166 self.assertEquals(ancestorcounts[(filename, node)],
167 len(ancestors))
167 len(ancestors))
168 for anode, (ap1, ap2, alinknode, copyfrom) in ancestors.items():
168 for anode, (ap1, ap2, alinknode, copyfrom) in ancestors.items():
169 ep1, ep2, elinknode = allentries[(filename, anode)]
169 ep1, ep2, elinknode = allentries[(filename, anode)]
170 self.assertEquals(ap1, ep1)
170 self.assertEquals(ap1, ep1)
171 self.assertEquals(ap2, ep2)
171 self.assertEquals(ap2, ep2)
172 self.assertEquals(alinknode, elinknode)
172 self.assertEquals(alinknode, elinknode)
173 self.assertEquals(copyfrom, None)
173 self.assertEquals(copyfrom, None)
174
174
175 def testGetNodeInfo(self):
175 def testGetNodeInfo(self):
176 revisions = []
176 revisions = []
177 filename = b"foo"
177 filename = b"foo"
178 lastnode = nullid
178 lastnode = nullid
179 for i in range(10):
179 for i in range(10):
180 node = self.getFakeHash()
180 node = self.getFakeHash()
181 revisions.append((filename, node, lastnode, nullid, nullid, None))
181 revisions.append((filename, node, lastnode, nullid, nullid, None))
182 lastnode = node
182 lastnode = node
183
183
184 pack = self.createPack(revisions)
184 pack = self.createPack(revisions)
185
185
186 # Test that getnodeinfo returns the expected results
186 # Test that getnodeinfo returns the expected results
187 for filename, node, p1, p2, linknode, copyfrom in revisions:
187 for filename, node, p1, p2, linknode, copyfrom in revisions:
188 ap1, ap2, alinknode, acopyfrom = pack.getnodeinfo(filename, node)
188 ap1, ap2, alinknode, acopyfrom = pack.getnodeinfo(filename, node)
189 self.assertEquals(ap1, p1)
189 self.assertEquals(ap1, p1)
190 self.assertEquals(ap2, p2)
190 self.assertEquals(ap2, p2)
191 self.assertEquals(alinknode, linknode)
191 self.assertEquals(alinknode, linknode)
192 self.assertEquals(acopyfrom, copyfrom)
192 self.assertEquals(acopyfrom, copyfrom)
193
193
194 def testGetMissing(self):
194 def testGetMissing(self):
195 """Test the getmissing() api.
195 """Test the getmissing() api.
196 """
196 """
197 revisions = []
197 revisions = []
198 filename = b"foo"
198 filename = b"foo"
199 for i in range(10):
199 for i in range(10):
200 node = self.getFakeHash()
200 node = self.getFakeHash()
201 p1 = self.getFakeHash()
201 p1 = self.getFakeHash()
202 p2 = self.getFakeHash()
202 p2 = self.getFakeHash()
203 linknode = self.getFakeHash()
203 linknode = self.getFakeHash()
204 revisions.append((filename, node, p1, p2, linknode, None))
204 revisions.append((filename, node, p1, p2, linknode, None))
205
205
206 pack = self.createPack(revisions)
206 pack = self.createPack(revisions)
207
207
208 missing = pack.getmissing([(filename, revisions[0][1])])
208 missing = pack.getmissing([(filename, revisions[0][1])])
209 self.assertFalse(missing)
209 self.assertFalse(missing)
210
210
211 missing = pack.getmissing([(filename, revisions[0][1]),
211 missing = pack.getmissing([(filename, revisions[0][1]),
212 (filename, revisions[1][1])])
212 (filename, revisions[1][1])])
213 self.assertFalse(missing)
213 self.assertFalse(missing)
214
214
215 fakenode = self.getFakeHash()
215 fakenode = self.getFakeHash()
216 missing = pack.getmissing([(filename, revisions[0][1]),
216 missing = pack.getmissing([(filename, revisions[0][1]),
217 (filename, fakenode)])
217 (filename, fakenode)])
218 self.assertEquals(missing, [(filename, fakenode)])
218 self.assertEquals(missing, [(filename, fakenode)])
219
219
220 # Test getmissing on a non-existant filename
220 # Test getmissing on a non-existant filename
221 missing = pack.getmissing([("bar", fakenode)])
221 missing = pack.getmissing([("bar", fakenode)])
222 self.assertEquals(missing, [("bar", fakenode)])
222 self.assertEquals(missing, [("bar", fakenode)])
223
223
224 def testAddThrows(self):
224 def testAddThrows(self):
225 pack = self.createPack()
225 pack = self.createPack()
226
226
227 try:
227 try:
228 pack.add(b'filename', nullid, nullid, nullid, nullid, None)
228 pack.add(b'filename', nullid, nullid, nullid, nullid, None)
229 self.assertTrue(False, "historypack.add should throw")
229 self.assertTrue(False, "historypack.add should throw")
230 except RuntimeError:
230 except RuntimeError:
231 pass
231 pass
232
232
233 def testBadVersionThrows(self):
233 def testBadVersionThrows(self):
234 pack = self.createPack()
234 pack = self.createPack()
235 path = pack.path + '.histpack'
235 path = pack.path + '.histpack'
236 with open(path) as f:
236 with open(path) as f:
237 raw = f.read()
237 raw = f.read()
238 raw = struct.pack('!B', 255) + raw[1:]
238 raw = struct.pack('!B', 255) + raw[1:]
239 os.chmod(path, os.stat(path).st_mode | stat.S_IWRITE)
239 os.chmod(path, os.stat(path).st_mode | stat.S_IWRITE)
240 with open(path, 'w+') as f:
240 with open(path, 'w+') as f:
241 f.write(raw)
241 f.write(raw)
242
242
243 try:
243 try:
244 pack = historypack.historypack(pack.path)
244 pack = historypack.historypack(pack.path)
245 self.assertTrue(False, "bad version number should have thrown")
245 self.assertTrue(False, "bad version number should have thrown")
246 except RuntimeError:
246 except RuntimeError:
247 pass
247 pass
248
248
249 def testLargePack(self):
249 def testLargePack(self):
250 """Test creating and reading from a large pack with over X entries.
250 """Test creating and reading from a large pack with over X entries.
251 This causes it to use a 2^16 fanout table instead."""
251 This causes it to use a 2^16 fanout table instead."""
252 total = basepack.SMALLFANOUTCUTOFF + 1
252 total = basepack.SMALLFANOUTCUTOFF + 1
253 revisions = []
253 revisions = []
254 for i in pycompat.xrange(total):
254 for i in pycompat.xrange(total):
255 filename = b"foo-%d" % i
255 filename = b"foo-%d" % i
256 node = self.getFakeHash()
256 node = self.getFakeHash()
257 p1 = self.getFakeHash()
257 p1 = self.getFakeHash()
258 p2 = self.getFakeHash()
258 p2 = self.getFakeHash()
259 linknode = self.getFakeHash()
259 linknode = self.getFakeHash()
260 revisions.append((filename, node, p1, p2, linknode, None))
260 revisions.append((filename, node, p1, p2, linknode, None))
261
261
262 pack = self.createPack(revisions)
262 pack = self.createPack(revisions)
263 self.assertEquals(pack.params.fanoutprefix, basepack.LARGEFANOUTPREFIX)
263 self.assertEquals(pack.params.fanoutprefix, basepack.LARGEFANOUTPREFIX)
264
264
265 for filename, node, p1, p2, linknode, copyfrom in revisions:
265 for filename, node, p1, p2, linknode, copyfrom in revisions:
266 actual = pack.getancestors(filename, node)[node]
266 actual = pack.getancestors(filename, node)[node]
267 self.assertEquals(p1, actual[0])
267 self.assertEquals(p1, actual[0])
268 self.assertEquals(p2, actual[1])
268 self.assertEquals(p2, actual[1])
269 self.assertEquals(linknode, actual[2])
269 self.assertEquals(linknode, actual[2])
270 self.assertEquals(copyfrom, actual[3])
270 self.assertEquals(copyfrom, actual[3])
271 # TODO:
271 # TODO:
272 # histpack store:
272 # histpack store:
273 # - repack two packs into one
273 # - repack two packs into one
274
274
275 if __name__ == '__main__':
275 if __name__ == '__main__':
276 if pycompat.iswindows:
277 sys.exit(80) # Skip on Windows
276 silenttestrunner.main(__name__)
278 silenttestrunner.main(__name__)
General Comments 0
You need to be logged in to leave comments. Login now