##// END OF EJS Templates
tests: also skip remotefilelog *.py tests on Windows...
Matt Harbison -
r41304:878084a4 default
parent child Browse files
Show More
@@ -1,376 +1,378 b''
1 1 #!/usr/bin/env python
2 2 from __future__ import absolute_import, print_function
3 3
4 4 import hashlib
5 5 import os
6 6 import random
7 7 import shutil
8 8 import stat
9 9 import struct
10 10 import sys
11 11 import tempfile
12 12 import time
13 13 import unittest
14 14
15 15 import silenttestrunner
16 16
17 17 # Load the local remotefilelog, not the system one
18 18 sys.path[0:0] = [os.path.join(os.path.dirname(__file__), '..')]
19 19 from mercurial.node import nullid
20 20 from mercurial import (
21 21 pycompat,
22 22 ui as uimod,
23 23 )
24 24 from hgext.remotefilelog import (
25 25 basepack,
26 26 constants,
27 27 datapack,
28 28 )
29 29
30 30 class datapacktestsbase(object):
31 31 def __init__(self, datapackreader, paramsavailable):
32 32 self.datapackreader = datapackreader
33 33 self.paramsavailable = paramsavailable
34 34
35 35 def setUp(self):
36 36 self.tempdirs = []
37 37
38 38 def tearDown(self):
39 39 for d in self.tempdirs:
40 40 shutil.rmtree(d)
41 41
42 42 def makeTempDir(self):
43 43 tempdir = tempfile.mkdtemp()
44 44 self.tempdirs.append(tempdir)
45 45 return tempdir
46 46
47 47 def getHash(self, content):
48 48 return hashlib.sha1(content).digest()
49 49
50 50 def getFakeHash(self):
51 51 return ''.join(chr(random.randint(0, 255)) for _ in range(20))
52 52
53 53 def createPack(self, revisions=None, packdir=None):
54 54 if revisions is None:
55 55 revisions = [("filename", self.getFakeHash(), nullid, "content")]
56 56
57 57 if packdir is None:
58 58 packdir = self.makeTempDir()
59 59
60 60 packer = datapack.mutabledatapack(uimod.ui(), packdir, version=2)
61 61
62 62 for args in revisions:
63 63 filename, node, base, content = args[0:4]
64 64 # meta is optional
65 65 meta = None
66 66 if len(args) > 4:
67 67 meta = args[4]
68 68 packer.add(filename, node, base, content, metadata=meta)
69 69
70 70 path = packer.close()
71 71 return self.datapackreader(path)
72 72
73 73 def _testAddSingle(self, content):
74 74 """Test putting a simple blob into a pack and reading it out.
75 75 """
76 76 filename = "foo"
77 77 node = self.getHash(content)
78 78
79 79 revisions = [(filename, node, nullid, content)]
80 80 pack = self.createPack(revisions)
81 81 if self.paramsavailable:
82 82 self.assertEquals(pack.params.fanoutprefix,
83 83 basepack.SMALLFANOUTPREFIX)
84 84
85 85 chain = pack.getdeltachain(filename, node)
86 86 self.assertEquals(content, chain[0][4])
87 87
88 88 def testAddSingle(self):
89 89 self._testAddSingle('')
90 90
91 91 def testAddSingleEmpty(self):
92 92 self._testAddSingle('abcdef')
93 93
94 94 def testAddMultiple(self):
95 95 """Test putting multiple unrelated blobs into a pack and reading them
96 96 out.
97 97 """
98 98 revisions = []
99 99 for i in range(10):
100 100 filename = "foo%s" % i
101 101 content = "abcdef%s" % i
102 102 node = self.getHash(content)
103 103 revisions.append((filename, node, self.getFakeHash(), content))
104 104
105 105 pack = self.createPack(revisions)
106 106
107 107 for filename, node, base, content in revisions:
108 108 entry = pack.getdelta(filename, node)
109 109 self.assertEquals((content, filename, base, {}), entry)
110 110
111 111 chain = pack.getdeltachain(filename, node)
112 112 self.assertEquals(content, chain[0][4])
113 113
114 114 def testAddDeltas(self):
115 115 """Test putting multiple delta blobs into a pack and read the chain.
116 116 """
117 117 revisions = []
118 118 filename = "foo"
119 119 lastnode = nullid
120 120 for i in range(10):
121 121 content = "abcdef%s" % i
122 122 node = self.getHash(content)
123 123 revisions.append((filename, node, lastnode, content))
124 124 lastnode = node
125 125
126 126 pack = self.createPack(revisions)
127 127
128 128 entry = pack.getdelta(filename, revisions[0][1])
129 129 realvalue = (revisions[0][3], filename, revisions[0][2], {})
130 130 self.assertEquals(entry, realvalue)
131 131
132 132 # Test that the chain for the final entry has all the others
133 133 chain = pack.getdeltachain(filename, node)
134 134 for i in range(10):
135 135 content = "abcdef%s" % i
136 136 self.assertEquals(content, chain[-i - 1][4])
137 137
138 138 def testPackMany(self):
139 139 """Pack many related and unrelated objects.
140 140 """
141 141 # Build a random pack file
142 142 revisions = []
143 143 blobs = {}
144 144 random.seed(0)
145 145 for i in range(100):
146 146 filename = "filename-%s" % i
147 147 filerevs = []
148 148 for j in range(random.randint(1, 100)):
149 149 content = "content-%s" % j
150 150 node = self.getHash(content)
151 151 lastnode = nullid
152 152 if len(filerevs) > 0:
153 153 lastnode = filerevs[random.randint(0, len(filerevs) - 1)]
154 154 filerevs.append(node)
155 155 blobs[(filename, node, lastnode)] = content
156 156 revisions.append((filename, node, lastnode, content))
157 157
158 158 pack = self.createPack(revisions)
159 159
160 160 # Verify the pack contents
161 161 for (filename, node, lastnode), content in sorted(blobs.iteritems()):
162 162 chain = pack.getdeltachain(filename, node)
163 163 for entry in chain:
164 164 expectedcontent = blobs[(entry[0], entry[1], entry[3])]
165 165 self.assertEquals(entry[4], expectedcontent)
166 166
167 167 def testPackMetadata(self):
168 168 revisions = []
169 169 for i in range(100):
170 170 filename = '%s.txt' % i
171 171 content = 'put-something-here \n' * i
172 172 node = self.getHash(content)
173 173 meta = {constants.METAKEYFLAG: i ** 4,
174 174 constants.METAKEYSIZE: len(content),
175 175 'Z': 'random_string',
176 176 '_': '\0' * i}
177 177 revisions.append((filename, node, nullid, content, meta))
178 178 pack = self.createPack(revisions)
179 179 for name, node, x, content, origmeta in revisions:
180 180 parsedmeta = pack.getmeta(name, node)
181 181 # flag == 0 should be optimized out
182 182 if origmeta[constants.METAKEYFLAG] == 0:
183 183 del origmeta[constants.METAKEYFLAG]
184 184 self.assertEquals(parsedmeta, origmeta)
185 185
186 186 def testGetMissing(self):
187 187 """Test the getmissing() api.
188 188 """
189 189 revisions = []
190 190 filename = "foo"
191 191 lastnode = nullid
192 192 for i in range(10):
193 193 content = "abcdef%s" % i
194 194 node = self.getHash(content)
195 195 revisions.append((filename, node, lastnode, content))
196 196 lastnode = node
197 197
198 198 pack = self.createPack(revisions)
199 199
200 200 missing = pack.getmissing([("foo", revisions[0][1])])
201 201 self.assertFalse(missing)
202 202
203 203 missing = pack.getmissing([("foo", revisions[0][1]),
204 204 ("foo", revisions[1][1])])
205 205 self.assertFalse(missing)
206 206
207 207 fakenode = self.getFakeHash()
208 208 missing = pack.getmissing([("foo", revisions[0][1]), ("foo", fakenode)])
209 209 self.assertEquals(missing, [("foo", fakenode)])
210 210
211 211 def testAddThrows(self):
212 212 pack = self.createPack()
213 213
214 214 try:
215 215 pack.add('filename', nullid, 'contents')
216 216 self.assertTrue(False, "datapack.add should throw")
217 217 except RuntimeError:
218 218 pass
219 219
220 220 def testBadVersionThrows(self):
221 221 pack = self.createPack()
222 222 path = pack.path + '.datapack'
223 223 with open(path) as f:
224 224 raw = f.read()
225 225 raw = struct.pack('!B', 255) + raw[1:]
226 226 os.chmod(path, os.stat(path).st_mode | stat.S_IWRITE)
227 227 with open(path, 'w+') as f:
228 228 f.write(raw)
229 229
230 230 try:
231 231 pack = self.datapackreader(pack.path)
232 232 self.assertTrue(False, "bad version number should have thrown")
233 233 except RuntimeError:
234 234 pass
235 235
236 236 def testMissingDeltabase(self):
237 237 fakenode = self.getFakeHash()
238 238 revisions = [("filename", fakenode, self.getFakeHash(), "content")]
239 239 pack = self.createPack(revisions)
240 240 chain = pack.getdeltachain("filename", fakenode)
241 241 self.assertEquals(len(chain), 1)
242 242
243 243 def testLargePack(self):
244 244 """Test creating and reading from a large pack with over X entries.
245 245 This causes it to use a 2^16 fanout table instead."""
246 246 revisions = []
247 247 blobs = {}
248 248 total = basepack.SMALLFANOUTCUTOFF + 1
249 249 for i in pycompat.xrange(total):
250 250 filename = "filename-%s" % i
251 251 content = filename
252 252 node = self.getHash(content)
253 253 blobs[(filename, node)] = content
254 254 revisions.append((filename, node, nullid, content))
255 255
256 256 pack = self.createPack(revisions)
257 257 if self.paramsavailable:
258 258 self.assertEquals(pack.params.fanoutprefix,
259 259 basepack.LARGEFANOUTPREFIX)
260 260
261 261 for (filename, node), content in blobs.iteritems():
262 262 actualcontent = pack.getdeltachain(filename, node)[0][4]
263 263 self.assertEquals(actualcontent, content)
264 264
265 265 def testPacksCache(self):
266 266 """Test that we remember the most recent packs while fetching the delta
267 267 chain."""
268 268
269 269 packdir = self.makeTempDir()
270 270 deltachains = []
271 271
272 272 numpacks = 10
273 273 revisionsperpack = 100
274 274
275 275 for i in range(numpacks):
276 276 chain = []
277 277 revision = (str(i), self.getFakeHash(), nullid, "content")
278 278
279 279 for _ in range(revisionsperpack):
280 280 chain.append(revision)
281 281 revision = (
282 282 str(i),
283 283 self.getFakeHash(),
284 284 revision[1],
285 285 self.getFakeHash()
286 286 )
287 287
288 288 self.createPack(chain, packdir)
289 289 deltachains.append(chain)
290 290
291 291 class testdatapackstore(datapack.datapackstore):
292 292 # Ensures that we are not keeping everything in the cache.
293 293 DEFAULTCACHESIZE = numpacks / 2
294 294
295 295 store = testdatapackstore(uimod.ui(), packdir)
296 296
297 297 random.shuffle(deltachains)
298 298 for randomchain in deltachains:
299 299 revision = random.choice(randomchain)
300 300 chain = store.getdeltachain(revision[0], revision[1])
301 301
302 302 mostrecentpack = next(iter(store.packs), None)
303 303 self.assertEquals(
304 304 mostrecentpack.getdeltachain(revision[0], revision[1]),
305 305 chain
306 306 )
307 307
308 308 self.assertEquals(randomchain.index(revision) + 1, len(chain))
309 309
310 310 # perf test off by default since it's slow
311 311 def _testIndexPerf(self):
312 312 random.seed(0)
313 313 print("Multi-get perf test")
314 314 packsizes = [
315 315 100,
316 316 10000,
317 317 100000,
318 318 500000,
319 319 1000000,
320 320 3000000,
321 321 ]
322 322 lookupsizes = [
323 323 10,
324 324 100,
325 325 1000,
326 326 10000,
327 327 100000,
328 328 1000000,
329 329 ]
330 330 for packsize in packsizes:
331 331 revisions = []
332 332 for i in pycompat.xrange(packsize):
333 333 filename = "filename-%s" % i
334 334 content = "content-%s" % i
335 335 node = self.getHash(content)
336 336 revisions.append((filename, node, nullid, content))
337 337
338 338 path = self.createPack(revisions).path
339 339
340 340 # Perf of large multi-get
341 341 import gc
342 342 gc.disable()
343 343 pack = self.datapackreader(path)
344 344 for lookupsize in lookupsizes:
345 345 if lookupsize > packsize:
346 346 continue
347 347 random.shuffle(revisions)
348 348 findnodes = [(rev[0], rev[1]) for rev in revisions]
349 349
350 350 start = time.time()
351 351 pack.getmissing(findnodes[:lookupsize])
352 352 elapsed = time.time() - start
353 353 print ("%s pack %s lookups = %0.04f" %
354 354 (('%s' % packsize).rjust(7),
355 355 ('%s' % lookupsize).rjust(7),
356 356 elapsed))
357 357
358 358 print("")
359 359 gc.enable()
360 360
361 361 # The perf test is meant to produce output, so we always fail the test
362 362 # so the user sees the output.
363 363 raise RuntimeError("perf test always fails")
364 364
365 365 class datapacktests(datapacktestsbase, unittest.TestCase):
366 366 def __init__(self, *args, **kwargs):
367 367 datapacktestsbase.__init__(self, datapack.datapack, True)
368 368 unittest.TestCase.__init__(self, *args, **kwargs)
369 369
370 370 # TODO:
371 371 # datapack store:
372 372 # - getmissing
373 373 # - GC two packs into one
374 374
375 375 if __name__ == '__main__':
376 if pycompat.iswindows:
377 sys.exit(80) # Skip on Windows
376 378 silenttestrunner.main(__name__)
@@ -1,276 +1,278 b''
1 1 #!/usr/bin/env python
2 2 from __future__ import absolute_import
3 3
4 4 import hashlib
5 5 import os
6 6 import random
7 7 import shutil
8 8 import stat
9 9 import struct
10 10 import sys
11 11 import tempfile
12 12 import unittest
13 13
14 14 import silenttestrunner
15 15
16 16 from mercurial.node import nullid
17 17 from mercurial import (
18 18 pycompat,
19 19 ui as uimod,
20 20 )
21 21 # Load the local remotefilelog, not the system one
22 22 sys.path[0:0] = [os.path.join(os.path.dirname(__file__), '..')]
23 23 from hgext.remotefilelog import (
24 24 basepack,
25 25 historypack,
26 26 )
27 27
28 28 class histpacktests(unittest.TestCase):
29 29 def setUp(self):
30 30 self.tempdirs = []
31 31
32 32 def tearDown(self):
33 33 for d in self.tempdirs:
34 34 shutil.rmtree(d)
35 35
36 36 def makeTempDir(self):
37 37 tempdir = tempfile.mkdtemp()
38 38 self.tempdirs.append(tempdir)
39 39 return pycompat.fsencode(tempdir)
40 40
41 41 def getHash(self, content):
42 42 return hashlib.sha1(content).digest()
43 43
44 44 def getFakeHash(self):
45 45 return b''.join(pycompat.bytechr(random.randint(0, 255))
46 46 for _ in range(20))
47 47
48 48 def createPack(self, revisions=None):
49 49 """Creates and returns a historypack containing the specified revisions.
50 50
51 51 `revisions` is a list of tuples, where each tuple contains a filanem,
52 52 node, p1node, p2node, and linknode.
53 53 """
54 54 if revisions is None:
55 55 revisions = [("filename", self.getFakeHash(), nullid, nullid,
56 56 self.getFakeHash(), None)]
57 57
58 58 packdir = pycompat.fsencode(self.makeTempDir())
59 59 packer = historypack.mutablehistorypack(uimod.ui(), packdir,
60 60 version=2)
61 61
62 62 for filename, node, p1, p2, linknode, copyfrom in revisions:
63 63 packer.add(filename, node, p1, p2, linknode, copyfrom)
64 64
65 65 path = packer.close()
66 66 return historypack.historypack(path)
67 67
68 68 def testAddSingle(self):
69 69 """Test putting a single entry into a pack and reading it out.
70 70 """
71 71 filename = "foo"
72 72 node = self.getFakeHash()
73 73 p1 = self.getFakeHash()
74 74 p2 = self.getFakeHash()
75 75 linknode = self.getFakeHash()
76 76
77 77 revisions = [(filename, node, p1, p2, linknode, None)]
78 78 pack = self.createPack(revisions)
79 79
80 80 actual = pack.getancestors(filename, node)[node]
81 81 self.assertEquals(p1, actual[0])
82 82 self.assertEquals(p2, actual[1])
83 83 self.assertEquals(linknode, actual[2])
84 84
85 85 def testAddMultiple(self):
86 86 """Test putting multiple unrelated revisions into a pack and reading
87 87 them out.
88 88 """
89 89 revisions = []
90 90 for i in range(10):
91 91 filename = "foo-%s" % i
92 92 node = self.getFakeHash()
93 93 p1 = self.getFakeHash()
94 94 p2 = self.getFakeHash()
95 95 linknode = self.getFakeHash()
96 96 revisions.append((filename, node, p1, p2, linknode, None))
97 97
98 98 pack = self.createPack(revisions)
99 99
100 100 for filename, node, p1, p2, linknode, copyfrom in revisions:
101 101 actual = pack.getancestors(filename, node)[node]
102 102 self.assertEquals(p1, actual[0])
103 103 self.assertEquals(p2, actual[1])
104 104 self.assertEquals(linknode, actual[2])
105 105 self.assertEquals(copyfrom, actual[3])
106 106
107 107 def testAddAncestorChain(self):
108 108 """Test putting multiple revisions in into a pack and read the ancestor
109 109 chain.
110 110 """
111 111 revisions = []
112 112 filename = b"foo"
113 113 lastnode = nullid
114 114 for i in range(10):
115 115 node = self.getFakeHash()
116 116 revisions.append((filename, node, lastnode, nullid, nullid, None))
117 117 lastnode = node
118 118
119 119 # revisions must be added in topological order, newest first
120 120 revisions = list(reversed(revisions))
121 121 pack = self.createPack(revisions)
122 122
123 123 # Test that the chain has all the entries
124 124 ancestors = pack.getancestors(revisions[0][0], revisions[0][1])
125 125 for filename, node, p1, p2, linknode, copyfrom in revisions:
126 126 ap1, ap2, alinknode, acopyfrom = ancestors[node]
127 127 self.assertEquals(ap1, p1)
128 128 self.assertEquals(ap2, p2)
129 129 self.assertEquals(alinknode, linknode)
130 130 self.assertEquals(acopyfrom, copyfrom)
131 131
132 132 def testPackMany(self):
133 133 """Pack many related and unrelated ancestors.
134 134 """
135 135 # Build a random pack file
136 136 allentries = {}
137 137 ancestorcounts = {}
138 138 revisions = []
139 139 random.seed(0)
140 140 for i in range(100):
141 141 filename = b"filename-%d" % i
142 142 entries = []
143 143 p2 = nullid
144 144 linknode = nullid
145 145 for j in range(random.randint(1, 100)):
146 146 node = self.getFakeHash()
147 147 p1 = nullid
148 148 if len(entries) > 0:
149 149 p1 = entries[random.randint(0, len(entries) - 1)]
150 150 entries.append(node)
151 151 revisions.append((filename, node, p1, p2, linknode, None))
152 152 allentries[(filename, node)] = (p1, p2, linknode)
153 153 if p1 == nullid:
154 154 ancestorcounts[(filename, node)] = 1
155 155 else:
156 156 newcount = ancestorcounts[(filename, p1)] + 1
157 157 ancestorcounts[(filename, node)] = newcount
158 158
159 159 # Must add file entries in reverse topological order
160 160 revisions = list(reversed(revisions))
161 161 pack = self.createPack(revisions)
162 162
163 163 # Verify the pack contents
164 164 for (filename, node), (p1, p2, lastnode) in allentries.items():
165 165 ancestors = pack.getancestors(filename, node)
166 166 self.assertEquals(ancestorcounts[(filename, node)],
167 167 len(ancestors))
168 168 for anode, (ap1, ap2, alinknode, copyfrom) in ancestors.items():
169 169 ep1, ep2, elinknode = allentries[(filename, anode)]
170 170 self.assertEquals(ap1, ep1)
171 171 self.assertEquals(ap2, ep2)
172 172 self.assertEquals(alinknode, elinknode)
173 173 self.assertEquals(copyfrom, None)
174 174
175 175 def testGetNodeInfo(self):
176 176 revisions = []
177 177 filename = b"foo"
178 178 lastnode = nullid
179 179 for i in range(10):
180 180 node = self.getFakeHash()
181 181 revisions.append((filename, node, lastnode, nullid, nullid, None))
182 182 lastnode = node
183 183
184 184 pack = self.createPack(revisions)
185 185
186 186 # Test that getnodeinfo returns the expected results
187 187 for filename, node, p1, p2, linknode, copyfrom in revisions:
188 188 ap1, ap2, alinknode, acopyfrom = pack.getnodeinfo(filename, node)
189 189 self.assertEquals(ap1, p1)
190 190 self.assertEquals(ap2, p2)
191 191 self.assertEquals(alinknode, linknode)
192 192 self.assertEquals(acopyfrom, copyfrom)
193 193
194 194 def testGetMissing(self):
195 195 """Test the getmissing() api.
196 196 """
197 197 revisions = []
198 198 filename = b"foo"
199 199 for i in range(10):
200 200 node = self.getFakeHash()
201 201 p1 = self.getFakeHash()
202 202 p2 = self.getFakeHash()
203 203 linknode = self.getFakeHash()
204 204 revisions.append((filename, node, p1, p2, linknode, None))
205 205
206 206 pack = self.createPack(revisions)
207 207
208 208 missing = pack.getmissing([(filename, revisions[0][1])])
209 209 self.assertFalse(missing)
210 210
211 211 missing = pack.getmissing([(filename, revisions[0][1]),
212 212 (filename, revisions[1][1])])
213 213 self.assertFalse(missing)
214 214
215 215 fakenode = self.getFakeHash()
216 216 missing = pack.getmissing([(filename, revisions[0][1]),
217 217 (filename, fakenode)])
218 218 self.assertEquals(missing, [(filename, fakenode)])
219 219
220 220 # Test getmissing on a non-existant filename
221 221 missing = pack.getmissing([("bar", fakenode)])
222 222 self.assertEquals(missing, [("bar", fakenode)])
223 223
224 224 def testAddThrows(self):
225 225 pack = self.createPack()
226 226
227 227 try:
228 228 pack.add(b'filename', nullid, nullid, nullid, nullid, None)
229 229 self.assertTrue(False, "historypack.add should throw")
230 230 except RuntimeError:
231 231 pass
232 232
233 233 def testBadVersionThrows(self):
234 234 pack = self.createPack()
235 235 path = pack.path + '.histpack'
236 236 with open(path) as f:
237 237 raw = f.read()
238 238 raw = struct.pack('!B', 255) + raw[1:]
239 239 os.chmod(path, os.stat(path).st_mode | stat.S_IWRITE)
240 240 with open(path, 'w+') as f:
241 241 f.write(raw)
242 242
243 243 try:
244 244 pack = historypack.historypack(pack.path)
245 245 self.assertTrue(False, "bad version number should have thrown")
246 246 except RuntimeError:
247 247 pass
248 248
249 249 def testLargePack(self):
250 250 """Test creating and reading from a large pack with over X entries.
251 251 This causes it to use a 2^16 fanout table instead."""
252 252 total = basepack.SMALLFANOUTCUTOFF + 1
253 253 revisions = []
254 254 for i in pycompat.xrange(total):
255 255 filename = b"foo-%d" % i
256 256 node = self.getFakeHash()
257 257 p1 = self.getFakeHash()
258 258 p2 = self.getFakeHash()
259 259 linknode = self.getFakeHash()
260 260 revisions.append((filename, node, p1, p2, linknode, None))
261 261
262 262 pack = self.createPack(revisions)
263 263 self.assertEquals(pack.params.fanoutprefix, basepack.LARGEFANOUTPREFIX)
264 264
265 265 for filename, node, p1, p2, linknode, copyfrom in revisions:
266 266 actual = pack.getancestors(filename, node)[node]
267 267 self.assertEquals(p1, actual[0])
268 268 self.assertEquals(p2, actual[1])
269 269 self.assertEquals(linknode, actual[2])
270 270 self.assertEquals(copyfrom, actual[3])
271 271 # TODO:
272 272 # histpack store:
273 273 # - repack two packs into one
274 274
275 275 if __name__ == '__main__':
276 if pycompat.iswindows:
277 sys.exit(80) # Skip on Windows
276 278 silenttestrunner.main(__name__)
General Comments 0
You need to be logged in to leave comments. Login now