##// END OF EJS Templates
tests: fix up uses of xrange in remotefilelog tests for py3...
Augie Fackler -
r41290:2888d12b default
parent child Browse files
Show More
@@ -1,123 +1,125 b''
1 1 #require no-windows
2 2
3 3 $ . "$TESTDIR/remotefilelog-library.sh"
4 4
5 5 $ hg init repo
6 6 $ cd repo
7 7 $ cat >> .hg/hgrc <<EOF
8 8 > [remotefilelog]
9 9 > server=True
10 10 > EOF
11 11 $ echo x > x
12 12 $ echo y > y
13 13 $ echo z > z
14 14 $ hg commit -qAm xy
15 15 $ cd ..
16 16
17 17 $ cat > cacheprocess-logger.py <<EOF
18 18 > import os
19 19 > import shutil
20 20 > import sys
21 > if sys.version_info[0] > 2:
22 > xrange = range
21 23 > f = open('$TESTTMP/cachelog.log', 'w')
22 24 > srccache = os.path.join('$TESTTMP', 'oldhgcache')
23 25 > def log(message):
24 26 > f.write(message)
25 27 > f.flush()
26 28 > destcache = sys.argv[-1]
27 29 > try:
28 30 > while True:
29 31 > cmd = sys.stdin.readline().strip()
30 32 > log('got command %r\n' % cmd)
31 33 > if cmd == 'exit':
32 34 > sys.exit(0)
33 35 > elif cmd == 'get':
34 36 > count = int(sys.stdin.readline())
35 37 > log('client wants %r blobs\n' % count)
36 38 > wants = []
37 39 > for _ in xrange(count):
38 40 > key = sys.stdin.readline()[:-1]
39 41 > wants.append(key)
40 42 > if '\0' in key:
41 43 > _, key = key.split('\0')
42 44 > srcpath = os.path.join(srccache, key)
43 45 > if os.path.exists(srcpath):
44 46 > dest = os.path.join(destcache, key)
45 47 > destdir = os.path.dirname(dest)
46 48 > if not os.path.exists(destdir):
47 49 > os.makedirs(destdir)
48 50 > shutil.copyfile(srcpath, dest)
49 51 > else:
50 52 > # report a cache miss
51 53 > sys.stdout.write(key + '\n')
52 54 > sys.stdout.write('0\n')
53 55 > for key in sorted(wants):
54 56 > log('requested %r\n' % key)
55 57 > sys.stdout.flush()
56 58 > elif cmd == 'set':
57 59 > assert False, 'todo writing'
58 60 > else:
59 61 > assert False, 'unknown command! %r' % cmd
60 62 > except Exception as e:
61 63 > log('Exception! %r\n' % e)
62 64 > raise
63 65 > EOF
64 66
65 67 $ cat >> $HGRCPATH <<EOF
66 68 > [remotefilelog]
67 69 > cacheprocess = python $TESTTMP/cacheprocess-logger.py
68 70 > EOF
69 71
70 72 Test cache keys and cache misses.
71 73 $ hgcloneshallow ssh://user@dummy/repo clone -q
72 74 3 files fetched over 1 fetches - (3 misses, 0.00% hit ratio) over *s (glob)
73 75 $ cat cachelog.log
74 76 got command 'get'
75 77 client wants 3 blobs
76 78 requested 'master/11/f6ad8ec52a2984abaafd7c3b516503785c2072/1406e74118627694268417491f018a4a883152f0'
77 79 requested 'master/39/5df8f7c51f007019cb30201c49e884b46b92fa/69a1b67522704ec122181c0890bd16e9d3e7516a'
78 80 requested 'master/95/cb0bfd2977c761298d9624e4b4d4c72a39974a/076f5e2225b3ff0400b98c92aa6cdf403ee24cca'
79 81 got command 'set'
80 82 Exception! AssertionError('todo writing',)
81 83
82 84 Test cache hits.
83 85 $ mv hgcache oldhgcache
84 86 $ rm cachelog.log
85 87 $ hgcloneshallow ssh://user@dummy/repo clone-cachehit -q
86 88 3 files fetched over 1 fetches - (0 misses, 100.00% hit ratio) over *s (glob)
87 89 $ cat cachelog.log | grep -v exit
88 90 got command 'get'
89 91 client wants 3 blobs
90 92 requested 'master/11/f6ad8ec52a2984abaafd7c3b516503785c2072/1406e74118627694268417491f018a4a883152f0'
91 93 requested 'master/39/5df8f7c51f007019cb30201c49e884b46b92fa/69a1b67522704ec122181c0890bd16e9d3e7516a'
92 94 requested 'master/95/cb0bfd2977c761298d9624e4b4d4c72a39974a/076f5e2225b3ff0400b98c92aa6cdf403ee24cca'
93 95
94 96 $ cat >> $HGRCPATH <<EOF
95 97 > [remotefilelog]
96 98 > cacheprocess.includepath = yes
97 99 > EOF
98 100
99 101 Test cache keys and cache misses with includepath.
100 102 $ rm -r hgcache oldhgcache
101 103 $ rm cachelog.log
102 104 $ hgcloneshallow ssh://user@dummy/repo clone-withpath -q
103 105 3 files fetched over 1 fetches - (3 misses, 0.00% hit ratio) over *s (glob)
104 106 $ cat cachelog.log
105 107 got command 'get'
106 108 client wants 3 blobs
107 109 requested 'x\x00master/11/f6ad8ec52a2984abaafd7c3b516503785c2072/1406e74118627694268417491f018a4a883152f0'
108 110 requested 'y\x00master/95/cb0bfd2977c761298d9624e4b4d4c72a39974a/076f5e2225b3ff0400b98c92aa6cdf403ee24cca'
109 111 requested 'z\x00master/39/5df8f7c51f007019cb30201c49e884b46b92fa/69a1b67522704ec122181c0890bd16e9d3e7516a'
110 112 got command 'set'
111 113 Exception! AssertionError('todo writing',)
112 114
113 115 Test cache hits with includepath.
114 116 $ mv hgcache oldhgcache
115 117 $ rm cachelog.log
116 118 $ hgcloneshallow ssh://user@dummy/repo clone-withpath-cachehit -q
117 119 3 files fetched over 1 fetches - (0 misses, 100.00% hit ratio) over *s (glob)
118 120 $ cat cachelog.log | grep -v exit
119 121 got command 'get'
120 122 client wants 3 blobs
121 123 requested 'x\x00master/11/f6ad8ec52a2984abaafd7c3b516503785c2072/1406e74118627694268417491f018a4a883152f0'
122 124 requested 'y\x00master/95/cb0bfd2977c761298d9624e4b4d4c72a39974a/076f5e2225b3ff0400b98c92aa6cdf403ee24cca'
123 125 requested 'z\x00master/39/5df8f7c51f007019cb30201c49e884b46b92fa/69a1b67522704ec122181c0890bd16e9d3e7516a'
@@ -1,375 +1,376 b''
1 1 #!/usr/bin/env python
2 2 from __future__ import absolute_import, print_function
3 3
4 4 import hashlib
5 5 import os
6 6 import random
7 7 import shutil
8 8 import stat
9 9 import struct
10 10 import sys
11 11 import tempfile
12 12 import time
13 13 import unittest
14 14
15 15 import silenttestrunner
16 16
17 17 # Load the local remotefilelog, not the system one
18 18 sys.path[0:0] = [os.path.join(os.path.dirname(__file__), '..')]
19 19 from mercurial.node import nullid
20 20 from mercurial import (
21 pycompat,
21 22 ui as uimod,
22 23 )
23 24 from hgext.remotefilelog import (
24 25 basepack,
25 26 constants,
26 27 datapack,
27 28 )
28 29
29 30 class datapacktestsbase(object):
30 31 def __init__(self, datapackreader, paramsavailable):
31 32 self.datapackreader = datapackreader
32 33 self.paramsavailable = paramsavailable
33 34
34 35 def setUp(self):
35 36 self.tempdirs = []
36 37
37 38 def tearDown(self):
38 39 for d in self.tempdirs:
39 40 shutil.rmtree(d)
40 41
41 42 def makeTempDir(self):
42 43 tempdir = tempfile.mkdtemp()
43 44 self.tempdirs.append(tempdir)
44 45 return tempdir
45 46
46 47 def getHash(self, content):
47 48 return hashlib.sha1(content).digest()
48 49
49 50 def getFakeHash(self):
50 51 return ''.join(chr(random.randint(0, 255)) for _ in range(20))
51 52
52 53 def createPack(self, revisions=None, packdir=None):
53 54 if revisions is None:
54 55 revisions = [("filename", self.getFakeHash(), nullid, "content")]
55 56
56 57 if packdir is None:
57 58 packdir = self.makeTempDir()
58 59
59 60 packer = datapack.mutabledatapack(uimod.ui(), packdir, version=2)
60 61
61 62 for args in revisions:
62 63 filename, node, base, content = args[0:4]
63 64 # meta is optional
64 65 meta = None
65 66 if len(args) > 4:
66 67 meta = args[4]
67 68 packer.add(filename, node, base, content, metadata=meta)
68 69
69 70 path = packer.close()
70 71 return self.datapackreader(path)
71 72
72 73 def _testAddSingle(self, content):
73 74 """Test putting a simple blob into a pack and reading it out.
74 75 """
75 76 filename = "foo"
76 77 node = self.getHash(content)
77 78
78 79 revisions = [(filename, node, nullid, content)]
79 80 pack = self.createPack(revisions)
80 81 if self.paramsavailable:
81 82 self.assertEquals(pack.params.fanoutprefix,
82 83 basepack.SMALLFANOUTPREFIX)
83 84
84 85 chain = pack.getdeltachain(filename, node)
85 86 self.assertEquals(content, chain[0][4])
86 87
87 88 def testAddSingle(self):
88 89 self._testAddSingle('')
89 90
90 91 def testAddSingleEmpty(self):
91 92 self._testAddSingle('abcdef')
92 93
93 94 def testAddMultiple(self):
94 95 """Test putting multiple unrelated blobs into a pack and reading them
95 96 out.
96 97 """
97 98 revisions = []
98 99 for i in range(10):
99 100 filename = "foo%s" % i
100 101 content = "abcdef%s" % i
101 102 node = self.getHash(content)
102 103 revisions.append((filename, node, self.getFakeHash(), content))
103 104
104 105 pack = self.createPack(revisions)
105 106
106 107 for filename, node, base, content in revisions:
107 108 entry = pack.getdelta(filename, node)
108 109 self.assertEquals((content, filename, base, {}), entry)
109 110
110 111 chain = pack.getdeltachain(filename, node)
111 112 self.assertEquals(content, chain[0][4])
112 113
113 114 def testAddDeltas(self):
114 115 """Test putting multiple delta blobs into a pack and read the chain.
115 116 """
116 117 revisions = []
117 118 filename = "foo"
118 119 lastnode = nullid
119 120 for i in range(10):
120 121 content = "abcdef%s" % i
121 122 node = self.getHash(content)
122 123 revisions.append((filename, node, lastnode, content))
123 124 lastnode = node
124 125
125 126 pack = self.createPack(revisions)
126 127
127 128 entry = pack.getdelta(filename, revisions[0][1])
128 129 realvalue = (revisions[0][3], filename, revisions[0][2], {})
129 130 self.assertEquals(entry, realvalue)
130 131
131 132 # Test that the chain for the final entry has all the others
132 133 chain = pack.getdeltachain(filename, node)
133 134 for i in range(10):
134 135 content = "abcdef%s" % i
135 136 self.assertEquals(content, chain[-i - 1][4])
136 137
137 138 def testPackMany(self):
138 139 """Pack many related and unrelated objects.
139 140 """
140 141 # Build a random pack file
141 142 revisions = []
142 143 blobs = {}
143 144 random.seed(0)
144 145 for i in range(100):
145 146 filename = "filename-%s" % i
146 147 filerevs = []
147 148 for j in range(random.randint(1, 100)):
148 149 content = "content-%s" % j
149 150 node = self.getHash(content)
150 151 lastnode = nullid
151 152 if len(filerevs) > 0:
152 153 lastnode = filerevs[random.randint(0, len(filerevs) - 1)]
153 154 filerevs.append(node)
154 155 blobs[(filename, node, lastnode)] = content
155 156 revisions.append((filename, node, lastnode, content))
156 157
157 158 pack = self.createPack(revisions)
158 159
159 160 # Verify the pack contents
160 161 for (filename, node, lastnode), content in sorted(blobs.iteritems()):
161 162 chain = pack.getdeltachain(filename, node)
162 163 for entry in chain:
163 164 expectedcontent = blobs[(entry[0], entry[1], entry[3])]
164 165 self.assertEquals(entry[4], expectedcontent)
165 166
166 167 def testPackMetadata(self):
167 168 revisions = []
168 169 for i in range(100):
169 170 filename = '%s.txt' % i
170 171 content = 'put-something-here \n' * i
171 172 node = self.getHash(content)
172 173 meta = {constants.METAKEYFLAG: i ** 4,
173 174 constants.METAKEYSIZE: len(content),
174 175 'Z': 'random_string',
175 176 '_': '\0' * i}
176 177 revisions.append((filename, node, nullid, content, meta))
177 178 pack = self.createPack(revisions)
178 179 for name, node, x, content, origmeta in revisions:
179 180 parsedmeta = pack.getmeta(name, node)
180 181 # flag == 0 should be optimized out
181 182 if origmeta[constants.METAKEYFLAG] == 0:
182 183 del origmeta[constants.METAKEYFLAG]
183 184 self.assertEquals(parsedmeta, origmeta)
184 185
185 186 def testGetMissing(self):
186 187 """Test the getmissing() api.
187 188 """
188 189 revisions = []
189 190 filename = "foo"
190 191 lastnode = nullid
191 192 for i in range(10):
192 193 content = "abcdef%s" % i
193 194 node = self.getHash(content)
194 195 revisions.append((filename, node, lastnode, content))
195 196 lastnode = node
196 197
197 198 pack = self.createPack(revisions)
198 199
199 200 missing = pack.getmissing([("foo", revisions[0][1])])
200 201 self.assertFalse(missing)
201 202
202 203 missing = pack.getmissing([("foo", revisions[0][1]),
203 204 ("foo", revisions[1][1])])
204 205 self.assertFalse(missing)
205 206
206 207 fakenode = self.getFakeHash()
207 208 missing = pack.getmissing([("foo", revisions[0][1]), ("foo", fakenode)])
208 209 self.assertEquals(missing, [("foo", fakenode)])
209 210
210 211 def testAddThrows(self):
211 212 pack = self.createPack()
212 213
213 214 try:
214 215 pack.add('filename', nullid, 'contents')
215 216 self.assertTrue(False, "datapack.add should throw")
216 217 except RuntimeError:
217 218 pass
218 219
219 220 def testBadVersionThrows(self):
220 221 pack = self.createPack()
221 222 path = pack.path + '.datapack'
222 223 with open(path) as f:
223 224 raw = f.read()
224 225 raw = struct.pack('!B', 255) + raw[1:]
225 226 os.chmod(path, os.stat(path).st_mode | stat.S_IWRITE)
226 227 with open(path, 'w+') as f:
227 228 f.write(raw)
228 229
229 230 try:
230 231 pack = self.datapackreader(pack.path)
231 232 self.assertTrue(False, "bad version number should have thrown")
232 233 except RuntimeError:
233 234 pass
234 235
235 236 def testMissingDeltabase(self):
236 237 fakenode = self.getFakeHash()
237 238 revisions = [("filename", fakenode, self.getFakeHash(), "content")]
238 239 pack = self.createPack(revisions)
239 240 chain = pack.getdeltachain("filename", fakenode)
240 241 self.assertEquals(len(chain), 1)
241 242
242 243 def testLargePack(self):
243 244 """Test creating and reading from a large pack with over X entries.
244 245 This causes it to use a 2^16 fanout table instead."""
245 246 revisions = []
246 247 blobs = {}
247 248 total = basepack.SMALLFANOUTCUTOFF + 1
248 for i in xrange(total):
249 for i in pycompat.xrange(total):
249 250 filename = "filename-%s" % i
250 251 content = filename
251 252 node = self.getHash(content)
252 253 blobs[(filename, node)] = content
253 254 revisions.append((filename, node, nullid, content))
254 255
255 256 pack = self.createPack(revisions)
256 257 if self.paramsavailable:
257 258 self.assertEquals(pack.params.fanoutprefix,
258 259 basepack.LARGEFANOUTPREFIX)
259 260
260 261 for (filename, node), content in blobs.iteritems():
261 262 actualcontent = pack.getdeltachain(filename, node)[0][4]
262 263 self.assertEquals(actualcontent, content)
263 264
264 265 def testPacksCache(self):
265 266 """Test that we remember the most recent packs while fetching the delta
266 267 chain."""
267 268
268 269 packdir = self.makeTempDir()
269 270 deltachains = []
270 271
271 272 numpacks = 10
272 273 revisionsperpack = 100
273 274
274 275 for i in range(numpacks):
275 276 chain = []
276 277 revision = (str(i), self.getFakeHash(), nullid, "content")
277 278
278 279 for _ in range(revisionsperpack):
279 280 chain.append(revision)
280 281 revision = (
281 282 str(i),
282 283 self.getFakeHash(),
283 284 revision[1],
284 285 self.getFakeHash()
285 286 )
286 287
287 288 self.createPack(chain, packdir)
288 289 deltachains.append(chain)
289 290
290 291 class testdatapackstore(datapack.datapackstore):
291 292 # Ensures that we are not keeping everything in the cache.
292 293 DEFAULTCACHESIZE = numpacks / 2
293 294
294 295 store = testdatapackstore(uimod.ui(), packdir)
295 296
296 297 random.shuffle(deltachains)
297 298 for randomchain in deltachains:
298 299 revision = random.choice(randomchain)
299 300 chain = store.getdeltachain(revision[0], revision[1])
300 301
301 302 mostrecentpack = next(iter(store.packs), None)
302 303 self.assertEquals(
303 304 mostrecentpack.getdeltachain(revision[0], revision[1]),
304 305 chain
305 306 )
306 307
307 308 self.assertEquals(randomchain.index(revision) + 1, len(chain))
308 309
309 310 # perf test off by default since it's slow
310 311 def _testIndexPerf(self):
311 312 random.seed(0)
312 313 print("Multi-get perf test")
313 314 packsizes = [
314 315 100,
315 316 10000,
316 317 100000,
317 318 500000,
318 319 1000000,
319 320 3000000,
320 321 ]
321 322 lookupsizes = [
322 323 10,
323 324 100,
324 325 1000,
325 326 10000,
326 327 100000,
327 328 1000000,
328 329 ]
329 330 for packsize in packsizes:
330 331 revisions = []
331 for i in xrange(packsize):
332 for i in pycompat.xrange(packsize):
332 333 filename = "filename-%s" % i
333 334 content = "content-%s" % i
334 335 node = self.getHash(content)
335 336 revisions.append((filename, node, nullid, content))
336 337
337 338 path = self.createPack(revisions).path
338 339
339 340 # Perf of large multi-get
340 341 import gc
341 342 gc.disable()
342 343 pack = self.datapackreader(path)
343 344 for lookupsize in lookupsizes:
344 345 if lookupsize > packsize:
345 346 continue
346 347 random.shuffle(revisions)
347 348 findnodes = [(rev[0], rev[1]) for rev in revisions]
348 349
349 350 start = time.time()
350 351 pack.getmissing(findnodes[:lookupsize])
351 352 elapsed = time.time() - start
352 353 print ("%s pack %s lookups = %0.04f" %
353 354 (('%s' % packsize).rjust(7),
354 355 ('%s' % lookupsize).rjust(7),
355 356 elapsed))
356 357
357 358 print("")
358 359 gc.enable()
359 360
360 361 # The perf test is meant to produce output, so we always fail the test
361 362 # so the user sees the output.
362 363 raise RuntimeError("perf test always fails")
363 364
364 365 class datapacktests(datapacktestsbase, unittest.TestCase):
365 366 def __init__(self, *args, **kwargs):
366 367 datapacktestsbase.__init__(self, datapack.datapack, True)
367 368 unittest.TestCase.__init__(self, *args, **kwargs)
368 369
369 370 # TODO:
370 371 # datapack store:
371 372 # - getmissing
372 373 # - GC two packs into one
373 374
374 375 if __name__ == '__main__':
375 376 silenttestrunner.main(__name__)
General Comments 0
You need to be logged in to leave comments. Login now