##// END OF EJS Templates
tests: use bytes for file I/O...
Gregory Szorc -
r41353:abac73ee default
parent child Browse files
Show More
@@ -1,278 +1,278 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 from __future__ import absolute_import
2 from __future__ import absolute_import
3
3
4 import hashlib
4 import hashlib
5 import os
5 import os
6 import random
6 import random
7 import shutil
7 import shutil
8 import stat
8 import stat
9 import struct
9 import struct
10 import sys
10 import sys
11 import tempfile
11 import tempfile
12 import unittest
12 import unittest
13
13
14 import silenttestrunner
14 import silenttestrunner
15
15
16 from mercurial.node import nullid
16 from mercurial.node import nullid
17 from mercurial import (
17 from mercurial import (
18 pycompat,
18 pycompat,
19 ui as uimod,
19 ui as uimod,
20 )
20 )
21 # Load the local remotefilelog, not the system one
21 # Load the local remotefilelog, not the system one
22 sys.path[0:0] = [os.path.join(os.path.dirname(__file__), '..')]
22 sys.path[0:0] = [os.path.join(os.path.dirname(__file__), '..')]
23 from hgext.remotefilelog import (
23 from hgext.remotefilelog import (
24 basepack,
24 basepack,
25 historypack,
25 historypack,
26 )
26 )
27
27
28 class histpacktests(unittest.TestCase):
28 class histpacktests(unittest.TestCase):
29 def setUp(self):
29 def setUp(self):
30 self.tempdirs = []
30 self.tempdirs = []
31
31
32 def tearDown(self):
32 def tearDown(self):
33 for d in self.tempdirs:
33 for d in self.tempdirs:
34 shutil.rmtree(d)
34 shutil.rmtree(d)
35
35
36 def makeTempDir(self):
36 def makeTempDir(self):
37 tempdir = tempfile.mkdtemp()
37 tempdir = tempfile.mkdtemp()
38 self.tempdirs.append(tempdir)
38 self.tempdirs.append(tempdir)
39 return pycompat.fsencode(tempdir)
39 return pycompat.fsencode(tempdir)
40
40
41 def getHash(self, content):
41 def getHash(self, content):
42 return hashlib.sha1(content).digest()
42 return hashlib.sha1(content).digest()
43
43
44 def getFakeHash(self):
44 def getFakeHash(self):
45 return b''.join(pycompat.bytechr(random.randint(0, 255))
45 return b''.join(pycompat.bytechr(random.randint(0, 255))
46 for _ in range(20))
46 for _ in range(20))
47
47
48 def createPack(self, revisions=None):
48 def createPack(self, revisions=None):
49 """Creates and returns a historypack containing the specified revisions.
49 """Creates and returns a historypack containing the specified revisions.
50
50
51 `revisions` is a list of tuples, where each tuple contains a filanem,
51 `revisions` is a list of tuples, where each tuple contains a filanem,
52 node, p1node, p2node, and linknode.
52 node, p1node, p2node, and linknode.
53 """
53 """
54 if revisions is None:
54 if revisions is None:
55 revisions = [(b"filename", self.getFakeHash(), nullid, nullid,
55 revisions = [(b"filename", self.getFakeHash(), nullid, nullid,
56 self.getFakeHash(), None)]
56 self.getFakeHash(), None)]
57
57
58 packdir = pycompat.fsencode(self.makeTempDir())
58 packdir = pycompat.fsencode(self.makeTempDir())
59 packer = historypack.mutablehistorypack(uimod.ui(), packdir,
59 packer = historypack.mutablehistorypack(uimod.ui(), packdir,
60 version=2)
60 version=2)
61
61
62 for filename, node, p1, p2, linknode, copyfrom in revisions:
62 for filename, node, p1, p2, linknode, copyfrom in revisions:
63 packer.add(filename, node, p1, p2, linknode, copyfrom)
63 packer.add(filename, node, p1, p2, linknode, copyfrom)
64
64
65 path = packer.close()
65 path = packer.close()
66 return historypack.historypack(path)
66 return historypack.historypack(path)
67
67
68 def testAddSingle(self):
68 def testAddSingle(self):
69 """Test putting a single entry into a pack and reading it out.
69 """Test putting a single entry into a pack and reading it out.
70 """
70 """
71 filename = b"foo"
71 filename = b"foo"
72 node = self.getFakeHash()
72 node = self.getFakeHash()
73 p1 = self.getFakeHash()
73 p1 = self.getFakeHash()
74 p2 = self.getFakeHash()
74 p2 = self.getFakeHash()
75 linknode = self.getFakeHash()
75 linknode = self.getFakeHash()
76
76
77 revisions = [(filename, node, p1, p2, linknode, None)]
77 revisions = [(filename, node, p1, p2, linknode, None)]
78 pack = self.createPack(revisions)
78 pack = self.createPack(revisions)
79
79
80 actual = pack.getancestors(filename, node)[node]
80 actual = pack.getancestors(filename, node)[node]
81 self.assertEqual(p1, actual[0])
81 self.assertEqual(p1, actual[0])
82 self.assertEqual(p2, actual[1])
82 self.assertEqual(p2, actual[1])
83 self.assertEqual(linknode, actual[2])
83 self.assertEqual(linknode, actual[2])
84
84
85 def testAddMultiple(self):
85 def testAddMultiple(self):
86 """Test putting multiple unrelated revisions into a pack and reading
86 """Test putting multiple unrelated revisions into a pack and reading
87 them out.
87 them out.
88 """
88 """
89 revisions = []
89 revisions = []
90 for i in range(10):
90 for i in range(10):
91 filename = b"foo-%d" % i
91 filename = b"foo-%d" % i
92 node = self.getFakeHash()
92 node = self.getFakeHash()
93 p1 = self.getFakeHash()
93 p1 = self.getFakeHash()
94 p2 = self.getFakeHash()
94 p2 = self.getFakeHash()
95 linknode = self.getFakeHash()
95 linknode = self.getFakeHash()
96 revisions.append((filename, node, p1, p2, linknode, None))
96 revisions.append((filename, node, p1, p2, linknode, None))
97
97
98 pack = self.createPack(revisions)
98 pack = self.createPack(revisions)
99
99
100 for filename, node, p1, p2, linknode, copyfrom in revisions:
100 for filename, node, p1, p2, linknode, copyfrom in revisions:
101 actual = pack.getancestors(filename, node)[node]
101 actual = pack.getancestors(filename, node)[node]
102 self.assertEqual(p1, actual[0])
102 self.assertEqual(p1, actual[0])
103 self.assertEqual(p2, actual[1])
103 self.assertEqual(p2, actual[1])
104 self.assertEqual(linknode, actual[2])
104 self.assertEqual(linknode, actual[2])
105 self.assertEqual(copyfrom, actual[3])
105 self.assertEqual(copyfrom, actual[3])
106
106
107 def testAddAncestorChain(self):
107 def testAddAncestorChain(self):
108 """Test putting multiple revisions in into a pack and read the ancestor
108 """Test putting multiple revisions in into a pack and read the ancestor
109 chain.
109 chain.
110 """
110 """
111 revisions = []
111 revisions = []
112 filename = b"foo"
112 filename = b"foo"
113 lastnode = nullid
113 lastnode = nullid
114 for i in range(10):
114 for i in range(10):
115 node = self.getFakeHash()
115 node = self.getFakeHash()
116 revisions.append((filename, node, lastnode, nullid, nullid, None))
116 revisions.append((filename, node, lastnode, nullid, nullid, None))
117 lastnode = node
117 lastnode = node
118
118
119 # revisions must be added in topological order, newest first
119 # revisions must be added in topological order, newest first
120 revisions = list(reversed(revisions))
120 revisions = list(reversed(revisions))
121 pack = self.createPack(revisions)
121 pack = self.createPack(revisions)
122
122
123 # Test that the chain has all the entries
123 # Test that the chain has all the entries
124 ancestors = pack.getancestors(revisions[0][0], revisions[0][1])
124 ancestors = pack.getancestors(revisions[0][0], revisions[0][1])
125 for filename, node, p1, p2, linknode, copyfrom in revisions:
125 for filename, node, p1, p2, linknode, copyfrom in revisions:
126 ap1, ap2, alinknode, acopyfrom = ancestors[node]
126 ap1, ap2, alinknode, acopyfrom = ancestors[node]
127 self.assertEqual(ap1, p1)
127 self.assertEqual(ap1, p1)
128 self.assertEqual(ap2, p2)
128 self.assertEqual(ap2, p2)
129 self.assertEqual(alinknode, linknode)
129 self.assertEqual(alinknode, linknode)
130 self.assertEqual(acopyfrom, copyfrom)
130 self.assertEqual(acopyfrom, copyfrom)
131
131
132 def testPackMany(self):
132 def testPackMany(self):
133 """Pack many related and unrelated ancestors.
133 """Pack many related and unrelated ancestors.
134 """
134 """
135 # Build a random pack file
135 # Build a random pack file
136 allentries = {}
136 allentries = {}
137 ancestorcounts = {}
137 ancestorcounts = {}
138 revisions = []
138 revisions = []
139 random.seed(0)
139 random.seed(0)
140 for i in range(100):
140 for i in range(100):
141 filename = b"filename-%d" % i
141 filename = b"filename-%d" % i
142 entries = []
142 entries = []
143 p2 = nullid
143 p2 = nullid
144 linknode = nullid
144 linknode = nullid
145 for j in range(random.randint(1, 100)):
145 for j in range(random.randint(1, 100)):
146 node = self.getFakeHash()
146 node = self.getFakeHash()
147 p1 = nullid
147 p1 = nullid
148 if len(entries) > 0:
148 if len(entries) > 0:
149 p1 = entries[random.randint(0, len(entries) - 1)]
149 p1 = entries[random.randint(0, len(entries) - 1)]
150 entries.append(node)
150 entries.append(node)
151 revisions.append((filename, node, p1, p2, linknode, None))
151 revisions.append((filename, node, p1, p2, linknode, None))
152 allentries[(filename, node)] = (p1, p2, linknode)
152 allentries[(filename, node)] = (p1, p2, linknode)
153 if p1 == nullid:
153 if p1 == nullid:
154 ancestorcounts[(filename, node)] = 1
154 ancestorcounts[(filename, node)] = 1
155 else:
155 else:
156 newcount = ancestorcounts[(filename, p1)] + 1
156 newcount = ancestorcounts[(filename, p1)] + 1
157 ancestorcounts[(filename, node)] = newcount
157 ancestorcounts[(filename, node)] = newcount
158
158
159 # Must add file entries in reverse topological order
159 # Must add file entries in reverse topological order
160 revisions = list(reversed(revisions))
160 revisions = list(reversed(revisions))
161 pack = self.createPack(revisions)
161 pack = self.createPack(revisions)
162
162
163 # Verify the pack contents
163 # Verify the pack contents
164 for (filename, node), (p1, p2, lastnode) in allentries.items():
164 for (filename, node), (p1, p2, lastnode) in allentries.items():
165 ancestors = pack.getancestors(filename, node)
165 ancestors = pack.getancestors(filename, node)
166 self.assertEqual(ancestorcounts[(filename, node)],
166 self.assertEqual(ancestorcounts[(filename, node)],
167 len(ancestors))
167 len(ancestors))
168 for anode, (ap1, ap2, alinknode, copyfrom) in ancestors.items():
168 for anode, (ap1, ap2, alinknode, copyfrom) in ancestors.items():
169 ep1, ep2, elinknode = allentries[(filename, anode)]
169 ep1, ep2, elinknode = allentries[(filename, anode)]
170 self.assertEqual(ap1, ep1)
170 self.assertEqual(ap1, ep1)
171 self.assertEqual(ap2, ep2)
171 self.assertEqual(ap2, ep2)
172 self.assertEqual(alinknode, elinknode)
172 self.assertEqual(alinknode, elinknode)
173 self.assertEqual(copyfrom, None)
173 self.assertEqual(copyfrom, None)
174
174
175 def testGetNodeInfo(self):
175 def testGetNodeInfo(self):
176 revisions = []
176 revisions = []
177 filename = b"foo"
177 filename = b"foo"
178 lastnode = nullid
178 lastnode = nullid
179 for i in range(10):
179 for i in range(10):
180 node = self.getFakeHash()
180 node = self.getFakeHash()
181 revisions.append((filename, node, lastnode, nullid, nullid, None))
181 revisions.append((filename, node, lastnode, nullid, nullid, None))
182 lastnode = node
182 lastnode = node
183
183
184 pack = self.createPack(revisions)
184 pack = self.createPack(revisions)
185
185
186 # Test that getnodeinfo returns the expected results
186 # Test that getnodeinfo returns the expected results
187 for filename, node, p1, p2, linknode, copyfrom in revisions:
187 for filename, node, p1, p2, linknode, copyfrom in revisions:
188 ap1, ap2, alinknode, acopyfrom = pack.getnodeinfo(filename, node)
188 ap1, ap2, alinknode, acopyfrom = pack.getnodeinfo(filename, node)
189 self.assertEqual(ap1, p1)
189 self.assertEqual(ap1, p1)
190 self.assertEqual(ap2, p2)
190 self.assertEqual(ap2, p2)
191 self.assertEqual(alinknode, linknode)
191 self.assertEqual(alinknode, linknode)
192 self.assertEqual(acopyfrom, copyfrom)
192 self.assertEqual(acopyfrom, copyfrom)
193
193
194 def testGetMissing(self):
194 def testGetMissing(self):
195 """Test the getmissing() api.
195 """Test the getmissing() api.
196 """
196 """
197 revisions = []
197 revisions = []
198 filename = b"foo"
198 filename = b"foo"
199 for i in range(10):
199 for i in range(10):
200 node = self.getFakeHash()
200 node = self.getFakeHash()
201 p1 = self.getFakeHash()
201 p1 = self.getFakeHash()
202 p2 = self.getFakeHash()
202 p2 = self.getFakeHash()
203 linknode = self.getFakeHash()
203 linknode = self.getFakeHash()
204 revisions.append((filename, node, p1, p2, linknode, None))
204 revisions.append((filename, node, p1, p2, linknode, None))
205
205
206 pack = self.createPack(revisions)
206 pack = self.createPack(revisions)
207
207
208 missing = pack.getmissing([(filename, revisions[0][1])])
208 missing = pack.getmissing([(filename, revisions[0][1])])
209 self.assertFalse(missing)
209 self.assertFalse(missing)
210
210
211 missing = pack.getmissing([(filename, revisions[0][1]),
211 missing = pack.getmissing([(filename, revisions[0][1]),
212 (filename, revisions[1][1])])
212 (filename, revisions[1][1])])
213 self.assertFalse(missing)
213 self.assertFalse(missing)
214
214
215 fakenode = self.getFakeHash()
215 fakenode = self.getFakeHash()
216 missing = pack.getmissing([(filename, revisions[0][1]),
216 missing = pack.getmissing([(filename, revisions[0][1]),
217 (filename, fakenode)])
217 (filename, fakenode)])
218 self.assertEqual(missing, [(filename, fakenode)])
218 self.assertEqual(missing, [(filename, fakenode)])
219
219
220 # Test getmissing on a non-existant filename
220 # Test getmissing on a non-existant filename
221 missing = pack.getmissing([(b"bar", fakenode)])
221 missing = pack.getmissing([(b"bar", fakenode)])
222 self.assertEqual(missing, [(b"bar", fakenode)])
222 self.assertEqual(missing, [(b"bar", fakenode)])
223
223
224 def testAddThrows(self):
224 def testAddThrows(self):
225 pack = self.createPack()
225 pack = self.createPack()
226
226
227 try:
227 try:
228 pack.add(b'filename', nullid, nullid, nullid, nullid, None)
228 pack.add(b'filename', nullid, nullid, nullid, nullid, None)
229 self.assertTrue(False, "historypack.add should throw")
229 self.assertTrue(False, "historypack.add should throw")
230 except RuntimeError:
230 except RuntimeError:
231 pass
231 pass
232
232
233 def testBadVersionThrows(self):
233 def testBadVersionThrows(self):
234 pack = self.createPack()
234 pack = self.createPack()
235 path = pack.path + '.histpack'
235 path = pack.path + b'.histpack'
236 with open(path) as f:
236 with open(path, 'rb') as f:
237 raw = f.read()
237 raw = f.read()
238 raw = struct.pack('!B', 255) + raw[1:]
238 raw = struct.pack('!B', 255) + raw[1:]
239 os.chmod(path, os.stat(path).st_mode | stat.S_IWRITE)
239 os.chmod(path, os.stat(path).st_mode | stat.S_IWRITE)
240 with open(path, 'w+') as f:
240 with open(path, 'wb+') as f:
241 f.write(raw)
241 f.write(raw)
242
242
243 try:
243 try:
244 pack = historypack.historypack(pack.path)
244 pack = historypack.historypack(pack.path)
245 self.assertTrue(False, "bad version number should have thrown")
245 self.assertTrue(False, "bad version number should have thrown")
246 except RuntimeError:
246 except RuntimeError:
247 pass
247 pass
248
248
249 def testLargePack(self):
249 def testLargePack(self):
250 """Test creating and reading from a large pack with over X entries.
250 """Test creating and reading from a large pack with over X entries.
251 This causes it to use a 2^16 fanout table instead."""
251 This causes it to use a 2^16 fanout table instead."""
252 total = basepack.SMALLFANOUTCUTOFF + 1
252 total = basepack.SMALLFANOUTCUTOFF + 1
253 revisions = []
253 revisions = []
254 for i in pycompat.xrange(total):
254 for i in pycompat.xrange(total):
255 filename = b"foo-%d" % i
255 filename = b"foo-%d" % i
256 node = self.getFakeHash()
256 node = self.getFakeHash()
257 p1 = self.getFakeHash()
257 p1 = self.getFakeHash()
258 p2 = self.getFakeHash()
258 p2 = self.getFakeHash()
259 linknode = self.getFakeHash()
259 linknode = self.getFakeHash()
260 revisions.append((filename, node, p1, p2, linknode, None))
260 revisions.append((filename, node, p1, p2, linknode, None))
261
261
262 pack = self.createPack(revisions)
262 pack = self.createPack(revisions)
263 self.assertEqual(pack.params.fanoutprefix, basepack.LARGEFANOUTPREFIX)
263 self.assertEqual(pack.params.fanoutprefix, basepack.LARGEFANOUTPREFIX)
264
264
265 for filename, node, p1, p2, linknode, copyfrom in revisions:
265 for filename, node, p1, p2, linknode, copyfrom in revisions:
266 actual = pack.getancestors(filename, node)[node]
266 actual = pack.getancestors(filename, node)[node]
267 self.assertEqual(p1, actual[0])
267 self.assertEqual(p1, actual[0])
268 self.assertEqual(p2, actual[1])
268 self.assertEqual(p2, actual[1])
269 self.assertEqual(linknode, actual[2])
269 self.assertEqual(linknode, actual[2])
270 self.assertEqual(copyfrom, actual[3])
270 self.assertEqual(copyfrom, actual[3])
271 # TODO:
271 # TODO:
272 # histpack store:
272 # histpack store:
273 # - repack two packs into one
273 # - repack two packs into one
274
274
275 if __name__ == '__main__':
275 if __name__ == '__main__':
276 if pycompat.iswindows:
276 if pycompat.iswindows:
277 sys.exit(80) # Skip on Windows
277 sys.exit(80) # Skip on Windows
278 silenttestrunner.main(__name__)
278 silenttestrunner.main(__name__)
General Comments 0
You need to be logged in to leave comments. Login now