##// END OF EJS Templates
py3: convert strings to bytes in tests/test-remotefilelog-histpack.py...
Pulkit Goyal -
r40764:9446d5aa default
parent child Browse files
Show More
@@ -1,274 +1,276
1 1 #!/usr/bin/env python
2 2 from __future__ import absolute_import
3 3
4 4 import hashlib
5 5 import os
6 6 import random
7 7 import shutil
8 8 import stat
9 9 import struct
10 10 import sys
11 11 import tempfile
12 12 import unittest
13 13
14 14 import silenttestrunner
15 15
16 16 from mercurial.node import nullid
17 17 from mercurial import (
18 pycompat,
18 19 ui as uimod,
19 20 )
20 21 # Load the local remotefilelog, not the system one
21 22 sys.path[0:0] = [os.path.join(os.path.dirname(__file__), '..')]
22 23 from hgext.remotefilelog import (
23 24 basepack,
24 25 historypack,
25 26 )
26 27
27 28 class histpacktests(unittest.TestCase):
28 29 def setUp(self):
29 30 self.tempdirs = []
30 31
31 32 def tearDown(self):
32 33 for d in self.tempdirs:
33 34 shutil.rmtree(d)
34 35
35 36 def makeTempDir(self):
36 37 tempdir = tempfile.mkdtemp()
37 38 self.tempdirs.append(tempdir)
38 return tempdir
39 return pycompat.fsencode(tempdir)
39 40
40 41 def getHash(self, content):
41 42 return hashlib.sha1(content).digest()
42 43
43 44 def getFakeHash(self):
44 return ''.join(chr(random.randint(0, 255)) for _ in range(20))
45 return b''.join(pycompat.bytechr(random.randint(0, 255))
46 for _ in range(20))
45 47
46 48 def createPack(self, revisions=None):
47 49 """Creates and returns a historypack containing the specified revisions.
48 50
49 51 `revisions` is a list of tuples, where each tuple contains a filanem,
50 52 node, p1node, p2node, and linknode.
51 53 """
52 54 if revisions is None:
53 55 revisions = [("filename", self.getFakeHash(), nullid, nullid,
54 56 self.getFakeHash(), None)]
55 57
56 packdir = self.makeTempDir()
58 packdir = pycompat.fsencode(self.makeTempDir())
57 59 packer = historypack.mutablehistorypack(uimod.ui(), packdir,
58 60 version=2)
59 61
60 62 for filename, node, p1, p2, linknode, copyfrom in revisions:
61 63 packer.add(filename, node, p1, p2, linknode, copyfrom)
62 64
63 65 path = packer.close()
64 66 return historypack.historypack(path)
65 67
66 68 def testAddSingle(self):
67 69 """Test putting a single entry into a pack and reading it out.
68 70 """
69 71 filename = "foo"
70 72 node = self.getFakeHash()
71 73 p1 = self.getFakeHash()
72 74 p2 = self.getFakeHash()
73 75 linknode = self.getFakeHash()
74 76
75 77 revisions = [(filename, node, p1, p2, linknode, None)]
76 78 pack = self.createPack(revisions)
77 79
78 80 actual = pack.getancestors(filename, node)[node]
79 81 self.assertEquals(p1, actual[0])
80 82 self.assertEquals(p2, actual[1])
81 83 self.assertEquals(linknode, actual[2])
82 84
83 85 def testAddMultiple(self):
84 86 """Test putting multiple unrelated revisions into a pack and reading
85 87 them out.
86 88 """
87 89 revisions = []
88 90 for i in range(10):
89 91 filename = "foo-%s" % i
90 92 node = self.getFakeHash()
91 93 p1 = self.getFakeHash()
92 94 p2 = self.getFakeHash()
93 95 linknode = self.getFakeHash()
94 96 revisions.append((filename, node, p1, p2, linknode, None))
95 97
96 98 pack = self.createPack(revisions)
97 99
98 100 for filename, node, p1, p2, linknode, copyfrom in revisions:
99 101 actual = pack.getancestors(filename, node)[node]
100 102 self.assertEquals(p1, actual[0])
101 103 self.assertEquals(p2, actual[1])
102 104 self.assertEquals(linknode, actual[2])
103 105 self.assertEquals(copyfrom, actual[3])
104 106
105 107 def testAddAncestorChain(self):
106 108 """Test putting multiple revisions in into a pack and read the ancestor
107 109 chain.
108 110 """
109 111 revisions = []
110 filename = "foo"
112 filename = b"foo"
111 113 lastnode = nullid
112 114 for i in range(10):
113 115 node = self.getFakeHash()
114 116 revisions.append((filename, node, lastnode, nullid, nullid, None))
115 117 lastnode = node
116 118
117 119 # revisions must be added in topological order, newest first
118 120 revisions = list(reversed(revisions))
119 121 pack = self.createPack(revisions)
120 122
121 123 # Test that the chain has all the entries
122 124 ancestors = pack.getancestors(revisions[0][0], revisions[0][1])
123 125 for filename, node, p1, p2, linknode, copyfrom in revisions:
124 126 ap1, ap2, alinknode, acopyfrom = ancestors[node]
125 127 self.assertEquals(ap1, p1)
126 128 self.assertEquals(ap2, p2)
127 129 self.assertEquals(alinknode, linknode)
128 130 self.assertEquals(acopyfrom, copyfrom)
129 131
130 132 def testPackMany(self):
131 133 """Pack many related and unrelated ancestors.
132 134 """
133 135 # Build a random pack file
134 136 allentries = {}
135 137 ancestorcounts = {}
136 138 revisions = []
137 139 random.seed(0)
138 140 for i in range(100):
139 filename = "filename-%s" % i
141 filename = b"filename-%d" % i
140 142 entries = []
141 143 p2 = nullid
142 144 linknode = nullid
143 145 for j in range(random.randint(1, 100)):
144 146 node = self.getFakeHash()
145 147 p1 = nullid
146 148 if len(entries) > 0:
147 149 p1 = entries[random.randint(0, len(entries) - 1)]
148 150 entries.append(node)
149 151 revisions.append((filename, node, p1, p2, linknode, None))
150 152 allentries[(filename, node)] = (p1, p2, linknode)
151 153 if p1 == nullid:
152 154 ancestorcounts[(filename, node)] = 1
153 155 else:
154 156 newcount = ancestorcounts[(filename, p1)] + 1
155 157 ancestorcounts[(filename, node)] = newcount
156 158
157 159 # Must add file entries in reverse topological order
158 160 revisions = list(reversed(revisions))
159 161 pack = self.createPack(revisions)
160 162
161 163 # Verify the pack contents
162 164 for (filename, node), (p1, p2, lastnode) in allentries.iteritems():
163 165 ancestors = pack.getancestors(filename, node)
164 166 self.assertEquals(ancestorcounts[(filename, node)],
165 167 len(ancestors))
166 168 for anode, (ap1, ap2, alinknode, copyfrom) in ancestors.iteritems():
167 169 ep1, ep2, elinknode = allentries[(filename, anode)]
168 170 self.assertEquals(ap1, ep1)
169 171 self.assertEquals(ap2, ep2)
170 172 self.assertEquals(alinknode, elinknode)
171 173 self.assertEquals(copyfrom, None)
172 174
173 175 def testGetNodeInfo(self):
174 176 revisions = []
175 filename = "foo"
177 filename = b"foo"
176 178 lastnode = nullid
177 179 for i in range(10):
178 180 node = self.getFakeHash()
179 181 revisions.append((filename, node, lastnode, nullid, nullid, None))
180 182 lastnode = node
181 183
182 184 pack = self.createPack(revisions)
183 185
184 186 # Test that getnodeinfo returns the expected results
185 187 for filename, node, p1, p2, linknode, copyfrom in revisions:
186 188 ap1, ap2, alinknode, acopyfrom = pack.getnodeinfo(filename, node)
187 189 self.assertEquals(ap1, p1)
188 190 self.assertEquals(ap2, p2)
189 191 self.assertEquals(alinknode, linknode)
190 192 self.assertEquals(acopyfrom, copyfrom)
191 193
192 194 def testGetMissing(self):
193 195 """Test the getmissing() api.
194 196 """
195 197 revisions = []
196 filename = "foo"
198 filename = b"foo"
197 199 for i in range(10):
198 200 node = self.getFakeHash()
199 201 p1 = self.getFakeHash()
200 202 p2 = self.getFakeHash()
201 203 linknode = self.getFakeHash()
202 204 revisions.append((filename, node, p1, p2, linknode, None))
203 205
204 206 pack = self.createPack(revisions)
205 207
206 208 missing = pack.getmissing([(filename, revisions[0][1])])
207 209 self.assertFalse(missing)
208 210
209 211 missing = pack.getmissing([(filename, revisions[0][1]),
210 212 (filename, revisions[1][1])])
211 213 self.assertFalse(missing)
212 214
213 215 fakenode = self.getFakeHash()
214 216 missing = pack.getmissing([(filename, revisions[0][1]),
215 217 (filename, fakenode)])
216 218 self.assertEquals(missing, [(filename, fakenode)])
217 219
218 220 # Test getmissing on a non-existant filename
219 221 missing = pack.getmissing([("bar", fakenode)])
220 222 self.assertEquals(missing, [("bar", fakenode)])
221 223
222 224 def testAddThrows(self):
223 225 pack = self.createPack()
224 226
225 227 try:
226 pack.add('filename', nullid, nullid, nullid, nullid, None)
228 pack.add(b'filename', nullid, nullid, nullid, nullid, None)
227 229 self.assertTrue(False, "historypack.add should throw")
228 230 except RuntimeError:
229 231 pass
230 232
231 233 def testBadVersionThrows(self):
232 234 pack = self.createPack()
233 235 path = pack.path + '.histpack'
234 236 with open(path) as f:
235 237 raw = f.read()
236 238 raw = struct.pack('!B', 255) + raw[1:]
237 239 os.chmod(path, os.stat(path).st_mode | stat.S_IWRITE)
238 240 with open(path, 'w+') as f:
239 241 f.write(raw)
240 242
241 243 try:
242 244 pack = historypack.historypack(pack.path)
243 245 self.assertTrue(False, "bad version number should have thrown")
244 246 except RuntimeError:
245 247 pass
246 248
247 249 def testLargePack(self):
248 250 """Test creating and reading from a large pack with over X entries.
249 251 This causes it to use a 2^16 fanout table instead."""
250 252 total = basepack.SMALLFANOUTCUTOFF + 1
251 253 revisions = []
252 254 for i in xrange(total):
253 filename = "foo-%s" % i
255 filename = b"foo-%d" % i
254 256 node = self.getFakeHash()
255 257 p1 = self.getFakeHash()
256 258 p2 = self.getFakeHash()
257 259 linknode = self.getFakeHash()
258 260 revisions.append((filename, node, p1, p2, linknode, None))
259 261
260 262 pack = self.createPack(revisions)
261 263 self.assertEquals(pack.params.fanoutprefix, basepack.LARGEFANOUTPREFIX)
262 264
263 265 for filename, node, p1, p2, linknode, copyfrom in revisions:
264 266 actual = pack.getancestors(filename, node)[node]
265 267 self.assertEquals(p1, actual[0])
266 268 self.assertEquals(p2, actual[1])
267 269 self.assertEquals(linknode, actual[2])
268 270 self.assertEquals(copyfrom, actual[3])
269 271 # TODO:
270 272 # histpack store:
271 273 # - repack two packs into one
272 274
273 275 if __name__ == '__main__':
274 276 silenttestrunner.main(__name__)
General Comments 0
You need to be logged in to leave comments. Login now