##// END OF EJS Templates
ancestor: unroll loop of parents in _lazyancestorsiter()...
Yuya Nishihara -
r39571:b9ee9c2e default
parent child Browse files
Show More
@@ -1,368 +1,374 b''
1 1 # ancestor.py - generic DAG ancestor algorithm for mercurial
2 2 #
3 3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import heapq
11 11
12 12 from .node import nullrev
13 13 from . import (
14 14 pycompat,
15 15 )
16 16
17 17 def commonancestorsheads(pfunc, *nodes):
18 18 """Returns a set with the heads of all common ancestors of all nodes,
19 19 heads(::nodes[0] and ::nodes[1] and ...) .
20 20
21 21 pfunc must return a list of parent vertices for a given vertex.
22 22 """
23 23 if not isinstance(nodes, set):
24 24 nodes = set(nodes)
25 25 if nullrev in nodes:
26 26 return set()
27 27 if len(nodes) <= 1:
28 28 return nodes
29 29
30 30 allseen = (1 << len(nodes)) - 1
31 31 seen = [0] * (max(nodes) + 1)
32 32 for i, n in enumerate(nodes):
33 33 seen[n] = 1 << i
34 34 poison = 1 << (i + 1)
35 35
36 36 gca = set()
37 37 interesting = len(nodes)
38 38 nv = len(seen) - 1
39 39 while nv >= 0 and interesting:
40 40 v = nv
41 41 nv -= 1
42 42 if not seen[v]:
43 43 continue
44 44 sv = seen[v]
45 45 if sv < poison:
46 46 interesting -= 1
47 47 if sv == allseen:
48 48 gca.add(v)
49 49 sv |= poison
50 50 if v in nodes:
51 51 # history is linear
52 52 return {v}
53 53 if sv < poison:
54 54 for p in pfunc(v):
55 55 sp = seen[p]
56 56 if p == nullrev:
57 57 continue
58 58 if sp == 0:
59 59 seen[p] = sv
60 60 interesting += 1
61 61 elif sp != sv:
62 62 seen[p] |= sv
63 63 else:
64 64 for p in pfunc(v):
65 65 if p == nullrev:
66 66 continue
67 67 sp = seen[p]
68 68 if sp and sp < poison:
69 69 interesting -= 1
70 70 seen[p] = sv
71 71 return gca
72 72
73 73 def ancestors(pfunc, *orignodes):
74 74 """
75 75 Returns the common ancestors of a and b that are furthest from a
76 76 root (as measured by longest path).
77 77
78 78 pfunc must return a list of parent vertices for a given vertex.
79 79 """
80 80 def deepest(nodes):
81 81 interesting = {}
82 82 count = max(nodes) + 1
83 83 depth = [0] * count
84 84 seen = [0] * count
85 85 mapping = []
86 86 for (i, n) in enumerate(sorted(nodes)):
87 87 depth[n] = 1
88 88 b = 1 << i
89 89 seen[n] = b
90 90 interesting[b] = 1
91 91 mapping.append((b, n))
92 92 nv = count - 1
93 93 while nv >= 0 and len(interesting) > 1:
94 94 v = nv
95 95 nv -= 1
96 96 dv = depth[v]
97 97 if dv == 0:
98 98 continue
99 99 sv = seen[v]
100 100 for p in pfunc(v):
101 101 if p == nullrev:
102 102 continue
103 103 dp = depth[p]
104 104 nsp = sp = seen[p]
105 105 if dp <= dv:
106 106 depth[p] = dv + 1
107 107 if sp != sv:
108 108 interesting[sv] += 1
109 109 nsp = seen[p] = sv
110 110 if sp:
111 111 interesting[sp] -= 1
112 112 if interesting[sp] == 0:
113 113 del interesting[sp]
114 114 elif dv == dp - 1:
115 115 nsp = sp | sv
116 116 if nsp == sp:
117 117 continue
118 118 seen[p] = nsp
119 119 interesting.setdefault(nsp, 0)
120 120 interesting[nsp] += 1
121 121 interesting[sp] -= 1
122 122 if interesting[sp] == 0:
123 123 del interesting[sp]
124 124 interesting[sv] -= 1
125 125 if interesting[sv] == 0:
126 126 del interesting[sv]
127 127
128 128 if len(interesting) != 1:
129 129 return []
130 130
131 131 k = 0
132 132 for i in interesting:
133 133 k |= i
134 134 return set(n for (i, n) in mapping if k & i)
135 135
136 136 gca = commonancestorsheads(pfunc, *orignodes)
137 137
138 138 if len(gca) <= 1:
139 139 return gca
140 140 return deepest(gca)
141 141
142 142 class incrementalmissingancestors(object):
143 143 '''persistent state used to calculate missing ancestors incrementally
144 144
145 145 Although similar in spirit to lazyancestors below, this is a separate class
146 146 because trying to support contains and missingancestors operations with the
147 147 same internal data structures adds needless complexity.'''
148 148 def __init__(self, pfunc, bases):
149 149 self.bases = set(bases)
150 150 if not self.bases:
151 151 self.bases.add(nullrev)
152 152 self.pfunc = pfunc
153 153
154 154 def hasbases(self):
155 155 '''whether the common set has any non-trivial bases'''
156 156 return self.bases and self.bases != {nullrev}
157 157
158 158 def addbases(self, newbases):
159 159 '''grow the ancestor set by adding new bases'''
160 160 self.bases.update(newbases)
161 161
162 162 def removeancestorsfrom(self, revs):
163 163 '''remove all ancestors of bases from the set revs (in place)'''
164 164 bases = self.bases
165 165 pfunc = self.pfunc
166 166 revs.difference_update(bases)
167 167 # nullrev is always an ancestor
168 168 revs.discard(nullrev)
169 169 if not revs:
170 170 return
171 171 # anything in revs > start is definitely not an ancestor of bases
172 172 # revs <= start needs to be investigated
173 173 start = max(bases)
174 174 keepcount = sum(1 for r in revs if r > start)
175 175 if len(revs) == keepcount:
176 176 # no revs to consider
177 177 return
178 178
179 179 for curr in pycompat.xrange(start, min(revs) - 1, -1):
180 180 if curr not in bases:
181 181 continue
182 182 revs.discard(curr)
183 183 bases.update(pfunc(curr))
184 184 if len(revs) == keepcount:
185 185 # no more potential revs to discard
186 186 break
187 187
188 188 def missingancestors(self, revs):
189 189 '''return all the ancestors of revs that are not ancestors of self.bases
190 190
191 191 This may include elements from revs.
192 192
193 193 Equivalent to the revset (::revs - ::self.bases). Revs are returned in
194 194 revision number order, which is a topological order.'''
195 195 revsvisit = set(revs)
196 196 basesvisit = self.bases
197 197 pfunc = self.pfunc
198 198 bothvisit = revsvisit.intersection(basesvisit)
199 199 revsvisit.difference_update(bothvisit)
200 200 if not revsvisit:
201 201 return []
202 202
203 203 start = max(max(revsvisit), max(basesvisit))
204 204 # At this point, we hold the invariants that:
205 205 # - revsvisit is the set of nodes we know are an ancestor of at least
206 206 # one of the nodes in revs
207 207 # - basesvisit is the same for bases
208 208 # - bothvisit is the set of nodes we know are ancestors of at least one
209 209 # of the nodes in revs and one of the nodes in bases. bothvisit and
210 210 # revsvisit are mutually exclusive, but bothvisit is a subset of
211 211 # basesvisit.
212 212 # Now we walk down in reverse topo order, adding parents of nodes
213 213 # already visited to the sets while maintaining the invariants. When a
214 214 # node is found in both revsvisit and basesvisit, it is removed from
215 215 # revsvisit and added to bothvisit. When revsvisit becomes empty, there
216 216 # are no more ancestors of revs that aren't also ancestors of bases, so
217 217 # exit.
218 218
219 219 missing = []
220 220 for curr in pycompat.xrange(start, nullrev, -1):
221 221 if not revsvisit:
222 222 break
223 223
224 224 if curr in bothvisit:
225 225 bothvisit.remove(curr)
226 226 # curr's parents might have made it into revsvisit through
227 227 # another path
228 228 for p in pfunc(curr):
229 229 revsvisit.discard(p)
230 230 basesvisit.add(p)
231 231 bothvisit.add(p)
232 232 continue
233 233
234 234 if curr in revsvisit:
235 235 missing.append(curr)
236 236 revsvisit.remove(curr)
237 237 thisvisit = revsvisit
238 238 othervisit = basesvisit
239 239 elif curr in basesvisit:
240 240 thisvisit = basesvisit
241 241 othervisit = revsvisit
242 242 else:
243 243 # not an ancestor of revs or bases: ignore
244 244 continue
245 245
246 246 for p in pfunc(curr):
247 247 if p == nullrev:
248 248 pass
249 249 elif p in othervisit or p in bothvisit:
250 250 # p is implicitly in thisvisit. This means p is or should be
251 251 # in bothvisit
252 252 revsvisit.discard(p)
253 253 basesvisit.add(p)
254 254 bothvisit.add(p)
255 255 else:
256 256 # visit later
257 257 thisvisit.add(p)
258 258
259 259 missing.reverse()
260 260 return missing
261 261
262 262 # Extracted from lazyancestors.__iter__ to avoid a reference cycle
263 263 def _lazyancestorsiter(parentrevs, initrevs, stoprev, inclusive):
264 264 seen = {nullrev}
265 265 schedule = heapq.heappush
266 266 nextitem = heapq.heappop
267 267 see = seen.add
268 268
269 269 if inclusive:
270 270 visit = [-r for r in initrevs]
271 271 seen.update(initrevs)
272 272 heapq.heapify(visit)
273 273 else:
274 274 visit = []
275 275 heapq.heapify(visit)
276 276 for r in initrevs:
277 for parent in parentrevs(r):
278 if parent not in seen:
279 schedule(visit, -parent)
280 see(parent)
277 p1, p2 = parentrevs(r)
278 if p1 not in seen:
279 schedule(visit, -p1)
280 see(p1)
281 if p2 not in seen:
282 schedule(visit, -p2)
283 see(p2)
281 284
282 285 while visit:
283 286 current = -nextitem(visit)
284 287 if current < stoprev:
285 288 break
286 289 yield current
287 for parent in parentrevs(current):
288 if parent not in seen:
289 schedule(visit, -parent)
290 see(parent)
290 p1, p2 = parentrevs(current)
291 if p1 not in seen:
292 schedule(visit, -p1)
293 see(p1)
294 if p2 not in seen:
295 schedule(visit, -p2)
296 see(p2)
291 297
292 298 class lazyancestors(object):
293 299 def __init__(self, pfunc, revs, stoprev=0, inclusive=False):
294 300 """Create a new object generating ancestors for the given revs. Does
295 301 not generate revs lower than stoprev.
296 302
297 303 This is computed lazily starting from revs. The object supports
298 304 iteration and membership.
299 305
300 306 cl should be a changelog and revs should be an iterable. inclusive is
301 307 a boolean that indicates whether revs should be included. Revs lower
302 308 than stoprev will not be generated.
303 309
304 310 Result does not include the null revision."""
305 311 self._parentrevs = pfunc
306 312 self._initrevs = revs = [r for r in revs if r >= stoprev]
307 313 self._stoprev = stoprev
308 314 self._inclusive = inclusive
309 315
310 316 self._containsseen = set()
311 317 self._containsiter = _lazyancestorsiter(self._parentrevs,
312 318 self._initrevs,
313 319 self._stoprev,
314 320 self._inclusive)
315 321
316 322 def __nonzero__(self):
317 323 """False if the set is empty, True otherwise."""
318 324 try:
319 325 next(iter(self))
320 326 return True
321 327 except StopIteration:
322 328 return False
323 329
324 330 __bool__ = __nonzero__
325 331
326 332 def __iter__(self):
327 333 """Generate the ancestors of _initrevs in reverse topological order.
328 334
329 335 If inclusive is False, yield a sequence of revision numbers starting
330 336 with the parents of each revision in revs, i.e., each revision is
331 337 *not* considered an ancestor of itself. Results are emitted in reverse
332 338 revision number order. That order is also topological: a child is
333 339 always emitted before its parent.
334 340
335 341 If inclusive is True, the source revisions are also yielded. The
336 342 reverse revision number order is still enforced."""
337 343 for rev in _lazyancestorsiter(self._parentrevs, self._initrevs,
338 344 self._stoprev, self._inclusive):
339 345 yield rev
340 346
341 347 def __contains__(self, target):
342 348 """Test whether target is an ancestor of self._initrevs."""
343 349 seen = self._containsseen
344 350 if target in seen:
345 351 return True
346 352 iter = self._containsiter
347 353 if iter is None:
348 354 # Iterator exhausted
349 355 return False
350 356 # Only integer target is valid, but some callers expect 'None in self'
351 357 # to be False. So we explicitly allow it.
352 358 if target is None:
353 359 return False
354 360
355 361 see = seen.add
356 362 try:
357 363 while True:
358 364 rev = next(iter)
359 365 see(rev)
360 366 if rev == target:
361 367 return True
362 368 if rev < target:
363 369 return False
364 370 except StopIteration:
365 371 # Set to None to indicate fast-path can be used next time, and to
366 372 # free up memory.
367 373 self._containsiter = None
368 374 return False
@@ -1,280 +1,280 b''
1 1 from __future__ import absolute_import, print_function
2 2
3 3 import binascii
4 4 import getopt
5 5 import math
6 6 import os
7 7 import random
8 8 import sys
9 9 import time
10 10
11 11 from mercurial.node import nullrev
12 12 from mercurial import (
13 13 ancestor,
14 14 debugcommands,
15 15 hg,
16 16 pycompat,
17 17 ui as uimod,
18 18 util,
19 19 )
20 20
21 21 if pycompat.ispy3:
22 22 long = int
23 23 xrange = range
24 24
25 25 def buildgraph(rng, nodes=100, rootprob=0.05, mergeprob=0.2, prevprob=0.7):
26 26 '''nodes: total number of nodes in the graph
27 27 rootprob: probability that a new node (not 0) will be a root
28 28 mergeprob: probability that, excluding a root a node will be a merge
29 29 prevprob: probability that p1 will be the previous node
30 30
31 31 return value is a graph represented as an adjacency list.
32 32 '''
33 33 graph = [None] * nodes
34 34 for i in xrange(nodes):
35 35 if i == 0 or rng.random() < rootprob:
36 36 graph[i] = [nullrev]
37 37 elif i == 1:
38 38 graph[i] = [0]
39 39 elif rng.random() < mergeprob:
40 40 if i == 2 or rng.random() < prevprob:
41 41 # p1 is prev
42 42 p1 = i - 1
43 43 else:
44 44 p1 = rng.randrange(i - 1)
45 45 p2 = rng.choice(list(range(0, p1)) + list(range(p1 + 1, i)))
46 46 graph[i] = [p1, p2]
47 47 elif rng.random() < prevprob:
48 48 graph[i] = [i - 1]
49 49 else:
50 50 graph[i] = [rng.randrange(i - 1)]
51 51
52 52 return graph
53 53
54 54 def buildancestorsets(graph):
55 55 ancs = [None] * len(graph)
56 56 for i in xrange(len(graph)):
57 57 ancs[i] = {i}
58 58 if graph[i] == [nullrev]:
59 59 continue
60 60 for p in graph[i]:
61 61 ancs[i].update(ancs[p])
62 62 return ancs
63 63
64 64 class naiveincrementalmissingancestors(object):
65 65 def __init__(self, ancs, bases):
66 66 self.ancs = ancs
67 67 self.bases = set(bases)
68 68 def addbases(self, newbases):
69 69 self.bases.update(newbases)
70 70 def removeancestorsfrom(self, revs):
71 71 for base in self.bases:
72 72 if base != nullrev:
73 73 revs.difference_update(self.ancs[base])
74 74 revs.discard(nullrev)
75 75 def missingancestors(self, revs):
76 76 res = set()
77 77 for rev in revs:
78 78 if rev != nullrev:
79 79 res.update(self.ancs[rev])
80 80 for base in self.bases:
81 81 if base != nullrev:
82 82 res.difference_update(self.ancs[base])
83 83 return sorted(res)
84 84
85 85 def test_missingancestors(seed, rng):
86 86 # empirically observed to take around 1 second
87 87 graphcount = 100
88 88 testcount = 10
89 89 inccount = 10
90 90 nerrs = [0]
91 91 # the default mu and sigma give us a nice distribution of mostly
92 92 # single-digit counts (including 0) with some higher ones
93 93 def lognormrandom(mu, sigma):
94 94 return int(math.floor(rng.lognormvariate(mu, sigma)))
95 95
96 96 def samplerevs(nodes, mu=1.1, sigma=0.8):
97 97 count = min(lognormrandom(mu, sigma), len(nodes))
98 98 return rng.sample(nodes, count)
99 99
100 100 def err(seed, graph, bases, seq, output, expected):
101 101 if nerrs[0] == 0:
102 102 print('seed:', hex(seed)[:-1], file=sys.stderr)
103 103 if gerrs[0] == 0:
104 104 print('graph:', graph, file=sys.stderr)
105 105 print('* bases:', bases, file=sys.stderr)
106 106 print('* seq: ', seq, file=sys.stderr)
107 107 print('* output: ', output, file=sys.stderr)
108 108 print('* expected:', expected, file=sys.stderr)
109 109 nerrs[0] += 1
110 110 gerrs[0] += 1
111 111
112 112 for g in xrange(graphcount):
113 113 graph = buildgraph(rng)
114 114 ancs = buildancestorsets(graph)
115 115 gerrs = [0]
116 116 for _ in xrange(testcount):
117 117 # start from nullrev to include it as a possibility
118 118 graphnodes = range(nullrev, len(graph))
119 119 bases = samplerevs(graphnodes)
120 120
121 121 # fast algorithm
122 122 inc = ancestor.incrementalmissingancestors(graph.__getitem__, bases)
123 123 # reference slow algorithm
124 124 naiveinc = naiveincrementalmissingancestors(ancs, bases)
125 125 seq = []
126 126 revs = []
127 127 for _ in xrange(inccount):
128 128 if rng.random() < 0.2:
129 129 newbases = samplerevs(graphnodes)
130 130 seq.append(('addbases', newbases))
131 131 inc.addbases(newbases)
132 132 naiveinc.addbases(newbases)
133 133 if rng.random() < 0.4:
134 134 # larger set so that there are more revs to remove from
135 135 revs = samplerevs(graphnodes, mu=1.5)
136 136 seq.append(('removeancestorsfrom', revs))
137 137 hrevs = set(revs)
138 138 rrevs = set(revs)
139 139 inc.removeancestorsfrom(hrevs)
140 140 naiveinc.removeancestorsfrom(rrevs)
141 141 if hrevs != rrevs:
142 142 err(seed, graph, bases, seq, sorted(hrevs),
143 143 sorted(rrevs))
144 144 else:
145 145 revs = samplerevs(graphnodes)
146 146 seq.append(('missingancestors', revs))
147 147 h = inc.missingancestors(revs)
148 148 r = naiveinc.missingancestors(revs)
149 149 if h != r:
150 150 err(seed, graph, bases, seq, h, r)
151 151
152 152 # graph is a dict of child->parent adjacency lists for this graph:
153 153 # o 13
154 154 # |
155 155 # | o 12
156 156 # | |
157 157 # | | o 11
158 158 # | | |\
159 159 # | | | | o 10
160 160 # | | | | |
161 161 # | o---+ | 9
162 162 # | | | | |
163 163 # o | | | | 8
164 164 # / / / /
165 165 # | | o | 7
166 166 # | | | |
167 167 # o---+ | 6
168 168 # / / /
169 169 # | | o 5
170 170 # | |/
171 171 # | o 4
172 172 # | |
173 173 # o | 3
174 174 # | |
175 175 # | o 2
176 176 # |/
177 177 # o 1
178 178 # |
179 179 # o 0
180 180
181 graph = {0: [-1], 1: [0], 2: [1], 3: [1], 4: [2], 5: [4], 6: [4],
182 7: [4], 8: [-1], 9: [6, 7], 10: [5], 11: [3, 7], 12: [9],
183 13: [8]}
181 graph = {0: [-1, -1], 1: [0, -1], 2: [1, -1], 3: [1, -1], 4: [2, -1],
182 5: [4, -1], 6: [4, -1], 7: [4, -1], 8: [-1, -1], 9: [6, 7],
183 10: [5, -1], 11: [3, 7], 12: [9, -1], 13: [8, -1]}
184 184
185 185 def genlazyancestors(revs, stoprev=0, inclusive=False):
186 186 print(("%% lazy ancestor set for %s, stoprev = %s, inclusive = %s" %
187 187 (revs, stoprev, inclusive)))
188 188 return ancestor.lazyancestors(graph.get, revs, stoprev=stoprev,
189 189 inclusive=inclusive)
190 190
191 191 def printlazyancestors(s, l):
192 192 print('membership: %r' % [n for n in l if n in s])
193 193 print('iteration: %r' % list(s))
194 194
195 195 def test_lazyancestors():
196 196 # Empty revs
197 197 s = genlazyancestors([])
198 198 printlazyancestors(s, [3, 0, -1])
199 199
200 200 # Standard example
201 201 s = genlazyancestors([11, 13])
202 202 printlazyancestors(s, [11, 13, 7, 9, 8, 3, 6, 4, 1, -1, 0])
203 203
204 204 # Standard with ancestry in the initial set (1 is ancestor of 3)
205 205 s = genlazyancestors([1, 3])
206 206 printlazyancestors(s, [1, -1, 0])
207 207
208 208 # Including revs
209 209 s = genlazyancestors([11, 13], inclusive=True)
210 210 printlazyancestors(s, [11, 13, 7, 9, 8, 3, 6, 4, 1, -1, 0])
211 211
212 212 # Test with stoprev
213 213 s = genlazyancestors([11, 13], stoprev=6)
214 214 printlazyancestors(s, [11, 13, 7, 9, 8, 3, 6, 4, 1, -1, 0])
215 215 s = genlazyancestors([11, 13], stoprev=6, inclusive=True)
216 216 printlazyancestors(s, [11, 13, 7, 9, 8, 3, 6, 4, 1, -1, 0])
217 217
218 218 # Test with stoprev >= min(initrevs)
219 219 s = genlazyancestors([11, 13], stoprev=11, inclusive=True)
220 220 printlazyancestors(s, [11, 13, 7, 9, 8, 3, 6, 4, 1, -1, 0])
221 221 s = genlazyancestors([11, 13], stoprev=12, inclusive=True)
222 222 printlazyancestors(s, [11, 13, 7, 9, 8, 3, 6, 4, 1, -1, 0])
223 223
224 224 # The C gca algorithm requires a real repo. These are textual descriptions of
225 225 # DAGs that have been known to be problematic, and, optionally, known pairs
226 226 # of revisions and their expected ancestor list.
227 227 dagtests = [
228 228 (b'+2*2*2/*3/2', {}),
229 229 (b'+3*3/*2*2/*4*4/*4/2*4/2*2', {}),
230 230 (b'+2*2*/2*4*/4*/3*2/4', {(6, 7): [3, 5]}),
231 231 ]
232 232 def test_gca():
233 233 u = uimod.ui.load()
234 234 for i, (dag, tests) in enumerate(dagtests):
235 235 repo = hg.repository(u, b'gca%d' % i, create=1)
236 236 cl = repo.changelog
237 237 if not util.safehasattr(cl.index, 'ancestors'):
238 238 # C version not available
239 239 return
240 240
241 241 debugcommands.debugbuilddag(u, repo, dag)
242 242 # Compare the results of the Python and C versions. This does not
243 243 # include choosing a winner when more than one gca exists -- we make
244 244 # sure both return exactly the same set of gcas.
245 245 # Also compare against expected results, if available.
246 246 for a in cl:
247 247 for b in cl:
248 248 cgcas = sorted(cl.index.ancestors(a, b))
249 249 pygcas = sorted(ancestor.ancestors(cl.parentrevs, a, b))
250 250 expected = None
251 251 if (a, b) in tests:
252 252 expected = tests[(a, b)]
253 253 if cgcas != pygcas or (expected and cgcas != expected):
254 254 print("test_gca: for dag %s, gcas for %d, %d:"
255 255 % (dag, a, b))
256 256 print(" C returned: %s" % cgcas)
257 257 print(" Python returned: %s" % pygcas)
258 258 if expected:
259 259 print(" expected: %s" % expected)
260 260
261 261 def main():
262 262 seed = None
263 263 opts, args = getopt.getopt(sys.argv[1:], 's:', ['seed='])
264 264 for o, a in opts:
265 265 if o in ('-s', '--seed'):
266 266 seed = long(a, base=0) # accepts base 10 or 16 strings
267 267
268 268 if seed is None:
269 269 try:
270 270 seed = long(binascii.hexlify(os.urandom(16)), 16)
271 271 except AttributeError:
272 272 seed = long(time.time() * 1000)
273 273
274 274 rng = random.Random(seed)
275 275 test_missingancestors(seed, rng)
276 276 test_lazyancestors()
277 277 test_gca()
278 278
279 279 if __name__ == '__main__':
280 280 main()
General Comments 0
You need to be logged in to leave comments. Login now