Show More
@@ -28,35 +28,87 b' def hash(text, p1, p2):' | |||||
28 | nullid = "\0" * 20 |
|
28 | nullid = "\0" * 20 | |
29 | indexformat = ">4l20s20s20s" |
|
29 | indexformat = ">4l20s20s20s" | |
30 |
|
30 | |||
|
31 | class lazyparser: | |||
|
32 | def __init__(self, data): | |||
|
33 | self.data = data | |||
|
34 | self.s = struct.calcsize(indexformat) | |||
|
35 | self.l = len(data)/self.s | |||
|
36 | self.index = [None] * self.l | |||
|
37 | self.map = {nullid: -1} | |||
|
38 | ||||
|
39 | if 0: | |||
|
40 | n = 0 | |||
|
41 | i = self.data | |||
|
42 | s = struct.calcsize(indexformat) | |||
|
43 | for f in xrange(0, len(i), s): | |||
|
44 | # offset, size, base, linkrev, p1, p2, nodeid | |||
|
45 | e = struct.unpack(indexformat, i[f:f + s]) | |||
|
46 | self.map[e[6]] = n | |||
|
47 | self.index.append(e) | |||
|
48 | n += 1 | |||
|
49 | ||||
|
50 | def load(self, pos): | |||
|
51 | block = pos / 1000 | |||
|
52 | i = block * 1000 | |||
|
53 | end = min(self.l, i + 1000) | |||
|
54 | while i < end: | |||
|
55 | d = self.data[i * self.s: (i + 1) * self.s] | |||
|
56 | e = struct.unpack(indexformat, d) | |||
|
57 | self.index[i] = e | |||
|
58 | self.map[e[6]] = i | |||
|
59 | i += 1 | |||
|
60 | ||||
|
61 | class lazyindex: | |||
|
62 | def __init__(self, parser): | |||
|
63 | self.p = parser | |||
|
64 | def __len__(self): | |||
|
65 | return len(self.p.index) | |||
|
66 | def __getitem__(self, pos): | |||
|
67 | i = self.p.index[pos] | |||
|
68 | if not i: | |||
|
69 | self.p.load(pos) | |||
|
70 | return self.p.index[pos] | |||
|
71 | return i | |||
|
72 | def append(self, e): | |||
|
73 | self.p.index.append(e) | |||
|
74 | ||||
|
75 | class lazymap: | |||
|
76 | def __init__(self, parser): | |||
|
77 | self.p = parser | |||
|
78 | def load(self, key): | |||
|
79 | n = self.p.data.find(key) | |||
|
80 | if n < 0: raise KeyError(key) | |||
|
81 | pos = n / self.p.s | |||
|
82 | self.p.load(pos) | |||
|
83 | def __contains__(self, key): | |||
|
84 | try: | |||
|
85 | self.p.map[key] | |||
|
86 | return True | |||
|
87 | except KeyError: | |||
|
88 | return False | |||
|
89 | def __getitem__(self, key): | |||
|
90 | try: | |||
|
91 | return self.p.map[key] | |||
|
92 | except KeyError: | |||
|
93 | self.load(key) | |||
|
94 | return self.p.map[key] | |||
|
95 | def __setitem__(self, key, val): | |||
|
96 | self.p.map[key] = val | |||
|
97 | ||||
31 | class revlog: |
|
98 | class revlog: | |
32 | def __init__(self, opener, indexfile, datafile): |
|
99 | def __init__(self, opener, indexfile, datafile): | |
33 | self.indexfile = indexfile |
|
100 | self.indexfile = indexfile | |
34 | self.datafile = datafile |
|
101 | self.datafile = datafile | |
35 | self.index = [] |
|
|||
36 | self.opener = opener |
|
102 | self.opener = opener | |
37 | self.cache = None |
|
103 | self.cache = None | |
38 | # read the whole index for now, handle on-demand later |
|
104 | # read the whole index for now, handle on-demand later | |
39 | try: |
|
105 | try: | |
40 | n = 0 |
|
|||
41 | i = self.opener(self.indexfile).read() |
|
106 | i = self.opener(self.indexfile).read() | |
42 | s = struct.calcsize(indexformat) |
|
|||
43 |
|
||||
44 | # preallocate arrays |
|
|||
45 | l = len(i)/s |
|
|||
46 | self.index = [None] * l |
|
|||
47 | m = [None] * l |
|
|||
48 |
|
||||
49 | for f in xrange(0, len(i), s): |
|
|||
50 | # offset, size, base, linkrev, p1, p2, nodeid |
|
|||
51 | e = struct.unpack(indexformat, i[f:f + s]) |
|
|||
52 | self.index[n] = e |
|
|||
53 | m[n] = (e[6], n) |
|
|||
54 | n += 1 |
|
|||
55 |
|
||||
56 | self.nodemap = dict(m) |
|
|||
57 | except IOError: |
|
107 | except IOError: | |
58 |
|
|
108 | i = "" | |
59 | self.nodemap[nullid] = -1 |
|
109 | parser = lazyparser(i) | |
|
110 | self.index = lazyindex(parser) | |||
|
111 | self.nodemap = lazymap(parser) | |||
60 |
|
112 | |||
61 | def tip(self): return self.node(len(self.index) - 1) |
|
113 | def tip(self): return self.node(len(self.index) - 1) | |
62 | def count(self): return len(self.index) |
|
114 | def count(self): return len(self.index) |
General Comments 0
You need to be logged in to leave comments.
Login now