##// END OF EJS Templates
Handle nullid better for ancestor
mpm@selenic.com -
r2:ecf3fd94 default
parent child Browse files
Show More
@@ -1,199 +1,201
1 1 # revlog.py - storage back-end for mercurial
2 2 #
3 3 # This provides efficient delta storage with O(1) retrieve and append
4 4 # and O(changes) merge between branches
5 5 #
6 6 # Copyright 2005 Matt Mackall <mpm@selenic.com>
7 7 #
8 8 # This software may be used and distributed according to the terms
9 9 # of the GNU General Public License, incorporated herein by reference.
10 10
11 import zlib, struct, sha, binascii, os, tempfile
11 import zlib, struct, sha, os, tempfile
12 12 from mercurial import mdiff
13 13
14 14 def compress(text):
15 15 return zlib.compress(text)
16 16
17 17 def decompress(bin):
18 18 return zlib.decompress(bin)
19 19
20 20 def hash(text, p1, p2):
21 21 l = [p1, p2]
22 22 l.sort()
23 23 return sha.sha(l[0] + l[1] + text).digest()
24 24
25 25 nullid = "\0" * 20
26 26 indexformat = ">4l20s20s20s"
27 27
28 28 class revlog:
29 29 def __init__(self, opener, indexfile, datafile):
30 30 self.indexfile = indexfile
31 31 self.datafile = datafile
32 32 self.index = []
33 33 self.opener = opener
34 34 self.cache = None
35 35 self.nodemap = { -1: nullid, nullid: -1 }
36 36 # read the whole index for now, handle on-demand later
37 37 try:
38 38 n = 0
39 39 i = self.opener(self.indexfile).read()
40 40 s = struct.calcsize(indexformat)
41 41 for f in range(0, len(i), s):
42 42 # offset, size, base, linkrev, p1, p2, nodeid, changeset
43 43 e = struct.unpack(indexformat, i[f:f + s])
44 44 self.nodemap[e[6]] = n
45 45 self.index.append(e)
46 46 n += 1
47 47 except IOError: pass
48 48
49 49 def tip(self): return self.node(len(self.index) - 1)
50 50 def count(self): return len(self.index)
51 51 def node(self, rev): return rev < 0 and nullid or self.index[rev][6]
52 52 def rev(self, node): return self.nodemap[node]
53 53 def linkrev(self, node): return self.index[self.nodemap[node]][3]
54 def parents(self, node): return self.index[self.nodemap[node]][4:6]
54 def parents(self, node):
55 if node == nullid: return (nullid, nullid)
56 return self.index[self.nodemap[node]][4:6]
55 57
56 58 def start(self, rev): return self.index[rev][0]
57 59 def length(self, rev): return self.index[rev][1]
58 60 def end(self, rev): return self.start(rev) + self.length(rev)
59 61 def base(self, rev): return self.index[rev][2]
60 62
61 63 def revisions(self, list):
62 64 # this can be optimized to do spans, etc
63 65 # be stupid for now
64 66 for r in list:
65 67 yield self.revision(r)
66 68
67 69 def diff(self, a, b):
68 70 return mdiff.textdiff(a, b)
69 71
70 72 def patch(self, text, patch):
71 73 return mdiff.patch(text, patch)
72 74
73 75 def revision(self, node):
74 76 if node is nullid: return ""
75 77 if self.cache and self.cache[0] == node: return self.cache[2]
76 78
77 79 text = None
78 80 rev = self.rev(node)
79 81 base = self.base(rev)
80 82 start = self.start(base)
81 83 end = self.end(rev)
82 84
83 85 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
84 86 base = self.cache[1]
85 87 start = self.start(base + 1)
86 88 text = self.cache[2]
87 89 last = 0
88 90
89 91 f = self.opener(self.datafile)
90 92 f.seek(start)
91 93 data = f.read(end - start)
92 94
93 95 if not text:
94 96 last = self.length(base)
95 97 text = decompress(data[:last])
96 98
97 99 for r in range(base + 1, rev + 1):
98 100 s = self.length(r)
99 101 b = decompress(data[last:last + s])
100 102 text = self.patch(text, b)
101 103 last = last + s
102 104
103 105 (p1, p2) = self.parents(node)
104 106 if self.node(rev) != hash(text, p1, p2):
105 107 raise "Consistency check failed on %s:%d" % (self.datafile, rev)
106 108
107 109 self.cache = (node, rev, text)
108 110 return text
109 111
110 112 def addrevision(self, text, transaction, link, p1=None, p2=None):
111 113 if text is None: text = ""
112 114 if p1 is None: p1 = self.tip()
113 115 if p2 is None: p2 = nullid
114 116
115 117 node = hash(text, p1, p2)
116 118
117 119 n = self.count()
118 120 t = n - 1
119 121
120 122 if n:
121 123 start = self.start(self.base(t))
122 124 end = self.end(t)
123 125 prev = self.revision(self.tip())
124 126 if 0:
125 127 dd = self.diff(prev, text)
126 128 tt = self.patch(prev, dd)
127 129 if tt != text:
128 130 print prev
129 131 print text
130 132 print tt
131 133 raise "diff+patch failed"
132 134 data = compress(self.diff(prev, text))
133 135
134 136 # full versions are inserted when the needed deltas
135 137 # become comparable to the uncompressed text
136 138 if not n or (end + len(data) - start) > len(text) * 2:
137 139 data = compress(text)
138 140 base = n
139 141 else:
140 142 base = self.base(t)
141 143
142 144 offset = 0
143 145 if t >= 0:
144 146 offset = self.end(t)
145 147
146 148 e = (offset, len(data), base, link, p1, p2, node)
147 149
148 150 self.index.append(e)
149 151 self.nodemap[node] = n
150 152 entry = struct.pack(indexformat, *e)
151 153
152 154 transaction.add(self.datafile, e[0])
153 155 self.opener(self.datafile, "a").write(data)
154 156 transaction.add(self.indexfile, n * len(entry))
155 157 self.opener(self.indexfile, "a").write(entry)
156 158
157 159 self.cache = (node, n, text)
158 160 return node
159 161
160 162 def ancestor(self, a, b):
161 163 def expand(e1, e2, a1, a2):
162 164 ne = []
163 165 for n in e1:
164 166 (p1, p2) = self.parents(n)
165 167 if p1 in a2: return p1
166 168 if p2 in a2: return p2
167 169 if p1 != nullid and p1 not in a1:
168 170 a1[p1] = 1
169 171 ne.append(p1)
170 172 if p2 != nullid and p2 not in a1:
171 173 a1[p2] = 1
172 174 ne.append(p2)
173 175 return expand(e2, ne, a2, a1)
174 176 return expand([a], [b], {a:1}, {b:1})
175 177
176 178 def mergedag(self, other, transaction, linkseq, accumulate = None):
177 179 """combine the nodes from other's DAG into ours"""
178 180 old = self.tip()
179 181 i = self.count()
180 182 l = []
181 183
182 184 # merge the other revision log into our DAG
183 185 for r in range(other.count()):
184 186 id = other.node(r)
185 187 if id not in self.nodemap:
186 188 (xn, yn) = other.parents(id)
187 189 l.append((id, xn, yn))
188 190 self.nodemap[id] = i
189 191 i += 1
190 192
191 193 # merge node date for new nodes
192 194 r = other.revisions([e[0] for e in l])
193 195 for e in l:
194 196 t = r.next()
195 197 if accumulate: accumulate(t)
196 198 self.addrevision(t, transaction, linkseq.next(), e[1], e[2])
197 199
198 200 # return the unmerged heads for later resolving
199 201 return (old, self.tip())
General Comments 0
You need to be logged in to leave comments. Login now