##// END OF EJS Templates
revlog: pre-compute null revlog item for pure version...
Joerg Sonnenberger -
r46222:7baf5f79 default
parent child Browse files
Show More
@@ -1,286 +1,287
1 1 # parsers.py - Python implementation of parsers.c
2 2 #
3 3 # Copyright 2009 Matt Mackall <mpm@selenic.com> and others
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import struct
11 11 import zlib
12 12
13 13 from ..node import nullid, nullrev
14 14 from .. import (
15 15 pycompat,
16 16 util,
17 17 )
18 18
19 19 from ..revlogutils import nodemap as nodemaputil
20 20
21 21 stringio = pycompat.bytesio
22 22
23 23
24 24 _pack = struct.pack
25 25 _unpack = struct.unpack
26 26 _compress = zlib.compress
27 27 _decompress = zlib.decompress
28 28
29 29 # Some code below makes tuples directly because it's more convenient. However,
30 30 # code outside this module should always use dirstatetuple.
31 31 def dirstatetuple(*x):
32 32 # x is a tuple
33 33 return x
34 34
35 35
36 36 indexformatng = b">Qiiiiii20s12x"
37 37 indexfirst = struct.calcsize(b'Q')
38 38 sizeint = struct.calcsize(b'i')
39 39 indexsize = struct.calcsize(indexformatng)
40 nullitem = (0, 0, 0, -1, -1, -1, -1, nullid)
40 41
41 42
42 43 def gettype(q):
43 44 return int(q & 0xFFFF)
44 45
45 46
46 47 def offset_type(offset, type):
47 48 return int(int(offset) << 16 | type)
48 49
49 50
50 51 class BaseIndexObject(object):
51 52 @property
52 53 def nodemap(self):
53 54 msg = b"index.nodemap is deprecated, use index.[has_node|rev|get_rev]"
54 55 util.nouideprecwarn(msg, b'5.3', stacklevel=2)
55 56 return self._nodemap
56 57
57 58 @util.propertycache
58 59 def _nodemap(self):
59 60 nodemap = nodemaputil.NodeMap({nullid: nullrev})
60 61 for r in range(0, len(self)):
61 62 n = self[r][7]
62 63 nodemap[n] = r
63 64 return nodemap
64 65
65 66 def has_node(self, node):
66 67 """return True if the node exist in the index"""
67 68 return node in self._nodemap
68 69
69 70 def rev(self, node):
70 71 """return a revision for a node
71 72
72 73 If the node is unknown, raise a RevlogError"""
73 74 return self._nodemap[node]
74 75
75 76 def get_rev(self, node):
76 77 """return a revision for a node
77 78
78 79 If the node is unknown, return None"""
79 80 return self._nodemap.get(node)
80 81
81 82 def _stripnodes(self, start):
82 83 if '_nodemap' in vars(self):
83 84 for r in range(start, len(self)):
84 85 n = self[r][7]
85 86 del self._nodemap[n]
86 87
87 88 def clearcaches(self):
88 89 self.__dict__.pop('_nodemap', None)
89 90
90 91 def __len__(self):
91 92 return self._lgt + len(self._extra)
92 93
93 94 def append(self, tup):
94 95 if '_nodemap' in vars(self):
95 96 self._nodemap[tup[7]] = len(self)
96 97 self._extra.append(tup)
97 98
98 99 def _check_index(self, i):
99 100 if not isinstance(i, int):
100 101 raise TypeError(b"expecting int indexes")
101 102 if i < 0 or i >= len(self):
102 103 raise IndexError
103 104
104 105 def __getitem__(self, i):
105 106 if i == -1:
106 return (0, 0, 0, -1, -1, -1, -1, nullid)
107 return nullitem
107 108 self._check_index(i)
108 109 if i >= self._lgt:
109 110 return self._extra[i - self._lgt]
110 111 index = self._calculate_index(i)
111 112 r = struct.unpack(indexformatng, self._data[index : index + indexsize])
112 113 if i == 0:
113 114 e = list(r)
114 115 type = gettype(e[0])
115 116 e[0] = offset_type(0, type)
116 117 return tuple(e)
117 118 return r
118 119
119 120
120 121 class IndexObject(BaseIndexObject):
121 122 def __init__(self, data):
122 123 assert len(data) % indexsize == 0
123 124 self._data = data
124 125 self._lgt = len(data) // indexsize
125 126 self._extra = []
126 127
127 128 def _calculate_index(self, i):
128 129 return i * indexsize
129 130
130 131 def __delitem__(self, i):
131 132 if not isinstance(i, slice) or not i.stop == -1 or i.step is not None:
132 133 raise ValueError(b"deleting slices only supports a:-1 with step 1")
133 134 i = i.start
134 135 self._check_index(i)
135 136 self._stripnodes(i)
136 137 if i < self._lgt:
137 138 self._data = self._data[: i * indexsize]
138 139 self._lgt = i
139 140 self._extra = []
140 141 else:
141 142 self._extra = self._extra[: i - self._lgt]
142 143
143 144
144 145 class PersistentNodeMapIndexObject(IndexObject):
145 146 """a Debug oriented class to test persistent nodemap
146 147
147 148 We need a simple python object to test API and higher level behavior. See
148 149 the Rust implementation for more serious usage. This should be used only
149 150 through the dedicated `devel.persistent-nodemap` config.
150 151 """
151 152
152 153 def nodemap_data_all(self):
153 154 """Return bytes containing a full serialization of a nodemap
154 155
155 156 The nodemap should be valid for the full set of revisions in the
156 157 index."""
157 158 return nodemaputil.persistent_data(self)
158 159
159 160 def nodemap_data_incremental(self):
160 161 """Return bytes containing a incremental update to persistent nodemap
161 162
162 163 This containst the data for an append-only update of the data provided
163 164 in the last call to `update_nodemap_data`.
164 165 """
165 166 if self._nm_root is None:
166 167 return None
167 168 docket = self._nm_docket
168 169 changed, data = nodemaputil.update_persistent_data(
169 170 self, self._nm_root, self._nm_max_idx, self._nm_docket.tip_rev
170 171 )
171 172
172 173 self._nm_root = self._nm_max_idx = self._nm_docket = None
173 174 return docket, changed, data
174 175
175 176 def update_nodemap_data(self, docket, nm_data):
176 177 """provide full block of persisted binary data for a nodemap
177 178
178 179 The data are expected to come from disk. See `nodemap_data_all` for a
179 180 produceur of such data."""
180 181 if nm_data is not None:
181 182 self._nm_root, self._nm_max_idx = nodemaputil.parse_data(nm_data)
182 183 if self._nm_root:
183 184 self._nm_docket = docket
184 185 else:
185 186 self._nm_root = self._nm_max_idx = self._nm_docket = None
186 187
187 188
188 189 class InlinedIndexObject(BaseIndexObject):
189 190 def __init__(self, data, inline=0):
190 191 self._data = data
191 192 self._lgt = self._inline_scan(None)
192 193 self._inline_scan(self._lgt)
193 194 self._extra = []
194 195
195 196 def _inline_scan(self, lgt):
196 197 off = 0
197 198 if lgt is not None:
198 199 self._offsets = [0] * lgt
199 200 count = 0
200 201 while off <= len(self._data) - indexsize:
201 202 (s,) = struct.unpack(
202 203 b'>i', self._data[off + indexfirst : off + sizeint + indexfirst]
203 204 )
204 205 if lgt is not None:
205 206 self._offsets[count] = off
206 207 count += 1
207 208 off += indexsize + s
208 209 if off != len(self._data):
209 210 raise ValueError(b"corrupted data")
210 211 return count
211 212
212 213 def __delitem__(self, i):
213 214 if not isinstance(i, slice) or not i.stop == -1 or i.step is not None:
214 215 raise ValueError(b"deleting slices only supports a:-1 with step 1")
215 216 i = i.start
216 217 self._check_index(i)
217 218 self._stripnodes(i)
218 219 if i < self._lgt:
219 220 self._offsets = self._offsets[:i]
220 221 self._lgt = i
221 222 self._extra = []
222 223 else:
223 224 self._extra = self._extra[: i - self._lgt]
224 225
225 226 def _calculate_index(self, i):
226 227 return self._offsets[i]
227 228
228 229
229 230 def parse_index2(data, inline):
230 231 if not inline:
231 232 return IndexObject(data), None
232 233 return InlinedIndexObject(data, inline), (0, data)
233 234
234 235
235 236 def parse_index_devel_nodemap(data, inline):
236 237 """like parse_index2, but alway return a PersistentNodeMapIndexObject
237 238 """
238 239 return PersistentNodeMapIndexObject(data), None
239 240
240 241
241 242 def parse_dirstate(dmap, copymap, st):
242 243 parents = [st[:20], st[20:40]]
243 244 # dereference fields so they will be local in loop
244 245 format = b">cllll"
245 246 e_size = struct.calcsize(format)
246 247 pos1 = 40
247 248 l = len(st)
248 249
249 250 # the inner loop
250 251 while pos1 < l:
251 252 pos2 = pos1 + e_size
252 253 e = _unpack(b">cllll", st[pos1:pos2]) # a literal here is faster
253 254 pos1 = pos2 + e[4]
254 255 f = st[pos2:pos1]
255 256 if b'\0' in f:
256 257 f, c = f.split(b'\0')
257 258 copymap[f] = c
258 259 dmap[f] = e[:4]
259 260 return parents
260 261
261 262
262 263 def pack_dirstate(dmap, copymap, pl, now):
263 264 now = int(now)
264 265 cs = stringio()
265 266 write = cs.write
266 267 write(b"".join(pl))
267 268 for f, e in pycompat.iteritems(dmap):
268 269 if e[0] == b'n' and e[3] == now:
269 270 # The file was last modified "simultaneously" with the current
270 271 # write to dirstate (i.e. within the same second for file-
271 272 # systems with a granularity of 1 sec). This commonly happens
272 273 # for at least a couple of files on 'update'.
273 274 # The user could change the file without changing its size
274 275 # within the same second. Invalidate the file's mtime in
275 276 # dirstate, forcing future 'status' calls to compare the
276 277 # contents of the file if the size is the same. This prevents
277 278 # mistakenly treating such files as clean.
278 279 e = dirstatetuple(e[0], e[1], e[2], -1)
279 280 dmap[f] = e
280 281
281 282 if f in copymap:
282 283 f = b"%s\0%s" % (f, copymap[f])
283 284 e = _pack(b">cllll", e[0], e[1], e[2], e[3], len(f))
284 285 write(e)
285 286 write(f)
286 287 return cs.getvalue()
General Comments 0
You need to be logged in to leave comments. Login now