##// END OF EJS Templates
sidedatacopies: get and store sidedata in the changelogrevision object...
marmoute -
r43413:037a8759 default
parent child Browse files
Show More
@@ -1,743 +1,746 b''
1 # changelog.py - changelog class for mercurial
1 # changelog.py - changelog class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 from .i18n import _
10 from .i18n import _
11 from .node import (
11 from .node import (
12 bin,
12 bin,
13 hex,
13 hex,
14 nullid,
14 nullid,
15 )
15 )
16 from .thirdparty import attr
16 from .thirdparty import attr
17
17
18 from . import (
18 from . import (
19 encoding,
19 encoding,
20 error,
20 error,
21 pycompat,
21 pycompat,
22 revlog,
22 revlog,
23 util,
23 util,
24 )
24 )
25 from .utils import (
25 from .utils import (
26 dateutil,
26 dateutil,
27 stringutil,
27 stringutil,
28 )
28 )
29
29
30 from .revlogutils import sidedata as sidedatamod
30 from .revlogutils import sidedata as sidedatamod
31
31
32 _defaultextra = {b'branch': b'default'}
32 _defaultextra = {b'branch': b'default'}
33
33
34
34
35 def _string_escape(text):
35 def _string_escape(text):
36 """
36 """
37 >>> from .pycompat import bytechr as chr
37 >>> from .pycompat import bytechr as chr
38 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
38 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
39 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
39 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
40 >>> s
40 >>> s
41 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
41 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
42 >>> res = _string_escape(s)
42 >>> res = _string_escape(s)
43 >>> s == _string_unescape(res)
43 >>> s == _string_unescape(res)
44 True
44 True
45 """
45 """
46 # subset of the string_escape codec
46 # subset of the string_escape codec
47 text = (
47 text = (
48 text.replace(b'\\', b'\\\\')
48 text.replace(b'\\', b'\\\\')
49 .replace(b'\n', b'\\n')
49 .replace(b'\n', b'\\n')
50 .replace(b'\r', b'\\r')
50 .replace(b'\r', b'\\r')
51 )
51 )
52 return text.replace(b'\0', b'\\0')
52 return text.replace(b'\0', b'\\0')
53
53
54
54
55 def _string_unescape(text):
55 def _string_unescape(text):
56 if b'\\0' in text:
56 if b'\\0' in text:
57 # fix up \0 without getting into trouble with \\0
57 # fix up \0 without getting into trouble with \\0
58 text = text.replace(b'\\\\', b'\\\\\n')
58 text = text.replace(b'\\\\', b'\\\\\n')
59 text = text.replace(b'\\0', b'\0')
59 text = text.replace(b'\\0', b'\0')
60 text = text.replace(b'\n', b'')
60 text = text.replace(b'\n', b'')
61 return stringutil.unescapestr(text)
61 return stringutil.unescapestr(text)
62
62
63
63
64 def decodeextra(text):
64 def decodeextra(text):
65 """
65 """
66 >>> from .pycompat import bytechr as chr
66 >>> from .pycompat import bytechr as chr
67 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
67 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
68 ... ).items())
68 ... ).items())
69 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
69 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
70 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
70 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
71 ... b'baz': chr(92) + chr(0) + b'2'})
71 ... b'baz': chr(92) + chr(0) + b'2'})
72 ... ).items())
72 ... ).items())
73 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
73 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
74 """
74 """
75 extra = _defaultextra.copy()
75 extra = _defaultextra.copy()
76 for l in text.split(b'\0'):
76 for l in text.split(b'\0'):
77 if l:
77 if l:
78 k, v = _string_unescape(l).split(b':', 1)
78 k, v = _string_unescape(l).split(b':', 1)
79 extra[k] = v
79 extra[k] = v
80 return extra
80 return extra
81
81
82
82
83 def encodeextra(d):
83 def encodeextra(d):
84 # keys must be sorted to produce a deterministic changelog entry
84 # keys must be sorted to produce a deterministic changelog entry
85 items = [
85 items = [
86 _string_escape(b'%s:%s' % (k, pycompat.bytestr(d[k])))
86 _string_escape(b'%s:%s' % (k, pycompat.bytestr(d[k])))
87 for k in sorted(d)
87 for k in sorted(d)
88 ]
88 ]
89 return b"\0".join(items)
89 return b"\0".join(items)
90
90
91
91
92 def encodecopies(files, copies):
92 def encodecopies(files, copies):
93 items = []
93 items = []
94 for i, dst in enumerate(files):
94 for i, dst in enumerate(files):
95 if dst in copies:
95 if dst in copies:
96 items.append(b'%d\0%s' % (i, copies[dst]))
96 items.append(b'%d\0%s' % (i, copies[dst]))
97 if len(items) != len(copies):
97 if len(items) != len(copies):
98 raise error.ProgrammingError(
98 raise error.ProgrammingError(
99 b'some copy targets missing from file list'
99 b'some copy targets missing from file list'
100 )
100 )
101 return b"\n".join(items)
101 return b"\n".join(items)
102
102
103
103
104 def decodecopies(files, data):
104 def decodecopies(files, data):
105 try:
105 try:
106 copies = {}
106 copies = {}
107 if not data:
107 if not data:
108 return copies
108 return copies
109 for l in data.split(b'\n'):
109 for l in data.split(b'\n'):
110 strindex, src = l.split(b'\0')
110 strindex, src = l.split(b'\0')
111 i = int(strindex)
111 i = int(strindex)
112 dst = files[i]
112 dst = files[i]
113 copies[dst] = src
113 copies[dst] = src
114 return copies
114 return copies
115 except (ValueError, IndexError):
115 except (ValueError, IndexError):
116 # Perhaps someone had chosen the same key name (e.g. "p1copies") and
116 # Perhaps someone had chosen the same key name (e.g. "p1copies") and
117 # used different syntax for the value.
117 # used different syntax for the value.
118 return None
118 return None
119
119
120
120
121 def encodefileindices(files, subset):
121 def encodefileindices(files, subset):
122 subset = set(subset)
122 subset = set(subset)
123 indices = []
123 indices = []
124 for i, f in enumerate(files):
124 for i, f in enumerate(files):
125 if f in subset:
125 if f in subset:
126 indices.append(b'%d' % i)
126 indices.append(b'%d' % i)
127 return b'\n'.join(indices)
127 return b'\n'.join(indices)
128
128
129
129
130 def decodefileindices(files, data):
130 def decodefileindices(files, data):
131 try:
131 try:
132 subset = []
132 subset = []
133 if not data:
133 if not data:
134 return subset
134 return subset
135 for strindex in data.split(b'\n'):
135 for strindex in data.split(b'\n'):
136 i = int(strindex)
136 i = int(strindex)
137 if i < 0 or i >= len(files):
137 if i < 0 or i >= len(files):
138 return None
138 return None
139 subset.append(files[i])
139 subset.append(files[i])
140 return subset
140 return subset
141 except (ValueError, IndexError):
141 except (ValueError, IndexError):
142 # Perhaps someone had chosen the same key name (e.g. "added") and
142 # Perhaps someone had chosen the same key name (e.g. "added") and
143 # used different syntax for the value.
143 # used different syntax for the value.
144 return None
144 return None
145
145
146
146
147 def stripdesc(desc):
147 def stripdesc(desc):
148 """strip trailing whitespace and leading and trailing empty lines"""
148 """strip trailing whitespace and leading and trailing empty lines"""
149 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
149 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
150
150
151
151
152 class appender(object):
152 class appender(object):
153 '''the changelog index must be updated last on disk, so we use this class
153 '''the changelog index must be updated last on disk, so we use this class
154 to delay writes to it'''
154 to delay writes to it'''
155
155
156 def __init__(self, vfs, name, mode, buf):
156 def __init__(self, vfs, name, mode, buf):
157 self.data = buf
157 self.data = buf
158 fp = vfs(name, mode)
158 fp = vfs(name, mode)
159 self.fp = fp
159 self.fp = fp
160 self.offset = fp.tell()
160 self.offset = fp.tell()
161 self.size = vfs.fstat(fp).st_size
161 self.size = vfs.fstat(fp).st_size
162 self._end = self.size
162 self._end = self.size
163
163
164 def end(self):
164 def end(self):
165 return self._end
165 return self._end
166
166
167 def tell(self):
167 def tell(self):
168 return self.offset
168 return self.offset
169
169
170 def flush(self):
170 def flush(self):
171 pass
171 pass
172
172
173 @property
173 @property
174 def closed(self):
174 def closed(self):
175 return self.fp.closed
175 return self.fp.closed
176
176
177 def close(self):
177 def close(self):
178 self.fp.close()
178 self.fp.close()
179
179
180 def seek(self, offset, whence=0):
180 def seek(self, offset, whence=0):
181 '''virtual file offset spans real file and data'''
181 '''virtual file offset spans real file and data'''
182 if whence == 0:
182 if whence == 0:
183 self.offset = offset
183 self.offset = offset
184 elif whence == 1:
184 elif whence == 1:
185 self.offset += offset
185 self.offset += offset
186 elif whence == 2:
186 elif whence == 2:
187 self.offset = self.end() + offset
187 self.offset = self.end() + offset
188 if self.offset < self.size:
188 if self.offset < self.size:
189 self.fp.seek(self.offset)
189 self.fp.seek(self.offset)
190
190
191 def read(self, count=-1):
191 def read(self, count=-1):
192 '''only trick here is reads that span real file and data'''
192 '''only trick here is reads that span real file and data'''
193 ret = b""
193 ret = b""
194 if self.offset < self.size:
194 if self.offset < self.size:
195 s = self.fp.read(count)
195 s = self.fp.read(count)
196 ret = s
196 ret = s
197 self.offset += len(s)
197 self.offset += len(s)
198 if count > 0:
198 if count > 0:
199 count -= len(s)
199 count -= len(s)
200 if count != 0:
200 if count != 0:
201 doff = self.offset - self.size
201 doff = self.offset - self.size
202 self.data.insert(0, b"".join(self.data))
202 self.data.insert(0, b"".join(self.data))
203 del self.data[1:]
203 del self.data[1:]
204 s = self.data[0][doff : doff + count]
204 s = self.data[0][doff : doff + count]
205 self.offset += len(s)
205 self.offset += len(s)
206 ret += s
206 ret += s
207 return ret
207 return ret
208
208
209 def write(self, s):
209 def write(self, s):
210 self.data.append(bytes(s))
210 self.data.append(bytes(s))
211 self.offset += len(s)
211 self.offset += len(s)
212 self._end += len(s)
212 self._end += len(s)
213
213
214 def __enter__(self):
214 def __enter__(self):
215 self.fp.__enter__()
215 self.fp.__enter__()
216 return self
216 return self
217
217
218 def __exit__(self, *args):
218 def __exit__(self, *args):
219 return self.fp.__exit__(*args)
219 return self.fp.__exit__(*args)
220
220
221
221
222 def _divertopener(opener, target):
222 def _divertopener(opener, target):
223 """build an opener that writes in 'target.a' instead of 'target'"""
223 """build an opener that writes in 'target.a' instead of 'target'"""
224
224
225 def _divert(name, mode=b'r', checkambig=False):
225 def _divert(name, mode=b'r', checkambig=False):
226 if name != target:
226 if name != target:
227 return opener(name, mode)
227 return opener(name, mode)
228 return opener(name + b".a", mode)
228 return opener(name + b".a", mode)
229
229
230 return _divert
230 return _divert
231
231
232
232
233 def _delayopener(opener, target, buf):
233 def _delayopener(opener, target, buf):
234 """build an opener that stores chunks in 'buf' instead of 'target'"""
234 """build an opener that stores chunks in 'buf' instead of 'target'"""
235
235
236 def _delay(name, mode=b'r', checkambig=False):
236 def _delay(name, mode=b'r', checkambig=False):
237 if name != target:
237 if name != target:
238 return opener(name, mode)
238 return opener(name, mode)
239 return appender(opener, name, mode, buf)
239 return appender(opener, name, mode, buf)
240
240
241 return _delay
241 return _delay
242
242
243
243
244 @attr.s
244 @attr.s
245 class _changelogrevision(object):
245 class _changelogrevision(object):
246 # Extensions might modify _defaultextra, so let the constructor below pass
246 # Extensions might modify _defaultextra, so let the constructor below pass
247 # it in
247 # it in
248 extra = attr.ib()
248 extra = attr.ib()
249 manifest = attr.ib(default=nullid)
249 manifest = attr.ib(default=nullid)
250 user = attr.ib(default=b'')
250 user = attr.ib(default=b'')
251 date = attr.ib(default=(0, 0))
251 date = attr.ib(default=(0, 0))
252 files = attr.ib(default=attr.Factory(list))
252 files = attr.ib(default=attr.Factory(list))
253 filesadded = attr.ib(default=None)
253 filesadded = attr.ib(default=None)
254 filesremoved = attr.ib(default=None)
254 filesremoved = attr.ib(default=None)
255 p1copies = attr.ib(default=None)
255 p1copies = attr.ib(default=None)
256 p2copies = attr.ib(default=None)
256 p2copies = attr.ib(default=None)
257 description = attr.ib(default=b'')
257 description = attr.ib(default=b'')
258
258
259
259
260 class changelogrevision(object):
260 class changelogrevision(object):
261 """Holds results of a parsed changelog revision.
261 """Holds results of a parsed changelog revision.
262
262
263 Changelog revisions consist of multiple pieces of data, including
263 Changelog revisions consist of multiple pieces of data, including
264 the manifest node, user, and date. This object exposes a view into
264 the manifest node, user, and date. This object exposes a view into
265 the parsed object.
265 the parsed object.
266 """
266 """
267
267
268 __slots__ = (
268 __slots__ = (
269 r'_offsets',
269 r'_offsets',
270 r'_text',
270 r'_text',
271 r'_sidedata',
271 )
272 )
272
273
273 def __new__(cls, text):
274 def __new__(cls, text, sidedata):
274 if not text:
275 if not text:
275 return _changelogrevision(extra=_defaultextra)
276 return _changelogrevision(extra=_defaultextra)
276
277
277 self = super(changelogrevision, cls).__new__(cls)
278 self = super(changelogrevision, cls).__new__(cls)
278 # We could return here and implement the following as an __init__.
279 # We could return here and implement the following as an __init__.
279 # But doing it here is equivalent and saves an extra function call.
280 # But doing it here is equivalent and saves an extra function call.
280
281
281 # format used:
282 # format used:
282 # nodeid\n : manifest node in ascii
283 # nodeid\n : manifest node in ascii
283 # user\n : user, no \n or \r allowed
284 # user\n : user, no \n or \r allowed
284 # time tz extra\n : date (time is int or float, timezone is int)
285 # time tz extra\n : date (time is int or float, timezone is int)
285 # : extra is metadata, encoded and separated by '\0'
286 # : extra is metadata, encoded and separated by '\0'
286 # : older versions ignore it
287 # : older versions ignore it
287 # files\n\n : files modified by the cset, no \n or \r allowed
288 # files\n\n : files modified by the cset, no \n or \r allowed
288 # (.*) : comment (free text, ideally utf-8)
289 # (.*) : comment (free text, ideally utf-8)
289 #
290 #
290 # changelog v0 doesn't use extra
291 # changelog v0 doesn't use extra
291
292
292 nl1 = text.index(b'\n')
293 nl1 = text.index(b'\n')
293 nl2 = text.index(b'\n', nl1 + 1)
294 nl2 = text.index(b'\n', nl1 + 1)
294 nl3 = text.index(b'\n', nl2 + 1)
295 nl3 = text.index(b'\n', nl2 + 1)
295
296
296 # The list of files may be empty. Which means nl3 is the first of the
297 # The list of files may be empty. Which means nl3 is the first of the
297 # double newline that precedes the description.
298 # double newline that precedes the description.
298 if text[nl3 + 1 : nl3 + 2] == b'\n':
299 if text[nl3 + 1 : nl3 + 2] == b'\n':
299 doublenl = nl3
300 doublenl = nl3
300 else:
301 else:
301 doublenl = text.index(b'\n\n', nl3 + 1)
302 doublenl = text.index(b'\n\n', nl3 + 1)
302
303
303 self._offsets = (nl1, nl2, nl3, doublenl)
304 self._offsets = (nl1, nl2, nl3, doublenl)
304 self._text = text
305 self._text = text
306 self._sidedata = sidedata
305
307
306 return self
308 return self
307
309
308 @property
310 @property
309 def manifest(self):
311 def manifest(self):
310 return bin(self._text[0 : self._offsets[0]])
312 return bin(self._text[0 : self._offsets[0]])
311
313
312 @property
314 @property
313 def user(self):
315 def user(self):
314 off = self._offsets
316 off = self._offsets
315 return encoding.tolocal(self._text[off[0] + 1 : off[1]])
317 return encoding.tolocal(self._text[off[0] + 1 : off[1]])
316
318
317 @property
319 @property
318 def _rawdate(self):
320 def _rawdate(self):
319 off = self._offsets
321 off = self._offsets
320 dateextra = self._text[off[1] + 1 : off[2]]
322 dateextra = self._text[off[1] + 1 : off[2]]
321 return dateextra.split(b' ', 2)[0:2]
323 return dateextra.split(b' ', 2)[0:2]
322
324
323 @property
325 @property
324 def _rawextra(self):
326 def _rawextra(self):
325 off = self._offsets
327 off = self._offsets
326 dateextra = self._text[off[1] + 1 : off[2]]
328 dateextra = self._text[off[1] + 1 : off[2]]
327 fields = dateextra.split(b' ', 2)
329 fields = dateextra.split(b' ', 2)
328 if len(fields) != 3:
330 if len(fields) != 3:
329 return None
331 return None
330
332
331 return fields[2]
333 return fields[2]
332
334
333 @property
335 @property
334 def date(self):
336 def date(self):
335 raw = self._rawdate
337 raw = self._rawdate
336 time = float(raw[0])
338 time = float(raw[0])
337 # Various tools did silly things with the timezone.
339 # Various tools did silly things with the timezone.
338 try:
340 try:
339 timezone = int(raw[1])
341 timezone = int(raw[1])
340 except ValueError:
342 except ValueError:
341 timezone = 0
343 timezone = 0
342
344
343 return time, timezone
345 return time, timezone
344
346
345 @property
347 @property
346 def extra(self):
348 def extra(self):
347 raw = self._rawextra
349 raw = self._rawextra
348 if raw is None:
350 if raw is None:
349 return _defaultextra
351 return _defaultextra
350
352
351 return decodeextra(raw)
353 return decodeextra(raw)
352
354
353 @property
355 @property
354 def files(self):
356 def files(self):
355 off = self._offsets
357 off = self._offsets
356 if off[2] == off[3]:
358 if off[2] == off[3]:
357 return []
359 return []
358
360
359 return self._text[off[2] + 1 : off[3]].split(b'\n')
361 return self._text[off[2] + 1 : off[3]].split(b'\n')
360
362
361 @property
363 @property
362 def filesadded(self):
364 def filesadded(self):
363 rawindices = self.extra.get(b'filesadded')
365 rawindices = self.extra.get(b'filesadded')
364 return rawindices and decodefileindices(self.files, rawindices)
366 return rawindices and decodefileindices(self.files, rawindices)
365
367
366 @property
368 @property
367 def filesremoved(self):
369 def filesremoved(self):
368 rawindices = self.extra.get(b'filesremoved')
370 rawindices = self.extra.get(b'filesremoved')
369 return rawindices and decodefileindices(self.files, rawindices)
371 return rawindices and decodefileindices(self.files, rawindices)
370
372
371 @property
373 @property
372 def p1copies(self):
374 def p1copies(self):
373 rawcopies = self.extra.get(b'p1copies')
375 rawcopies = self.extra.get(b'p1copies')
374 return rawcopies and decodecopies(self.files, rawcopies)
376 return rawcopies and decodecopies(self.files, rawcopies)
375
377
376 @property
378 @property
377 def p2copies(self):
379 def p2copies(self):
378 rawcopies = self.extra.get(b'p2copies')
380 rawcopies = self.extra.get(b'p2copies')
379 return rawcopies and decodecopies(self.files, rawcopies)
381 return rawcopies and decodecopies(self.files, rawcopies)
380
382
381 @property
383 @property
382 def description(self):
384 def description(self):
383 return encoding.tolocal(self._text[self._offsets[3] + 2 :])
385 return encoding.tolocal(self._text[self._offsets[3] + 2 :])
384
386
385
387
386 class changelog(revlog.revlog):
388 class changelog(revlog.revlog):
387 def __init__(self, opener, trypending=False):
389 def __init__(self, opener, trypending=False):
388 """Load a changelog revlog using an opener.
390 """Load a changelog revlog using an opener.
389
391
390 If ``trypending`` is true, we attempt to load the index from a
392 If ``trypending`` is true, we attempt to load the index from a
391 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
393 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
392 The ``00changelog.i.a`` file contains index (and possibly inline
394 The ``00changelog.i.a`` file contains index (and possibly inline
393 revision) data for a transaction that hasn't been finalized yet.
395 revision) data for a transaction that hasn't been finalized yet.
394 It exists in a separate file to facilitate readers (such as
396 It exists in a separate file to facilitate readers (such as
395 hooks processes) accessing data before a transaction is finalized.
397 hooks processes) accessing data before a transaction is finalized.
396 """
398 """
397 if trypending and opener.exists(b'00changelog.i.a'):
399 if trypending and opener.exists(b'00changelog.i.a'):
398 indexfile = b'00changelog.i.a'
400 indexfile = b'00changelog.i.a'
399 else:
401 else:
400 indexfile = b'00changelog.i'
402 indexfile = b'00changelog.i'
401
403
402 datafile = b'00changelog.d'
404 datafile = b'00changelog.d'
403 revlog.revlog.__init__(
405 revlog.revlog.__init__(
404 self,
406 self,
405 opener,
407 opener,
406 indexfile,
408 indexfile,
407 datafile=datafile,
409 datafile=datafile,
408 checkambig=True,
410 checkambig=True,
409 mmaplargeindex=True,
411 mmaplargeindex=True,
410 )
412 )
411
413
412 if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
414 if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
413 # changelogs don't benefit from generaldelta.
415 # changelogs don't benefit from generaldelta.
414
416
415 self.version &= ~revlog.FLAG_GENERALDELTA
417 self.version &= ~revlog.FLAG_GENERALDELTA
416 self._generaldelta = False
418 self._generaldelta = False
417
419
418 # Delta chains for changelogs tend to be very small because entries
420 # Delta chains for changelogs tend to be very small because entries
419 # tend to be small and don't delta well with each. So disable delta
421 # tend to be small and don't delta well with each. So disable delta
420 # chains.
422 # chains.
421 self._storedeltachains = False
423 self._storedeltachains = False
422
424
423 self._realopener = opener
425 self._realopener = opener
424 self._delayed = False
426 self._delayed = False
425 self._delaybuf = None
427 self._delaybuf = None
426 self._divert = False
428 self._divert = False
427 self.filteredrevs = frozenset()
429 self.filteredrevs = frozenset()
428 self._copiesstorage = opener.options.get(b'copies-storage')
430 self._copiesstorage = opener.options.get(b'copies-storage')
429
431
430 def tiprev(self):
432 def tiprev(self):
431 for i in pycompat.xrange(len(self) - 1, -2, -1):
433 for i in pycompat.xrange(len(self) - 1, -2, -1):
432 if i not in self.filteredrevs:
434 if i not in self.filteredrevs:
433 return i
435 return i
434
436
435 def tip(self):
437 def tip(self):
436 """filtered version of revlog.tip"""
438 """filtered version of revlog.tip"""
437 return self.node(self.tiprev())
439 return self.node(self.tiprev())
438
440
439 def __contains__(self, rev):
441 def __contains__(self, rev):
440 """filtered version of revlog.__contains__"""
442 """filtered version of revlog.__contains__"""
441 return 0 <= rev < len(self) and rev not in self.filteredrevs
443 return 0 <= rev < len(self) and rev not in self.filteredrevs
442
444
443 def __iter__(self):
445 def __iter__(self):
444 """filtered version of revlog.__iter__"""
446 """filtered version of revlog.__iter__"""
445 if len(self.filteredrevs) == 0:
447 if len(self.filteredrevs) == 0:
446 return revlog.revlog.__iter__(self)
448 return revlog.revlog.__iter__(self)
447
449
448 def filterediter():
450 def filterediter():
449 for i in pycompat.xrange(len(self)):
451 for i in pycompat.xrange(len(self)):
450 if i not in self.filteredrevs:
452 if i not in self.filteredrevs:
451 yield i
453 yield i
452
454
453 return filterediter()
455 return filterediter()
454
456
455 def revs(self, start=0, stop=None):
457 def revs(self, start=0, stop=None):
456 """filtered version of revlog.revs"""
458 """filtered version of revlog.revs"""
457 for i in super(changelog, self).revs(start, stop):
459 for i in super(changelog, self).revs(start, stop):
458 if i not in self.filteredrevs:
460 if i not in self.filteredrevs:
459 yield i
461 yield i
460
462
461 def _checknofilteredinrevs(self, revs):
463 def _checknofilteredinrevs(self, revs):
462 """raise the appropriate error if 'revs' contains a filtered revision
464 """raise the appropriate error if 'revs' contains a filtered revision
463
465
464 This returns a version of 'revs' to be used thereafter by the caller.
466 This returns a version of 'revs' to be used thereafter by the caller.
465 In particular, if revs is an iterator, it is converted into a set.
467 In particular, if revs is an iterator, it is converted into a set.
466 """
468 """
467 safehasattr = util.safehasattr
469 safehasattr = util.safehasattr
468 if safehasattr(revs, '__next__'):
470 if safehasattr(revs, '__next__'):
469 # Note that inspect.isgenerator() is not true for iterators,
471 # Note that inspect.isgenerator() is not true for iterators,
470 revs = set(revs)
472 revs = set(revs)
471
473
472 filteredrevs = self.filteredrevs
474 filteredrevs = self.filteredrevs
473 if safehasattr(revs, 'first'): # smartset
475 if safehasattr(revs, 'first'): # smartset
474 offenders = revs & filteredrevs
476 offenders = revs & filteredrevs
475 else:
477 else:
476 offenders = filteredrevs.intersection(revs)
478 offenders = filteredrevs.intersection(revs)
477
479
478 for rev in offenders:
480 for rev in offenders:
479 raise error.FilteredIndexError(rev)
481 raise error.FilteredIndexError(rev)
480 return revs
482 return revs
481
483
482 def headrevs(self, revs=None):
484 def headrevs(self, revs=None):
483 if revs is None and self.filteredrevs:
485 if revs is None and self.filteredrevs:
484 try:
486 try:
485 return self.index.headrevsfiltered(self.filteredrevs)
487 return self.index.headrevsfiltered(self.filteredrevs)
486 # AttributeError covers non-c-extension environments and
488 # AttributeError covers non-c-extension environments and
487 # old c extensions without filter handling.
489 # old c extensions without filter handling.
488 except AttributeError:
490 except AttributeError:
489 return self._headrevs()
491 return self._headrevs()
490
492
491 if self.filteredrevs:
493 if self.filteredrevs:
492 revs = self._checknofilteredinrevs(revs)
494 revs = self._checknofilteredinrevs(revs)
493 return super(changelog, self).headrevs(revs)
495 return super(changelog, self).headrevs(revs)
494
496
495 def strip(self, *args, **kwargs):
497 def strip(self, *args, **kwargs):
496 # XXX make something better than assert
498 # XXX make something better than assert
497 # We can't expect proper strip behavior if we are filtered.
499 # We can't expect proper strip behavior if we are filtered.
498 assert not self.filteredrevs
500 assert not self.filteredrevs
499 super(changelog, self).strip(*args, **kwargs)
501 super(changelog, self).strip(*args, **kwargs)
500
502
501 def rev(self, node):
503 def rev(self, node):
502 """filtered version of revlog.rev"""
504 """filtered version of revlog.rev"""
503 r = super(changelog, self).rev(node)
505 r = super(changelog, self).rev(node)
504 if r in self.filteredrevs:
506 if r in self.filteredrevs:
505 raise error.FilteredLookupError(
507 raise error.FilteredLookupError(
506 hex(node), self.indexfile, _(b'filtered node')
508 hex(node), self.indexfile, _(b'filtered node')
507 )
509 )
508 return r
510 return r
509
511
510 def node(self, rev):
512 def node(self, rev):
511 """filtered version of revlog.node"""
513 """filtered version of revlog.node"""
512 if rev in self.filteredrevs:
514 if rev in self.filteredrevs:
513 raise error.FilteredIndexError(rev)
515 raise error.FilteredIndexError(rev)
514 return super(changelog, self).node(rev)
516 return super(changelog, self).node(rev)
515
517
516 def linkrev(self, rev):
518 def linkrev(self, rev):
517 """filtered version of revlog.linkrev"""
519 """filtered version of revlog.linkrev"""
518 if rev in self.filteredrevs:
520 if rev in self.filteredrevs:
519 raise error.FilteredIndexError(rev)
521 raise error.FilteredIndexError(rev)
520 return super(changelog, self).linkrev(rev)
522 return super(changelog, self).linkrev(rev)
521
523
522 def parentrevs(self, rev):
524 def parentrevs(self, rev):
523 """filtered version of revlog.parentrevs"""
525 """filtered version of revlog.parentrevs"""
524 if rev in self.filteredrevs:
526 if rev in self.filteredrevs:
525 raise error.FilteredIndexError(rev)
527 raise error.FilteredIndexError(rev)
526 return super(changelog, self).parentrevs(rev)
528 return super(changelog, self).parentrevs(rev)
527
529
528 def flags(self, rev):
530 def flags(self, rev):
529 """filtered version of revlog.flags"""
531 """filtered version of revlog.flags"""
530 if rev in self.filteredrevs:
532 if rev in self.filteredrevs:
531 raise error.FilteredIndexError(rev)
533 raise error.FilteredIndexError(rev)
532 return super(changelog, self).flags(rev)
534 return super(changelog, self).flags(rev)
533
535
534 def delayupdate(self, tr):
536 def delayupdate(self, tr):
535 b"delay visibility of index updates to other readers"
537 b"delay visibility of index updates to other readers"
536
538
537 if not self._delayed:
539 if not self._delayed:
538 if len(self) == 0:
540 if len(self) == 0:
539 self._divert = True
541 self._divert = True
540 if self._realopener.exists(self.indexfile + b'.a'):
542 if self._realopener.exists(self.indexfile + b'.a'):
541 self._realopener.unlink(self.indexfile + b'.a')
543 self._realopener.unlink(self.indexfile + b'.a')
542 self.opener = _divertopener(self._realopener, self.indexfile)
544 self.opener = _divertopener(self._realopener, self.indexfile)
543 else:
545 else:
544 self._delaybuf = []
546 self._delaybuf = []
545 self.opener = _delayopener(
547 self.opener = _delayopener(
546 self._realopener, self.indexfile, self._delaybuf
548 self._realopener, self.indexfile, self._delaybuf
547 )
549 )
548 self._delayed = True
550 self._delayed = True
549 tr.addpending(b'cl-%i' % id(self), self._writepending)
551 tr.addpending(b'cl-%i' % id(self), self._writepending)
550 tr.addfinalize(b'cl-%i' % id(self), self._finalize)
552 tr.addfinalize(b'cl-%i' % id(self), self._finalize)
551
553
552 def _finalize(self, tr):
554 def _finalize(self, tr):
553 b"finalize index updates"
555 b"finalize index updates"
554 self._delayed = False
556 self._delayed = False
555 self.opener = self._realopener
557 self.opener = self._realopener
556 # move redirected index data back into place
558 # move redirected index data back into place
557 if self._divert:
559 if self._divert:
558 assert not self._delaybuf
560 assert not self._delaybuf
559 tmpname = self.indexfile + b".a"
561 tmpname = self.indexfile + b".a"
560 nfile = self.opener.open(tmpname)
562 nfile = self.opener.open(tmpname)
561 nfile.close()
563 nfile.close()
562 self.opener.rename(tmpname, self.indexfile, checkambig=True)
564 self.opener.rename(tmpname, self.indexfile, checkambig=True)
563 elif self._delaybuf:
565 elif self._delaybuf:
564 fp = self.opener(self.indexfile, b'a', checkambig=True)
566 fp = self.opener(self.indexfile, b'a', checkambig=True)
565 fp.write(b"".join(self._delaybuf))
567 fp.write(b"".join(self._delaybuf))
566 fp.close()
568 fp.close()
567 self._delaybuf = None
569 self._delaybuf = None
568 self._divert = False
570 self._divert = False
569 # split when we're done
571 # split when we're done
570 self._enforceinlinesize(tr)
572 self._enforceinlinesize(tr)
571
573
572 def _writepending(self, tr):
574 def _writepending(self, tr):
573 b"create a file containing the unfinalized state for pretxnchangegroup"
575 b"create a file containing the unfinalized state for pretxnchangegroup"
574 if self._delaybuf:
576 if self._delaybuf:
575 # make a temporary copy of the index
577 # make a temporary copy of the index
576 fp1 = self._realopener(self.indexfile)
578 fp1 = self._realopener(self.indexfile)
577 pendingfilename = self.indexfile + b".a"
579 pendingfilename = self.indexfile + b".a"
578 # register as a temp file to ensure cleanup on failure
580 # register as a temp file to ensure cleanup on failure
579 tr.registertmp(pendingfilename)
581 tr.registertmp(pendingfilename)
580 # write existing data
582 # write existing data
581 fp2 = self._realopener(pendingfilename, b"w")
583 fp2 = self._realopener(pendingfilename, b"w")
582 fp2.write(fp1.read())
584 fp2.write(fp1.read())
583 # add pending data
585 # add pending data
584 fp2.write(b"".join(self._delaybuf))
586 fp2.write(b"".join(self._delaybuf))
585 fp2.close()
587 fp2.close()
586 # switch modes so finalize can simply rename
588 # switch modes so finalize can simply rename
587 self._delaybuf = None
589 self._delaybuf = None
588 self._divert = True
590 self._divert = True
589 self.opener = _divertopener(self._realopener, self.indexfile)
591 self.opener = _divertopener(self._realopener, self.indexfile)
590
592
591 if self._divert:
593 if self._divert:
592 return True
594 return True
593
595
594 return False
596 return False
595
597
596 def _enforceinlinesize(self, tr, fp=None):
598 def _enforceinlinesize(self, tr, fp=None):
597 if not self._delayed:
599 if not self._delayed:
598 revlog.revlog._enforceinlinesize(self, tr, fp)
600 revlog.revlog._enforceinlinesize(self, tr, fp)
599
601
600 def read(self, node):
602 def read(self, node):
601 """Obtain data from a parsed changelog revision.
603 """Obtain data from a parsed changelog revision.
602
604
603 Returns a 6-tuple of:
605 Returns a 6-tuple of:
604
606
605 - manifest node in binary
607 - manifest node in binary
606 - author/user as a localstr
608 - author/user as a localstr
607 - date as a 2-tuple of (time, timezone)
609 - date as a 2-tuple of (time, timezone)
608 - list of files
610 - list of files
609 - commit message as a localstr
611 - commit message as a localstr
610 - dict of extra metadata
612 - dict of extra metadata
611
613
612 Unless you need to access all fields, consider calling
614 Unless you need to access all fields, consider calling
613 ``changelogrevision`` instead, as it is faster for partial object
615 ``changelogrevision`` instead, as it is faster for partial object
614 access.
616 access.
615 """
617 """
616 c = changelogrevision(self.revision(node))
618 c = changelogrevision(*self._revisiondata(node))
617 return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
619 return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
618
620
619 def changelogrevision(self, nodeorrev):
621 def changelogrevision(self, nodeorrev):
620 """Obtain a ``changelogrevision`` for a node or revision."""
622 """Obtain a ``changelogrevision`` for a node or revision."""
621 return changelogrevision(self.revision(nodeorrev))
623 text, sidedata = self._revisiondata(nodeorrev)
624 return changelogrevision(text, sidedata)
622
625
623 def readfiles(self, node):
626 def readfiles(self, node):
624 """
627 """
625 short version of read that only returns the files modified by the cset
628 short version of read that only returns the files modified by the cset
626 """
629 """
627 text = self.revision(node)
630 text = self.revision(node)
628 if not text:
631 if not text:
629 return []
632 return []
630 last = text.index(b"\n\n")
633 last = text.index(b"\n\n")
631 l = text[:last].split(b'\n')
634 l = text[:last].split(b'\n')
632 return l[3:]
635 return l[3:]
633
636
634 def add(
637 def add(
635 self,
638 self,
636 manifest,
639 manifest,
637 files,
640 files,
638 desc,
641 desc,
639 transaction,
642 transaction,
640 p1,
643 p1,
641 p2,
644 p2,
642 user,
645 user,
643 date=None,
646 date=None,
644 extra=None,
647 extra=None,
645 p1copies=None,
648 p1copies=None,
646 p2copies=None,
649 p2copies=None,
647 filesadded=None,
650 filesadded=None,
648 filesremoved=None,
651 filesremoved=None,
649 ):
652 ):
650 # Convert to UTF-8 encoded bytestrings as the very first
653 # Convert to UTF-8 encoded bytestrings as the very first
651 # thing: calling any method on a localstr object will turn it
654 # thing: calling any method on a localstr object will turn it
652 # into a str object and the cached UTF-8 string is thus lost.
655 # into a str object and the cached UTF-8 string is thus lost.
653 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
656 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
654
657
655 user = user.strip()
658 user = user.strip()
656 # An empty username or a username with a "\n" will make the
659 # An empty username or a username with a "\n" will make the
657 # revision text contain two "\n\n" sequences -> corrupt
660 # revision text contain two "\n\n" sequences -> corrupt
658 # repository since read cannot unpack the revision.
661 # repository since read cannot unpack the revision.
659 if not user:
662 if not user:
660 raise error.StorageError(_(b"empty username"))
663 raise error.StorageError(_(b"empty username"))
661 if b"\n" in user:
664 if b"\n" in user:
662 raise error.StorageError(
665 raise error.StorageError(
663 _(b"username %r contains a newline") % pycompat.bytestr(user)
666 _(b"username %r contains a newline") % pycompat.bytestr(user)
664 )
667 )
665
668
666 desc = stripdesc(desc)
669 desc = stripdesc(desc)
667
670
668 if date:
671 if date:
669 parseddate = b"%d %d" % dateutil.parsedate(date)
672 parseddate = b"%d %d" % dateutil.parsedate(date)
670 else:
673 else:
671 parseddate = b"%d %d" % dateutil.makedate()
674 parseddate = b"%d %d" % dateutil.makedate()
672 if extra:
675 if extra:
673 branch = extra.get(b"branch")
676 branch = extra.get(b"branch")
674 if branch in (b"default", b""):
677 if branch in (b"default", b""):
675 del extra[b"branch"]
678 del extra[b"branch"]
676 elif branch in (b".", b"null", b"tip"):
679 elif branch in (b".", b"null", b"tip"):
677 raise error.StorageError(
680 raise error.StorageError(
678 _(b'the name \'%s\' is reserved') % branch
681 _(b'the name \'%s\' is reserved') % branch
679 )
682 )
680 sortedfiles = sorted(files)
683 sortedfiles = sorted(files)
681 sidedata = None
684 sidedata = None
682 if extra is not None:
685 if extra is not None:
683 for name in (
686 for name in (
684 b'p1copies',
687 b'p1copies',
685 b'p2copies',
688 b'p2copies',
686 b'filesadded',
689 b'filesadded',
687 b'filesremoved',
690 b'filesremoved',
688 ):
691 ):
689 extra.pop(name, None)
692 extra.pop(name, None)
690 if p1copies is not None:
693 if p1copies is not None:
691 p1copies = encodecopies(sortedfiles, p1copies)
694 p1copies = encodecopies(sortedfiles, p1copies)
692 if p2copies is not None:
695 if p2copies is not None:
693 p2copies = encodecopies(sortedfiles, p2copies)
696 p2copies = encodecopies(sortedfiles, p2copies)
694 if filesadded is not None:
697 if filesadded is not None:
695 filesadded = encodefileindices(sortedfiles, filesadded)
698 filesadded = encodefileindices(sortedfiles, filesadded)
696 if filesremoved is not None:
699 if filesremoved is not None:
697 filesremoved = encodefileindices(sortedfiles, filesremoved)
700 filesremoved = encodefileindices(sortedfiles, filesremoved)
698 if self._copiesstorage == b'extra':
701 if self._copiesstorage == b'extra':
699 extrasentries = p1copies, p2copies, filesadded, filesremoved
702 extrasentries = p1copies, p2copies, filesadded, filesremoved
700 if extra is None and any(x is not None for x in extrasentries):
703 if extra is None and any(x is not None for x in extrasentries):
701 extra = {}
704 extra = {}
702 if p1copies is not None:
705 if p1copies is not None:
703 extra[b'p1copies'] = p1copies
706 extra[b'p1copies'] = p1copies
704 if p2copies is not None:
707 if p2copies is not None:
705 extra[b'p2copies'] = p2copies
708 extra[b'p2copies'] = p2copies
706 if filesadded is not None:
709 if filesadded is not None:
707 extra[b'filesadded'] = filesadded
710 extra[b'filesadded'] = filesadded
708 if filesremoved is not None:
711 if filesremoved is not None:
709 extra[b'filesremoved'] = filesremoved
712 extra[b'filesremoved'] = filesremoved
710 elif self._copiesstorage == b'changeset-sidedata':
713 elif self._copiesstorage == b'changeset-sidedata':
711 sidedata = {}
714 sidedata = {}
712 if p1copies is not None:
715 if p1copies is not None:
713 sidedata[sidedatamod.SD_P1COPIES] = p1copies
716 sidedata[sidedatamod.SD_P1COPIES] = p1copies
714 if p2copies is not None:
717 if p2copies is not None:
715 sidedata[sidedatamod.SD_P2COPIES] = p2copies
718 sidedata[sidedatamod.SD_P2COPIES] = p2copies
716 if filesadded is not None:
719 if filesadded is not None:
717 sidedata[sidedatamod.SD_FILESADDED] = filesadded
720 sidedata[sidedatamod.SD_FILESADDED] = filesadded
718 if filesremoved is not None:
721 if filesremoved is not None:
719 sidedata[sidedatamod.SD_FILESREMOVED] = filesremoved
722 sidedata[sidedatamod.SD_FILESREMOVED] = filesremoved
720
723
721 if extra:
724 if extra:
722 extra = encodeextra(extra)
725 extra = encodeextra(extra)
723 parseddate = b"%s %s" % (parseddate, extra)
726 parseddate = b"%s %s" % (parseddate, extra)
724 l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
727 l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
725 text = b"\n".join(l)
728 text = b"\n".join(l)
726 return self.addrevision(
729 return self.addrevision(
727 text, transaction, len(self), p1, p2, sidedata=sidedata
730 text, transaction, len(self), p1, p2, sidedata=sidedata
728 )
731 )
729
732
730 def branchinfo(self, rev):
733 def branchinfo(self, rev):
731 """return the branch name and open/close state of a revision
734 """return the branch name and open/close state of a revision
732
735
733 This function exists because creating a changectx object
736 This function exists because creating a changectx object
734 just to access this is costly."""
737 just to access this is costly."""
735 extra = self.read(rev)[5]
738 extra = self.read(rev)[5]
736 return encoding.tolocal(extra.get(b"branch")), b'close' in extra
739 return encoding.tolocal(extra.get(b"branch")), b'close' in extra
737
740
738 def _nodeduplicatecallback(self, transaction, node):
741 def _nodeduplicatecallback(self, transaction, node):
739 # keep track of revisions that got "re-added", eg: unbunde of know rev.
742 # keep track of revisions that got "re-added", eg: unbunde of know rev.
740 #
743 #
741 # We track them in a list to preserve their order from the source bundle
744 # We track them in a list to preserve their order from the source bundle
742 duplicates = transaction.changes.setdefault(b'revduplicates', [])
745 duplicates = transaction.changes.setdefault(b'revduplicates', [])
743 duplicates.append(self.rev(node))
746 duplicates.append(self.rev(node))
General Comments 0
You need to be logged in to leave comments. Login now