##// END OF EJS Templates
sidedatacopies: only read from copies when in this mode...
marmoute -
r43504:e51f5d06 default
parent child Browse files
Show More
@@ -1,712 +1,727
1 # changelog.py - changelog class for mercurial
1 # changelog.py - changelog class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 from .i18n import _
10 from .i18n import _
11 from .node import (
11 from .node import (
12 bin,
12 bin,
13 hex,
13 hex,
14 nullid,
14 nullid,
15 )
15 )
16 from .thirdparty import attr
16 from .thirdparty import attr
17
17
18 from . import (
18 from . import (
19 copies,
19 copies,
20 encoding,
20 encoding,
21 error,
21 error,
22 pycompat,
22 pycompat,
23 revlog,
23 revlog,
24 util,
24 util,
25 )
25 )
26 from .utils import (
26 from .utils import (
27 dateutil,
27 dateutil,
28 stringutil,
28 stringutil,
29 )
29 )
30
30
31 from .revlogutils import sidedata as sidedatamod
31 from .revlogutils import sidedata as sidedatamod
32
32
33 _defaultextra = {b'branch': b'default'}
33 _defaultextra = {b'branch': b'default'}
34
34
35
35
36 def _string_escape(text):
36 def _string_escape(text):
37 """
37 """
38 >>> from .pycompat import bytechr as chr
38 >>> from .pycompat import bytechr as chr
39 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
39 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
40 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
40 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
41 >>> s
41 >>> s
42 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
42 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
43 >>> res = _string_escape(s)
43 >>> res = _string_escape(s)
44 >>> s == _string_unescape(res)
44 >>> s == _string_unescape(res)
45 True
45 True
46 """
46 """
47 # subset of the string_escape codec
47 # subset of the string_escape codec
48 text = (
48 text = (
49 text.replace(b'\\', b'\\\\')
49 text.replace(b'\\', b'\\\\')
50 .replace(b'\n', b'\\n')
50 .replace(b'\n', b'\\n')
51 .replace(b'\r', b'\\r')
51 .replace(b'\r', b'\\r')
52 )
52 )
53 return text.replace(b'\0', b'\\0')
53 return text.replace(b'\0', b'\\0')
54
54
55
55
56 def _string_unescape(text):
56 def _string_unescape(text):
57 if b'\\0' in text:
57 if b'\\0' in text:
58 # fix up \0 without getting into trouble with \\0
58 # fix up \0 without getting into trouble with \\0
59 text = text.replace(b'\\\\', b'\\\\\n')
59 text = text.replace(b'\\\\', b'\\\\\n')
60 text = text.replace(b'\\0', b'\0')
60 text = text.replace(b'\\0', b'\0')
61 text = text.replace(b'\n', b'')
61 text = text.replace(b'\n', b'')
62 return stringutil.unescapestr(text)
62 return stringutil.unescapestr(text)
63
63
64
64
65 def decodeextra(text):
65 def decodeextra(text):
66 """
66 """
67 >>> from .pycompat import bytechr as chr
67 >>> from .pycompat import bytechr as chr
68 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
68 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
69 ... ).items())
69 ... ).items())
70 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
70 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
71 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
71 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
72 ... b'baz': chr(92) + chr(0) + b'2'})
72 ... b'baz': chr(92) + chr(0) + b'2'})
73 ... ).items())
73 ... ).items())
74 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
74 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
75 """
75 """
76 extra = _defaultextra.copy()
76 extra = _defaultextra.copy()
77 for l in text.split(b'\0'):
77 for l in text.split(b'\0'):
78 if l:
78 if l:
79 k, v = _string_unescape(l).split(b':', 1)
79 k, v = _string_unescape(l).split(b':', 1)
80 extra[k] = v
80 extra[k] = v
81 return extra
81 return extra
82
82
83
83
84 def encodeextra(d):
84 def encodeextra(d):
85 # keys must be sorted to produce a deterministic changelog entry
85 # keys must be sorted to produce a deterministic changelog entry
86 items = [
86 items = [
87 _string_escape(b'%s:%s' % (k, pycompat.bytestr(d[k])))
87 _string_escape(b'%s:%s' % (k, pycompat.bytestr(d[k])))
88 for k in sorted(d)
88 for k in sorted(d)
89 ]
89 ]
90 return b"\0".join(items)
90 return b"\0".join(items)
91
91
92
92
93 def stripdesc(desc):
93 def stripdesc(desc):
94 """strip trailing whitespace and leading and trailing empty lines"""
94 """strip trailing whitespace and leading and trailing empty lines"""
95 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
95 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
96
96
97
97
98 class appender(object):
98 class appender(object):
99 '''the changelog index must be updated last on disk, so we use this class
99 '''the changelog index must be updated last on disk, so we use this class
100 to delay writes to it'''
100 to delay writes to it'''
101
101
102 def __init__(self, vfs, name, mode, buf):
102 def __init__(self, vfs, name, mode, buf):
103 self.data = buf
103 self.data = buf
104 fp = vfs(name, mode)
104 fp = vfs(name, mode)
105 self.fp = fp
105 self.fp = fp
106 self.offset = fp.tell()
106 self.offset = fp.tell()
107 self.size = vfs.fstat(fp).st_size
107 self.size = vfs.fstat(fp).st_size
108 self._end = self.size
108 self._end = self.size
109
109
110 def end(self):
110 def end(self):
111 return self._end
111 return self._end
112
112
113 def tell(self):
113 def tell(self):
114 return self.offset
114 return self.offset
115
115
116 def flush(self):
116 def flush(self):
117 pass
117 pass
118
118
119 @property
119 @property
120 def closed(self):
120 def closed(self):
121 return self.fp.closed
121 return self.fp.closed
122
122
123 def close(self):
123 def close(self):
124 self.fp.close()
124 self.fp.close()
125
125
126 def seek(self, offset, whence=0):
126 def seek(self, offset, whence=0):
127 '''virtual file offset spans real file and data'''
127 '''virtual file offset spans real file and data'''
128 if whence == 0:
128 if whence == 0:
129 self.offset = offset
129 self.offset = offset
130 elif whence == 1:
130 elif whence == 1:
131 self.offset += offset
131 self.offset += offset
132 elif whence == 2:
132 elif whence == 2:
133 self.offset = self.end() + offset
133 self.offset = self.end() + offset
134 if self.offset < self.size:
134 if self.offset < self.size:
135 self.fp.seek(self.offset)
135 self.fp.seek(self.offset)
136
136
137 def read(self, count=-1):
137 def read(self, count=-1):
138 '''only trick here is reads that span real file and data'''
138 '''only trick here is reads that span real file and data'''
139 ret = b""
139 ret = b""
140 if self.offset < self.size:
140 if self.offset < self.size:
141 s = self.fp.read(count)
141 s = self.fp.read(count)
142 ret = s
142 ret = s
143 self.offset += len(s)
143 self.offset += len(s)
144 if count > 0:
144 if count > 0:
145 count -= len(s)
145 count -= len(s)
146 if count != 0:
146 if count != 0:
147 doff = self.offset - self.size
147 doff = self.offset - self.size
148 self.data.insert(0, b"".join(self.data))
148 self.data.insert(0, b"".join(self.data))
149 del self.data[1:]
149 del self.data[1:]
150 s = self.data[0][doff : doff + count]
150 s = self.data[0][doff : doff + count]
151 self.offset += len(s)
151 self.offset += len(s)
152 ret += s
152 ret += s
153 return ret
153 return ret
154
154
155 def write(self, s):
155 def write(self, s):
156 self.data.append(bytes(s))
156 self.data.append(bytes(s))
157 self.offset += len(s)
157 self.offset += len(s)
158 self._end += len(s)
158 self._end += len(s)
159
159
160 def __enter__(self):
160 def __enter__(self):
161 self.fp.__enter__()
161 self.fp.__enter__()
162 return self
162 return self
163
163
164 def __exit__(self, *args):
164 def __exit__(self, *args):
165 return self.fp.__exit__(*args)
165 return self.fp.__exit__(*args)
166
166
167
167
168 def _divertopener(opener, target):
168 def _divertopener(opener, target):
169 """build an opener that writes in 'target.a' instead of 'target'"""
169 """build an opener that writes in 'target.a' instead of 'target'"""
170
170
171 def _divert(name, mode=b'r', checkambig=False):
171 def _divert(name, mode=b'r', checkambig=False):
172 if name != target:
172 if name != target:
173 return opener(name, mode)
173 return opener(name, mode)
174 return opener(name + b".a", mode)
174 return opener(name + b".a", mode)
175
175
176 return _divert
176 return _divert
177
177
178
178
179 def _delayopener(opener, target, buf):
179 def _delayopener(opener, target, buf):
180 """build an opener that stores chunks in 'buf' instead of 'target'"""
180 """build an opener that stores chunks in 'buf' instead of 'target'"""
181
181
182 def _delay(name, mode=b'r', checkambig=False):
182 def _delay(name, mode=b'r', checkambig=False):
183 if name != target:
183 if name != target:
184 return opener(name, mode)
184 return opener(name, mode)
185 return appender(opener, name, mode, buf)
185 return appender(opener, name, mode, buf)
186
186
187 return _delay
187 return _delay
188
188
189
189
190 @attr.s
190 @attr.s
191 class _changelogrevision(object):
191 class _changelogrevision(object):
192 # Extensions might modify _defaultextra, so let the constructor below pass
192 # Extensions might modify _defaultextra, so let the constructor below pass
193 # it in
193 # it in
194 extra = attr.ib()
194 extra = attr.ib()
195 manifest = attr.ib(default=nullid)
195 manifest = attr.ib(default=nullid)
196 user = attr.ib(default=b'')
196 user = attr.ib(default=b'')
197 date = attr.ib(default=(0, 0))
197 date = attr.ib(default=(0, 0))
198 files = attr.ib(default=attr.Factory(list))
198 files = attr.ib(default=attr.Factory(list))
199 filesadded = attr.ib(default=None)
199 filesadded = attr.ib(default=None)
200 filesremoved = attr.ib(default=None)
200 filesremoved = attr.ib(default=None)
201 p1copies = attr.ib(default=None)
201 p1copies = attr.ib(default=None)
202 p2copies = attr.ib(default=None)
202 p2copies = attr.ib(default=None)
203 description = attr.ib(default=b'')
203 description = attr.ib(default=b'')
204
204
205
205
206 class changelogrevision(object):
206 class changelogrevision(object):
207 """Holds results of a parsed changelog revision.
207 """Holds results of a parsed changelog revision.
208
208
209 Changelog revisions consist of multiple pieces of data, including
209 Changelog revisions consist of multiple pieces of data, including
210 the manifest node, user, and date. This object exposes a view into
210 the manifest node, user, and date. This object exposes a view into
211 the parsed object.
211 the parsed object.
212 """
212 """
213
213
214 __slots__ = (
214 __slots__ = (
215 r'_offsets',
215 r'_offsets',
216 r'_text',
216 r'_text',
217 r'_sidedata',
217 r'_sidedata',
218 r'_cpsd',
218 )
219 )
219
220
220 def __new__(cls, text, sidedata):
221 def __new__(cls, text, sidedata, cpsd):
221 if not text:
222 if not text:
222 return _changelogrevision(extra=_defaultextra)
223 return _changelogrevision(extra=_defaultextra)
223
224
224 self = super(changelogrevision, cls).__new__(cls)
225 self = super(changelogrevision, cls).__new__(cls)
225 # We could return here and implement the following as an __init__.
226 # We could return here and implement the following as an __init__.
226 # But doing it here is equivalent and saves an extra function call.
227 # But doing it here is equivalent and saves an extra function call.
227
228
228 # format used:
229 # format used:
229 # nodeid\n : manifest node in ascii
230 # nodeid\n : manifest node in ascii
230 # user\n : user, no \n or \r allowed
231 # user\n : user, no \n or \r allowed
231 # time tz extra\n : date (time is int or float, timezone is int)
232 # time tz extra\n : date (time is int or float, timezone is int)
232 # : extra is metadata, encoded and separated by '\0'
233 # : extra is metadata, encoded and separated by '\0'
233 # : older versions ignore it
234 # : older versions ignore it
234 # files\n\n : files modified by the cset, no \n or \r allowed
235 # files\n\n : files modified by the cset, no \n or \r allowed
235 # (.*) : comment (free text, ideally utf-8)
236 # (.*) : comment (free text, ideally utf-8)
236 #
237 #
237 # changelog v0 doesn't use extra
238 # changelog v0 doesn't use extra
238
239
239 nl1 = text.index(b'\n')
240 nl1 = text.index(b'\n')
240 nl2 = text.index(b'\n', nl1 + 1)
241 nl2 = text.index(b'\n', nl1 + 1)
241 nl3 = text.index(b'\n', nl2 + 1)
242 nl3 = text.index(b'\n', nl2 + 1)
242
243
243 # The list of files may be empty. Which means nl3 is the first of the
244 # The list of files may be empty. Which means nl3 is the first of the
244 # double newline that precedes the description.
245 # double newline that precedes the description.
245 if text[nl3 + 1 : nl3 + 2] == b'\n':
246 if text[nl3 + 1 : nl3 + 2] == b'\n':
246 doublenl = nl3
247 doublenl = nl3
247 else:
248 else:
248 doublenl = text.index(b'\n\n', nl3 + 1)
249 doublenl = text.index(b'\n\n', nl3 + 1)
249
250
250 self._offsets = (nl1, nl2, nl3, doublenl)
251 self._offsets = (nl1, nl2, nl3, doublenl)
251 self._text = text
252 self._text = text
252 self._sidedata = sidedata
253 self._sidedata = sidedata
254 self._cpsd = cpsd
253
255
254 return self
256 return self
255
257
256 @property
258 @property
257 def manifest(self):
259 def manifest(self):
258 return bin(self._text[0 : self._offsets[0]])
260 return bin(self._text[0 : self._offsets[0]])
259
261
260 @property
262 @property
261 def user(self):
263 def user(self):
262 off = self._offsets
264 off = self._offsets
263 return encoding.tolocal(self._text[off[0] + 1 : off[1]])
265 return encoding.tolocal(self._text[off[0] + 1 : off[1]])
264
266
265 @property
267 @property
266 def _rawdate(self):
268 def _rawdate(self):
267 off = self._offsets
269 off = self._offsets
268 dateextra = self._text[off[1] + 1 : off[2]]
270 dateextra = self._text[off[1] + 1 : off[2]]
269 return dateextra.split(b' ', 2)[0:2]
271 return dateextra.split(b' ', 2)[0:2]
270
272
271 @property
273 @property
272 def _rawextra(self):
274 def _rawextra(self):
273 off = self._offsets
275 off = self._offsets
274 dateextra = self._text[off[1] + 1 : off[2]]
276 dateextra = self._text[off[1] + 1 : off[2]]
275 fields = dateextra.split(b' ', 2)
277 fields = dateextra.split(b' ', 2)
276 if len(fields) != 3:
278 if len(fields) != 3:
277 return None
279 return None
278
280
279 return fields[2]
281 return fields[2]
280
282
281 @property
283 @property
282 def date(self):
284 def date(self):
283 raw = self._rawdate
285 raw = self._rawdate
284 time = float(raw[0])
286 time = float(raw[0])
285 # Various tools did silly things with the timezone.
287 # Various tools did silly things with the timezone.
286 try:
288 try:
287 timezone = int(raw[1])
289 timezone = int(raw[1])
288 except ValueError:
290 except ValueError:
289 timezone = 0
291 timezone = 0
290
292
291 return time, timezone
293 return time, timezone
292
294
293 @property
295 @property
294 def extra(self):
296 def extra(self):
295 raw = self._rawextra
297 raw = self._rawextra
296 if raw is None:
298 if raw is None:
297 return _defaultextra
299 return _defaultextra
298
300
299 return decodeextra(raw)
301 return decodeextra(raw)
300
302
301 @property
303 @property
302 def files(self):
304 def files(self):
303 off = self._offsets
305 off = self._offsets
304 if off[2] == off[3]:
306 if off[2] == off[3]:
305 return []
307 return []
306
308
307 return self._text[off[2] + 1 : off[3]].split(b'\n')
309 return self._text[off[2] + 1 : off[3]].split(b'\n')
308
310
309 @property
311 @property
310 def filesadded(self):
312 def filesadded(self):
311 if sidedatamod.SD_FILESADDED in self._sidedata:
313 if self._cpsd:
312 rawindices = self._sidedata.get(sidedatamod.SD_FILESADDED)
314 rawindices = self._sidedata.get(sidedatamod.SD_FILESADDED)
315 if not rawindices:
316 return []
313 else:
317 else:
314 rawindices = self.extra.get(b'filesadded')
318 rawindices = self.extra.get(b'filesadded')
315 if rawindices is None:
319 if rawindices is None:
316 return None
320 return None
317 return copies.decodefileindices(self.files, rawindices)
321 return copies.decodefileindices(self.files, rawindices)
318
322
319 @property
323 @property
320 def filesremoved(self):
324 def filesremoved(self):
321 if sidedatamod.SD_FILESREMOVED in self._sidedata:
325 if self._cpsd:
322 rawindices = self._sidedata.get(sidedatamod.SD_FILESREMOVED)
326 rawindices = self._sidedata.get(sidedatamod.SD_FILESREMOVED)
327 if not rawindices:
328 return []
323 else:
329 else:
324 rawindices = self.extra.get(b'filesremoved')
330 rawindices = self.extra.get(b'filesremoved')
325 if rawindices is None:
331 if rawindices is None:
326 return None
332 return None
327 return copies.decodefileindices(self.files, rawindices)
333 return copies.decodefileindices(self.files, rawindices)
328
334
329 @property
335 @property
330 def p1copies(self):
336 def p1copies(self):
331 if sidedatamod.SD_P1COPIES in self._sidedata:
337 if self._cpsd:
332 rawcopies = self._sidedata.get(sidedatamod.SD_P1COPIES)
338 rawcopies = self._sidedata.get(sidedatamod.SD_P1COPIES)
339 if not rawcopies:
340 return {}
333 else:
341 else:
334 rawcopies = self.extra.get(b'p1copies')
342 rawcopies = self.extra.get(b'p1copies')
335 if rawcopies is None:
343 if rawcopies is None:
336 return None
344 return None
337 return copies.decodecopies(self.files, rawcopies)
345 return copies.decodecopies(self.files, rawcopies)
338
346
339 @property
347 @property
340 def p2copies(self):
348 def p2copies(self):
341 if sidedatamod.SD_P2COPIES in self._sidedata:
349 if self._cpsd:
342 rawcopies = self._sidedata.get(sidedatamod.SD_P2COPIES)
350 rawcopies = self._sidedata.get(sidedatamod.SD_P2COPIES)
351 if not rawcopies:
352 return {}
343 else:
353 else:
344 rawcopies = self.extra.get(b'p2copies')
354 rawcopies = self.extra.get(b'p2copies')
345 if rawcopies is None:
355 if rawcopies is None:
346 return None
356 return None
347 return copies.decodecopies(self.files, rawcopies)
357 return copies.decodecopies(self.files, rawcopies)
348
358
349 @property
359 @property
350 def description(self):
360 def description(self):
351 return encoding.tolocal(self._text[self._offsets[3] + 2 :])
361 return encoding.tolocal(self._text[self._offsets[3] + 2 :])
352
362
353
363
354 class changelog(revlog.revlog):
364 class changelog(revlog.revlog):
355 def __init__(self, opener, trypending=False):
365 def __init__(self, opener, trypending=False):
356 """Load a changelog revlog using an opener.
366 """Load a changelog revlog using an opener.
357
367
358 If ``trypending`` is true, we attempt to load the index from a
368 If ``trypending`` is true, we attempt to load the index from a
359 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
369 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
360 The ``00changelog.i.a`` file contains index (and possibly inline
370 The ``00changelog.i.a`` file contains index (and possibly inline
361 revision) data for a transaction that hasn't been finalized yet.
371 revision) data for a transaction that hasn't been finalized yet.
362 It exists in a separate file to facilitate readers (such as
372 It exists in a separate file to facilitate readers (such as
363 hooks processes) accessing data before a transaction is finalized.
373 hooks processes) accessing data before a transaction is finalized.
364 """
374 """
365 if trypending and opener.exists(b'00changelog.i.a'):
375 if trypending and opener.exists(b'00changelog.i.a'):
366 indexfile = b'00changelog.i.a'
376 indexfile = b'00changelog.i.a'
367 else:
377 else:
368 indexfile = b'00changelog.i'
378 indexfile = b'00changelog.i'
369
379
370 datafile = b'00changelog.d'
380 datafile = b'00changelog.d'
371 revlog.revlog.__init__(
381 revlog.revlog.__init__(
372 self,
382 self,
373 opener,
383 opener,
374 indexfile,
384 indexfile,
375 datafile=datafile,
385 datafile=datafile,
376 checkambig=True,
386 checkambig=True,
377 mmaplargeindex=True,
387 mmaplargeindex=True,
378 )
388 )
379
389
380 if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
390 if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
381 # changelogs don't benefit from generaldelta.
391 # changelogs don't benefit from generaldelta.
382
392
383 self.version &= ~revlog.FLAG_GENERALDELTA
393 self.version &= ~revlog.FLAG_GENERALDELTA
384 self._generaldelta = False
394 self._generaldelta = False
385
395
386 # Delta chains for changelogs tend to be very small because entries
396 # Delta chains for changelogs tend to be very small because entries
387 # tend to be small and don't delta well with each. So disable delta
397 # tend to be small and don't delta well with each. So disable delta
388 # chains.
398 # chains.
389 self._storedeltachains = False
399 self._storedeltachains = False
390
400
391 self._realopener = opener
401 self._realopener = opener
392 self._delayed = False
402 self._delayed = False
393 self._delaybuf = None
403 self._delaybuf = None
394 self._divert = False
404 self._divert = False
395 self.filteredrevs = frozenset()
405 self.filteredrevs = frozenset()
396 self._copiesstorage = opener.options.get(b'copies-storage')
406 self._copiesstorage = opener.options.get(b'copies-storage')
397
407
398 def tiprev(self):
408 def tiprev(self):
399 for i in pycompat.xrange(len(self) - 1, -2, -1):
409 for i in pycompat.xrange(len(self) - 1, -2, -1):
400 if i not in self.filteredrevs:
410 if i not in self.filteredrevs:
401 return i
411 return i
402
412
403 def tip(self):
413 def tip(self):
404 """filtered version of revlog.tip"""
414 """filtered version of revlog.tip"""
405 return self.node(self.tiprev())
415 return self.node(self.tiprev())
406
416
407 def __contains__(self, rev):
417 def __contains__(self, rev):
408 """filtered version of revlog.__contains__"""
418 """filtered version of revlog.__contains__"""
409 return 0 <= rev < len(self) and rev not in self.filteredrevs
419 return 0 <= rev < len(self) and rev not in self.filteredrevs
410
420
411 def __iter__(self):
421 def __iter__(self):
412 """filtered version of revlog.__iter__"""
422 """filtered version of revlog.__iter__"""
413 if len(self.filteredrevs) == 0:
423 if len(self.filteredrevs) == 0:
414 return revlog.revlog.__iter__(self)
424 return revlog.revlog.__iter__(self)
415
425
416 def filterediter():
426 def filterediter():
417 for i in pycompat.xrange(len(self)):
427 for i in pycompat.xrange(len(self)):
418 if i not in self.filteredrevs:
428 if i not in self.filteredrevs:
419 yield i
429 yield i
420
430
421 return filterediter()
431 return filterediter()
422
432
423 def revs(self, start=0, stop=None):
433 def revs(self, start=0, stop=None):
424 """filtered version of revlog.revs"""
434 """filtered version of revlog.revs"""
425 for i in super(changelog, self).revs(start, stop):
435 for i in super(changelog, self).revs(start, stop):
426 if i not in self.filteredrevs:
436 if i not in self.filteredrevs:
427 yield i
437 yield i
428
438
429 def _checknofilteredinrevs(self, revs):
439 def _checknofilteredinrevs(self, revs):
430 """raise the appropriate error if 'revs' contains a filtered revision
440 """raise the appropriate error if 'revs' contains a filtered revision
431
441
432 This returns a version of 'revs' to be used thereafter by the caller.
442 This returns a version of 'revs' to be used thereafter by the caller.
433 In particular, if revs is an iterator, it is converted into a set.
443 In particular, if revs is an iterator, it is converted into a set.
434 """
444 """
435 safehasattr = util.safehasattr
445 safehasattr = util.safehasattr
436 if safehasattr(revs, '__next__'):
446 if safehasattr(revs, '__next__'):
437 # Note that inspect.isgenerator() is not true for iterators,
447 # Note that inspect.isgenerator() is not true for iterators,
438 revs = set(revs)
448 revs = set(revs)
439
449
440 filteredrevs = self.filteredrevs
450 filteredrevs = self.filteredrevs
441 if safehasattr(revs, 'first'): # smartset
451 if safehasattr(revs, 'first'): # smartset
442 offenders = revs & filteredrevs
452 offenders = revs & filteredrevs
443 else:
453 else:
444 offenders = filteredrevs.intersection(revs)
454 offenders = filteredrevs.intersection(revs)
445
455
446 for rev in offenders:
456 for rev in offenders:
447 raise error.FilteredIndexError(rev)
457 raise error.FilteredIndexError(rev)
448 return revs
458 return revs
449
459
450 def headrevs(self, revs=None):
460 def headrevs(self, revs=None):
451 if revs is None and self.filteredrevs:
461 if revs is None and self.filteredrevs:
452 try:
462 try:
453 return self.index.headrevsfiltered(self.filteredrevs)
463 return self.index.headrevsfiltered(self.filteredrevs)
454 # AttributeError covers non-c-extension environments and
464 # AttributeError covers non-c-extension environments and
455 # old c extensions without filter handling.
465 # old c extensions without filter handling.
456 except AttributeError:
466 except AttributeError:
457 return self._headrevs()
467 return self._headrevs()
458
468
459 if self.filteredrevs:
469 if self.filteredrevs:
460 revs = self._checknofilteredinrevs(revs)
470 revs = self._checknofilteredinrevs(revs)
461 return super(changelog, self).headrevs(revs)
471 return super(changelog, self).headrevs(revs)
462
472
463 def strip(self, *args, **kwargs):
473 def strip(self, *args, **kwargs):
464 # XXX make something better than assert
474 # XXX make something better than assert
465 # We can't expect proper strip behavior if we are filtered.
475 # We can't expect proper strip behavior if we are filtered.
466 assert not self.filteredrevs
476 assert not self.filteredrevs
467 super(changelog, self).strip(*args, **kwargs)
477 super(changelog, self).strip(*args, **kwargs)
468
478
469 def rev(self, node):
479 def rev(self, node):
470 """filtered version of revlog.rev"""
480 """filtered version of revlog.rev"""
471 r = super(changelog, self).rev(node)
481 r = super(changelog, self).rev(node)
472 if r in self.filteredrevs:
482 if r in self.filteredrevs:
473 raise error.FilteredLookupError(
483 raise error.FilteredLookupError(
474 hex(node), self.indexfile, _(b'filtered node')
484 hex(node), self.indexfile, _(b'filtered node')
475 )
485 )
476 return r
486 return r
477
487
478 def node(self, rev):
488 def node(self, rev):
479 """filtered version of revlog.node"""
489 """filtered version of revlog.node"""
480 if rev in self.filteredrevs:
490 if rev in self.filteredrevs:
481 raise error.FilteredIndexError(rev)
491 raise error.FilteredIndexError(rev)
482 return super(changelog, self).node(rev)
492 return super(changelog, self).node(rev)
483
493
484 def linkrev(self, rev):
494 def linkrev(self, rev):
485 """filtered version of revlog.linkrev"""
495 """filtered version of revlog.linkrev"""
486 if rev in self.filteredrevs:
496 if rev in self.filteredrevs:
487 raise error.FilteredIndexError(rev)
497 raise error.FilteredIndexError(rev)
488 return super(changelog, self).linkrev(rev)
498 return super(changelog, self).linkrev(rev)
489
499
490 def parentrevs(self, rev):
500 def parentrevs(self, rev):
491 """filtered version of revlog.parentrevs"""
501 """filtered version of revlog.parentrevs"""
492 if rev in self.filteredrevs:
502 if rev in self.filteredrevs:
493 raise error.FilteredIndexError(rev)
503 raise error.FilteredIndexError(rev)
494 return super(changelog, self).parentrevs(rev)
504 return super(changelog, self).parentrevs(rev)
495
505
496 def flags(self, rev):
506 def flags(self, rev):
497 """filtered version of revlog.flags"""
507 """filtered version of revlog.flags"""
498 if rev in self.filteredrevs:
508 if rev in self.filteredrevs:
499 raise error.FilteredIndexError(rev)
509 raise error.FilteredIndexError(rev)
500 return super(changelog, self).flags(rev)
510 return super(changelog, self).flags(rev)
501
511
502 def delayupdate(self, tr):
512 def delayupdate(self, tr):
503 b"delay visibility of index updates to other readers"
513 b"delay visibility of index updates to other readers"
504
514
505 if not self._delayed:
515 if not self._delayed:
506 if len(self) == 0:
516 if len(self) == 0:
507 self._divert = True
517 self._divert = True
508 if self._realopener.exists(self.indexfile + b'.a'):
518 if self._realopener.exists(self.indexfile + b'.a'):
509 self._realopener.unlink(self.indexfile + b'.a')
519 self._realopener.unlink(self.indexfile + b'.a')
510 self.opener = _divertopener(self._realopener, self.indexfile)
520 self.opener = _divertopener(self._realopener, self.indexfile)
511 else:
521 else:
512 self._delaybuf = []
522 self._delaybuf = []
513 self.opener = _delayopener(
523 self.opener = _delayopener(
514 self._realopener, self.indexfile, self._delaybuf
524 self._realopener, self.indexfile, self._delaybuf
515 )
525 )
516 self._delayed = True
526 self._delayed = True
517 tr.addpending(b'cl-%i' % id(self), self._writepending)
527 tr.addpending(b'cl-%i' % id(self), self._writepending)
518 tr.addfinalize(b'cl-%i' % id(self), self._finalize)
528 tr.addfinalize(b'cl-%i' % id(self), self._finalize)
519
529
520 def _finalize(self, tr):
530 def _finalize(self, tr):
521 b"finalize index updates"
531 b"finalize index updates"
522 self._delayed = False
532 self._delayed = False
523 self.opener = self._realopener
533 self.opener = self._realopener
524 # move redirected index data back into place
534 # move redirected index data back into place
525 if self._divert:
535 if self._divert:
526 assert not self._delaybuf
536 assert not self._delaybuf
527 tmpname = self.indexfile + b".a"
537 tmpname = self.indexfile + b".a"
528 nfile = self.opener.open(tmpname)
538 nfile = self.opener.open(tmpname)
529 nfile.close()
539 nfile.close()
530 self.opener.rename(tmpname, self.indexfile, checkambig=True)
540 self.opener.rename(tmpname, self.indexfile, checkambig=True)
531 elif self._delaybuf:
541 elif self._delaybuf:
532 fp = self.opener(self.indexfile, b'a', checkambig=True)
542 fp = self.opener(self.indexfile, b'a', checkambig=True)
533 fp.write(b"".join(self._delaybuf))
543 fp.write(b"".join(self._delaybuf))
534 fp.close()
544 fp.close()
535 self._delaybuf = None
545 self._delaybuf = None
536 self._divert = False
546 self._divert = False
537 # split when we're done
547 # split when we're done
538 self._enforceinlinesize(tr)
548 self._enforceinlinesize(tr)
539
549
540 def _writepending(self, tr):
550 def _writepending(self, tr):
541 b"create a file containing the unfinalized state for pretxnchangegroup"
551 b"create a file containing the unfinalized state for pretxnchangegroup"
542 if self._delaybuf:
552 if self._delaybuf:
543 # make a temporary copy of the index
553 # make a temporary copy of the index
544 fp1 = self._realopener(self.indexfile)
554 fp1 = self._realopener(self.indexfile)
545 pendingfilename = self.indexfile + b".a"
555 pendingfilename = self.indexfile + b".a"
546 # register as a temp file to ensure cleanup on failure
556 # register as a temp file to ensure cleanup on failure
547 tr.registertmp(pendingfilename)
557 tr.registertmp(pendingfilename)
548 # write existing data
558 # write existing data
549 fp2 = self._realopener(pendingfilename, b"w")
559 fp2 = self._realopener(pendingfilename, b"w")
550 fp2.write(fp1.read())
560 fp2.write(fp1.read())
551 # add pending data
561 # add pending data
552 fp2.write(b"".join(self._delaybuf))
562 fp2.write(b"".join(self._delaybuf))
553 fp2.close()
563 fp2.close()
554 # switch modes so finalize can simply rename
564 # switch modes so finalize can simply rename
555 self._delaybuf = None
565 self._delaybuf = None
556 self._divert = True
566 self._divert = True
557 self.opener = _divertopener(self._realopener, self.indexfile)
567 self.opener = _divertopener(self._realopener, self.indexfile)
558
568
559 if self._divert:
569 if self._divert:
560 return True
570 return True
561
571
562 return False
572 return False
563
573
564 def _enforceinlinesize(self, tr, fp=None):
574 def _enforceinlinesize(self, tr, fp=None):
565 if not self._delayed:
575 if not self._delayed:
566 revlog.revlog._enforceinlinesize(self, tr, fp)
576 revlog.revlog._enforceinlinesize(self, tr, fp)
567
577
568 def read(self, node):
578 def read(self, node):
569 """Obtain data from a parsed changelog revision.
579 """Obtain data from a parsed changelog revision.
570
580
571 Returns a 6-tuple of:
581 Returns a 6-tuple of:
572
582
573 - manifest node in binary
583 - manifest node in binary
574 - author/user as a localstr
584 - author/user as a localstr
575 - date as a 2-tuple of (time, timezone)
585 - date as a 2-tuple of (time, timezone)
576 - list of files
586 - list of files
577 - commit message as a localstr
587 - commit message as a localstr
578 - dict of extra metadata
588 - dict of extra metadata
579
589
580 Unless you need to access all fields, consider calling
590 Unless you need to access all fields, consider calling
581 ``changelogrevision`` instead, as it is faster for partial object
591 ``changelogrevision`` instead, as it is faster for partial object
582 access.
592 access.
583 """
593 """
584 c = changelogrevision(*self._revisiondata(node))
594 d, s = self._revisiondata(node)
595 c = changelogrevision(
596 d, s, self._copiesstorage == b'changeset-sidedata'
597 )
585 return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
598 return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
586
599
587 def changelogrevision(self, nodeorrev):
600 def changelogrevision(self, nodeorrev):
588 """Obtain a ``changelogrevision`` for a node or revision."""
601 """Obtain a ``changelogrevision`` for a node or revision."""
589 text, sidedata = self._revisiondata(nodeorrev)
602 text, sidedata = self._revisiondata(nodeorrev)
590 return changelogrevision(text, sidedata)
603 return changelogrevision(
604 text, sidedata, self._copiesstorage == b'changeset-sidedata'
605 )
591
606
592 def readfiles(self, node):
607 def readfiles(self, node):
593 """
608 """
594 short version of read that only returns the files modified by the cset
609 short version of read that only returns the files modified by the cset
595 """
610 """
596 text = self.revision(node)
611 text = self.revision(node)
597 if not text:
612 if not text:
598 return []
613 return []
599 last = text.index(b"\n\n")
614 last = text.index(b"\n\n")
600 l = text[:last].split(b'\n')
615 l = text[:last].split(b'\n')
601 return l[3:]
616 return l[3:]
602
617
603 def add(
618 def add(
604 self,
619 self,
605 manifest,
620 manifest,
606 files,
621 files,
607 desc,
622 desc,
608 transaction,
623 transaction,
609 p1,
624 p1,
610 p2,
625 p2,
611 user,
626 user,
612 date=None,
627 date=None,
613 extra=None,
628 extra=None,
614 p1copies=None,
629 p1copies=None,
615 p2copies=None,
630 p2copies=None,
616 filesadded=None,
631 filesadded=None,
617 filesremoved=None,
632 filesremoved=None,
618 ):
633 ):
619 # Convert to UTF-8 encoded bytestrings as the very first
634 # Convert to UTF-8 encoded bytestrings as the very first
620 # thing: calling any method on a localstr object will turn it
635 # thing: calling any method on a localstr object will turn it
621 # into a str object and the cached UTF-8 string is thus lost.
636 # into a str object and the cached UTF-8 string is thus lost.
622 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
637 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
623
638
624 user = user.strip()
639 user = user.strip()
625 # An empty username or a username with a "\n" will make the
640 # An empty username or a username with a "\n" will make the
626 # revision text contain two "\n\n" sequences -> corrupt
641 # revision text contain two "\n\n" sequences -> corrupt
627 # repository since read cannot unpack the revision.
642 # repository since read cannot unpack the revision.
628 if not user:
643 if not user:
629 raise error.StorageError(_(b"empty username"))
644 raise error.StorageError(_(b"empty username"))
630 if b"\n" in user:
645 if b"\n" in user:
631 raise error.StorageError(
646 raise error.StorageError(
632 _(b"username %r contains a newline") % pycompat.bytestr(user)
647 _(b"username %r contains a newline") % pycompat.bytestr(user)
633 )
648 )
634
649
635 desc = stripdesc(desc)
650 desc = stripdesc(desc)
636
651
637 if date:
652 if date:
638 parseddate = b"%d %d" % dateutil.parsedate(date)
653 parseddate = b"%d %d" % dateutil.parsedate(date)
639 else:
654 else:
640 parseddate = b"%d %d" % dateutil.makedate()
655 parseddate = b"%d %d" % dateutil.makedate()
641 if extra:
656 if extra:
642 branch = extra.get(b"branch")
657 branch = extra.get(b"branch")
643 if branch in (b"default", b""):
658 if branch in (b"default", b""):
644 del extra[b"branch"]
659 del extra[b"branch"]
645 elif branch in (b".", b"null", b"tip"):
660 elif branch in (b".", b"null", b"tip"):
646 raise error.StorageError(
661 raise error.StorageError(
647 _(b'the name \'%s\' is reserved') % branch
662 _(b'the name \'%s\' is reserved') % branch
648 )
663 )
649 sortedfiles = sorted(files)
664 sortedfiles = sorted(files)
650 sidedata = None
665 sidedata = None
651 if extra is not None:
666 if extra is not None:
652 for name in (
667 for name in (
653 b'p1copies',
668 b'p1copies',
654 b'p2copies',
669 b'p2copies',
655 b'filesadded',
670 b'filesadded',
656 b'filesremoved',
671 b'filesremoved',
657 ):
672 ):
658 extra.pop(name, None)
673 extra.pop(name, None)
659 if p1copies is not None:
674 if p1copies is not None:
660 p1copies = copies.encodecopies(sortedfiles, p1copies)
675 p1copies = copies.encodecopies(sortedfiles, p1copies)
661 if p2copies is not None:
676 if p2copies is not None:
662 p2copies = copies.encodecopies(sortedfiles, p2copies)
677 p2copies = copies.encodecopies(sortedfiles, p2copies)
663 if filesadded is not None:
678 if filesadded is not None:
664 filesadded = copies.encodefileindices(sortedfiles, filesadded)
679 filesadded = copies.encodefileindices(sortedfiles, filesadded)
665 if filesremoved is not None:
680 if filesremoved is not None:
666 filesremoved = copies.encodefileindices(sortedfiles, filesremoved)
681 filesremoved = copies.encodefileindices(sortedfiles, filesremoved)
667 if self._copiesstorage == b'extra':
682 if self._copiesstorage == b'extra':
668 extrasentries = p1copies, p2copies, filesadded, filesremoved
683 extrasentries = p1copies, p2copies, filesadded, filesremoved
669 if extra is None and any(x is not None for x in extrasentries):
684 if extra is None and any(x is not None for x in extrasentries):
670 extra = {}
685 extra = {}
671 if p1copies is not None:
686 if p1copies is not None:
672 extra[b'p1copies'] = p1copies
687 extra[b'p1copies'] = p1copies
673 if p2copies is not None:
688 if p2copies is not None:
674 extra[b'p2copies'] = p2copies
689 extra[b'p2copies'] = p2copies
675 if filesadded is not None:
690 if filesadded is not None:
676 extra[b'filesadded'] = filesadded
691 extra[b'filesadded'] = filesadded
677 if filesremoved is not None:
692 if filesremoved is not None:
678 extra[b'filesremoved'] = filesremoved
693 extra[b'filesremoved'] = filesremoved
679 elif self._copiesstorage == b'changeset-sidedata':
694 elif self._copiesstorage == b'changeset-sidedata':
680 sidedata = {}
695 sidedata = {}
681 if p1copies is not None:
696 if p1copies is not None:
682 sidedata[sidedatamod.SD_P1COPIES] = p1copies
697 sidedata[sidedatamod.SD_P1COPIES] = p1copies
683 if p2copies is not None:
698 if p2copies is not None:
684 sidedata[sidedatamod.SD_P2COPIES] = p2copies
699 sidedata[sidedatamod.SD_P2COPIES] = p2copies
685 if filesadded is not None:
700 if filesadded is not None:
686 sidedata[sidedatamod.SD_FILESADDED] = filesadded
701 sidedata[sidedatamod.SD_FILESADDED] = filesadded
687 if filesremoved is not None:
702 if filesremoved is not None:
688 sidedata[sidedatamod.SD_FILESREMOVED] = filesremoved
703 sidedata[sidedatamod.SD_FILESREMOVED] = filesremoved
689
704
690 if extra:
705 if extra:
691 extra = encodeextra(extra)
706 extra = encodeextra(extra)
692 parseddate = b"%s %s" % (parseddate, extra)
707 parseddate = b"%s %s" % (parseddate, extra)
693 l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
708 l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
694 text = b"\n".join(l)
709 text = b"\n".join(l)
695 return self.addrevision(
710 return self.addrevision(
696 text, transaction, len(self), p1, p2, sidedata=sidedata
711 text, transaction, len(self), p1, p2, sidedata=sidedata
697 )
712 )
698
713
699 def branchinfo(self, rev):
714 def branchinfo(self, rev):
700 """return the branch name and open/close state of a revision
715 """return the branch name and open/close state of a revision
701
716
702 This function exists because creating a changectx object
717 This function exists because creating a changectx object
703 just to access this is costly."""
718 just to access this is costly."""
704 extra = self.read(rev)[5]
719 extra = self.read(rev)[5]
705 return encoding.tolocal(extra.get(b"branch")), b'close' in extra
720 return encoding.tolocal(extra.get(b"branch")), b'close' in extra
706
721
707 def _nodeduplicatecallback(self, transaction, node):
722 def _nodeduplicatecallback(self, transaction, node):
708 # keep track of revisions that got "re-added", eg: unbunde of know rev.
723 # keep track of revisions that got "re-added", eg: unbunde of know rev.
709 #
724 #
710 # We track them in a list to preserve their order from the source bundle
725 # We track them in a list to preserve their order from the source bundle
711 duplicates = transaction.changes.setdefault(b'revduplicates', [])
726 duplicates = transaction.changes.setdefault(b'revduplicates', [])
712 duplicates.append(self.rev(node))
727 duplicates.append(self.rev(node))
General Comments 0
You need to be logged in to leave comments. Login now