##// END OF EJS Templates
branchmap: micro-optimize branchinfo...
Joerg Sonnenberger -
r46813:2607a934 default
parent child Browse files
Show More
@@ -1,612 +1,612 b''
1 # changelog.py - changelog class for mercurial
1 # changelog.py - changelog class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 from .i18n import _
10 from .i18n import _
11 from .node import (
11 from .node import (
12 bin,
12 bin,
13 hex,
13 hex,
14 nullid,
14 nullid,
15 )
15 )
16 from .thirdparty import attr
16 from .thirdparty import attr
17
17
18 from . import (
18 from . import (
19 encoding,
19 encoding,
20 error,
20 error,
21 metadata,
21 metadata,
22 pycompat,
22 pycompat,
23 revlog,
23 revlog,
24 )
24 )
25 from .utils import (
25 from .utils import (
26 dateutil,
26 dateutil,
27 stringutil,
27 stringutil,
28 )
28 )
29 from .revlogutils import flagutil
29 from .revlogutils import flagutil
30
30
31 _defaultextra = {b'branch': b'default'}
31 _defaultextra = {b'branch': b'default'}
32
32
33
33
34 def _string_escape(text):
34 def _string_escape(text):
35 """
35 """
36 >>> from .pycompat import bytechr as chr
36 >>> from .pycompat import bytechr as chr
37 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
37 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
38 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
38 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
39 >>> s
39 >>> s
40 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
40 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
41 >>> res = _string_escape(s)
41 >>> res = _string_escape(s)
42 >>> s == _string_unescape(res)
42 >>> s == _string_unescape(res)
43 True
43 True
44 """
44 """
45 # subset of the string_escape codec
45 # subset of the string_escape codec
46 text = (
46 text = (
47 text.replace(b'\\', b'\\\\')
47 text.replace(b'\\', b'\\\\')
48 .replace(b'\n', b'\\n')
48 .replace(b'\n', b'\\n')
49 .replace(b'\r', b'\\r')
49 .replace(b'\r', b'\\r')
50 )
50 )
51 return text.replace(b'\0', b'\\0')
51 return text.replace(b'\0', b'\\0')
52
52
53
53
54 def _string_unescape(text):
54 def _string_unescape(text):
55 if b'\\0' in text:
55 if b'\\0' in text:
56 # fix up \0 without getting into trouble with \\0
56 # fix up \0 without getting into trouble with \\0
57 text = text.replace(b'\\\\', b'\\\\\n')
57 text = text.replace(b'\\\\', b'\\\\\n')
58 text = text.replace(b'\\0', b'\0')
58 text = text.replace(b'\\0', b'\0')
59 text = text.replace(b'\n', b'')
59 text = text.replace(b'\n', b'')
60 return stringutil.unescapestr(text)
60 return stringutil.unescapestr(text)
61
61
62
62
63 def decodeextra(text):
63 def decodeextra(text):
64 """
64 """
65 >>> from .pycompat import bytechr as chr
65 >>> from .pycompat import bytechr as chr
66 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
66 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
67 ... ).items())
67 ... ).items())
68 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
68 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
69 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
69 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
70 ... b'baz': chr(92) + chr(0) + b'2'})
70 ... b'baz': chr(92) + chr(0) + b'2'})
71 ... ).items())
71 ... ).items())
72 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
72 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
73 """
73 """
74 extra = _defaultextra.copy()
74 extra = _defaultextra.copy()
75 for l in text.split(b'\0'):
75 for l in text.split(b'\0'):
76 if l:
76 if l:
77 k, v = _string_unescape(l).split(b':', 1)
77 k, v = _string_unescape(l).split(b':', 1)
78 extra[k] = v
78 extra[k] = v
79 return extra
79 return extra
80
80
81
81
82 def encodeextra(d):
82 def encodeextra(d):
83 # keys must be sorted to produce a deterministic changelog entry
83 # keys must be sorted to produce a deterministic changelog entry
84 items = [_string_escape(b'%s:%s' % (k, d[k])) for k in sorted(d)]
84 items = [_string_escape(b'%s:%s' % (k, d[k])) for k in sorted(d)]
85 return b"\0".join(items)
85 return b"\0".join(items)
86
86
87
87
88 def stripdesc(desc):
88 def stripdesc(desc):
89 """strip trailing whitespace and leading and trailing empty lines"""
89 """strip trailing whitespace and leading and trailing empty lines"""
90 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
90 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
91
91
92
92
93 class appender(object):
93 class appender(object):
94 """the changelog index must be updated last on disk, so we use this class
94 """the changelog index must be updated last on disk, so we use this class
95 to delay writes to it"""
95 to delay writes to it"""
96
96
97 def __init__(self, vfs, name, mode, buf):
97 def __init__(self, vfs, name, mode, buf):
98 self.data = buf
98 self.data = buf
99 fp = vfs(name, mode)
99 fp = vfs(name, mode)
100 self.fp = fp
100 self.fp = fp
101 self.offset = fp.tell()
101 self.offset = fp.tell()
102 self.size = vfs.fstat(fp).st_size
102 self.size = vfs.fstat(fp).st_size
103 self._end = self.size
103 self._end = self.size
104
104
105 def end(self):
105 def end(self):
106 return self._end
106 return self._end
107
107
108 def tell(self):
108 def tell(self):
109 return self.offset
109 return self.offset
110
110
111 def flush(self):
111 def flush(self):
112 pass
112 pass
113
113
114 @property
114 @property
115 def closed(self):
115 def closed(self):
116 return self.fp.closed
116 return self.fp.closed
117
117
118 def close(self):
118 def close(self):
119 self.fp.close()
119 self.fp.close()
120
120
121 def seek(self, offset, whence=0):
121 def seek(self, offset, whence=0):
122 '''virtual file offset spans real file and data'''
122 '''virtual file offset spans real file and data'''
123 if whence == 0:
123 if whence == 0:
124 self.offset = offset
124 self.offset = offset
125 elif whence == 1:
125 elif whence == 1:
126 self.offset += offset
126 self.offset += offset
127 elif whence == 2:
127 elif whence == 2:
128 self.offset = self.end() + offset
128 self.offset = self.end() + offset
129 if self.offset < self.size:
129 if self.offset < self.size:
130 self.fp.seek(self.offset)
130 self.fp.seek(self.offset)
131
131
132 def read(self, count=-1):
132 def read(self, count=-1):
133 '''only trick here is reads that span real file and data'''
133 '''only trick here is reads that span real file and data'''
134 ret = b""
134 ret = b""
135 if self.offset < self.size:
135 if self.offset < self.size:
136 s = self.fp.read(count)
136 s = self.fp.read(count)
137 ret = s
137 ret = s
138 self.offset += len(s)
138 self.offset += len(s)
139 if count > 0:
139 if count > 0:
140 count -= len(s)
140 count -= len(s)
141 if count != 0:
141 if count != 0:
142 doff = self.offset - self.size
142 doff = self.offset - self.size
143 self.data.insert(0, b"".join(self.data))
143 self.data.insert(0, b"".join(self.data))
144 del self.data[1:]
144 del self.data[1:]
145 s = self.data[0][doff : doff + count]
145 s = self.data[0][doff : doff + count]
146 self.offset += len(s)
146 self.offset += len(s)
147 ret += s
147 ret += s
148 return ret
148 return ret
149
149
150 def write(self, s):
150 def write(self, s):
151 self.data.append(bytes(s))
151 self.data.append(bytes(s))
152 self.offset += len(s)
152 self.offset += len(s)
153 self._end += len(s)
153 self._end += len(s)
154
154
155 def __enter__(self):
155 def __enter__(self):
156 self.fp.__enter__()
156 self.fp.__enter__()
157 return self
157 return self
158
158
159 def __exit__(self, *args):
159 def __exit__(self, *args):
160 return self.fp.__exit__(*args)
160 return self.fp.__exit__(*args)
161
161
162
162
163 class _divertopener(object):
163 class _divertopener(object):
164 def __init__(self, opener, target):
164 def __init__(self, opener, target):
165 self._opener = opener
165 self._opener = opener
166 self._target = target
166 self._target = target
167
167
168 def __call__(self, name, mode=b'r', checkambig=False, **kwargs):
168 def __call__(self, name, mode=b'r', checkambig=False, **kwargs):
169 if name != self._target:
169 if name != self._target:
170 return self._opener(name, mode, **kwargs)
170 return self._opener(name, mode, **kwargs)
171 return self._opener(name + b".a", mode, **kwargs)
171 return self._opener(name + b".a", mode, **kwargs)
172
172
173 def __getattr__(self, attr):
173 def __getattr__(self, attr):
174 return getattr(self._opener, attr)
174 return getattr(self._opener, attr)
175
175
176
176
177 def _delayopener(opener, target, buf):
177 def _delayopener(opener, target, buf):
178 """build an opener that stores chunks in 'buf' instead of 'target'"""
178 """build an opener that stores chunks in 'buf' instead of 'target'"""
179
179
180 def _delay(name, mode=b'r', checkambig=False, **kwargs):
180 def _delay(name, mode=b'r', checkambig=False, **kwargs):
181 if name != target:
181 if name != target:
182 return opener(name, mode, **kwargs)
182 return opener(name, mode, **kwargs)
183 assert not kwargs
183 assert not kwargs
184 return appender(opener, name, mode, buf)
184 return appender(opener, name, mode, buf)
185
185
186 return _delay
186 return _delay
187
187
188
188
189 @attr.s
189 @attr.s
190 class _changelogrevision(object):
190 class _changelogrevision(object):
191 # Extensions might modify _defaultextra, so let the constructor below pass
191 # Extensions might modify _defaultextra, so let the constructor below pass
192 # it in
192 # it in
193 extra = attr.ib()
193 extra = attr.ib()
194 manifest = attr.ib(default=nullid)
194 manifest = attr.ib(default=nullid)
195 user = attr.ib(default=b'')
195 user = attr.ib(default=b'')
196 date = attr.ib(default=(0, 0))
196 date = attr.ib(default=(0, 0))
197 files = attr.ib(default=attr.Factory(list))
197 files = attr.ib(default=attr.Factory(list))
198 filesadded = attr.ib(default=None)
198 filesadded = attr.ib(default=None)
199 filesremoved = attr.ib(default=None)
199 filesremoved = attr.ib(default=None)
200 p1copies = attr.ib(default=None)
200 p1copies = attr.ib(default=None)
201 p2copies = attr.ib(default=None)
201 p2copies = attr.ib(default=None)
202 description = attr.ib(default=b'')
202 description = attr.ib(default=b'')
203
203
204
204
205 class changelogrevision(object):
205 class changelogrevision(object):
206 """Holds results of a parsed changelog revision.
206 """Holds results of a parsed changelog revision.
207
207
208 Changelog revisions consist of multiple pieces of data, including
208 Changelog revisions consist of multiple pieces of data, including
209 the manifest node, user, and date. This object exposes a view into
209 the manifest node, user, and date. This object exposes a view into
210 the parsed object.
210 the parsed object.
211 """
211 """
212
212
213 __slots__ = (
213 __slots__ = (
214 '_offsets',
214 '_offsets',
215 '_text',
215 '_text',
216 '_sidedata',
216 '_sidedata',
217 '_cpsd',
217 '_cpsd',
218 '_changes',
218 '_changes',
219 )
219 )
220
220
221 def __new__(cls, text, sidedata, cpsd):
221 def __new__(cls, text, sidedata, cpsd):
222 if not text:
222 if not text:
223 return _changelogrevision(extra=_defaultextra)
223 return _changelogrevision(extra=_defaultextra)
224
224
225 self = super(changelogrevision, cls).__new__(cls)
225 self = super(changelogrevision, cls).__new__(cls)
226 # We could return here and implement the following as an __init__.
226 # We could return here and implement the following as an __init__.
227 # But doing it here is equivalent and saves an extra function call.
227 # But doing it here is equivalent and saves an extra function call.
228
228
229 # format used:
229 # format used:
230 # nodeid\n : manifest node in ascii
230 # nodeid\n : manifest node in ascii
231 # user\n : user, no \n or \r allowed
231 # user\n : user, no \n or \r allowed
232 # time tz extra\n : date (time is int or float, timezone is int)
232 # time tz extra\n : date (time is int or float, timezone is int)
233 # : extra is metadata, encoded and separated by '\0'
233 # : extra is metadata, encoded and separated by '\0'
234 # : older versions ignore it
234 # : older versions ignore it
235 # files\n\n : files modified by the cset, no \n or \r allowed
235 # files\n\n : files modified by the cset, no \n or \r allowed
236 # (.*) : comment (free text, ideally utf-8)
236 # (.*) : comment (free text, ideally utf-8)
237 #
237 #
238 # changelog v0 doesn't use extra
238 # changelog v0 doesn't use extra
239
239
240 nl1 = text.index(b'\n')
240 nl1 = text.index(b'\n')
241 nl2 = text.index(b'\n', nl1 + 1)
241 nl2 = text.index(b'\n', nl1 + 1)
242 nl3 = text.index(b'\n', nl2 + 1)
242 nl3 = text.index(b'\n', nl2 + 1)
243
243
244 # The list of files may be empty. Which means nl3 is the first of the
244 # The list of files may be empty. Which means nl3 is the first of the
245 # double newline that precedes the description.
245 # double newline that precedes the description.
246 if text[nl3 + 1 : nl3 + 2] == b'\n':
246 if text[nl3 + 1 : nl3 + 2] == b'\n':
247 doublenl = nl3
247 doublenl = nl3
248 else:
248 else:
249 doublenl = text.index(b'\n\n', nl3 + 1)
249 doublenl = text.index(b'\n\n', nl3 + 1)
250
250
251 self._offsets = (nl1, nl2, nl3, doublenl)
251 self._offsets = (nl1, nl2, nl3, doublenl)
252 self._text = text
252 self._text = text
253 self._sidedata = sidedata
253 self._sidedata = sidedata
254 self._cpsd = cpsd
254 self._cpsd = cpsd
255 self._changes = None
255 self._changes = None
256
256
257 return self
257 return self
258
258
259 @property
259 @property
260 def manifest(self):
260 def manifest(self):
261 return bin(self._text[0 : self._offsets[0]])
261 return bin(self._text[0 : self._offsets[0]])
262
262
263 @property
263 @property
264 def user(self):
264 def user(self):
265 off = self._offsets
265 off = self._offsets
266 return encoding.tolocal(self._text[off[0] + 1 : off[1]])
266 return encoding.tolocal(self._text[off[0] + 1 : off[1]])
267
267
268 @property
268 @property
269 def _rawdate(self):
269 def _rawdate(self):
270 off = self._offsets
270 off = self._offsets
271 dateextra = self._text[off[1] + 1 : off[2]]
271 dateextra = self._text[off[1] + 1 : off[2]]
272 return dateextra.split(b' ', 2)[0:2]
272 return dateextra.split(b' ', 2)[0:2]
273
273
274 @property
274 @property
275 def _rawextra(self):
275 def _rawextra(self):
276 off = self._offsets
276 off = self._offsets
277 dateextra = self._text[off[1] + 1 : off[2]]
277 dateextra = self._text[off[1] + 1 : off[2]]
278 fields = dateextra.split(b' ', 2)
278 fields = dateextra.split(b' ', 2)
279 if len(fields) != 3:
279 if len(fields) != 3:
280 return None
280 return None
281
281
282 return fields[2]
282 return fields[2]
283
283
284 @property
284 @property
285 def date(self):
285 def date(self):
286 raw = self._rawdate
286 raw = self._rawdate
287 time = float(raw[0])
287 time = float(raw[0])
288 # Various tools did silly things with the timezone.
288 # Various tools did silly things with the timezone.
289 try:
289 try:
290 timezone = int(raw[1])
290 timezone = int(raw[1])
291 except ValueError:
291 except ValueError:
292 timezone = 0
292 timezone = 0
293
293
294 return time, timezone
294 return time, timezone
295
295
296 @property
296 @property
297 def extra(self):
297 def extra(self):
298 raw = self._rawextra
298 raw = self._rawextra
299 if raw is None:
299 if raw is None:
300 return _defaultextra
300 return _defaultextra
301
301
302 return decodeextra(raw)
302 return decodeextra(raw)
303
303
304 @property
304 @property
305 def changes(self):
305 def changes(self):
306 if self._changes is not None:
306 if self._changes is not None:
307 return self._changes
307 return self._changes
308 if self._cpsd:
308 if self._cpsd:
309 changes = metadata.decode_files_sidedata(self._sidedata)
309 changes = metadata.decode_files_sidedata(self._sidedata)
310 else:
310 else:
311 changes = metadata.ChangingFiles(
311 changes = metadata.ChangingFiles(
312 touched=self.files or (),
312 touched=self.files or (),
313 added=self.filesadded or (),
313 added=self.filesadded or (),
314 removed=self.filesremoved or (),
314 removed=self.filesremoved or (),
315 p1_copies=self.p1copies or {},
315 p1_copies=self.p1copies or {},
316 p2_copies=self.p2copies or {},
316 p2_copies=self.p2copies or {},
317 )
317 )
318 self._changes = changes
318 self._changes = changes
319 return changes
319 return changes
320
320
321 @property
321 @property
322 def files(self):
322 def files(self):
323 if self._cpsd:
323 if self._cpsd:
324 return sorted(self.changes.touched)
324 return sorted(self.changes.touched)
325 off = self._offsets
325 off = self._offsets
326 if off[2] == off[3]:
326 if off[2] == off[3]:
327 return []
327 return []
328
328
329 return self._text[off[2] + 1 : off[3]].split(b'\n')
329 return self._text[off[2] + 1 : off[3]].split(b'\n')
330
330
331 @property
331 @property
332 def filesadded(self):
332 def filesadded(self):
333 if self._cpsd:
333 if self._cpsd:
334 return self.changes.added
334 return self.changes.added
335 else:
335 else:
336 rawindices = self.extra.get(b'filesadded')
336 rawindices = self.extra.get(b'filesadded')
337 if rawindices is None:
337 if rawindices is None:
338 return None
338 return None
339 return metadata.decodefileindices(self.files, rawindices)
339 return metadata.decodefileindices(self.files, rawindices)
340
340
341 @property
341 @property
342 def filesremoved(self):
342 def filesremoved(self):
343 if self._cpsd:
343 if self._cpsd:
344 return self.changes.removed
344 return self.changes.removed
345 else:
345 else:
346 rawindices = self.extra.get(b'filesremoved')
346 rawindices = self.extra.get(b'filesremoved')
347 if rawindices is None:
347 if rawindices is None:
348 return None
348 return None
349 return metadata.decodefileindices(self.files, rawindices)
349 return metadata.decodefileindices(self.files, rawindices)
350
350
351 @property
351 @property
352 def p1copies(self):
352 def p1copies(self):
353 if self._cpsd:
353 if self._cpsd:
354 return self.changes.copied_from_p1
354 return self.changes.copied_from_p1
355 else:
355 else:
356 rawcopies = self.extra.get(b'p1copies')
356 rawcopies = self.extra.get(b'p1copies')
357 if rawcopies is None:
357 if rawcopies is None:
358 return None
358 return None
359 return metadata.decodecopies(self.files, rawcopies)
359 return metadata.decodecopies(self.files, rawcopies)
360
360
361 @property
361 @property
362 def p2copies(self):
362 def p2copies(self):
363 if self._cpsd:
363 if self._cpsd:
364 return self.changes.copied_from_p2
364 return self.changes.copied_from_p2
365 else:
365 else:
366 rawcopies = self.extra.get(b'p2copies')
366 rawcopies = self.extra.get(b'p2copies')
367 if rawcopies is None:
367 if rawcopies is None:
368 return None
368 return None
369 return metadata.decodecopies(self.files, rawcopies)
369 return metadata.decodecopies(self.files, rawcopies)
370
370
371 @property
371 @property
372 def description(self):
372 def description(self):
373 return encoding.tolocal(self._text[self._offsets[3] + 2 :])
373 return encoding.tolocal(self._text[self._offsets[3] + 2 :])
374
374
375
375
376 class changelog(revlog.revlog):
376 class changelog(revlog.revlog):
377 def __init__(self, opener, trypending=False):
377 def __init__(self, opener, trypending=False):
378 """Load a changelog revlog using an opener.
378 """Load a changelog revlog using an opener.
379
379
380 If ``trypending`` is true, we attempt to load the index from a
380 If ``trypending`` is true, we attempt to load the index from a
381 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
381 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
382 The ``00changelog.i.a`` file contains index (and possibly inline
382 The ``00changelog.i.a`` file contains index (and possibly inline
383 revision) data for a transaction that hasn't been finalized yet.
383 revision) data for a transaction that hasn't been finalized yet.
384 It exists in a separate file to facilitate readers (such as
384 It exists in a separate file to facilitate readers (such as
385 hooks processes) accessing data before a transaction is finalized.
385 hooks processes) accessing data before a transaction is finalized.
386 """
386 """
387 if trypending and opener.exists(b'00changelog.i.a'):
387 if trypending and opener.exists(b'00changelog.i.a'):
388 indexfile = b'00changelog.i.a'
388 indexfile = b'00changelog.i.a'
389 else:
389 else:
390 indexfile = b'00changelog.i'
390 indexfile = b'00changelog.i'
391
391
392 datafile = b'00changelog.d'
392 datafile = b'00changelog.d'
393 revlog.revlog.__init__(
393 revlog.revlog.__init__(
394 self,
394 self,
395 opener,
395 opener,
396 indexfile,
396 indexfile,
397 datafile=datafile,
397 datafile=datafile,
398 checkambig=True,
398 checkambig=True,
399 mmaplargeindex=True,
399 mmaplargeindex=True,
400 persistentnodemap=opener.options.get(b'persistent-nodemap', False),
400 persistentnodemap=opener.options.get(b'persistent-nodemap', False),
401 )
401 )
402
402
403 if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
403 if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
404 # changelogs don't benefit from generaldelta.
404 # changelogs don't benefit from generaldelta.
405
405
406 self.version &= ~revlog.FLAG_GENERALDELTA
406 self.version &= ~revlog.FLAG_GENERALDELTA
407 self._generaldelta = False
407 self._generaldelta = False
408
408
409 # Delta chains for changelogs tend to be very small because entries
409 # Delta chains for changelogs tend to be very small because entries
410 # tend to be small and don't delta well with each. So disable delta
410 # tend to be small and don't delta well with each. So disable delta
411 # chains.
411 # chains.
412 self._storedeltachains = False
412 self._storedeltachains = False
413
413
414 self._realopener = opener
414 self._realopener = opener
415 self._delayed = False
415 self._delayed = False
416 self._delaybuf = None
416 self._delaybuf = None
417 self._divert = False
417 self._divert = False
418 self._filteredrevs = frozenset()
418 self._filteredrevs = frozenset()
419 self._filteredrevs_hashcache = {}
419 self._filteredrevs_hashcache = {}
420 self._copiesstorage = opener.options.get(b'copies-storage')
420 self._copiesstorage = opener.options.get(b'copies-storage')
421
421
422 @property
422 @property
423 def filteredrevs(self):
423 def filteredrevs(self):
424 return self._filteredrevs
424 return self._filteredrevs
425
425
426 @filteredrevs.setter
426 @filteredrevs.setter
427 def filteredrevs(self, val):
427 def filteredrevs(self, val):
428 # Ensure all updates go through this function
428 # Ensure all updates go through this function
429 assert isinstance(val, frozenset)
429 assert isinstance(val, frozenset)
430 self._filteredrevs = val
430 self._filteredrevs = val
431 self._filteredrevs_hashcache = {}
431 self._filteredrevs_hashcache = {}
432
432
433 def delayupdate(self, tr):
433 def delayupdate(self, tr):
434 """delay visibility of index updates to other readers"""
434 """delay visibility of index updates to other readers"""
435
435
436 if not self._delayed:
436 if not self._delayed:
437 if len(self) == 0:
437 if len(self) == 0:
438 self._divert = True
438 self._divert = True
439 if self._realopener.exists(self.indexfile + b'.a'):
439 if self._realopener.exists(self.indexfile + b'.a'):
440 self._realopener.unlink(self.indexfile + b'.a')
440 self._realopener.unlink(self.indexfile + b'.a')
441 self.opener = _divertopener(self._realopener, self.indexfile)
441 self.opener = _divertopener(self._realopener, self.indexfile)
442 else:
442 else:
443 self._delaybuf = []
443 self._delaybuf = []
444 self.opener = _delayopener(
444 self.opener = _delayopener(
445 self._realopener, self.indexfile, self._delaybuf
445 self._realopener, self.indexfile, self._delaybuf
446 )
446 )
447 self._delayed = True
447 self._delayed = True
448 tr.addpending(b'cl-%i' % id(self), self._writepending)
448 tr.addpending(b'cl-%i' % id(self), self._writepending)
449 tr.addfinalize(b'cl-%i' % id(self), self._finalize)
449 tr.addfinalize(b'cl-%i' % id(self), self._finalize)
450
450
451 def _finalize(self, tr):
451 def _finalize(self, tr):
452 """finalize index updates"""
452 """finalize index updates"""
453 self._delayed = False
453 self._delayed = False
454 self.opener = self._realopener
454 self.opener = self._realopener
455 # move redirected index data back into place
455 # move redirected index data back into place
456 if self._divert:
456 if self._divert:
457 assert not self._delaybuf
457 assert not self._delaybuf
458 tmpname = self.indexfile + b".a"
458 tmpname = self.indexfile + b".a"
459 nfile = self.opener.open(tmpname)
459 nfile = self.opener.open(tmpname)
460 nfile.close()
460 nfile.close()
461 self.opener.rename(tmpname, self.indexfile, checkambig=True)
461 self.opener.rename(tmpname, self.indexfile, checkambig=True)
462 elif self._delaybuf:
462 elif self._delaybuf:
463 fp = self.opener(self.indexfile, b'a', checkambig=True)
463 fp = self.opener(self.indexfile, b'a', checkambig=True)
464 fp.write(b"".join(self._delaybuf))
464 fp.write(b"".join(self._delaybuf))
465 fp.close()
465 fp.close()
466 self._delaybuf = None
466 self._delaybuf = None
467 self._divert = False
467 self._divert = False
468 # split when we're done
468 # split when we're done
469 self._enforceinlinesize(tr)
469 self._enforceinlinesize(tr)
470
470
471 def _writepending(self, tr):
471 def _writepending(self, tr):
472 """create a file containing the unfinalized state for
472 """create a file containing the unfinalized state for
473 pretxnchangegroup"""
473 pretxnchangegroup"""
474 if self._delaybuf:
474 if self._delaybuf:
475 # make a temporary copy of the index
475 # make a temporary copy of the index
476 fp1 = self._realopener(self.indexfile)
476 fp1 = self._realopener(self.indexfile)
477 pendingfilename = self.indexfile + b".a"
477 pendingfilename = self.indexfile + b".a"
478 # register as a temp file to ensure cleanup on failure
478 # register as a temp file to ensure cleanup on failure
479 tr.registertmp(pendingfilename)
479 tr.registertmp(pendingfilename)
480 # write existing data
480 # write existing data
481 fp2 = self._realopener(pendingfilename, b"w")
481 fp2 = self._realopener(pendingfilename, b"w")
482 fp2.write(fp1.read())
482 fp2.write(fp1.read())
483 # add pending data
483 # add pending data
484 fp2.write(b"".join(self._delaybuf))
484 fp2.write(b"".join(self._delaybuf))
485 fp2.close()
485 fp2.close()
486 # switch modes so finalize can simply rename
486 # switch modes so finalize can simply rename
487 self._delaybuf = None
487 self._delaybuf = None
488 self._divert = True
488 self._divert = True
489 self.opener = _divertopener(self._realopener, self.indexfile)
489 self.opener = _divertopener(self._realopener, self.indexfile)
490
490
491 if self._divert:
491 if self._divert:
492 return True
492 return True
493
493
494 return False
494 return False
495
495
496 def _enforceinlinesize(self, tr, fp=None):
496 def _enforceinlinesize(self, tr, fp=None):
497 if not self._delayed:
497 if not self._delayed:
498 revlog.revlog._enforceinlinesize(self, tr, fp)
498 revlog.revlog._enforceinlinesize(self, tr, fp)
499
499
500 def read(self, node):
500 def read(self, node):
501 """Obtain data from a parsed changelog revision.
501 """Obtain data from a parsed changelog revision.
502
502
503 Returns a 6-tuple of:
503 Returns a 6-tuple of:
504
504
505 - manifest node in binary
505 - manifest node in binary
506 - author/user as a localstr
506 - author/user as a localstr
507 - date as a 2-tuple of (time, timezone)
507 - date as a 2-tuple of (time, timezone)
508 - list of files
508 - list of files
509 - commit message as a localstr
509 - commit message as a localstr
510 - dict of extra metadata
510 - dict of extra metadata
511
511
512 Unless you need to access all fields, consider calling
512 Unless you need to access all fields, consider calling
513 ``changelogrevision`` instead, as it is faster for partial object
513 ``changelogrevision`` instead, as it is faster for partial object
514 access.
514 access.
515 """
515 """
516 d, s = self._revisiondata(node)
516 d, s = self._revisiondata(node)
517 c = changelogrevision(
517 c = changelogrevision(
518 d, s, self._copiesstorage == b'changeset-sidedata'
518 d, s, self._copiesstorage == b'changeset-sidedata'
519 )
519 )
520 return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
520 return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
521
521
522 def changelogrevision(self, nodeorrev):
522 def changelogrevision(self, nodeorrev):
523 """Obtain a ``changelogrevision`` for a node or revision."""
523 """Obtain a ``changelogrevision`` for a node or revision."""
524 text, sidedata = self._revisiondata(nodeorrev)
524 text, sidedata = self._revisiondata(nodeorrev)
525 return changelogrevision(
525 return changelogrevision(
526 text, sidedata, self._copiesstorage == b'changeset-sidedata'
526 text, sidedata, self._copiesstorage == b'changeset-sidedata'
527 )
527 )
528
528
529 def readfiles(self, node):
529 def readfiles(self, node):
530 """
530 """
531 short version of read that only returns the files modified by the cset
531 short version of read that only returns the files modified by the cset
532 """
532 """
533 text = self.revision(node)
533 text = self.revision(node)
534 if not text:
534 if not text:
535 return []
535 return []
536 last = text.index(b"\n\n")
536 last = text.index(b"\n\n")
537 l = text[:last].split(b'\n')
537 l = text[:last].split(b'\n')
538 return l[3:]
538 return l[3:]
539
539
540 def add(
540 def add(
541 self,
541 self,
542 manifest,
542 manifest,
543 files,
543 files,
544 desc,
544 desc,
545 transaction,
545 transaction,
546 p1,
546 p1,
547 p2,
547 p2,
548 user,
548 user,
549 date=None,
549 date=None,
550 extra=None,
550 extra=None,
551 ):
551 ):
552 # Convert to UTF-8 encoded bytestrings as the very first
552 # Convert to UTF-8 encoded bytestrings as the very first
553 # thing: calling any method on a localstr object will turn it
553 # thing: calling any method on a localstr object will turn it
554 # into a str object and the cached UTF-8 string is thus lost.
554 # into a str object and the cached UTF-8 string is thus lost.
555 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
555 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
556
556
557 user = user.strip()
557 user = user.strip()
558 # An empty username or a username with a "\n" will make the
558 # An empty username or a username with a "\n" will make the
559 # revision text contain two "\n\n" sequences -> corrupt
559 # revision text contain two "\n\n" sequences -> corrupt
560 # repository since read cannot unpack the revision.
560 # repository since read cannot unpack the revision.
561 if not user:
561 if not user:
562 raise error.StorageError(_(b"empty username"))
562 raise error.StorageError(_(b"empty username"))
563 if b"\n" in user:
563 if b"\n" in user:
564 raise error.StorageError(
564 raise error.StorageError(
565 _(b"username %r contains a newline") % pycompat.bytestr(user)
565 _(b"username %r contains a newline") % pycompat.bytestr(user)
566 )
566 )
567
567
568 desc = stripdesc(desc)
568 desc = stripdesc(desc)
569
569
570 if date:
570 if date:
571 parseddate = b"%d %d" % dateutil.parsedate(date)
571 parseddate = b"%d %d" % dateutil.parsedate(date)
572 else:
572 else:
573 parseddate = b"%d %d" % dateutil.makedate()
573 parseddate = b"%d %d" % dateutil.makedate()
574 if extra:
574 if extra:
575 branch = extra.get(b"branch")
575 branch = extra.get(b"branch")
576 if branch in (b"default", b""):
576 if branch in (b"default", b""):
577 del extra[b"branch"]
577 del extra[b"branch"]
578 elif branch in (b".", b"null", b"tip"):
578 elif branch in (b".", b"null", b"tip"):
579 raise error.StorageError(
579 raise error.StorageError(
580 _(b'the name \'%s\' is reserved') % branch
580 _(b'the name \'%s\' is reserved') % branch
581 )
581 )
582 sortedfiles = sorted(files.touched)
582 sortedfiles = sorted(files.touched)
583 flags = 0
583 flags = 0
584 sidedata = None
584 sidedata = None
585 if self._copiesstorage == b'changeset-sidedata':
585 if self._copiesstorage == b'changeset-sidedata':
586 if files.has_copies_info:
586 if files.has_copies_info:
587 flags |= flagutil.REVIDX_HASCOPIESINFO
587 flags |= flagutil.REVIDX_HASCOPIESINFO
588 sidedata = metadata.encode_files_sidedata(files)
588 sidedata = metadata.encode_files_sidedata(files)
589
589
590 if extra:
590 if extra:
591 extra = encodeextra(extra)
591 extra = encodeextra(extra)
592 parseddate = b"%s %s" % (parseddate, extra)
592 parseddate = b"%s %s" % (parseddate, extra)
593 l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
593 l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
594 text = b"\n".join(l)
594 text = b"\n".join(l)
595 return self.addrevision(
595 return self.addrevision(
596 text, transaction, len(self), p1, p2, sidedata=sidedata, flags=flags
596 text, transaction, len(self), p1, p2, sidedata=sidedata, flags=flags
597 )
597 )
598
598
599 def branchinfo(self, rev):
599 def branchinfo(self, rev):
600 """return the branch name and open/close state of a revision
600 """return the branch name and open/close state of a revision
601
601
602 This function exists because creating a changectx object
602 This function exists because creating a changectx object
603 just to access this is costly."""
603 just to access this is costly."""
604 extra = self.read(rev)[5]
604 extra = self.changelogrevision(rev).extra
605 return encoding.tolocal(extra.get(b"branch")), b'close' in extra
605 return encoding.tolocal(extra.get(b"branch")), b'close' in extra
606
606
607 def _nodeduplicatecallback(self, transaction, node):
607 def _nodeduplicatecallback(self, transaction, node):
608 # keep track of revisions that got "re-added", eg: unbunde of know rev.
608 # keep track of revisions that got "re-added", eg: unbunde of know rev.
609 #
609 #
610 # We track them in a list to preserve their order from the source bundle
610 # We track them in a list to preserve their order from the source bundle
611 duplicates = transaction.changes.setdefault(b'revduplicates', [])
611 duplicates = transaction.changes.setdefault(b'revduplicates', [])
612 duplicates.append(self.rev(node))
612 duplicates.append(self.rev(node))
General Comments 0
You need to be logged in to leave comments. Login now