##// END OF EJS Templates
changing-files: retrieve changelogrevision.files from the sidedata block...
marmoute -
r46213:053c9014 default
parent child Browse files
Show More
@@ -1,606 +1,608 b''
1 # changelog.py - changelog class for mercurial
1 # changelog.py - changelog class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 from .i18n import _
10 from .i18n import _
11 from .node import (
11 from .node import (
12 bin,
12 bin,
13 hex,
13 hex,
14 nullid,
14 nullid,
15 )
15 )
16 from .thirdparty import attr
16 from .thirdparty import attr
17
17
18 from . import (
18 from . import (
19 encoding,
19 encoding,
20 error,
20 error,
21 metadata,
21 metadata,
22 pycompat,
22 pycompat,
23 revlog,
23 revlog,
24 )
24 )
25 from .utils import (
25 from .utils import (
26 dateutil,
26 dateutil,
27 stringutil,
27 stringutil,
28 )
28 )
29
29
30 _defaultextra = {b'branch': b'default'}
30 _defaultextra = {b'branch': b'default'}
31
31
32
32
33 def _string_escape(text):
33 def _string_escape(text):
34 """
34 """
35 >>> from .pycompat import bytechr as chr
35 >>> from .pycompat import bytechr as chr
36 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
36 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
37 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
37 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
38 >>> s
38 >>> s
39 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
39 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
40 >>> res = _string_escape(s)
40 >>> res = _string_escape(s)
41 >>> s == _string_unescape(res)
41 >>> s == _string_unescape(res)
42 True
42 True
43 """
43 """
44 # subset of the string_escape codec
44 # subset of the string_escape codec
45 text = (
45 text = (
46 text.replace(b'\\', b'\\\\')
46 text.replace(b'\\', b'\\\\')
47 .replace(b'\n', b'\\n')
47 .replace(b'\n', b'\\n')
48 .replace(b'\r', b'\\r')
48 .replace(b'\r', b'\\r')
49 )
49 )
50 return text.replace(b'\0', b'\\0')
50 return text.replace(b'\0', b'\\0')
51
51
52
52
53 def _string_unescape(text):
53 def _string_unescape(text):
54 if b'\\0' in text:
54 if b'\\0' in text:
55 # fix up \0 without getting into trouble with \\0
55 # fix up \0 without getting into trouble with \\0
56 text = text.replace(b'\\\\', b'\\\\\n')
56 text = text.replace(b'\\\\', b'\\\\\n')
57 text = text.replace(b'\\0', b'\0')
57 text = text.replace(b'\\0', b'\0')
58 text = text.replace(b'\n', b'')
58 text = text.replace(b'\n', b'')
59 return stringutil.unescapestr(text)
59 return stringutil.unescapestr(text)
60
60
61
61
62 def decodeextra(text):
62 def decodeextra(text):
63 """
63 """
64 >>> from .pycompat import bytechr as chr
64 >>> from .pycompat import bytechr as chr
65 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
65 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
66 ... ).items())
66 ... ).items())
67 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
67 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
68 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
68 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
69 ... b'baz': chr(92) + chr(0) + b'2'})
69 ... b'baz': chr(92) + chr(0) + b'2'})
70 ... ).items())
70 ... ).items())
71 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
71 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
72 """
72 """
73 extra = _defaultextra.copy()
73 extra = _defaultextra.copy()
74 for l in text.split(b'\0'):
74 for l in text.split(b'\0'):
75 if l:
75 if l:
76 k, v = _string_unescape(l).split(b':', 1)
76 k, v = _string_unescape(l).split(b':', 1)
77 extra[k] = v
77 extra[k] = v
78 return extra
78 return extra
79
79
80
80
81 def encodeextra(d):
81 def encodeextra(d):
82 # keys must be sorted to produce a deterministic changelog entry
82 # keys must be sorted to produce a deterministic changelog entry
83 items = [_string_escape(b'%s:%s' % (k, d[k])) for k in sorted(d)]
83 items = [_string_escape(b'%s:%s' % (k, d[k])) for k in sorted(d)]
84 return b"\0".join(items)
84 return b"\0".join(items)
85
85
86
86
87 def stripdesc(desc):
87 def stripdesc(desc):
88 """strip trailing whitespace and leading and trailing empty lines"""
88 """strip trailing whitespace and leading and trailing empty lines"""
89 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
89 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
90
90
91
91
92 class appender(object):
92 class appender(object):
93 '''the changelog index must be updated last on disk, so we use this class
93 '''the changelog index must be updated last on disk, so we use this class
94 to delay writes to it'''
94 to delay writes to it'''
95
95
96 def __init__(self, vfs, name, mode, buf):
96 def __init__(self, vfs, name, mode, buf):
97 self.data = buf
97 self.data = buf
98 fp = vfs(name, mode)
98 fp = vfs(name, mode)
99 self.fp = fp
99 self.fp = fp
100 self.offset = fp.tell()
100 self.offset = fp.tell()
101 self.size = vfs.fstat(fp).st_size
101 self.size = vfs.fstat(fp).st_size
102 self._end = self.size
102 self._end = self.size
103
103
104 def end(self):
104 def end(self):
105 return self._end
105 return self._end
106
106
107 def tell(self):
107 def tell(self):
108 return self.offset
108 return self.offset
109
109
110 def flush(self):
110 def flush(self):
111 pass
111 pass
112
112
113 @property
113 @property
114 def closed(self):
114 def closed(self):
115 return self.fp.closed
115 return self.fp.closed
116
116
117 def close(self):
117 def close(self):
118 self.fp.close()
118 self.fp.close()
119
119
120 def seek(self, offset, whence=0):
120 def seek(self, offset, whence=0):
121 '''virtual file offset spans real file and data'''
121 '''virtual file offset spans real file and data'''
122 if whence == 0:
122 if whence == 0:
123 self.offset = offset
123 self.offset = offset
124 elif whence == 1:
124 elif whence == 1:
125 self.offset += offset
125 self.offset += offset
126 elif whence == 2:
126 elif whence == 2:
127 self.offset = self.end() + offset
127 self.offset = self.end() + offset
128 if self.offset < self.size:
128 if self.offset < self.size:
129 self.fp.seek(self.offset)
129 self.fp.seek(self.offset)
130
130
131 def read(self, count=-1):
131 def read(self, count=-1):
132 '''only trick here is reads that span real file and data'''
132 '''only trick here is reads that span real file and data'''
133 ret = b""
133 ret = b""
134 if self.offset < self.size:
134 if self.offset < self.size:
135 s = self.fp.read(count)
135 s = self.fp.read(count)
136 ret = s
136 ret = s
137 self.offset += len(s)
137 self.offset += len(s)
138 if count > 0:
138 if count > 0:
139 count -= len(s)
139 count -= len(s)
140 if count != 0:
140 if count != 0:
141 doff = self.offset - self.size
141 doff = self.offset - self.size
142 self.data.insert(0, b"".join(self.data))
142 self.data.insert(0, b"".join(self.data))
143 del self.data[1:]
143 del self.data[1:]
144 s = self.data[0][doff : doff + count]
144 s = self.data[0][doff : doff + count]
145 self.offset += len(s)
145 self.offset += len(s)
146 ret += s
146 ret += s
147 return ret
147 return ret
148
148
149 def write(self, s):
149 def write(self, s):
150 self.data.append(bytes(s))
150 self.data.append(bytes(s))
151 self.offset += len(s)
151 self.offset += len(s)
152 self._end += len(s)
152 self._end += len(s)
153
153
154 def __enter__(self):
154 def __enter__(self):
155 self.fp.__enter__()
155 self.fp.__enter__()
156 return self
156 return self
157
157
158 def __exit__(self, *args):
158 def __exit__(self, *args):
159 return self.fp.__exit__(*args)
159 return self.fp.__exit__(*args)
160
160
161
161
162 class _divertopener(object):
162 class _divertopener(object):
163 def __init__(self, opener, target):
163 def __init__(self, opener, target):
164 self._opener = opener
164 self._opener = opener
165 self._target = target
165 self._target = target
166
166
167 def __call__(self, name, mode=b'r', checkambig=False, **kwargs):
167 def __call__(self, name, mode=b'r', checkambig=False, **kwargs):
168 if name != self._target:
168 if name != self._target:
169 return self._opener(name, mode, **kwargs)
169 return self._opener(name, mode, **kwargs)
170 return self._opener(name + b".a", mode, **kwargs)
170 return self._opener(name + b".a", mode, **kwargs)
171
171
172 def __getattr__(self, attr):
172 def __getattr__(self, attr):
173 return getattr(self._opener, attr)
173 return getattr(self._opener, attr)
174
174
175
175
176 def _delayopener(opener, target, buf):
176 def _delayopener(opener, target, buf):
177 """build an opener that stores chunks in 'buf' instead of 'target'"""
177 """build an opener that stores chunks in 'buf' instead of 'target'"""
178
178
179 def _delay(name, mode=b'r', checkambig=False, **kwargs):
179 def _delay(name, mode=b'r', checkambig=False, **kwargs):
180 if name != target:
180 if name != target:
181 return opener(name, mode, **kwargs)
181 return opener(name, mode, **kwargs)
182 assert not kwargs
182 assert not kwargs
183 return appender(opener, name, mode, buf)
183 return appender(opener, name, mode, buf)
184
184
185 return _delay
185 return _delay
186
186
187
187
188 @attr.s
188 @attr.s
189 class _changelogrevision(object):
189 class _changelogrevision(object):
190 # Extensions might modify _defaultextra, so let the constructor below pass
190 # Extensions might modify _defaultextra, so let the constructor below pass
191 # it in
191 # it in
192 extra = attr.ib()
192 extra = attr.ib()
193 manifest = attr.ib(default=nullid)
193 manifest = attr.ib(default=nullid)
194 user = attr.ib(default=b'')
194 user = attr.ib(default=b'')
195 date = attr.ib(default=(0, 0))
195 date = attr.ib(default=(0, 0))
196 files = attr.ib(default=attr.Factory(list))
196 files = attr.ib(default=attr.Factory(list))
197 filesadded = attr.ib(default=None)
197 filesadded = attr.ib(default=None)
198 filesremoved = attr.ib(default=None)
198 filesremoved = attr.ib(default=None)
199 p1copies = attr.ib(default=None)
199 p1copies = attr.ib(default=None)
200 p2copies = attr.ib(default=None)
200 p2copies = attr.ib(default=None)
201 description = attr.ib(default=b'')
201 description = attr.ib(default=b'')
202
202
203
203
204 class changelogrevision(object):
204 class changelogrevision(object):
205 """Holds results of a parsed changelog revision.
205 """Holds results of a parsed changelog revision.
206
206
207 Changelog revisions consist of multiple pieces of data, including
207 Changelog revisions consist of multiple pieces of data, including
208 the manifest node, user, and date. This object exposes a view into
208 the manifest node, user, and date. This object exposes a view into
209 the parsed object.
209 the parsed object.
210 """
210 """
211
211
212 __slots__ = (
212 __slots__ = (
213 '_offsets',
213 '_offsets',
214 '_text',
214 '_text',
215 '_sidedata',
215 '_sidedata',
216 '_cpsd',
216 '_cpsd',
217 '_changes',
217 '_changes',
218 )
218 )
219
219
220 def __new__(cls, text, sidedata, cpsd):
220 def __new__(cls, text, sidedata, cpsd):
221 if not text:
221 if not text:
222 return _changelogrevision(extra=_defaultextra)
222 return _changelogrevision(extra=_defaultextra)
223
223
224 self = super(changelogrevision, cls).__new__(cls)
224 self = super(changelogrevision, cls).__new__(cls)
225 # We could return here and implement the following as an __init__.
225 # We could return here and implement the following as an __init__.
226 # But doing it here is equivalent and saves an extra function call.
226 # But doing it here is equivalent and saves an extra function call.
227
227
228 # format used:
228 # format used:
229 # nodeid\n : manifest node in ascii
229 # nodeid\n : manifest node in ascii
230 # user\n : user, no \n or \r allowed
230 # user\n : user, no \n or \r allowed
231 # time tz extra\n : date (time is int or float, timezone is int)
231 # time tz extra\n : date (time is int or float, timezone is int)
232 # : extra is metadata, encoded and separated by '\0'
232 # : extra is metadata, encoded and separated by '\0'
233 # : older versions ignore it
233 # : older versions ignore it
234 # files\n\n : files modified by the cset, no \n or \r allowed
234 # files\n\n : files modified by the cset, no \n or \r allowed
235 # (.*) : comment (free text, ideally utf-8)
235 # (.*) : comment (free text, ideally utf-8)
236 #
236 #
237 # changelog v0 doesn't use extra
237 # changelog v0 doesn't use extra
238
238
239 nl1 = text.index(b'\n')
239 nl1 = text.index(b'\n')
240 nl2 = text.index(b'\n', nl1 + 1)
240 nl2 = text.index(b'\n', nl1 + 1)
241 nl3 = text.index(b'\n', nl2 + 1)
241 nl3 = text.index(b'\n', nl2 + 1)
242
242
243 # The list of files may be empty. Which means nl3 is the first of the
243 # The list of files may be empty. Which means nl3 is the first of the
244 # double newline that precedes the description.
244 # double newline that precedes the description.
245 if text[nl3 + 1 : nl3 + 2] == b'\n':
245 if text[nl3 + 1 : nl3 + 2] == b'\n':
246 doublenl = nl3
246 doublenl = nl3
247 else:
247 else:
248 doublenl = text.index(b'\n\n', nl3 + 1)
248 doublenl = text.index(b'\n\n', nl3 + 1)
249
249
250 self._offsets = (nl1, nl2, nl3, doublenl)
250 self._offsets = (nl1, nl2, nl3, doublenl)
251 self._text = text
251 self._text = text
252 self._sidedata = sidedata
252 self._sidedata = sidedata
253 self._cpsd = cpsd
253 self._cpsd = cpsd
254 self._changes = None
254 self._changes = None
255
255
256 return self
256 return self
257
257
258 @property
258 @property
259 def manifest(self):
259 def manifest(self):
260 return bin(self._text[0 : self._offsets[0]])
260 return bin(self._text[0 : self._offsets[0]])
261
261
262 @property
262 @property
263 def user(self):
263 def user(self):
264 off = self._offsets
264 off = self._offsets
265 return encoding.tolocal(self._text[off[0] + 1 : off[1]])
265 return encoding.tolocal(self._text[off[0] + 1 : off[1]])
266
266
267 @property
267 @property
268 def _rawdate(self):
268 def _rawdate(self):
269 off = self._offsets
269 off = self._offsets
270 dateextra = self._text[off[1] + 1 : off[2]]
270 dateextra = self._text[off[1] + 1 : off[2]]
271 return dateextra.split(b' ', 2)[0:2]
271 return dateextra.split(b' ', 2)[0:2]
272
272
273 @property
273 @property
274 def _rawextra(self):
274 def _rawextra(self):
275 off = self._offsets
275 off = self._offsets
276 dateextra = self._text[off[1] + 1 : off[2]]
276 dateextra = self._text[off[1] + 1 : off[2]]
277 fields = dateextra.split(b' ', 2)
277 fields = dateextra.split(b' ', 2)
278 if len(fields) != 3:
278 if len(fields) != 3:
279 return None
279 return None
280
280
281 return fields[2]
281 return fields[2]
282
282
283 @property
283 @property
284 def date(self):
284 def date(self):
285 raw = self._rawdate
285 raw = self._rawdate
286 time = float(raw[0])
286 time = float(raw[0])
287 # Various tools did silly things with the timezone.
287 # Various tools did silly things with the timezone.
288 try:
288 try:
289 timezone = int(raw[1])
289 timezone = int(raw[1])
290 except ValueError:
290 except ValueError:
291 timezone = 0
291 timezone = 0
292
292
293 return time, timezone
293 return time, timezone
294
294
295 @property
295 @property
296 def extra(self):
296 def extra(self):
297 raw = self._rawextra
297 raw = self._rawextra
298 if raw is None:
298 if raw is None:
299 return _defaultextra
299 return _defaultextra
300
300
301 return decodeextra(raw)
301 return decodeextra(raw)
302
302
303 @property
303 @property
304 def changes(self):
304 def changes(self):
305 if self._changes is not None:
305 if self._changes is not None:
306 return self._changes
306 return self._changes
307 if self._cpsd:
307 if self._cpsd:
308 changes = metadata.decode_files_sidedata(self._sidedata)
308 changes = metadata.decode_files_sidedata(self._sidedata)
309 else:
309 else:
310 changes = metadata.ChangingFiles(
310 changes = metadata.ChangingFiles(
311 touched=self.files or (),
311 touched=self.files or (),
312 added=self.filesadded or (),
312 added=self.filesadded or (),
313 removed=self.filesremoved or (),
313 removed=self.filesremoved or (),
314 p1_copies=self.p1copies or {},
314 p1_copies=self.p1copies or {},
315 p2_copies=self.p2copies or {},
315 p2_copies=self.p2copies or {},
316 )
316 )
317 self._changes = changes
317 self._changes = changes
318 return changes
318 return changes
319
319
320 @property
320 @property
321 def files(self):
321 def files(self):
322 if self._cpsd:
323 return sorted(self.changes.touched)
322 off = self._offsets
324 off = self._offsets
323 if off[2] == off[3]:
325 if off[2] == off[3]:
324 return []
326 return []
325
327
326 return self._text[off[2] + 1 : off[3]].split(b'\n')
328 return self._text[off[2] + 1 : off[3]].split(b'\n')
327
329
328 @property
330 @property
329 def filesadded(self):
331 def filesadded(self):
330 if self._cpsd:
332 if self._cpsd:
331 return self.changes.added
333 return self.changes.added
332 else:
334 else:
333 rawindices = self.extra.get(b'filesadded')
335 rawindices = self.extra.get(b'filesadded')
334 if rawindices is None:
336 if rawindices is None:
335 return None
337 return None
336 return metadata.decodefileindices(self.files, rawindices)
338 return metadata.decodefileindices(self.files, rawindices)
337
339
338 @property
340 @property
339 def filesremoved(self):
341 def filesremoved(self):
340 if self._cpsd:
342 if self._cpsd:
341 return self.changes.removed
343 return self.changes.removed
342 else:
344 else:
343 rawindices = self.extra.get(b'filesremoved')
345 rawindices = self.extra.get(b'filesremoved')
344 if rawindices is None:
346 if rawindices is None:
345 return None
347 return None
346 return metadata.decodefileindices(self.files, rawindices)
348 return metadata.decodefileindices(self.files, rawindices)
347
349
348 @property
350 @property
349 def p1copies(self):
351 def p1copies(self):
350 if self._cpsd:
352 if self._cpsd:
351 return self.changes.copied_from_p1
353 return self.changes.copied_from_p1
352 else:
354 else:
353 rawcopies = self.extra.get(b'p1copies')
355 rawcopies = self.extra.get(b'p1copies')
354 if rawcopies is None:
356 if rawcopies is None:
355 return None
357 return None
356 return metadata.decodecopies(self.files, rawcopies)
358 return metadata.decodecopies(self.files, rawcopies)
357
359
358 @property
360 @property
359 def p2copies(self):
361 def p2copies(self):
360 if self._cpsd:
362 if self._cpsd:
361 return self.changes.copied_from_p2
363 return self.changes.copied_from_p2
362 else:
364 else:
363 rawcopies = self.extra.get(b'p2copies')
365 rawcopies = self.extra.get(b'p2copies')
364 if rawcopies is None:
366 if rawcopies is None:
365 return None
367 return None
366 return metadata.decodecopies(self.files, rawcopies)
368 return metadata.decodecopies(self.files, rawcopies)
367
369
368 @property
370 @property
369 def description(self):
371 def description(self):
370 return encoding.tolocal(self._text[self._offsets[3] + 2 :])
372 return encoding.tolocal(self._text[self._offsets[3] + 2 :])
371
373
372
374
373 class changelog(revlog.revlog):
375 class changelog(revlog.revlog):
374 def __init__(self, opener, trypending=False):
376 def __init__(self, opener, trypending=False):
375 """Load a changelog revlog using an opener.
377 """Load a changelog revlog using an opener.
376
378
377 If ``trypending`` is true, we attempt to load the index from a
379 If ``trypending`` is true, we attempt to load the index from a
378 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
380 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
379 The ``00changelog.i.a`` file contains index (and possibly inline
381 The ``00changelog.i.a`` file contains index (and possibly inline
380 revision) data for a transaction that hasn't been finalized yet.
382 revision) data for a transaction that hasn't been finalized yet.
381 It exists in a separate file to facilitate readers (such as
383 It exists in a separate file to facilitate readers (such as
382 hooks processes) accessing data before a transaction is finalized.
384 hooks processes) accessing data before a transaction is finalized.
383 """
385 """
384 if trypending and opener.exists(b'00changelog.i.a'):
386 if trypending and opener.exists(b'00changelog.i.a'):
385 indexfile = b'00changelog.i.a'
387 indexfile = b'00changelog.i.a'
386 else:
388 else:
387 indexfile = b'00changelog.i'
389 indexfile = b'00changelog.i'
388
390
389 datafile = b'00changelog.d'
391 datafile = b'00changelog.d'
390 revlog.revlog.__init__(
392 revlog.revlog.__init__(
391 self,
393 self,
392 opener,
394 opener,
393 indexfile,
395 indexfile,
394 datafile=datafile,
396 datafile=datafile,
395 checkambig=True,
397 checkambig=True,
396 mmaplargeindex=True,
398 mmaplargeindex=True,
397 persistentnodemap=opener.options.get(b'persistent-nodemap', False),
399 persistentnodemap=opener.options.get(b'persistent-nodemap', False),
398 )
400 )
399
401
400 if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
402 if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
401 # changelogs don't benefit from generaldelta.
403 # changelogs don't benefit from generaldelta.
402
404
403 self.version &= ~revlog.FLAG_GENERALDELTA
405 self.version &= ~revlog.FLAG_GENERALDELTA
404 self._generaldelta = False
406 self._generaldelta = False
405
407
406 # Delta chains for changelogs tend to be very small because entries
408 # Delta chains for changelogs tend to be very small because entries
407 # tend to be small and don't delta well with each. So disable delta
409 # tend to be small and don't delta well with each. So disable delta
408 # chains.
410 # chains.
409 self._storedeltachains = False
411 self._storedeltachains = False
410
412
411 self._realopener = opener
413 self._realopener = opener
412 self._delayed = False
414 self._delayed = False
413 self._delaybuf = None
415 self._delaybuf = None
414 self._divert = False
416 self._divert = False
415 self._filteredrevs = frozenset()
417 self._filteredrevs = frozenset()
416 self._filteredrevs_hashcache = {}
418 self._filteredrevs_hashcache = {}
417 self._copiesstorage = opener.options.get(b'copies-storage')
419 self._copiesstorage = opener.options.get(b'copies-storage')
418
420
419 @property
421 @property
420 def filteredrevs(self):
422 def filteredrevs(self):
421 return self._filteredrevs
423 return self._filteredrevs
422
424
423 @filteredrevs.setter
425 @filteredrevs.setter
424 def filteredrevs(self, val):
426 def filteredrevs(self, val):
425 # Ensure all updates go through this function
427 # Ensure all updates go through this function
426 assert isinstance(val, frozenset)
428 assert isinstance(val, frozenset)
427 self._filteredrevs = val
429 self._filteredrevs = val
428 self._filteredrevs_hashcache = {}
430 self._filteredrevs_hashcache = {}
429
431
430 def delayupdate(self, tr):
432 def delayupdate(self, tr):
431 """delay visibility of index updates to other readers"""
433 """delay visibility of index updates to other readers"""
432
434
433 if not self._delayed:
435 if not self._delayed:
434 if len(self) == 0:
436 if len(self) == 0:
435 self._divert = True
437 self._divert = True
436 if self._realopener.exists(self.indexfile + b'.a'):
438 if self._realopener.exists(self.indexfile + b'.a'):
437 self._realopener.unlink(self.indexfile + b'.a')
439 self._realopener.unlink(self.indexfile + b'.a')
438 self.opener = _divertopener(self._realopener, self.indexfile)
440 self.opener = _divertopener(self._realopener, self.indexfile)
439 else:
441 else:
440 self._delaybuf = []
442 self._delaybuf = []
441 self.opener = _delayopener(
443 self.opener = _delayopener(
442 self._realopener, self.indexfile, self._delaybuf
444 self._realopener, self.indexfile, self._delaybuf
443 )
445 )
444 self._delayed = True
446 self._delayed = True
445 tr.addpending(b'cl-%i' % id(self), self._writepending)
447 tr.addpending(b'cl-%i' % id(self), self._writepending)
446 tr.addfinalize(b'cl-%i' % id(self), self._finalize)
448 tr.addfinalize(b'cl-%i' % id(self), self._finalize)
447
449
448 def _finalize(self, tr):
450 def _finalize(self, tr):
449 """finalize index updates"""
451 """finalize index updates"""
450 self._delayed = False
452 self._delayed = False
451 self.opener = self._realopener
453 self.opener = self._realopener
452 # move redirected index data back into place
454 # move redirected index data back into place
453 if self._divert:
455 if self._divert:
454 assert not self._delaybuf
456 assert not self._delaybuf
455 tmpname = self.indexfile + b".a"
457 tmpname = self.indexfile + b".a"
456 nfile = self.opener.open(tmpname)
458 nfile = self.opener.open(tmpname)
457 nfile.close()
459 nfile.close()
458 self.opener.rename(tmpname, self.indexfile, checkambig=True)
460 self.opener.rename(tmpname, self.indexfile, checkambig=True)
459 elif self._delaybuf:
461 elif self._delaybuf:
460 fp = self.opener(self.indexfile, b'a', checkambig=True)
462 fp = self.opener(self.indexfile, b'a', checkambig=True)
461 fp.write(b"".join(self._delaybuf))
463 fp.write(b"".join(self._delaybuf))
462 fp.close()
464 fp.close()
463 self._delaybuf = None
465 self._delaybuf = None
464 self._divert = False
466 self._divert = False
465 # split when we're done
467 # split when we're done
466 self._enforceinlinesize(tr)
468 self._enforceinlinesize(tr)
467
469
468 def _writepending(self, tr):
470 def _writepending(self, tr):
469 """create a file containing the unfinalized state for
471 """create a file containing the unfinalized state for
470 pretxnchangegroup"""
472 pretxnchangegroup"""
471 if self._delaybuf:
473 if self._delaybuf:
472 # make a temporary copy of the index
474 # make a temporary copy of the index
473 fp1 = self._realopener(self.indexfile)
475 fp1 = self._realopener(self.indexfile)
474 pendingfilename = self.indexfile + b".a"
476 pendingfilename = self.indexfile + b".a"
475 # register as a temp file to ensure cleanup on failure
477 # register as a temp file to ensure cleanup on failure
476 tr.registertmp(pendingfilename)
478 tr.registertmp(pendingfilename)
477 # write existing data
479 # write existing data
478 fp2 = self._realopener(pendingfilename, b"w")
480 fp2 = self._realopener(pendingfilename, b"w")
479 fp2.write(fp1.read())
481 fp2.write(fp1.read())
480 # add pending data
482 # add pending data
481 fp2.write(b"".join(self._delaybuf))
483 fp2.write(b"".join(self._delaybuf))
482 fp2.close()
484 fp2.close()
483 # switch modes so finalize can simply rename
485 # switch modes so finalize can simply rename
484 self._delaybuf = None
486 self._delaybuf = None
485 self._divert = True
487 self._divert = True
486 self.opener = _divertopener(self._realopener, self.indexfile)
488 self.opener = _divertopener(self._realopener, self.indexfile)
487
489
488 if self._divert:
490 if self._divert:
489 return True
491 return True
490
492
491 return False
493 return False
492
494
493 def _enforceinlinesize(self, tr, fp=None):
495 def _enforceinlinesize(self, tr, fp=None):
494 if not self._delayed:
496 if not self._delayed:
495 revlog.revlog._enforceinlinesize(self, tr, fp)
497 revlog.revlog._enforceinlinesize(self, tr, fp)
496
498
497 def read(self, node):
499 def read(self, node):
498 """Obtain data from a parsed changelog revision.
500 """Obtain data from a parsed changelog revision.
499
501
500 Returns a 6-tuple of:
502 Returns a 6-tuple of:
501
503
502 - manifest node in binary
504 - manifest node in binary
503 - author/user as a localstr
505 - author/user as a localstr
504 - date as a 2-tuple of (time, timezone)
506 - date as a 2-tuple of (time, timezone)
505 - list of files
507 - list of files
506 - commit message as a localstr
508 - commit message as a localstr
507 - dict of extra metadata
509 - dict of extra metadata
508
510
509 Unless you need to access all fields, consider calling
511 Unless you need to access all fields, consider calling
510 ``changelogrevision`` instead, as it is faster for partial object
512 ``changelogrevision`` instead, as it is faster for partial object
511 access.
513 access.
512 """
514 """
513 d, s = self._revisiondata(node)
515 d, s = self._revisiondata(node)
514 c = changelogrevision(
516 c = changelogrevision(
515 d, s, self._copiesstorage == b'changeset-sidedata'
517 d, s, self._copiesstorage == b'changeset-sidedata'
516 )
518 )
517 return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
519 return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
518
520
519 def changelogrevision(self, nodeorrev):
521 def changelogrevision(self, nodeorrev):
520 """Obtain a ``changelogrevision`` for a node or revision."""
522 """Obtain a ``changelogrevision`` for a node or revision."""
521 text, sidedata = self._revisiondata(nodeorrev)
523 text, sidedata = self._revisiondata(nodeorrev)
522 return changelogrevision(
524 return changelogrevision(
523 text, sidedata, self._copiesstorage == b'changeset-sidedata'
525 text, sidedata, self._copiesstorage == b'changeset-sidedata'
524 )
526 )
525
527
526 def readfiles(self, node):
528 def readfiles(self, node):
527 """
529 """
528 short version of read that only returns the files modified by the cset
530 short version of read that only returns the files modified by the cset
529 """
531 """
530 text = self.revision(node)
532 text = self.revision(node)
531 if not text:
533 if not text:
532 return []
534 return []
533 last = text.index(b"\n\n")
535 last = text.index(b"\n\n")
534 l = text[:last].split(b'\n')
536 l = text[:last].split(b'\n')
535 return l[3:]
537 return l[3:]
536
538
537 def add(
539 def add(
538 self,
540 self,
539 manifest,
541 manifest,
540 files,
542 files,
541 desc,
543 desc,
542 transaction,
544 transaction,
543 p1,
545 p1,
544 p2,
546 p2,
545 user,
547 user,
546 date=None,
548 date=None,
547 extra=None,
549 extra=None,
548 ):
550 ):
549 # Convert to UTF-8 encoded bytestrings as the very first
551 # Convert to UTF-8 encoded bytestrings as the very first
550 # thing: calling any method on a localstr object will turn it
552 # thing: calling any method on a localstr object will turn it
551 # into a str object and the cached UTF-8 string is thus lost.
553 # into a str object and the cached UTF-8 string is thus lost.
552 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
554 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
553
555
554 user = user.strip()
556 user = user.strip()
555 # An empty username or a username with a "\n" will make the
557 # An empty username or a username with a "\n" will make the
556 # revision text contain two "\n\n" sequences -> corrupt
558 # revision text contain two "\n\n" sequences -> corrupt
557 # repository since read cannot unpack the revision.
559 # repository since read cannot unpack the revision.
558 if not user:
560 if not user:
559 raise error.StorageError(_(b"empty username"))
561 raise error.StorageError(_(b"empty username"))
560 if b"\n" in user:
562 if b"\n" in user:
561 raise error.StorageError(
563 raise error.StorageError(
562 _(b"username %r contains a newline") % pycompat.bytestr(user)
564 _(b"username %r contains a newline") % pycompat.bytestr(user)
563 )
565 )
564
566
565 desc = stripdesc(desc)
567 desc = stripdesc(desc)
566
568
567 if date:
569 if date:
568 parseddate = b"%d %d" % dateutil.parsedate(date)
570 parseddate = b"%d %d" % dateutil.parsedate(date)
569 else:
571 else:
570 parseddate = b"%d %d" % dateutil.makedate()
572 parseddate = b"%d %d" % dateutil.makedate()
571 if extra:
573 if extra:
572 branch = extra.get(b"branch")
574 branch = extra.get(b"branch")
573 if branch in (b"default", b""):
575 if branch in (b"default", b""):
574 del extra[b"branch"]
576 del extra[b"branch"]
575 elif branch in (b".", b"null", b"tip"):
577 elif branch in (b".", b"null", b"tip"):
576 raise error.StorageError(
578 raise error.StorageError(
577 _(b'the name \'%s\' is reserved') % branch
579 _(b'the name \'%s\' is reserved') % branch
578 )
580 )
579 sortedfiles = sorted(files.touched)
581 sortedfiles = sorted(files.touched)
580 sidedata = None
582 sidedata = None
581 if self._copiesstorage == b'changeset-sidedata':
583 if self._copiesstorage == b'changeset-sidedata':
582 sidedata = metadata.encode_files_sidedata(files)
584 sidedata = metadata.encode_files_sidedata(files)
583
585
584 if extra:
586 if extra:
585 extra = encodeextra(extra)
587 extra = encodeextra(extra)
586 parseddate = b"%s %s" % (parseddate, extra)
588 parseddate = b"%s %s" % (parseddate, extra)
587 l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
589 l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
588 text = b"\n".join(l)
590 text = b"\n".join(l)
589 return self.addrevision(
591 return self.addrevision(
590 text, transaction, len(self), p1, p2, sidedata=sidedata
592 text, transaction, len(self), p1, p2, sidedata=sidedata
591 )
593 )
592
594
593 def branchinfo(self, rev):
595 def branchinfo(self, rev):
594 """return the branch name and open/close state of a revision
596 """return the branch name and open/close state of a revision
595
597
596 This function exists because creating a changectx object
598 This function exists because creating a changectx object
597 just to access this is costly."""
599 just to access this is costly."""
598 extra = self.read(rev)[5]
600 extra = self.read(rev)[5]
599 return encoding.tolocal(extra.get(b"branch")), b'close' in extra
601 return encoding.tolocal(extra.get(b"branch")), b'close' in extra
600
602
601 def _nodeduplicatecallback(self, transaction, node):
603 def _nodeduplicatecallback(self, transaction, node):
602 # keep track of revisions that got "re-added", eg: unbunde of know rev.
604 # keep track of revisions that got "re-added", eg: unbunde of know rev.
603 #
605 #
604 # We track them in a list to preserve their order from the source bundle
606 # We track them in a list to preserve their order from the source bundle
605 duplicates = transaction.changes.setdefault(b'revduplicates', [])
607 duplicates = transaction.changes.setdefault(b'revduplicates', [])
606 duplicates.append(self.rev(node))
608 duplicates.append(self.rev(node))
General Comments 0
You need to be logged in to leave comments. Login now