##// END OF EJS Templates
sidedata: simply read removed files from the `ChangingFiles` object
marmoute -
r46147:f9a67afc default
parent child Browse files
Show More
@@ -1,614 +1,612 b''
1 # changelog.py - changelog class for mercurial
1 # changelog.py - changelog class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 from .i18n import _
10 from .i18n import _
11 from .node import (
11 from .node import (
12 bin,
12 bin,
13 hex,
13 hex,
14 nullid,
14 nullid,
15 )
15 )
16 from .thirdparty import attr
16 from .thirdparty import attr
17
17
18 from . import (
18 from . import (
19 encoding,
19 encoding,
20 error,
20 error,
21 metadata,
21 metadata,
22 pycompat,
22 pycompat,
23 revlog,
23 revlog,
24 )
24 )
25 from .utils import (
25 from .utils import (
26 dateutil,
26 dateutil,
27 stringutil,
27 stringutil,
28 )
28 )
29
29
30 from .revlogutils import sidedata as sidedatamod
30 from .revlogutils import sidedata as sidedatamod
31
31
32 _defaultextra = {b'branch': b'default'}
32 _defaultextra = {b'branch': b'default'}
33
33
34
34
35 def _string_escape(text):
35 def _string_escape(text):
36 """
36 """
37 >>> from .pycompat import bytechr as chr
37 >>> from .pycompat import bytechr as chr
38 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
38 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
39 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
39 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
40 >>> s
40 >>> s
41 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
41 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
42 >>> res = _string_escape(s)
42 >>> res = _string_escape(s)
43 >>> s == _string_unescape(res)
43 >>> s == _string_unescape(res)
44 True
44 True
45 """
45 """
46 # subset of the string_escape codec
46 # subset of the string_escape codec
47 text = (
47 text = (
48 text.replace(b'\\', b'\\\\')
48 text.replace(b'\\', b'\\\\')
49 .replace(b'\n', b'\\n')
49 .replace(b'\n', b'\\n')
50 .replace(b'\r', b'\\r')
50 .replace(b'\r', b'\\r')
51 )
51 )
52 return text.replace(b'\0', b'\\0')
52 return text.replace(b'\0', b'\\0')
53
53
54
54
55 def _string_unescape(text):
55 def _string_unescape(text):
56 if b'\\0' in text:
56 if b'\\0' in text:
57 # fix up \0 without getting into trouble with \\0
57 # fix up \0 without getting into trouble with \\0
58 text = text.replace(b'\\\\', b'\\\\\n')
58 text = text.replace(b'\\\\', b'\\\\\n')
59 text = text.replace(b'\\0', b'\0')
59 text = text.replace(b'\\0', b'\0')
60 text = text.replace(b'\n', b'')
60 text = text.replace(b'\n', b'')
61 return stringutil.unescapestr(text)
61 return stringutil.unescapestr(text)
62
62
63
63
64 def decodeextra(text):
64 def decodeextra(text):
65 """
65 """
66 >>> from .pycompat import bytechr as chr
66 >>> from .pycompat import bytechr as chr
67 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
67 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
68 ... ).items())
68 ... ).items())
69 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
69 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
70 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
70 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
71 ... b'baz': chr(92) + chr(0) + b'2'})
71 ... b'baz': chr(92) + chr(0) + b'2'})
72 ... ).items())
72 ... ).items())
73 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
73 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
74 """
74 """
75 extra = _defaultextra.copy()
75 extra = _defaultextra.copy()
76 for l in text.split(b'\0'):
76 for l in text.split(b'\0'):
77 if l:
77 if l:
78 k, v = _string_unescape(l).split(b':', 1)
78 k, v = _string_unescape(l).split(b':', 1)
79 extra[k] = v
79 extra[k] = v
80 return extra
80 return extra
81
81
82
82
83 def encodeextra(d):
83 def encodeextra(d):
84 # keys must be sorted to produce a deterministic changelog entry
84 # keys must be sorted to produce a deterministic changelog entry
85 items = [_string_escape(b'%s:%s' % (k, d[k])) for k in sorted(d)]
85 items = [_string_escape(b'%s:%s' % (k, d[k])) for k in sorted(d)]
86 return b"\0".join(items)
86 return b"\0".join(items)
87
87
88
88
89 def stripdesc(desc):
89 def stripdesc(desc):
90 """strip trailing whitespace and leading and trailing empty lines"""
90 """strip trailing whitespace and leading and trailing empty lines"""
91 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
91 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
92
92
93
93
94 class appender(object):
94 class appender(object):
95 '''the changelog index must be updated last on disk, so we use this class
95 '''the changelog index must be updated last on disk, so we use this class
96 to delay writes to it'''
96 to delay writes to it'''
97
97
98 def __init__(self, vfs, name, mode, buf):
98 def __init__(self, vfs, name, mode, buf):
99 self.data = buf
99 self.data = buf
100 fp = vfs(name, mode)
100 fp = vfs(name, mode)
101 self.fp = fp
101 self.fp = fp
102 self.offset = fp.tell()
102 self.offset = fp.tell()
103 self.size = vfs.fstat(fp).st_size
103 self.size = vfs.fstat(fp).st_size
104 self._end = self.size
104 self._end = self.size
105
105
106 def end(self):
106 def end(self):
107 return self._end
107 return self._end
108
108
109 def tell(self):
109 def tell(self):
110 return self.offset
110 return self.offset
111
111
112 def flush(self):
112 def flush(self):
113 pass
113 pass
114
114
115 @property
115 @property
116 def closed(self):
116 def closed(self):
117 return self.fp.closed
117 return self.fp.closed
118
118
119 def close(self):
119 def close(self):
120 self.fp.close()
120 self.fp.close()
121
121
122 def seek(self, offset, whence=0):
122 def seek(self, offset, whence=0):
123 '''virtual file offset spans real file and data'''
123 '''virtual file offset spans real file and data'''
124 if whence == 0:
124 if whence == 0:
125 self.offset = offset
125 self.offset = offset
126 elif whence == 1:
126 elif whence == 1:
127 self.offset += offset
127 self.offset += offset
128 elif whence == 2:
128 elif whence == 2:
129 self.offset = self.end() + offset
129 self.offset = self.end() + offset
130 if self.offset < self.size:
130 if self.offset < self.size:
131 self.fp.seek(self.offset)
131 self.fp.seek(self.offset)
132
132
133 def read(self, count=-1):
133 def read(self, count=-1):
134 '''only trick here is reads that span real file and data'''
134 '''only trick here is reads that span real file and data'''
135 ret = b""
135 ret = b""
136 if self.offset < self.size:
136 if self.offset < self.size:
137 s = self.fp.read(count)
137 s = self.fp.read(count)
138 ret = s
138 ret = s
139 self.offset += len(s)
139 self.offset += len(s)
140 if count > 0:
140 if count > 0:
141 count -= len(s)
141 count -= len(s)
142 if count != 0:
142 if count != 0:
143 doff = self.offset - self.size
143 doff = self.offset - self.size
144 self.data.insert(0, b"".join(self.data))
144 self.data.insert(0, b"".join(self.data))
145 del self.data[1:]
145 del self.data[1:]
146 s = self.data[0][doff : doff + count]
146 s = self.data[0][doff : doff + count]
147 self.offset += len(s)
147 self.offset += len(s)
148 ret += s
148 ret += s
149 return ret
149 return ret
150
150
151 def write(self, s):
151 def write(self, s):
152 self.data.append(bytes(s))
152 self.data.append(bytes(s))
153 self.offset += len(s)
153 self.offset += len(s)
154 self._end += len(s)
154 self._end += len(s)
155
155
156 def __enter__(self):
156 def __enter__(self):
157 self.fp.__enter__()
157 self.fp.__enter__()
158 return self
158 return self
159
159
160 def __exit__(self, *args):
160 def __exit__(self, *args):
161 return self.fp.__exit__(*args)
161 return self.fp.__exit__(*args)
162
162
163
163
164 class _divertopener(object):
164 class _divertopener(object):
165 def __init__(self, opener, target):
165 def __init__(self, opener, target):
166 self._opener = opener
166 self._opener = opener
167 self._target = target
167 self._target = target
168
168
169 def __call__(self, name, mode=b'r', checkambig=False, **kwargs):
169 def __call__(self, name, mode=b'r', checkambig=False, **kwargs):
170 if name != self._target:
170 if name != self._target:
171 return self._opener(name, mode, **kwargs)
171 return self._opener(name, mode, **kwargs)
172 return self._opener(name + b".a", mode, **kwargs)
172 return self._opener(name + b".a", mode, **kwargs)
173
173
174 def __getattr__(self, attr):
174 def __getattr__(self, attr):
175 return getattr(self._opener, attr)
175 return getattr(self._opener, attr)
176
176
177
177
178 def _delayopener(opener, target, buf):
178 def _delayopener(opener, target, buf):
179 """build an opener that stores chunks in 'buf' instead of 'target'"""
179 """build an opener that stores chunks in 'buf' instead of 'target'"""
180
180
181 def _delay(name, mode=b'r', checkambig=False, **kwargs):
181 def _delay(name, mode=b'r', checkambig=False, **kwargs):
182 if name != target:
182 if name != target:
183 return opener(name, mode, **kwargs)
183 return opener(name, mode, **kwargs)
184 assert not kwargs
184 assert not kwargs
185 return appender(opener, name, mode, buf)
185 return appender(opener, name, mode, buf)
186
186
187 return _delay
187 return _delay
188
188
189
189
190 @attr.s
190 @attr.s
191 class _changelogrevision(object):
191 class _changelogrevision(object):
192 # Extensions might modify _defaultextra, so let the constructor below pass
192 # Extensions might modify _defaultextra, so let the constructor below pass
193 # it in
193 # it in
194 extra = attr.ib()
194 extra = attr.ib()
195 manifest = attr.ib(default=nullid)
195 manifest = attr.ib(default=nullid)
196 user = attr.ib(default=b'')
196 user = attr.ib(default=b'')
197 date = attr.ib(default=(0, 0))
197 date = attr.ib(default=(0, 0))
198 files = attr.ib(default=attr.Factory(list))
198 files = attr.ib(default=attr.Factory(list))
199 filesadded = attr.ib(default=None)
199 filesadded = attr.ib(default=None)
200 filesremoved = attr.ib(default=None)
200 filesremoved = attr.ib(default=None)
201 p1copies = attr.ib(default=None)
201 p1copies = attr.ib(default=None)
202 p2copies = attr.ib(default=None)
202 p2copies = attr.ib(default=None)
203 description = attr.ib(default=b'')
203 description = attr.ib(default=b'')
204
204
205
205
206 class changelogrevision(object):
206 class changelogrevision(object):
207 """Holds results of a parsed changelog revision.
207 """Holds results of a parsed changelog revision.
208
208
209 Changelog revisions consist of multiple pieces of data, including
209 Changelog revisions consist of multiple pieces of data, including
210 the manifest node, user, and date. This object exposes a view into
210 the manifest node, user, and date. This object exposes a view into
211 the parsed object.
211 the parsed object.
212 """
212 """
213
213
214 __slots__ = (
214 __slots__ = (
215 '_offsets',
215 '_offsets',
216 '_text',
216 '_text',
217 '_sidedata',
217 '_sidedata',
218 '_cpsd',
218 '_cpsd',
219 '_changes',
219 '_changes',
220 )
220 )
221
221
222 def __new__(cls, text, sidedata, cpsd):
222 def __new__(cls, text, sidedata, cpsd):
223 if not text:
223 if not text:
224 return _changelogrevision(extra=_defaultextra)
224 return _changelogrevision(extra=_defaultextra)
225
225
226 self = super(changelogrevision, cls).__new__(cls)
226 self = super(changelogrevision, cls).__new__(cls)
227 # We could return here and implement the following as an __init__.
227 # We could return here and implement the following as an __init__.
228 # But doing it here is equivalent and saves an extra function call.
228 # But doing it here is equivalent and saves an extra function call.
229
229
230 # format used:
230 # format used:
231 # nodeid\n : manifest node in ascii
231 # nodeid\n : manifest node in ascii
232 # user\n : user, no \n or \r allowed
232 # user\n : user, no \n or \r allowed
233 # time tz extra\n : date (time is int or float, timezone is int)
233 # time tz extra\n : date (time is int or float, timezone is int)
234 # : extra is metadata, encoded and separated by '\0'
234 # : extra is metadata, encoded and separated by '\0'
235 # : older versions ignore it
235 # : older versions ignore it
236 # files\n\n : files modified by the cset, no \n or \r allowed
236 # files\n\n : files modified by the cset, no \n or \r allowed
237 # (.*) : comment (free text, ideally utf-8)
237 # (.*) : comment (free text, ideally utf-8)
238 #
238 #
239 # changelog v0 doesn't use extra
239 # changelog v0 doesn't use extra
240
240
241 nl1 = text.index(b'\n')
241 nl1 = text.index(b'\n')
242 nl2 = text.index(b'\n', nl1 + 1)
242 nl2 = text.index(b'\n', nl1 + 1)
243 nl3 = text.index(b'\n', nl2 + 1)
243 nl3 = text.index(b'\n', nl2 + 1)
244
244
245 # The list of files may be empty. Which means nl3 is the first of the
245 # The list of files may be empty. Which means nl3 is the first of the
246 # double newline that precedes the description.
246 # double newline that precedes the description.
247 if text[nl3 + 1 : nl3 + 2] == b'\n':
247 if text[nl3 + 1 : nl3 + 2] == b'\n':
248 doublenl = nl3
248 doublenl = nl3
249 else:
249 else:
250 doublenl = text.index(b'\n\n', nl3 + 1)
250 doublenl = text.index(b'\n\n', nl3 + 1)
251
251
252 self._offsets = (nl1, nl2, nl3, doublenl)
252 self._offsets = (nl1, nl2, nl3, doublenl)
253 self._text = text
253 self._text = text
254 self._sidedata = sidedata
254 self._sidedata = sidedata
255 self._cpsd = cpsd
255 self._cpsd = cpsd
256 self._changes = None
256 self._changes = None
257
257
258 return self
258 return self
259
259
260 @property
260 @property
261 def manifest(self):
261 def manifest(self):
262 return bin(self._text[0 : self._offsets[0]])
262 return bin(self._text[0 : self._offsets[0]])
263
263
264 @property
264 @property
265 def user(self):
265 def user(self):
266 off = self._offsets
266 off = self._offsets
267 return encoding.tolocal(self._text[off[0] + 1 : off[1]])
267 return encoding.tolocal(self._text[off[0] + 1 : off[1]])
268
268
269 @property
269 @property
270 def _rawdate(self):
270 def _rawdate(self):
271 off = self._offsets
271 off = self._offsets
272 dateextra = self._text[off[1] + 1 : off[2]]
272 dateextra = self._text[off[1] + 1 : off[2]]
273 return dateextra.split(b' ', 2)[0:2]
273 return dateextra.split(b' ', 2)[0:2]
274
274
275 @property
275 @property
276 def _rawextra(self):
276 def _rawextra(self):
277 off = self._offsets
277 off = self._offsets
278 dateextra = self._text[off[1] + 1 : off[2]]
278 dateextra = self._text[off[1] + 1 : off[2]]
279 fields = dateextra.split(b' ', 2)
279 fields = dateextra.split(b' ', 2)
280 if len(fields) != 3:
280 if len(fields) != 3:
281 return None
281 return None
282
282
283 return fields[2]
283 return fields[2]
284
284
285 @property
285 @property
286 def date(self):
286 def date(self):
287 raw = self._rawdate
287 raw = self._rawdate
288 time = float(raw[0])
288 time = float(raw[0])
289 # Various tools did silly things with the timezone.
289 # Various tools did silly things with the timezone.
290 try:
290 try:
291 timezone = int(raw[1])
291 timezone = int(raw[1])
292 except ValueError:
292 except ValueError:
293 timezone = 0
293 timezone = 0
294
294
295 return time, timezone
295 return time, timezone
296
296
297 @property
297 @property
298 def extra(self):
298 def extra(self):
299 raw = self._rawextra
299 raw = self._rawextra
300 if raw is None:
300 if raw is None:
301 return _defaultextra
301 return _defaultextra
302
302
303 return decodeextra(raw)
303 return decodeextra(raw)
304
304
305 @property
305 @property
306 def changes(self):
306 def changes(self):
307 if self._changes is not None:
307 if self._changes is not None:
308 return self._changes
308 return self._changes
309 if self._cpsd:
309 if self._cpsd:
310 changes = metadata.decode_files_sidedata(self, self._sidedata)
310 changes = metadata.decode_files_sidedata(self, self._sidedata)
311 else:
311 else:
312 changes = metadata.ChangingFiles(
312 changes = metadata.ChangingFiles(
313 touched=self.files or (),
313 touched=self.files or (),
314 added=self.filesadded or (),
314 added=self.filesadded or (),
315 removed=self.filesremoved or (),
315 removed=self.filesremoved or (),
316 p1_copies=self.p1copies or {},
316 p1_copies=self.p1copies or {},
317 p2_copies=self.p2copies or {},
317 p2_copies=self.p2copies or {},
318 )
318 )
319 self._changes = changes
319 self._changes = changes
320 return changes
320 return changes
321
321
322 @property
322 @property
323 def files(self):
323 def files(self):
324 off = self._offsets
324 off = self._offsets
325 if off[2] == off[3]:
325 if off[2] == off[3]:
326 return []
326 return []
327
327
328 return self._text[off[2] + 1 : off[3]].split(b'\n')
328 return self._text[off[2] + 1 : off[3]].split(b'\n')
329
329
330 @property
330 @property
331 def filesadded(self):
331 def filesadded(self):
332 if self._cpsd:
332 if self._cpsd:
333 return self.changes.added
333 return self.changes.added
334 else:
334 else:
335 rawindices = self.extra.get(b'filesadded')
335 rawindices = self.extra.get(b'filesadded')
336 if rawindices is None:
336 if rawindices is None:
337 return None
337 return None
338 return metadata.decodefileindices(self.files, rawindices)
338 return metadata.decodefileindices(self.files, rawindices)
339
339
340 @property
340 @property
341 def filesremoved(self):
341 def filesremoved(self):
342 if self._cpsd:
342 if self._cpsd:
343 rawindices = self._sidedata.get(sidedatamod.SD_FILESREMOVED)
343 return self.changes.removed
344 if not rawindices:
345 return []
346 else:
344 else:
347 rawindices = self.extra.get(b'filesremoved')
345 rawindices = self.extra.get(b'filesremoved')
348 if rawindices is None:
346 if rawindices is None:
349 return None
347 return None
350 return metadata.decodefileindices(self.files, rawindices)
348 return metadata.decodefileindices(self.files, rawindices)
351
349
352 @property
350 @property
353 def p1copies(self):
351 def p1copies(self):
354 if self._cpsd:
352 if self._cpsd:
355 rawcopies = self._sidedata.get(sidedatamod.SD_P1COPIES)
353 rawcopies = self._sidedata.get(sidedatamod.SD_P1COPIES)
356 if not rawcopies:
354 if not rawcopies:
357 return {}
355 return {}
358 else:
356 else:
359 rawcopies = self.extra.get(b'p1copies')
357 rawcopies = self.extra.get(b'p1copies')
360 if rawcopies is None:
358 if rawcopies is None:
361 return None
359 return None
362 return metadata.decodecopies(self.files, rawcopies)
360 return metadata.decodecopies(self.files, rawcopies)
363
361
364 @property
362 @property
365 def p2copies(self):
363 def p2copies(self):
366 if self._cpsd:
364 if self._cpsd:
367 rawcopies = self._sidedata.get(sidedatamod.SD_P2COPIES)
365 rawcopies = self._sidedata.get(sidedatamod.SD_P2COPIES)
368 if not rawcopies:
366 if not rawcopies:
369 return {}
367 return {}
370 else:
368 else:
371 rawcopies = self.extra.get(b'p2copies')
369 rawcopies = self.extra.get(b'p2copies')
372 if rawcopies is None:
370 if rawcopies is None:
373 return None
371 return None
374 return metadata.decodecopies(self.files, rawcopies)
372 return metadata.decodecopies(self.files, rawcopies)
375
373
376 @property
374 @property
377 def description(self):
375 def description(self):
378 return encoding.tolocal(self._text[self._offsets[3] + 2 :])
376 return encoding.tolocal(self._text[self._offsets[3] + 2 :])
379
377
380
378
381 class changelog(revlog.revlog):
379 class changelog(revlog.revlog):
382 def __init__(self, opener, trypending=False):
380 def __init__(self, opener, trypending=False):
383 """Load a changelog revlog using an opener.
381 """Load a changelog revlog using an opener.
384
382
385 If ``trypending`` is true, we attempt to load the index from a
383 If ``trypending`` is true, we attempt to load the index from a
386 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
384 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
387 The ``00changelog.i.a`` file contains index (and possibly inline
385 The ``00changelog.i.a`` file contains index (and possibly inline
388 revision) data for a transaction that hasn't been finalized yet.
386 revision) data for a transaction that hasn't been finalized yet.
389 It exists in a separate file to facilitate readers (such as
387 It exists in a separate file to facilitate readers (such as
390 hooks processes) accessing data before a transaction is finalized.
388 hooks processes) accessing data before a transaction is finalized.
391 """
389 """
392 if trypending and opener.exists(b'00changelog.i.a'):
390 if trypending and opener.exists(b'00changelog.i.a'):
393 indexfile = b'00changelog.i.a'
391 indexfile = b'00changelog.i.a'
394 else:
392 else:
395 indexfile = b'00changelog.i'
393 indexfile = b'00changelog.i'
396
394
397 datafile = b'00changelog.d'
395 datafile = b'00changelog.d'
398 revlog.revlog.__init__(
396 revlog.revlog.__init__(
399 self,
397 self,
400 opener,
398 opener,
401 indexfile,
399 indexfile,
402 datafile=datafile,
400 datafile=datafile,
403 checkambig=True,
401 checkambig=True,
404 mmaplargeindex=True,
402 mmaplargeindex=True,
405 persistentnodemap=opener.options.get(b'persistent-nodemap', False),
403 persistentnodemap=opener.options.get(b'persistent-nodemap', False),
406 )
404 )
407
405
408 if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
406 if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
409 # changelogs don't benefit from generaldelta.
407 # changelogs don't benefit from generaldelta.
410
408
411 self.version &= ~revlog.FLAG_GENERALDELTA
409 self.version &= ~revlog.FLAG_GENERALDELTA
412 self._generaldelta = False
410 self._generaldelta = False
413
411
414 # Delta chains for changelogs tend to be very small because entries
412 # Delta chains for changelogs tend to be very small because entries
415 # tend to be small and don't delta well with each. So disable delta
413 # tend to be small and don't delta well with each. So disable delta
416 # chains.
414 # chains.
417 self._storedeltachains = False
415 self._storedeltachains = False
418
416
419 self._realopener = opener
417 self._realopener = opener
420 self._delayed = False
418 self._delayed = False
421 self._delaybuf = None
419 self._delaybuf = None
422 self._divert = False
420 self._divert = False
423 self._filteredrevs = frozenset()
421 self._filteredrevs = frozenset()
424 self._filteredrevs_hashcache = {}
422 self._filteredrevs_hashcache = {}
425 self._copiesstorage = opener.options.get(b'copies-storage')
423 self._copiesstorage = opener.options.get(b'copies-storage')
426
424
427 @property
425 @property
428 def filteredrevs(self):
426 def filteredrevs(self):
429 return self._filteredrevs
427 return self._filteredrevs
430
428
431 @filteredrevs.setter
429 @filteredrevs.setter
432 def filteredrevs(self, val):
430 def filteredrevs(self, val):
433 # Ensure all updates go through this function
431 # Ensure all updates go through this function
434 assert isinstance(val, frozenset)
432 assert isinstance(val, frozenset)
435 self._filteredrevs = val
433 self._filteredrevs = val
436 self._filteredrevs_hashcache = {}
434 self._filteredrevs_hashcache = {}
437
435
438 def delayupdate(self, tr):
436 def delayupdate(self, tr):
439 """delay visibility of index updates to other readers"""
437 """delay visibility of index updates to other readers"""
440
438
441 if not self._delayed:
439 if not self._delayed:
442 if len(self) == 0:
440 if len(self) == 0:
443 self._divert = True
441 self._divert = True
444 if self._realopener.exists(self.indexfile + b'.a'):
442 if self._realopener.exists(self.indexfile + b'.a'):
445 self._realopener.unlink(self.indexfile + b'.a')
443 self._realopener.unlink(self.indexfile + b'.a')
446 self.opener = _divertopener(self._realopener, self.indexfile)
444 self.opener = _divertopener(self._realopener, self.indexfile)
447 else:
445 else:
448 self._delaybuf = []
446 self._delaybuf = []
449 self.opener = _delayopener(
447 self.opener = _delayopener(
450 self._realopener, self.indexfile, self._delaybuf
448 self._realopener, self.indexfile, self._delaybuf
451 )
449 )
452 self._delayed = True
450 self._delayed = True
453 tr.addpending(b'cl-%i' % id(self), self._writepending)
451 tr.addpending(b'cl-%i' % id(self), self._writepending)
454 tr.addfinalize(b'cl-%i' % id(self), self._finalize)
452 tr.addfinalize(b'cl-%i' % id(self), self._finalize)
455
453
456 def _finalize(self, tr):
454 def _finalize(self, tr):
457 """finalize index updates"""
455 """finalize index updates"""
458 self._delayed = False
456 self._delayed = False
459 self.opener = self._realopener
457 self.opener = self._realopener
460 # move redirected index data back into place
458 # move redirected index data back into place
461 if self._divert:
459 if self._divert:
462 assert not self._delaybuf
460 assert not self._delaybuf
463 tmpname = self.indexfile + b".a"
461 tmpname = self.indexfile + b".a"
464 nfile = self.opener.open(tmpname)
462 nfile = self.opener.open(tmpname)
465 nfile.close()
463 nfile.close()
466 self.opener.rename(tmpname, self.indexfile, checkambig=True)
464 self.opener.rename(tmpname, self.indexfile, checkambig=True)
467 elif self._delaybuf:
465 elif self._delaybuf:
468 fp = self.opener(self.indexfile, b'a', checkambig=True)
466 fp = self.opener(self.indexfile, b'a', checkambig=True)
469 fp.write(b"".join(self._delaybuf))
467 fp.write(b"".join(self._delaybuf))
470 fp.close()
468 fp.close()
471 self._delaybuf = None
469 self._delaybuf = None
472 self._divert = False
470 self._divert = False
473 # split when we're done
471 # split when we're done
474 self._enforceinlinesize(tr)
472 self._enforceinlinesize(tr)
475
473
476 def _writepending(self, tr):
474 def _writepending(self, tr):
477 """create a file containing the unfinalized state for
475 """create a file containing the unfinalized state for
478 pretxnchangegroup"""
476 pretxnchangegroup"""
479 if self._delaybuf:
477 if self._delaybuf:
480 # make a temporary copy of the index
478 # make a temporary copy of the index
481 fp1 = self._realopener(self.indexfile)
479 fp1 = self._realopener(self.indexfile)
482 pendingfilename = self.indexfile + b".a"
480 pendingfilename = self.indexfile + b".a"
483 # register as a temp file to ensure cleanup on failure
481 # register as a temp file to ensure cleanup on failure
484 tr.registertmp(pendingfilename)
482 tr.registertmp(pendingfilename)
485 # write existing data
483 # write existing data
486 fp2 = self._realopener(pendingfilename, b"w")
484 fp2 = self._realopener(pendingfilename, b"w")
487 fp2.write(fp1.read())
485 fp2.write(fp1.read())
488 # add pending data
486 # add pending data
489 fp2.write(b"".join(self._delaybuf))
487 fp2.write(b"".join(self._delaybuf))
490 fp2.close()
488 fp2.close()
491 # switch modes so finalize can simply rename
489 # switch modes so finalize can simply rename
492 self._delaybuf = None
490 self._delaybuf = None
493 self._divert = True
491 self._divert = True
494 self.opener = _divertopener(self._realopener, self.indexfile)
492 self.opener = _divertopener(self._realopener, self.indexfile)
495
493
496 if self._divert:
494 if self._divert:
497 return True
495 return True
498
496
499 return False
497 return False
500
498
501 def _enforceinlinesize(self, tr, fp=None):
499 def _enforceinlinesize(self, tr, fp=None):
502 if not self._delayed:
500 if not self._delayed:
503 revlog.revlog._enforceinlinesize(self, tr, fp)
501 revlog.revlog._enforceinlinesize(self, tr, fp)
504
502
505 def read(self, node):
503 def read(self, node):
506 """Obtain data from a parsed changelog revision.
504 """Obtain data from a parsed changelog revision.
507
505
508 Returns a 6-tuple of:
506 Returns a 6-tuple of:
509
507
510 - manifest node in binary
508 - manifest node in binary
511 - author/user as a localstr
509 - author/user as a localstr
512 - date as a 2-tuple of (time, timezone)
510 - date as a 2-tuple of (time, timezone)
513 - list of files
511 - list of files
514 - commit message as a localstr
512 - commit message as a localstr
515 - dict of extra metadata
513 - dict of extra metadata
516
514
517 Unless you need to access all fields, consider calling
515 Unless you need to access all fields, consider calling
518 ``changelogrevision`` instead, as it is faster for partial object
516 ``changelogrevision`` instead, as it is faster for partial object
519 access.
517 access.
520 """
518 """
521 d, s = self._revisiondata(node)
519 d, s = self._revisiondata(node)
522 c = changelogrevision(
520 c = changelogrevision(
523 d, s, self._copiesstorage == b'changeset-sidedata'
521 d, s, self._copiesstorage == b'changeset-sidedata'
524 )
522 )
525 return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
523 return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
526
524
527 def changelogrevision(self, nodeorrev):
525 def changelogrevision(self, nodeorrev):
528 """Obtain a ``changelogrevision`` for a node or revision."""
526 """Obtain a ``changelogrevision`` for a node or revision."""
529 text, sidedata = self._revisiondata(nodeorrev)
527 text, sidedata = self._revisiondata(nodeorrev)
530 return changelogrevision(
528 return changelogrevision(
531 text, sidedata, self._copiesstorage == b'changeset-sidedata'
529 text, sidedata, self._copiesstorage == b'changeset-sidedata'
532 )
530 )
533
531
534 def readfiles(self, node):
532 def readfiles(self, node):
535 """
533 """
536 short version of read that only returns the files modified by the cset
534 short version of read that only returns the files modified by the cset
537 """
535 """
538 text = self.revision(node)
536 text = self.revision(node)
539 if not text:
537 if not text:
540 return []
538 return []
541 last = text.index(b"\n\n")
539 last = text.index(b"\n\n")
542 l = text[:last].split(b'\n')
540 l = text[:last].split(b'\n')
543 return l[3:]
541 return l[3:]
544
542
545 def add(
543 def add(
546 self,
544 self,
547 manifest,
545 manifest,
548 files,
546 files,
549 desc,
547 desc,
550 transaction,
548 transaction,
551 p1,
549 p1,
552 p2,
550 p2,
553 user,
551 user,
554 date=None,
552 date=None,
555 extra=None,
553 extra=None,
556 ):
554 ):
557 # Convert to UTF-8 encoded bytestrings as the very first
555 # Convert to UTF-8 encoded bytestrings as the very first
558 # thing: calling any method on a localstr object will turn it
556 # thing: calling any method on a localstr object will turn it
559 # into a str object and the cached UTF-8 string is thus lost.
557 # into a str object and the cached UTF-8 string is thus lost.
560 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
558 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
561
559
562 user = user.strip()
560 user = user.strip()
563 # An empty username or a username with a "\n" will make the
561 # An empty username or a username with a "\n" will make the
564 # revision text contain two "\n\n" sequences -> corrupt
562 # revision text contain two "\n\n" sequences -> corrupt
565 # repository since read cannot unpack the revision.
563 # repository since read cannot unpack the revision.
566 if not user:
564 if not user:
567 raise error.StorageError(_(b"empty username"))
565 raise error.StorageError(_(b"empty username"))
568 if b"\n" in user:
566 if b"\n" in user:
569 raise error.StorageError(
567 raise error.StorageError(
570 _(b"username %r contains a newline") % pycompat.bytestr(user)
568 _(b"username %r contains a newline") % pycompat.bytestr(user)
571 )
569 )
572
570
573 desc = stripdesc(desc)
571 desc = stripdesc(desc)
574
572
575 if date:
573 if date:
576 parseddate = b"%d %d" % dateutil.parsedate(date)
574 parseddate = b"%d %d" % dateutil.parsedate(date)
577 else:
575 else:
578 parseddate = b"%d %d" % dateutil.makedate()
576 parseddate = b"%d %d" % dateutil.makedate()
579 if extra:
577 if extra:
580 branch = extra.get(b"branch")
578 branch = extra.get(b"branch")
581 if branch in (b"default", b""):
579 if branch in (b"default", b""):
582 del extra[b"branch"]
580 del extra[b"branch"]
583 elif branch in (b".", b"null", b"tip"):
581 elif branch in (b".", b"null", b"tip"):
584 raise error.StorageError(
582 raise error.StorageError(
585 _(b'the name \'%s\' is reserved') % branch
583 _(b'the name \'%s\' is reserved') % branch
586 )
584 )
587 sortedfiles = sorted(files.touched)
585 sortedfiles = sorted(files.touched)
588 sidedata = None
586 sidedata = None
589 if self._copiesstorage == b'changeset-sidedata':
587 if self._copiesstorage == b'changeset-sidedata':
590 sidedata = metadata.encode_files_sidedata(files)
588 sidedata = metadata.encode_files_sidedata(files)
591
589
592 if extra:
590 if extra:
593 extra = encodeextra(extra)
591 extra = encodeextra(extra)
594 parseddate = b"%s %s" % (parseddate, extra)
592 parseddate = b"%s %s" % (parseddate, extra)
595 l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
593 l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
596 text = b"\n".join(l)
594 text = b"\n".join(l)
597 return self.addrevision(
595 return self.addrevision(
598 text, transaction, len(self), p1, p2, sidedata=sidedata
596 text, transaction, len(self), p1, p2, sidedata=sidedata
599 )
597 )
600
598
601 def branchinfo(self, rev):
599 def branchinfo(self, rev):
602 """return the branch name and open/close state of a revision
600 """return the branch name and open/close state of a revision
603
601
604 This function exists because creating a changectx object
602 This function exists because creating a changectx object
605 just to access this is costly."""
603 just to access this is costly."""
606 extra = self.read(rev)[5]
604 extra = self.read(rev)[5]
607 return encoding.tolocal(extra.get(b"branch")), b'close' in extra
605 return encoding.tolocal(extra.get(b"branch")), b'close' in extra
608
606
609 def _nodeduplicatecallback(self, transaction, node):
607 def _nodeduplicatecallback(self, transaction, node):
610 # keep track of revisions that got "re-added", eg: unbunde of know rev.
608 # keep track of revisions that got "re-added", eg: unbunde of know rev.
611 #
609 #
612 # We track them in a list to preserve their order from the source bundle
610 # We track them in a list to preserve their order from the source bundle
613 duplicates = transaction.changes.setdefault(b'revduplicates', [])
611 duplicates = transaction.changes.setdefault(b'revduplicates', [])
614 duplicates.append(self.rev(node))
612 duplicates.append(self.rev(node))
General Comments 0
You need to be logged in to leave comments. Login now