##// END OF EJS Templates
sidedata: simply read added files from the `ChangingFiles` object
marmoute -
r46146:48c93a0b default
parent child Browse files
Show More
@@ -1,616 +1,614
1 # changelog.py - changelog class for mercurial
1 # changelog.py - changelog class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 from .i18n import _
10 from .i18n import _
11 from .node import (
11 from .node import (
12 bin,
12 bin,
13 hex,
13 hex,
14 nullid,
14 nullid,
15 )
15 )
16 from .thirdparty import attr
16 from .thirdparty import attr
17
17
18 from . import (
18 from . import (
19 encoding,
19 encoding,
20 error,
20 error,
21 metadata,
21 metadata,
22 pycompat,
22 pycompat,
23 revlog,
23 revlog,
24 )
24 )
25 from .utils import (
25 from .utils import (
26 dateutil,
26 dateutil,
27 stringutil,
27 stringutil,
28 )
28 )
29
29
30 from .revlogutils import sidedata as sidedatamod
30 from .revlogutils import sidedata as sidedatamod
31
31
32 _defaultextra = {b'branch': b'default'}
32 _defaultextra = {b'branch': b'default'}
33
33
34
34
35 def _string_escape(text):
35 def _string_escape(text):
36 """
36 """
37 >>> from .pycompat import bytechr as chr
37 >>> from .pycompat import bytechr as chr
38 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
38 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
39 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
39 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
40 >>> s
40 >>> s
41 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
41 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
42 >>> res = _string_escape(s)
42 >>> res = _string_escape(s)
43 >>> s == _string_unescape(res)
43 >>> s == _string_unescape(res)
44 True
44 True
45 """
45 """
46 # subset of the string_escape codec
46 # subset of the string_escape codec
47 text = (
47 text = (
48 text.replace(b'\\', b'\\\\')
48 text.replace(b'\\', b'\\\\')
49 .replace(b'\n', b'\\n')
49 .replace(b'\n', b'\\n')
50 .replace(b'\r', b'\\r')
50 .replace(b'\r', b'\\r')
51 )
51 )
52 return text.replace(b'\0', b'\\0')
52 return text.replace(b'\0', b'\\0')
53
53
54
54
55 def _string_unescape(text):
55 def _string_unescape(text):
56 if b'\\0' in text:
56 if b'\\0' in text:
57 # fix up \0 without getting into trouble with \\0
57 # fix up \0 without getting into trouble with \\0
58 text = text.replace(b'\\\\', b'\\\\\n')
58 text = text.replace(b'\\\\', b'\\\\\n')
59 text = text.replace(b'\\0', b'\0')
59 text = text.replace(b'\\0', b'\0')
60 text = text.replace(b'\n', b'')
60 text = text.replace(b'\n', b'')
61 return stringutil.unescapestr(text)
61 return stringutil.unescapestr(text)
62
62
63
63
64 def decodeextra(text):
64 def decodeextra(text):
65 """
65 """
66 >>> from .pycompat import bytechr as chr
66 >>> from .pycompat import bytechr as chr
67 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
67 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
68 ... ).items())
68 ... ).items())
69 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
69 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
70 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
70 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
71 ... b'baz': chr(92) + chr(0) + b'2'})
71 ... b'baz': chr(92) + chr(0) + b'2'})
72 ... ).items())
72 ... ).items())
73 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
73 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
74 """
74 """
75 extra = _defaultextra.copy()
75 extra = _defaultextra.copy()
76 for l in text.split(b'\0'):
76 for l in text.split(b'\0'):
77 if l:
77 if l:
78 k, v = _string_unescape(l).split(b':', 1)
78 k, v = _string_unescape(l).split(b':', 1)
79 extra[k] = v
79 extra[k] = v
80 return extra
80 return extra
81
81
82
82
83 def encodeextra(d):
83 def encodeextra(d):
84 # keys must be sorted to produce a deterministic changelog entry
84 # keys must be sorted to produce a deterministic changelog entry
85 items = [_string_escape(b'%s:%s' % (k, d[k])) for k in sorted(d)]
85 items = [_string_escape(b'%s:%s' % (k, d[k])) for k in sorted(d)]
86 return b"\0".join(items)
86 return b"\0".join(items)
87
87
88
88
89 def stripdesc(desc):
89 def stripdesc(desc):
90 """strip trailing whitespace and leading and trailing empty lines"""
90 """strip trailing whitespace and leading and trailing empty lines"""
91 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
91 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
92
92
93
93
94 class appender(object):
94 class appender(object):
95 '''the changelog index must be updated last on disk, so we use this class
95 '''the changelog index must be updated last on disk, so we use this class
96 to delay writes to it'''
96 to delay writes to it'''
97
97
98 def __init__(self, vfs, name, mode, buf):
98 def __init__(self, vfs, name, mode, buf):
99 self.data = buf
99 self.data = buf
100 fp = vfs(name, mode)
100 fp = vfs(name, mode)
101 self.fp = fp
101 self.fp = fp
102 self.offset = fp.tell()
102 self.offset = fp.tell()
103 self.size = vfs.fstat(fp).st_size
103 self.size = vfs.fstat(fp).st_size
104 self._end = self.size
104 self._end = self.size
105
105
106 def end(self):
106 def end(self):
107 return self._end
107 return self._end
108
108
109 def tell(self):
109 def tell(self):
110 return self.offset
110 return self.offset
111
111
112 def flush(self):
112 def flush(self):
113 pass
113 pass
114
114
115 @property
115 @property
116 def closed(self):
116 def closed(self):
117 return self.fp.closed
117 return self.fp.closed
118
118
119 def close(self):
119 def close(self):
120 self.fp.close()
120 self.fp.close()
121
121
122 def seek(self, offset, whence=0):
122 def seek(self, offset, whence=0):
123 '''virtual file offset spans real file and data'''
123 '''virtual file offset spans real file and data'''
124 if whence == 0:
124 if whence == 0:
125 self.offset = offset
125 self.offset = offset
126 elif whence == 1:
126 elif whence == 1:
127 self.offset += offset
127 self.offset += offset
128 elif whence == 2:
128 elif whence == 2:
129 self.offset = self.end() + offset
129 self.offset = self.end() + offset
130 if self.offset < self.size:
130 if self.offset < self.size:
131 self.fp.seek(self.offset)
131 self.fp.seek(self.offset)
132
132
133 def read(self, count=-1):
133 def read(self, count=-1):
134 '''only trick here is reads that span real file and data'''
134 '''only trick here is reads that span real file and data'''
135 ret = b""
135 ret = b""
136 if self.offset < self.size:
136 if self.offset < self.size:
137 s = self.fp.read(count)
137 s = self.fp.read(count)
138 ret = s
138 ret = s
139 self.offset += len(s)
139 self.offset += len(s)
140 if count > 0:
140 if count > 0:
141 count -= len(s)
141 count -= len(s)
142 if count != 0:
142 if count != 0:
143 doff = self.offset - self.size
143 doff = self.offset - self.size
144 self.data.insert(0, b"".join(self.data))
144 self.data.insert(0, b"".join(self.data))
145 del self.data[1:]
145 del self.data[1:]
146 s = self.data[0][doff : doff + count]
146 s = self.data[0][doff : doff + count]
147 self.offset += len(s)
147 self.offset += len(s)
148 ret += s
148 ret += s
149 return ret
149 return ret
150
150
151 def write(self, s):
151 def write(self, s):
152 self.data.append(bytes(s))
152 self.data.append(bytes(s))
153 self.offset += len(s)
153 self.offset += len(s)
154 self._end += len(s)
154 self._end += len(s)
155
155
156 def __enter__(self):
156 def __enter__(self):
157 self.fp.__enter__()
157 self.fp.__enter__()
158 return self
158 return self
159
159
160 def __exit__(self, *args):
160 def __exit__(self, *args):
161 return self.fp.__exit__(*args)
161 return self.fp.__exit__(*args)
162
162
163
163
164 class _divertopener(object):
164 class _divertopener(object):
165 def __init__(self, opener, target):
165 def __init__(self, opener, target):
166 self._opener = opener
166 self._opener = opener
167 self._target = target
167 self._target = target
168
168
169 def __call__(self, name, mode=b'r', checkambig=False, **kwargs):
169 def __call__(self, name, mode=b'r', checkambig=False, **kwargs):
170 if name != self._target:
170 if name != self._target:
171 return self._opener(name, mode, **kwargs)
171 return self._opener(name, mode, **kwargs)
172 return self._opener(name + b".a", mode, **kwargs)
172 return self._opener(name + b".a", mode, **kwargs)
173
173
174 def __getattr__(self, attr):
174 def __getattr__(self, attr):
175 return getattr(self._opener, attr)
175 return getattr(self._opener, attr)
176
176
177
177
178 def _delayopener(opener, target, buf):
178 def _delayopener(opener, target, buf):
179 """build an opener that stores chunks in 'buf' instead of 'target'"""
179 """build an opener that stores chunks in 'buf' instead of 'target'"""
180
180
181 def _delay(name, mode=b'r', checkambig=False, **kwargs):
181 def _delay(name, mode=b'r', checkambig=False, **kwargs):
182 if name != target:
182 if name != target:
183 return opener(name, mode, **kwargs)
183 return opener(name, mode, **kwargs)
184 assert not kwargs
184 assert not kwargs
185 return appender(opener, name, mode, buf)
185 return appender(opener, name, mode, buf)
186
186
187 return _delay
187 return _delay
188
188
189
189
190 @attr.s
190 @attr.s
191 class _changelogrevision(object):
191 class _changelogrevision(object):
192 # Extensions might modify _defaultextra, so let the constructor below pass
192 # Extensions might modify _defaultextra, so let the constructor below pass
193 # it in
193 # it in
194 extra = attr.ib()
194 extra = attr.ib()
195 manifest = attr.ib(default=nullid)
195 manifest = attr.ib(default=nullid)
196 user = attr.ib(default=b'')
196 user = attr.ib(default=b'')
197 date = attr.ib(default=(0, 0))
197 date = attr.ib(default=(0, 0))
198 files = attr.ib(default=attr.Factory(list))
198 files = attr.ib(default=attr.Factory(list))
199 filesadded = attr.ib(default=None)
199 filesadded = attr.ib(default=None)
200 filesremoved = attr.ib(default=None)
200 filesremoved = attr.ib(default=None)
201 p1copies = attr.ib(default=None)
201 p1copies = attr.ib(default=None)
202 p2copies = attr.ib(default=None)
202 p2copies = attr.ib(default=None)
203 description = attr.ib(default=b'')
203 description = attr.ib(default=b'')
204
204
205
205
206 class changelogrevision(object):
206 class changelogrevision(object):
207 """Holds results of a parsed changelog revision.
207 """Holds results of a parsed changelog revision.
208
208
209 Changelog revisions consist of multiple pieces of data, including
209 Changelog revisions consist of multiple pieces of data, including
210 the manifest node, user, and date. This object exposes a view into
210 the manifest node, user, and date. This object exposes a view into
211 the parsed object.
211 the parsed object.
212 """
212 """
213
213
214 __slots__ = (
214 __slots__ = (
215 '_offsets',
215 '_offsets',
216 '_text',
216 '_text',
217 '_sidedata',
217 '_sidedata',
218 '_cpsd',
218 '_cpsd',
219 '_changes',
219 '_changes',
220 )
220 )
221
221
222 def __new__(cls, text, sidedata, cpsd):
222 def __new__(cls, text, sidedata, cpsd):
223 if not text:
223 if not text:
224 return _changelogrevision(extra=_defaultextra)
224 return _changelogrevision(extra=_defaultextra)
225
225
226 self = super(changelogrevision, cls).__new__(cls)
226 self = super(changelogrevision, cls).__new__(cls)
227 # We could return here and implement the following as an __init__.
227 # We could return here and implement the following as an __init__.
228 # But doing it here is equivalent and saves an extra function call.
228 # But doing it here is equivalent and saves an extra function call.
229
229
230 # format used:
230 # format used:
231 # nodeid\n : manifest node in ascii
231 # nodeid\n : manifest node in ascii
232 # user\n : user, no \n or \r allowed
232 # user\n : user, no \n or \r allowed
233 # time tz extra\n : date (time is int or float, timezone is int)
233 # time tz extra\n : date (time is int or float, timezone is int)
234 # : extra is metadata, encoded and separated by '\0'
234 # : extra is metadata, encoded and separated by '\0'
235 # : older versions ignore it
235 # : older versions ignore it
236 # files\n\n : files modified by the cset, no \n or \r allowed
236 # files\n\n : files modified by the cset, no \n or \r allowed
237 # (.*) : comment (free text, ideally utf-8)
237 # (.*) : comment (free text, ideally utf-8)
238 #
238 #
239 # changelog v0 doesn't use extra
239 # changelog v0 doesn't use extra
240
240
241 nl1 = text.index(b'\n')
241 nl1 = text.index(b'\n')
242 nl2 = text.index(b'\n', nl1 + 1)
242 nl2 = text.index(b'\n', nl1 + 1)
243 nl3 = text.index(b'\n', nl2 + 1)
243 nl3 = text.index(b'\n', nl2 + 1)
244
244
245 # The list of files may be empty. Which means nl3 is the first of the
245 # The list of files may be empty. Which means nl3 is the first of the
246 # double newline that precedes the description.
246 # double newline that precedes the description.
247 if text[nl3 + 1 : nl3 + 2] == b'\n':
247 if text[nl3 + 1 : nl3 + 2] == b'\n':
248 doublenl = nl3
248 doublenl = nl3
249 else:
249 else:
250 doublenl = text.index(b'\n\n', nl3 + 1)
250 doublenl = text.index(b'\n\n', nl3 + 1)
251
251
252 self._offsets = (nl1, nl2, nl3, doublenl)
252 self._offsets = (nl1, nl2, nl3, doublenl)
253 self._text = text
253 self._text = text
254 self._sidedata = sidedata
254 self._sidedata = sidedata
255 self._cpsd = cpsd
255 self._cpsd = cpsd
256 self._changes = None
256 self._changes = None
257
257
258 return self
258 return self
259
259
260 @property
260 @property
261 def manifest(self):
261 def manifest(self):
262 return bin(self._text[0 : self._offsets[0]])
262 return bin(self._text[0 : self._offsets[0]])
263
263
264 @property
264 @property
265 def user(self):
265 def user(self):
266 off = self._offsets
266 off = self._offsets
267 return encoding.tolocal(self._text[off[0] + 1 : off[1]])
267 return encoding.tolocal(self._text[off[0] + 1 : off[1]])
268
268
269 @property
269 @property
270 def _rawdate(self):
270 def _rawdate(self):
271 off = self._offsets
271 off = self._offsets
272 dateextra = self._text[off[1] + 1 : off[2]]
272 dateextra = self._text[off[1] + 1 : off[2]]
273 return dateextra.split(b' ', 2)[0:2]
273 return dateextra.split(b' ', 2)[0:2]
274
274
275 @property
275 @property
276 def _rawextra(self):
276 def _rawextra(self):
277 off = self._offsets
277 off = self._offsets
278 dateextra = self._text[off[1] + 1 : off[2]]
278 dateextra = self._text[off[1] + 1 : off[2]]
279 fields = dateextra.split(b' ', 2)
279 fields = dateextra.split(b' ', 2)
280 if len(fields) != 3:
280 if len(fields) != 3:
281 return None
281 return None
282
282
283 return fields[2]
283 return fields[2]
284
284
285 @property
285 @property
286 def date(self):
286 def date(self):
287 raw = self._rawdate
287 raw = self._rawdate
288 time = float(raw[0])
288 time = float(raw[0])
289 # Various tools did silly things with the timezone.
289 # Various tools did silly things with the timezone.
290 try:
290 try:
291 timezone = int(raw[1])
291 timezone = int(raw[1])
292 except ValueError:
292 except ValueError:
293 timezone = 0
293 timezone = 0
294
294
295 return time, timezone
295 return time, timezone
296
296
297 @property
297 @property
298 def extra(self):
298 def extra(self):
299 raw = self._rawextra
299 raw = self._rawextra
300 if raw is None:
300 if raw is None:
301 return _defaultextra
301 return _defaultextra
302
302
303 return decodeextra(raw)
303 return decodeextra(raw)
304
304
305 @property
305 @property
306 def changes(self):
306 def changes(self):
307 if self._changes is not None:
307 if self._changes is not None:
308 return self._changes
308 return self._changes
309 if self._cpsd:
309 if self._cpsd:
310 changes = metadata.decode_files_sidedata(self, self._sidedata)
310 changes = metadata.decode_files_sidedata(self, self._sidedata)
311 else:
311 else:
312 changes = metadata.ChangingFiles(
312 changes = metadata.ChangingFiles(
313 touched=self.files or (),
313 touched=self.files or (),
314 added=self.filesadded or (),
314 added=self.filesadded or (),
315 removed=self.filesremoved or (),
315 removed=self.filesremoved or (),
316 p1_copies=self.p1copies or {},
316 p1_copies=self.p1copies or {},
317 p2_copies=self.p2copies or {},
317 p2_copies=self.p2copies or {},
318 )
318 )
319 self._changes = changes
319 self._changes = changes
320 return changes
320 return changes
321
321
322 @property
322 @property
323 def files(self):
323 def files(self):
324 off = self._offsets
324 off = self._offsets
325 if off[2] == off[3]:
325 if off[2] == off[3]:
326 return []
326 return []
327
327
328 return self._text[off[2] + 1 : off[3]].split(b'\n')
328 return self._text[off[2] + 1 : off[3]].split(b'\n')
329
329
330 @property
330 @property
331 def filesadded(self):
331 def filesadded(self):
332 if self._cpsd:
332 if self._cpsd:
333 rawindices = self._sidedata.get(sidedatamod.SD_FILESADDED)
333 return self.changes.added
334 if not rawindices:
335 return []
336 else:
334 else:
337 rawindices = self.extra.get(b'filesadded')
335 rawindices = self.extra.get(b'filesadded')
338 if rawindices is None:
336 if rawindices is None:
339 return None
337 return None
340 return metadata.decodefileindices(self.files, rawindices)
338 return metadata.decodefileindices(self.files, rawindices)
341
339
342 @property
340 @property
343 def filesremoved(self):
341 def filesremoved(self):
344 if self._cpsd:
342 if self._cpsd:
345 rawindices = self._sidedata.get(sidedatamod.SD_FILESREMOVED)
343 rawindices = self._sidedata.get(sidedatamod.SD_FILESREMOVED)
346 if not rawindices:
344 if not rawindices:
347 return []
345 return []
348 else:
346 else:
349 rawindices = self.extra.get(b'filesremoved')
347 rawindices = self.extra.get(b'filesremoved')
350 if rawindices is None:
348 if rawindices is None:
351 return None
349 return None
352 return metadata.decodefileindices(self.files, rawindices)
350 return metadata.decodefileindices(self.files, rawindices)
353
351
354 @property
352 @property
355 def p1copies(self):
353 def p1copies(self):
356 if self._cpsd:
354 if self._cpsd:
357 rawcopies = self._sidedata.get(sidedatamod.SD_P1COPIES)
355 rawcopies = self._sidedata.get(sidedatamod.SD_P1COPIES)
358 if not rawcopies:
356 if not rawcopies:
359 return {}
357 return {}
360 else:
358 else:
361 rawcopies = self.extra.get(b'p1copies')
359 rawcopies = self.extra.get(b'p1copies')
362 if rawcopies is None:
360 if rawcopies is None:
363 return None
361 return None
364 return metadata.decodecopies(self.files, rawcopies)
362 return metadata.decodecopies(self.files, rawcopies)
365
363
366 @property
364 @property
367 def p2copies(self):
365 def p2copies(self):
368 if self._cpsd:
366 if self._cpsd:
369 rawcopies = self._sidedata.get(sidedatamod.SD_P2COPIES)
367 rawcopies = self._sidedata.get(sidedatamod.SD_P2COPIES)
370 if not rawcopies:
368 if not rawcopies:
371 return {}
369 return {}
372 else:
370 else:
373 rawcopies = self.extra.get(b'p2copies')
371 rawcopies = self.extra.get(b'p2copies')
374 if rawcopies is None:
372 if rawcopies is None:
375 return None
373 return None
376 return metadata.decodecopies(self.files, rawcopies)
374 return metadata.decodecopies(self.files, rawcopies)
377
375
378 @property
376 @property
379 def description(self):
377 def description(self):
380 return encoding.tolocal(self._text[self._offsets[3] + 2 :])
378 return encoding.tolocal(self._text[self._offsets[3] + 2 :])
381
379
382
380
383 class changelog(revlog.revlog):
381 class changelog(revlog.revlog):
384 def __init__(self, opener, trypending=False):
382 def __init__(self, opener, trypending=False):
385 """Load a changelog revlog using an opener.
383 """Load a changelog revlog using an opener.
386
384
387 If ``trypending`` is true, we attempt to load the index from a
385 If ``trypending`` is true, we attempt to load the index from a
388 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
386 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
389 The ``00changelog.i.a`` file contains index (and possibly inline
387 The ``00changelog.i.a`` file contains index (and possibly inline
390 revision) data for a transaction that hasn't been finalized yet.
388 revision) data for a transaction that hasn't been finalized yet.
391 It exists in a separate file to facilitate readers (such as
389 It exists in a separate file to facilitate readers (such as
392 hooks processes) accessing data before a transaction is finalized.
390 hooks processes) accessing data before a transaction is finalized.
393 """
391 """
394 if trypending and opener.exists(b'00changelog.i.a'):
392 if trypending and opener.exists(b'00changelog.i.a'):
395 indexfile = b'00changelog.i.a'
393 indexfile = b'00changelog.i.a'
396 else:
394 else:
397 indexfile = b'00changelog.i'
395 indexfile = b'00changelog.i'
398
396
399 datafile = b'00changelog.d'
397 datafile = b'00changelog.d'
400 revlog.revlog.__init__(
398 revlog.revlog.__init__(
401 self,
399 self,
402 opener,
400 opener,
403 indexfile,
401 indexfile,
404 datafile=datafile,
402 datafile=datafile,
405 checkambig=True,
403 checkambig=True,
406 mmaplargeindex=True,
404 mmaplargeindex=True,
407 persistentnodemap=opener.options.get(b'persistent-nodemap', False),
405 persistentnodemap=opener.options.get(b'persistent-nodemap', False),
408 )
406 )
409
407
410 if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
408 if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
411 # changelogs don't benefit from generaldelta.
409 # changelogs don't benefit from generaldelta.
412
410
413 self.version &= ~revlog.FLAG_GENERALDELTA
411 self.version &= ~revlog.FLAG_GENERALDELTA
414 self._generaldelta = False
412 self._generaldelta = False
415
413
416 # Delta chains for changelogs tend to be very small because entries
414 # Delta chains for changelogs tend to be very small because entries
417 # tend to be small and don't delta well with each. So disable delta
415 # tend to be small and don't delta well with each. So disable delta
418 # chains.
416 # chains.
419 self._storedeltachains = False
417 self._storedeltachains = False
420
418
421 self._realopener = opener
419 self._realopener = opener
422 self._delayed = False
420 self._delayed = False
423 self._delaybuf = None
421 self._delaybuf = None
424 self._divert = False
422 self._divert = False
425 self._filteredrevs = frozenset()
423 self._filteredrevs = frozenset()
426 self._filteredrevs_hashcache = {}
424 self._filteredrevs_hashcache = {}
427 self._copiesstorage = opener.options.get(b'copies-storage')
425 self._copiesstorage = opener.options.get(b'copies-storage')
428
426
429 @property
427 @property
430 def filteredrevs(self):
428 def filteredrevs(self):
431 return self._filteredrevs
429 return self._filteredrevs
432
430
433 @filteredrevs.setter
431 @filteredrevs.setter
434 def filteredrevs(self, val):
432 def filteredrevs(self, val):
435 # Ensure all updates go through this function
433 # Ensure all updates go through this function
436 assert isinstance(val, frozenset)
434 assert isinstance(val, frozenset)
437 self._filteredrevs = val
435 self._filteredrevs = val
438 self._filteredrevs_hashcache = {}
436 self._filteredrevs_hashcache = {}
439
437
440 def delayupdate(self, tr):
438 def delayupdate(self, tr):
441 """delay visibility of index updates to other readers"""
439 """delay visibility of index updates to other readers"""
442
440
443 if not self._delayed:
441 if not self._delayed:
444 if len(self) == 0:
442 if len(self) == 0:
445 self._divert = True
443 self._divert = True
446 if self._realopener.exists(self.indexfile + b'.a'):
444 if self._realopener.exists(self.indexfile + b'.a'):
447 self._realopener.unlink(self.indexfile + b'.a')
445 self._realopener.unlink(self.indexfile + b'.a')
448 self.opener = _divertopener(self._realopener, self.indexfile)
446 self.opener = _divertopener(self._realopener, self.indexfile)
449 else:
447 else:
450 self._delaybuf = []
448 self._delaybuf = []
451 self.opener = _delayopener(
449 self.opener = _delayopener(
452 self._realopener, self.indexfile, self._delaybuf
450 self._realopener, self.indexfile, self._delaybuf
453 )
451 )
454 self._delayed = True
452 self._delayed = True
455 tr.addpending(b'cl-%i' % id(self), self._writepending)
453 tr.addpending(b'cl-%i' % id(self), self._writepending)
456 tr.addfinalize(b'cl-%i' % id(self), self._finalize)
454 tr.addfinalize(b'cl-%i' % id(self), self._finalize)
457
455
458 def _finalize(self, tr):
456 def _finalize(self, tr):
459 """finalize index updates"""
457 """finalize index updates"""
460 self._delayed = False
458 self._delayed = False
461 self.opener = self._realopener
459 self.opener = self._realopener
462 # move redirected index data back into place
460 # move redirected index data back into place
463 if self._divert:
461 if self._divert:
464 assert not self._delaybuf
462 assert not self._delaybuf
465 tmpname = self.indexfile + b".a"
463 tmpname = self.indexfile + b".a"
466 nfile = self.opener.open(tmpname)
464 nfile = self.opener.open(tmpname)
467 nfile.close()
465 nfile.close()
468 self.opener.rename(tmpname, self.indexfile, checkambig=True)
466 self.opener.rename(tmpname, self.indexfile, checkambig=True)
469 elif self._delaybuf:
467 elif self._delaybuf:
470 fp = self.opener(self.indexfile, b'a', checkambig=True)
468 fp = self.opener(self.indexfile, b'a', checkambig=True)
471 fp.write(b"".join(self._delaybuf))
469 fp.write(b"".join(self._delaybuf))
472 fp.close()
470 fp.close()
473 self._delaybuf = None
471 self._delaybuf = None
474 self._divert = False
472 self._divert = False
475 # split when we're done
473 # split when we're done
476 self._enforceinlinesize(tr)
474 self._enforceinlinesize(tr)
477
475
478 def _writepending(self, tr):
476 def _writepending(self, tr):
479 """create a file containing the unfinalized state for
477 """create a file containing the unfinalized state for
480 pretxnchangegroup"""
478 pretxnchangegroup"""
481 if self._delaybuf:
479 if self._delaybuf:
482 # make a temporary copy of the index
480 # make a temporary copy of the index
483 fp1 = self._realopener(self.indexfile)
481 fp1 = self._realopener(self.indexfile)
484 pendingfilename = self.indexfile + b".a"
482 pendingfilename = self.indexfile + b".a"
485 # register as a temp file to ensure cleanup on failure
483 # register as a temp file to ensure cleanup on failure
486 tr.registertmp(pendingfilename)
484 tr.registertmp(pendingfilename)
487 # write existing data
485 # write existing data
488 fp2 = self._realopener(pendingfilename, b"w")
486 fp2 = self._realopener(pendingfilename, b"w")
489 fp2.write(fp1.read())
487 fp2.write(fp1.read())
490 # add pending data
488 # add pending data
491 fp2.write(b"".join(self._delaybuf))
489 fp2.write(b"".join(self._delaybuf))
492 fp2.close()
490 fp2.close()
493 # switch modes so finalize can simply rename
491 # switch modes so finalize can simply rename
494 self._delaybuf = None
492 self._delaybuf = None
495 self._divert = True
493 self._divert = True
496 self.opener = _divertopener(self._realopener, self.indexfile)
494 self.opener = _divertopener(self._realopener, self.indexfile)
497
495
498 if self._divert:
496 if self._divert:
499 return True
497 return True
500
498
501 return False
499 return False
502
500
503 def _enforceinlinesize(self, tr, fp=None):
501 def _enforceinlinesize(self, tr, fp=None):
504 if not self._delayed:
502 if not self._delayed:
505 revlog.revlog._enforceinlinesize(self, tr, fp)
503 revlog.revlog._enforceinlinesize(self, tr, fp)
506
504
507 def read(self, node):
505 def read(self, node):
508 """Obtain data from a parsed changelog revision.
506 """Obtain data from a parsed changelog revision.
509
507
510 Returns a 6-tuple of:
508 Returns a 6-tuple of:
511
509
512 - manifest node in binary
510 - manifest node in binary
513 - author/user as a localstr
511 - author/user as a localstr
514 - date as a 2-tuple of (time, timezone)
512 - date as a 2-tuple of (time, timezone)
515 - list of files
513 - list of files
516 - commit message as a localstr
514 - commit message as a localstr
517 - dict of extra metadata
515 - dict of extra metadata
518
516
519 Unless you need to access all fields, consider calling
517 Unless you need to access all fields, consider calling
520 ``changelogrevision`` instead, as it is faster for partial object
518 ``changelogrevision`` instead, as it is faster for partial object
521 access.
519 access.
522 """
520 """
523 d, s = self._revisiondata(node)
521 d, s = self._revisiondata(node)
524 c = changelogrevision(
522 c = changelogrevision(
525 d, s, self._copiesstorage == b'changeset-sidedata'
523 d, s, self._copiesstorage == b'changeset-sidedata'
526 )
524 )
527 return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
525 return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
528
526
529 def changelogrevision(self, nodeorrev):
527 def changelogrevision(self, nodeorrev):
530 """Obtain a ``changelogrevision`` for a node or revision."""
528 """Obtain a ``changelogrevision`` for a node or revision."""
531 text, sidedata = self._revisiondata(nodeorrev)
529 text, sidedata = self._revisiondata(nodeorrev)
532 return changelogrevision(
530 return changelogrevision(
533 text, sidedata, self._copiesstorage == b'changeset-sidedata'
531 text, sidedata, self._copiesstorage == b'changeset-sidedata'
534 )
532 )
535
533
536 def readfiles(self, node):
534 def readfiles(self, node):
537 """
535 """
538 short version of read that only returns the files modified by the cset
536 short version of read that only returns the files modified by the cset
539 """
537 """
540 text = self.revision(node)
538 text = self.revision(node)
541 if not text:
539 if not text:
542 return []
540 return []
543 last = text.index(b"\n\n")
541 last = text.index(b"\n\n")
544 l = text[:last].split(b'\n')
542 l = text[:last].split(b'\n')
545 return l[3:]
543 return l[3:]
546
544
547 def add(
545 def add(
548 self,
546 self,
549 manifest,
547 manifest,
550 files,
548 files,
551 desc,
549 desc,
552 transaction,
550 transaction,
553 p1,
551 p1,
554 p2,
552 p2,
555 user,
553 user,
556 date=None,
554 date=None,
557 extra=None,
555 extra=None,
558 ):
556 ):
559 # Convert to UTF-8 encoded bytestrings as the very first
557 # Convert to UTF-8 encoded bytestrings as the very first
560 # thing: calling any method on a localstr object will turn it
558 # thing: calling any method on a localstr object will turn it
561 # into a str object and the cached UTF-8 string is thus lost.
559 # into a str object and the cached UTF-8 string is thus lost.
562 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
560 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
563
561
564 user = user.strip()
562 user = user.strip()
565 # An empty username or a username with a "\n" will make the
563 # An empty username or a username with a "\n" will make the
566 # revision text contain two "\n\n" sequences -> corrupt
564 # revision text contain two "\n\n" sequences -> corrupt
567 # repository since read cannot unpack the revision.
565 # repository since read cannot unpack the revision.
568 if not user:
566 if not user:
569 raise error.StorageError(_(b"empty username"))
567 raise error.StorageError(_(b"empty username"))
570 if b"\n" in user:
568 if b"\n" in user:
571 raise error.StorageError(
569 raise error.StorageError(
572 _(b"username %r contains a newline") % pycompat.bytestr(user)
570 _(b"username %r contains a newline") % pycompat.bytestr(user)
573 )
571 )
574
572
575 desc = stripdesc(desc)
573 desc = stripdesc(desc)
576
574
577 if date:
575 if date:
578 parseddate = b"%d %d" % dateutil.parsedate(date)
576 parseddate = b"%d %d" % dateutil.parsedate(date)
579 else:
577 else:
580 parseddate = b"%d %d" % dateutil.makedate()
578 parseddate = b"%d %d" % dateutil.makedate()
581 if extra:
579 if extra:
582 branch = extra.get(b"branch")
580 branch = extra.get(b"branch")
583 if branch in (b"default", b""):
581 if branch in (b"default", b""):
584 del extra[b"branch"]
582 del extra[b"branch"]
585 elif branch in (b".", b"null", b"tip"):
583 elif branch in (b".", b"null", b"tip"):
586 raise error.StorageError(
584 raise error.StorageError(
587 _(b'the name \'%s\' is reserved') % branch
585 _(b'the name \'%s\' is reserved') % branch
588 )
586 )
589 sortedfiles = sorted(files.touched)
587 sortedfiles = sorted(files.touched)
590 sidedata = None
588 sidedata = None
591 if self._copiesstorage == b'changeset-sidedata':
589 if self._copiesstorage == b'changeset-sidedata':
592 sidedata = metadata.encode_files_sidedata(files)
590 sidedata = metadata.encode_files_sidedata(files)
593
591
594 if extra:
592 if extra:
595 extra = encodeextra(extra)
593 extra = encodeextra(extra)
596 parseddate = b"%s %s" % (parseddate, extra)
594 parseddate = b"%s %s" % (parseddate, extra)
597 l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
595 l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
598 text = b"\n".join(l)
596 text = b"\n".join(l)
599 return self.addrevision(
597 return self.addrevision(
600 text, transaction, len(self), p1, p2, sidedata=sidedata
598 text, transaction, len(self), p1, p2, sidedata=sidedata
601 )
599 )
602
600
603 def branchinfo(self, rev):
601 def branchinfo(self, rev):
604 """return the branch name and open/close state of a revision
602 """return the branch name and open/close state of a revision
605
603
606 This function exists because creating a changectx object
604 This function exists because creating a changectx object
607 just to access this is costly."""
605 just to access this is costly."""
608 extra = self.read(rev)[5]
606 extra = self.read(rev)[5]
609 return encoding.tolocal(extra.get(b"branch")), b'close' in extra
607 return encoding.tolocal(extra.get(b"branch")), b'close' in extra
610
608
611 def _nodeduplicatecallback(self, transaction, node):
609 def _nodeduplicatecallback(self, transaction, node):
612 # keep track of revisions that got "re-added", eg: unbunde of know rev.
610 # keep track of revisions that got "re-added", eg: unbunde of know rev.
613 #
611 #
614 # We track them in a list to preserve their order from the source bundle
612 # We track them in a list to preserve their order from the source bundle
615 duplicates = transaction.changes.setdefault(b'revduplicates', [])
613 duplicates = transaction.changes.setdefault(b'revduplicates', [])
616 duplicates.append(self.rev(node))
614 duplicates.append(self.rev(node))
General Comments 0
You need to be logged in to leave comments. Login now