##// END OF EJS Templates
changelog: rename parameters to reflect semantics...
Joerg Sonnenberger -
r47376:230f7301 default
parent child Browse files
Show More
@@ -1,622 +1,622
1 # changelog.py - changelog class for mercurial
1 # changelog.py - changelog class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 from .i18n import _
10 from .i18n import _
11 from .node import (
11 from .node import (
12 bin,
12 bin,
13 hex,
13 hex,
14 nullid,
14 nullid,
15 )
15 )
16 from .thirdparty import attr
16 from .thirdparty import attr
17
17
18 from . import (
18 from . import (
19 encoding,
19 encoding,
20 error,
20 error,
21 metadata,
21 metadata,
22 pycompat,
22 pycompat,
23 revlog,
23 revlog,
24 )
24 )
25 from .utils import (
25 from .utils import (
26 dateutil,
26 dateutil,
27 stringutil,
27 stringutil,
28 )
28 )
29 from .revlogutils import flagutil
29 from .revlogutils import flagutil
30
30
31 _defaultextra = {b'branch': b'default'}
31 _defaultextra = {b'branch': b'default'}
32
32
33
33
34 def _string_escape(text):
34 def _string_escape(text):
35 """
35 """
36 >>> from .pycompat import bytechr as chr
36 >>> from .pycompat import bytechr as chr
37 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
37 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
38 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
38 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
39 >>> s
39 >>> s
40 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
40 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
41 >>> res = _string_escape(s)
41 >>> res = _string_escape(s)
42 >>> s == _string_unescape(res)
42 >>> s == _string_unescape(res)
43 True
43 True
44 """
44 """
45 # subset of the string_escape codec
45 # subset of the string_escape codec
46 text = (
46 text = (
47 text.replace(b'\\', b'\\\\')
47 text.replace(b'\\', b'\\\\')
48 .replace(b'\n', b'\\n')
48 .replace(b'\n', b'\\n')
49 .replace(b'\r', b'\\r')
49 .replace(b'\r', b'\\r')
50 )
50 )
51 return text.replace(b'\0', b'\\0')
51 return text.replace(b'\0', b'\\0')
52
52
53
53
54 def _string_unescape(text):
54 def _string_unescape(text):
55 if b'\\0' in text:
55 if b'\\0' in text:
56 # fix up \0 without getting into trouble with \\0
56 # fix up \0 without getting into trouble with \\0
57 text = text.replace(b'\\\\', b'\\\\\n')
57 text = text.replace(b'\\\\', b'\\\\\n')
58 text = text.replace(b'\\0', b'\0')
58 text = text.replace(b'\\0', b'\0')
59 text = text.replace(b'\n', b'')
59 text = text.replace(b'\n', b'')
60 return stringutil.unescapestr(text)
60 return stringutil.unescapestr(text)
61
61
62
62
63 def decodeextra(text):
63 def decodeextra(text):
64 """
64 """
65 >>> from .pycompat import bytechr as chr
65 >>> from .pycompat import bytechr as chr
66 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
66 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
67 ... ).items())
67 ... ).items())
68 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
68 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
69 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
69 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
70 ... b'baz': chr(92) + chr(0) + b'2'})
70 ... b'baz': chr(92) + chr(0) + b'2'})
71 ... ).items())
71 ... ).items())
72 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
72 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
73 """
73 """
74 extra = _defaultextra.copy()
74 extra = _defaultextra.copy()
75 for l in text.split(b'\0'):
75 for l in text.split(b'\0'):
76 if l:
76 if l:
77 k, v = _string_unescape(l).split(b':', 1)
77 k, v = _string_unescape(l).split(b':', 1)
78 extra[k] = v
78 extra[k] = v
79 return extra
79 return extra
80
80
81
81
82 def encodeextra(d):
82 def encodeextra(d):
83 # keys must be sorted to produce a deterministic changelog entry
83 # keys must be sorted to produce a deterministic changelog entry
84 items = [_string_escape(b'%s:%s' % (k, d[k])) for k in sorted(d)]
84 items = [_string_escape(b'%s:%s' % (k, d[k])) for k in sorted(d)]
85 return b"\0".join(items)
85 return b"\0".join(items)
86
86
87
87
88 def stripdesc(desc):
88 def stripdesc(desc):
89 """strip trailing whitespace and leading and trailing empty lines"""
89 """strip trailing whitespace and leading and trailing empty lines"""
90 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
90 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
91
91
92
92
93 class appender(object):
93 class appender(object):
94 """the changelog index must be updated last on disk, so we use this class
94 """the changelog index must be updated last on disk, so we use this class
95 to delay writes to it"""
95 to delay writes to it"""
96
96
97 def __init__(self, vfs, name, mode, buf):
97 def __init__(self, vfs, name, mode, buf):
98 self.data = buf
98 self.data = buf
99 fp = vfs(name, mode)
99 fp = vfs(name, mode)
100 self.fp = fp
100 self.fp = fp
101 self.offset = fp.tell()
101 self.offset = fp.tell()
102 self.size = vfs.fstat(fp).st_size
102 self.size = vfs.fstat(fp).st_size
103 self._end = self.size
103 self._end = self.size
104
104
105 def end(self):
105 def end(self):
106 return self._end
106 return self._end
107
107
108 def tell(self):
108 def tell(self):
109 return self.offset
109 return self.offset
110
110
111 def flush(self):
111 def flush(self):
112 pass
112 pass
113
113
114 @property
114 @property
115 def closed(self):
115 def closed(self):
116 return self.fp.closed
116 return self.fp.closed
117
117
118 def close(self):
118 def close(self):
119 self.fp.close()
119 self.fp.close()
120
120
121 def seek(self, offset, whence=0):
121 def seek(self, offset, whence=0):
122 '''virtual file offset spans real file and data'''
122 '''virtual file offset spans real file and data'''
123 if whence == 0:
123 if whence == 0:
124 self.offset = offset
124 self.offset = offset
125 elif whence == 1:
125 elif whence == 1:
126 self.offset += offset
126 self.offset += offset
127 elif whence == 2:
127 elif whence == 2:
128 self.offset = self.end() + offset
128 self.offset = self.end() + offset
129 if self.offset < self.size:
129 if self.offset < self.size:
130 self.fp.seek(self.offset)
130 self.fp.seek(self.offset)
131
131
132 def read(self, count=-1):
132 def read(self, count=-1):
133 '''only trick here is reads that span real file and data'''
133 '''only trick here is reads that span real file and data'''
134 ret = b""
134 ret = b""
135 if self.offset < self.size:
135 if self.offset < self.size:
136 s = self.fp.read(count)
136 s = self.fp.read(count)
137 ret = s
137 ret = s
138 self.offset += len(s)
138 self.offset += len(s)
139 if count > 0:
139 if count > 0:
140 count -= len(s)
140 count -= len(s)
141 if count != 0:
141 if count != 0:
142 doff = self.offset - self.size
142 doff = self.offset - self.size
143 self.data.insert(0, b"".join(self.data))
143 self.data.insert(0, b"".join(self.data))
144 del self.data[1:]
144 del self.data[1:]
145 s = self.data[0][doff : doff + count]
145 s = self.data[0][doff : doff + count]
146 self.offset += len(s)
146 self.offset += len(s)
147 ret += s
147 ret += s
148 return ret
148 return ret
149
149
150 def write(self, s):
150 def write(self, s):
151 self.data.append(bytes(s))
151 self.data.append(bytes(s))
152 self.offset += len(s)
152 self.offset += len(s)
153 self._end += len(s)
153 self._end += len(s)
154
154
155 def __enter__(self):
155 def __enter__(self):
156 self.fp.__enter__()
156 self.fp.__enter__()
157 return self
157 return self
158
158
159 def __exit__(self, *args):
159 def __exit__(self, *args):
160 return self.fp.__exit__(*args)
160 return self.fp.__exit__(*args)
161
161
162
162
163 class _divertopener(object):
163 class _divertopener(object):
164 def __init__(self, opener, target):
164 def __init__(self, opener, target):
165 self._opener = opener
165 self._opener = opener
166 self._target = target
166 self._target = target
167
167
168 def __call__(self, name, mode=b'r', checkambig=False, **kwargs):
168 def __call__(self, name, mode=b'r', checkambig=False, **kwargs):
169 if name != self._target:
169 if name != self._target:
170 return self._opener(name, mode, **kwargs)
170 return self._opener(name, mode, **kwargs)
171 return self._opener(name + b".a", mode, **kwargs)
171 return self._opener(name + b".a", mode, **kwargs)
172
172
173 def __getattr__(self, attr):
173 def __getattr__(self, attr):
174 return getattr(self._opener, attr)
174 return getattr(self._opener, attr)
175
175
176
176
177 def _delayopener(opener, target, buf):
177 def _delayopener(opener, target, buf):
178 """build an opener that stores chunks in 'buf' instead of 'target'"""
178 """build an opener that stores chunks in 'buf' instead of 'target'"""
179
179
180 def _delay(name, mode=b'r', checkambig=False, **kwargs):
180 def _delay(name, mode=b'r', checkambig=False, **kwargs):
181 if name != target:
181 if name != target:
182 return opener(name, mode, **kwargs)
182 return opener(name, mode, **kwargs)
183 assert not kwargs
183 assert not kwargs
184 return appender(opener, name, mode, buf)
184 return appender(opener, name, mode, buf)
185
185
186 return _delay
186 return _delay
187
187
188
188
189 @attr.s
189 @attr.s
190 class _changelogrevision(object):
190 class _changelogrevision(object):
191 # Extensions might modify _defaultextra, so let the constructor below pass
191 # Extensions might modify _defaultextra, so let the constructor below pass
192 # it in
192 # it in
193 extra = attr.ib()
193 extra = attr.ib()
194 manifest = attr.ib(default=nullid)
194 manifest = attr.ib(default=nullid)
195 user = attr.ib(default=b'')
195 user = attr.ib(default=b'')
196 date = attr.ib(default=(0, 0))
196 date = attr.ib(default=(0, 0))
197 files = attr.ib(default=attr.Factory(list))
197 files = attr.ib(default=attr.Factory(list))
198 filesadded = attr.ib(default=None)
198 filesadded = attr.ib(default=None)
199 filesremoved = attr.ib(default=None)
199 filesremoved = attr.ib(default=None)
200 p1copies = attr.ib(default=None)
200 p1copies = attr.ib(default=None)
201 p2copies = attr.ib(default=None)
201 p2copies = attr.ib(default=None)
202 description = attr.ib(default=b'')
202 description = attr.ib(default=b'')
203 branchinfo = attr.ib(default=(_defaultextra[b'branch'], False))
203 branchinfo = attr.ib(default=(_defaultextra[b'branch'], False))
204
204
205
205
206 class changelogrevision(object):
206 class changelogrevision(object):
207 """Holds results of a parsed changelog revision.
207 """Holds results of a parsed changelog revision.
208
208
209 Changelog revisions consist of multiple pieces of data, including
209 Changelog revisions consist of multiple pieces of data, including
210 the manifest node, user, and date. This object exposes a view into
210 the manifest node, user, and date. This object exposes a view into
211 the parsed object.
211 the parsed object.
212 """
212 """
213
213
214 __slots__ = (
214 __slots__ = (
215 '_offsets',
215 '_offsets',
216 '_text',
216 '_text',
217 '_sidedata',
217 '_sidedata',
218 '_cpsd',
218 '_cpsd',
219 '_changes',
219 '_changes',
220 )
220 )
221
221
222 def __new__(cls, text, sidedata, cpsd):
222 def __new__(cls, text, sidedata, cpsd):
223 if not text:
223 if not text:
224 return _changelogrevision(extra=_defaultextra)
224 return _changelogrevision(extra=_defaultextra)
225
225
226 self = super(changelogrevision, cls).__new__(cls)
226 self = super(changelogrevision, cls).__new__(cls)
227 # We could return here and implement the following as an __init__.
227 # We could return here and implement the following as an __init__.
228 # But doing it here is equivalent and saves an extra function call.
228 # But doing it here is equivalent and saves an extra function call.
229
229
230 # format used:
230 # format used:
231 # nodeid\n : manifest node in ascii
231 # nodeid\n : manifest node in ascii
232 # user\n : user, no \n or \r allowed
232 # user\n : user, no \n or \r allowed
233 # time tz extra\n : date (time is int or float, timezone is int)
233 # time tz extra\n : date (time is int or float, timezone is int)
234 # : extra is metadata, encoded and separated by '\0'
234 # : extra is metadata, encoded and separated by '\0'
235 # : older versions ignore it
235 # : older versions ignore it
236 # files\n\n : files modified by the cset, no \n or \r allowed
236 # files\n\n : files modified by the cset, no \n or \r allowed
237 # (.*) : comment (free text, ideally utf-8)
237 # (.*) : comment (free text, ideally utf-8)
238 #
238 #
239 # changelog v0 doesn't use extra
239 # changelog v0 doesn't use extra
240
240
241 nl1 = text.index(b'\n')
241 nl1 = text.index(b'\n')
242 nl2 = text.index(b'\n', nl1 + 1)
242 nl2 = text.index(b'\n', nl1 + 1)
243 nl3 = text.index(b'\n', nl2 + 1)
243 nl3 = text.index(b'\n', nl2 + 1)
244
244
245 # The list of files may be empty. Which means nl3 is the first of the
245 # The list of files may be empty. Which means nl3 is the first of the
246 # double newline that precedes the description.
246 # double newline that precedes the description.
247 if text[nl3 + 1 : nl3 + 2] == b'\n':
247 if text[nl3 + 1 : nl3 + 2] == b'\n':
248 doublenl = nl3
248 doublenl = nl3
249 else:
249 else:
250 doublenl = text.index(b'\n\n', nl3 + 1)
250 doublenl = text.index(b'\n\n', nl3 + 1)
251
251
252 self._offsets = (nl1, nl2, nl3, doublenl)
252 self._offsets = (nl1, nl2, nl3, doublenl)
253 self._text = text
253 self._text = text
254 self._sidedata = sidedata
254 self._sidedata = sidedata
255 self._cpsd = cpsd
255 self._cpsd = cpsd
256 self._changes = None
256 self._changes = None
257
257
258 return self
258 return self
259
259
260 @property
260 @property
261 def manifest(self):
261 def manifest(self):
262 return bin(self._text[0 : self._offsets[0]])
262 return bin(self._text[0 : self._offsets[0]])
263
263
264 @property
264 @property
265 def user(self):
265 def user(self):
266 off = self._offsets
266 off = self._offsets
267 return encoding.tolocal(self._text[off[0] + 1 : off[1]])
267 return encoding.tolocal(self._text[off[0] + 1 : off[1]])
268
268
269 @property
269 @property
270 def _rawdate(self):
270 def _rawdate(self):
271 off = self._offsets
271 off = self._offsets
272 dateextra = self._text[off[1] + 1 : off[2]]
272 dateextra = self._text[off[1] + 1 : off[2]]
273 return dateextra.split(b' ', 2)[0:2]
273 return dateextra.split(b' ', 2)[0:2]
274
274
275 @property
275 @property
276 def _rawextra(self):
276 def _rawextra(self):
277 off = self._offsets
277 off = self._offsets
278 dateextra = self._text[off[1] + 1 : off[2]]
278 dateextra = self._text[off[1] + 1 : off[2]]
279 fields = dateextra.split(b' ', 2)
279 fields = dateextra.split(b' ', 2)
280 if len(fields) != 3:
280 if len(fields) != 3:
281 return None
281 return None
282
282
283 return fields[2]
283 return fields[2]
284
284
285 @property
285 @property
286 def date(self):
286 def date(self):
287 raw = self._rawdate
287 raw = self._rawdate
288 time = float(raw[0])
288 time = float(raw[0])
289 # Various tools did silly things with the timezone.
289 # Various tools did silly things with the timezone.
290 try:
290 try:
291 timezone = int(raw[1])
291 timezone = int(raw[1])
292 except ValueError:
292 except ValueError:
293 timezone = 0
293 timezone = 0
294
294
295 return time, timezone
295 return time, timezone
296
296
297 @property
297 @property
298 def extra(self):
298 def extra(self):
299 raw = self._rawextra
299 raw = self._rawextra
300 if raw is None:
300 if raw is None:
301 return _defaultextra
301 return _defaultextra
302
302
303 return decodeextra(raw)
303 return decodeextra(raw)
304
304
305 @property
305 @property
306 def changes(self):
306 def changes(self):
307 if self._changes is not None:
307 if self._changes is not None:
308 return self._changes
308 return self._changes
309 if self._cpsd:
309 if self._cpsd:
310 changes = metadata.decode_files_sidedata(self._sidedata)
310 changes = metadata.decode_files_sidedata(self._sidedata)
311 else:
311 else:
312 changes = metadata.ChangingFiles(
312 changes = metadata.ChangingFiles(
313 touched=self.files or (),
313 touched=self.files or (),
314 added=self.filesadded or (),
314 added=self.filesadded or (),
315 removed=self.filesremoved or (),
315 removed=self.filesremoved or (),
316 p1_copies=self.p1copies or {},
316 p1_copies=self.p1copies or {},
317 p2_copies=self.p2copies or {},
317 p2_copies=self.p2copies or {},
318 )
318 )
319 self._changes = changes
319 self._changes = changes
320 return changes
320 return changes
321
321
322 @property
322 @property
323 def files(self):
323 def files(self):
324 if self._cpsd:
324 if self._cpsd:
325 return sorted(self.changes.touched)
325 return sorted(self.changes.touched)
326 off = self._offsets
326 off = self._offsets
327 if off[2] == off[3]:
327 if off[2] == off[3]:
328 return []
328 return []
329
329
330 return self._text[off[2] + 1 : off[3]].split(b'\n')
330 return self._text[off[2] + 1 : off[3]].split(b'\n')
331
331
332 @property
332 @property
333 def filesadded(self):
333 def filesadded(self):
334 if self._cpsd:
334 if self._cpsd:
335 return self.changes.added
335 return self.changes.added
336 else:
336 else:
337 rawindices = self.extra.get(b'filesadded')
337 rawindices = self.extra.get(b'filesadded')
338 if rawindices is None:
338 if rawindices is None:
339 return None
339 return None
340 return metadata.decodefileindices(self.files, rawindices)
340 return metadata.decodefileindices(self.files, rawindices)
341
341
342 @property
342 @property
343 def filesremoved(self):
343 def filesremoved(self):
344 if self._cpsd:
344 if self._cpsd:
345 return self.changes.removed
345 return self.changes.removed
346 else:
346 else:
347 rawindices = self.extra.get(b'filesremoved')
347 rawindices = self.extra.get(b'filesremoved')
348 if rawindices is None:
348 if rawindices is None:
349 return None
349 return None
350 return metadata.decodefileindices(self.files, rawindices)
350 return metadata.decodefileindices(self.files, rawindices)
351
351
352 @property
352 @property
353 def p1copies(self):
353 def p1copies(self):
354 if self._cpsd:
354 if self._cpsd:
355 return self.changes.copied_from_p1
355 return self.changes.copied_from_p1
356 else:
356 else:
357 rawcopies = self.extra.get(b'p1copies')
357 rawcopies = self.extra.get(b'p1copies')
358 if rawcopies is None:
358 if rawcopies is None:
359 return None
359 return None
360 return metadata.decodecopies(self.files, rawcopies)
360 return metadata.decodecopies(self.files, rawcopies)
361
361
362 @property
362 @property
363 def p2copies(self):
363 def p2copies(self):
364 if self._cpsd:
364 if self._cpsd:
365 return self.changes.copied_from_p2
365 return self.changes.copied_from_p2
366 else:
366 else:
367 rawcopies = self.extra.get(b'p2copies')
367 rawcopies = self.extra.get(b'p2copies')
368 if rawcopies is None:
368 if rawcopies is None:
369 return None
369 return None
370 return metadata.decodecopies(self.files, rawcopies)
370 return metadata.decodecopies(self.files, rawcopies)
371
371
372 @property
372 @property
373 def description(self):
373 def description(self):
374 return encoding.tolocal(self._text[self._offsets[3] + 2 :])
374 return encoding.tolocal(self._text[self._offsets[3] + 2 :])
375
375
376 @property
376 @property
377 def branchinfo(self):
377 def branchinfo(self):
378 extra = self.extra
378 extra = self.extra
379 return encoding.tolocal(extra.get(b"branch")), b'close' in extra
379 return encoding.tolocal(extra.get(b"branch")), b'close' in extra
380
380
381
381
382 class changelog(revlog.revlog):
382 class changelog(revlog.revlog):
383 def __init__(self, opener, trypending=False, concurrencychecker=None):
383 def __init__(self, opener, trypending=False, concurrencychecker=None):
384 """Load a changelog revlog using an opener.
384 """Load a changelog revlog using an opener.
385
385
386 If ``trypending`` is true, we attempt to load the index from a
386 If ``trypending`` is true, we attempt to load the index from a
387 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
387 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
388 The ``00changelog.i.a`` file contains index (and possibly inline
388 The ``00changelog.i.a`` file contains index (and possibly inline
389 revision) data for a transaction that hasn't been finalized yet.
389 revision) data for a transaction that hasn't been finalized yet.
390 It exists in a separate file to facilitate readers (such as
390 It exists in a separate file to facilitate readers (such as
391 hooks processes) accessing data before a transaction is finalized.
391 hooks processes) accessing data before a transaction is finalized.
392
392
393 ``concurrencychecker`` will be passed to the revlog init function, see
393 ``concurrencychecker`` will be passed to the revlog init function, see
394 the documentation there.
394 the documentation there.
395 """
395 """
396 if trypending and opener.exists(b'00changelog.i.a'):
396 if trypending and opener.exists(b'00changelog.i.a'):
397 indexfile = b'00changelog.i.a'
397 indexfile = b'00changelog.i.a'
398 else:
398 else:
399 indexfile = b'00changelog.i'
399 indexfile = b'00changelog.i'
400
400
401 datafile = b'00changelog.d'
401 datafile = b'00changelog.d'
402 revlog.revlog.__init__(
402 revlog.revlog.__init__(
403 self,
403 self,
404 opener,
404 opener,
405 indexfile,
405 indexfile,
406 datafile=datafile,
406 datafile=datafile,
407 checkambig=True,
407 checkambig=True,
408 mmaplargeindex=True,
408 mmaplargeindex=True,
409 persistentnodemap=opener.options.get(b'persistent-nodemap', False),
409 persistentnodemap=opener.options.get(b'persistent-nodemap', False),
410 concurrencychecker=concurrencychecker,
410 concurrencychecker=concurrencychecker,
411 )
411 )
412
412
413 if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
413 if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
414 # changelogs don't benefit from generaldelta.
414 # changelogs don't benefit from generaldelta.
415
415
416 self.version &= ~revlog.FLAG_GENERALDELTA
416 self.version &= ~revlog.FLAG_GENERALDELTA
417 self._generaldelta = False
417 self._generaldelta = False
418
418
419 # Delta chains for changelogs tend to be very small because entries
419 # Delta chains for changelogs tend to be very small because entries
420 # tend to be small and don't delta well with each. So disable delta
420 # tend to be small and don't delta well with each. So disable delta
421 # chains.
421 # chains.
422 self._storedeltachains = False
422 self._storedeltachains = False
423
423
424 self._realopener = opener
424 self._realopener = opener
425 self._delayed = False
425 self._delayed = False
426 self._delaybuf = None
426 self._delaybuf = None
427 self._divert = False
427 self._divert = False
428 self._filteredrevs = frozenset()
428 self._filteredrevs = frozenset()
429 self._filteredrevs_hashcache = {}
429 self._filteredrevs_hashcache = {}
430 self._copiesstorage = opener.options.get(b'copies-storage')
430 self._copiesstorage = opener.options.get(b'copies-storage')
431
431
432 @property
432 @property
433 def filteredrevs(self):
433 def filteredrevs(self):
434 return self._filteredrevs
434 return self._filteredrevs
435
435
436 @filteredrevs.setter
436 @filteredrevs.setter
437 def filteredrevs(self, val):
437 def filteredrevs(self, val):
438 # Ensure all updates go through this function
438 # Ensure all updates go through this function
439 assert isinstance(val, frozenset)
439 assert isinstance(val, frozenset)
440 self._filteredrevs = val
440 self._filteredrevs = val
441 self._filteredrevs_hashcache = {}
441 self._filteredrevs_hashcache = {}
442
442
443 def delayupdate(self, tr):
443 def delayupdate(self, tr):
444 """delay visibility of index updates to other readers"""
444 """delay visibility of index updates to other readers"""
445
445
446 if not self._delayed:
446 if not self._delayed:
447 if len(self) == 0:
447 if len(self) == 0:
448 self._divert = True
448 self._divert = True
449 if self._realopener.exists(self.indexfile + b'.a'):
449 if self._realopener.exists(self.indexfile + b'.a'):
450 self._realopener.unlink(self.indexfile + b'.a')
450 self._realopener.unlink(self.indexfile + b'.a')
451 self.opener = _divertopener(self._realopener, self.indexfile)
451 self.opener = _divertopener(self._realopener, self.indexfile)
452 else:
452 else:
453 self._delaybuf = []
453 self._delaybuf = []
454 self.opener = _delayopener(
454 self.opener = _delayopener(
455 self._realopener, self.indexfile, self._delaybuf
455 self._realopener, self.indexfile, self._delaybuf
456 )
456 )
457 self._delayed = True
457 self._delayed = True
458 tr.addpending(b'cl-%i' % id(self), self._writepending)
458 tr.addpending(b'cl-%i' % id(self), self._writepending)
459 tr.addfinalize(b'cl-%i' % id(self), self._finalize)
459 tr.addfinalize(b'cl-%i' % id(self), self._finalize)
460
460
461 def _finalize(self, tr):
461 def _finalize(self, tr):
462 """finalize index updates"""
462 """finalize index updates"""
463 self._delayed = False
463 self._delayed = False
464 self.opener = self._realopener
464 self.opener = self._realopener
465 # move redirected index data back into place
465 # move redirected index data back into place
466 if self._divert:
466 if self._divert:
467 assert not self._delaybuf
467 assert not self._delaybuf
468 tmpname = self.indexfile + b".a"
468 tmpname = self.indexfile + b".a"
469 nfile = self.opener.open(tmpname)
469 nfile = self.opener.open(tmpname)
470 nfile.close()
470 nfile.close()
471 self.opener.rename(tmpname, self.indexfile, checkambig=True)
471 self.opener.rename(tmpname, self.indexfile, checkambig=True)
472 elif self._delaybuf:
472 elif self._delaybuf:
473 fp = self.opener(self.indexfile, b'a', checkambig=True)
473 fp = self.opener(self.indexfile, b'a', checkambig=True)
474 fp.write(b"".join(self._delaybuf))
474 fp.write(b"".join(self._delaybuf))
475 fp.close()
475 fp.close()
476 self._delaybuf = None
476 self._delaybuf = None
477 self._divert = False
477 self._divert = False
478 # split when we're done
478 # split when we're done
479 self._enforceinlinesize(tr)
479 self._enforceinlinesize(tr)
480
480
481 def _writepending(self, tr):
481 def _writepending(self, tr):
482 """create a file containing the unfinalized state for
482 """create a file containing the unfinalized state for
483 pretxnchangegroup"""
483 pretxnchangegroup"""
484 if self._delaybuf:
484 if self._delaybuf:
485 # make a temporary copy of the index
485 # make a temporary copy of the index
486 fp1 = self._realopener(self.indexfile)
486 fp1 = self._realopener(self.indexfile)
487 pendingfilename = self.indexfile + b".a"
487 pendingfilename = self.indexfile + b".a"
488 # register as a temp file to ensure cleanup on failure
488 # register as a temp file to ensure cleanup on failure
489 tr.registertmp(pendingfilename)
489 tr.registertmp(pendingfilename)
490 # write existing data
490 # write existing data
491 fp2 = self._realopener(pendingfilename, b"w")
491 fp2 = self._realopener(pendingfilename, b"w")
492 fp2.write(fp1.read())
492 fp2.write(fp1.read())
493 # add pending data
493 # add pending data
494 fp2.write(b"".join(self._delaybuf))
494 fp2.write(b"".join(self._delaybuf))
495 fp2.close()
495 fp2.close()
496 # switch modes so finalize can simply rename
496 # switch modes so finalize can simply rename
497 self._delaybuf = None
497 self._delaybuf = None
498 self._divert = True
498 self._divert = True
499 self.opener = _divertopener(self._realopener, self.indexfile)
499 self.opener = _divertopener(self._realopener, self.indexfile)
500
500
501 if self._divert:
501 if self._divert:
502 return True
502 return True
503
503
504 return False
504 return False
505
505
506 def _enforceinlinesize(self, tr, fp=None):
506 def _enforceinlinesize(self, tr, fp=None):
507 if not self._delayed:
507 if not self._delayed:
508 revlog.revlog._enforceinlinesize(self, tr, fp)
508 revlog.revlog._enforceinlinesize(self, tr, fp)
509
509
510 def read(self, node):
510 def read(self, nodeorrev):
511 """Obtain data from a parsed changelog revision.
511 """Obtain data from a parsed changelog revision.
512
512
513 Returns a 6-tuple of:
513 Returns a 6-tuple of:
514
514
515 - manifest node in binary
515 - manifest node in binary
516 - author/user as a localstr
516 - author/user as a localstr
517 - date as a 2-tuple of (time, timezone)
517 - date as a 2-tuple of (time, timezone)
518 - list of files
518 - list of files
519 - commit message as a localstr
519 - commit message as a localstr
520 - dict of extra metadata
520 - dict of extra metadata
521
521
522 Unless you need to access all fields, consider calling
522 Unless you need to access all fields, consider calling
523 ``changelogrevision`` instead, as it is faster for partial object
523 ``changelogrevision`` instead, as it is faster for partial object
524 access.
524 access.
525 """
525 """
526 d, s = self._revisiondata(node)
526 d, s = self._revisiondata(nodeorrev)
527 c = changelogrevision(
527 c = changelogrevision(
528 d, s, self._copiesstorage == b'changeset-sidedata'
528 d, s, self._copiesstorage == b'changeset-sidedata'
529 )
529 )
530 return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
530 return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
531
531
532 def changelogrevision(self, nodeorrev):
532 def changelogrevision(self, nodeorrev):
533 """Obtain a ``changelogrevision`` for a node or revision."""
533 """Obtain a ``changelogrevision`` for a node or revision."""
534 text, sidedata = self._revisiondata(nodeorrev)
534 text, sidedata = self._revisiondata(nodeorrev)
535 return changelogrevision(
535 return changelogrevision(
536 text, sidedata, self._copiesstorage == b'changeset-sidedata'
536 text, sidedata, self._copiesstorage == b'changeset-sidedata'
537 )
537 )
538
538
539 def readfiles(self, node):
539 def readfiles(self, nodeorrev):
540 """
540 """
541 short version of read that only returns the files modified by the cset
541 short version of read that only returns the files modified by the cset
542 """
542 """
543 text = self.revision(node)
543 text = self.revision(nodeorrev)
544 if not text:
544 if not text:
545 return []
545 return []
546 last = text.index(b"\n\n")
546 last = text.index(b"\n\n")
547 l = text[:last].split(b'\n')
547 l = text[:last].split(b'\n')
548 return l[3:]
548 return l[3:]
549
549
550 def add(
550 def add(
551 self,
551 self,
552 manifest,
552 manifest,
553 files,
553 files,
554 desc,
554 desc,
555 transaction,
555 transaction,
556 p1,
556 p1,
557 p2,
557 p2,
558 user,
558 user,
559 date=None,
559 date=None,
560 extra=None,
560 extra=None,
561 ):
561 ):
562 # Convert to UTF-8 encoded bytestrings as the very first
562 # Convert to UTF-8 encoded bytestrings as the very first
563 # thing: calling any method on a localstr object will turn it
563 # thing: calling any method on a localstr object will turn it
564 # into a str object and the cached UTF-8 string is thus lost.
564 # into a str object and the cached UTF-8 string is thus lost.
565 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
565 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
566
566
567 user = user.strip()
567 user = user.strip()
568 # An empty username or a username with a "\n" will make the
568 # An empty username or a username with a "\n" will make the
569 # revision text contain two "\n\n" sequences -> corrupt
569 # revision text contain two "\n\n" sequences -> corrupt
570 # repository since read cannot unpack the revision.
570 # repository since read cannot unpack the revision.
571 if not user:
571 if not user:
572 raise error.StorageError(_(b"empty username"))
572 raise error.StorageError(_(b"empty username"))
573 if b"\n" in user:
573 if b"\n" in user:
574 raise error.StorageError(
574 raise error.StorageError(
575 _(b"username %r contains a newline") % pycompat.bytestr(user)
575 _(b"username %r contains a newline") % pycompat.bytestr(user)
576 )
576 )
577
577
578 desc = stripdesc(desc)
578 desc = stripdesc(desc)
579
579
580 if date:
580 if date:
581 parseddate = b"%d %d" % dateutil.parsedate(date)
581 parseddate = b"%d %d" % dateutil.parsedate(date)
582 else:
582 else:
583 parseddate = b"%d %d" % dateutil.makedate()
583 parseddate = b"%d %d" % dateutil.makedate()
584 if extra:
584 if extra:
585 branch = extra.get(b"branch")
585 branch = extra.get(b"branch")
586 if branch in (b"default", b""):
586 if branch in (b"default", b""):
587 del extra[b"branch"]
587 del extra[b"branch"]
588 elif branch in (b".", b"null", b"tip"):
588 elif branch in (b".", b"null", b"tip"):
589 raise error.StorageError(
589 raise error.StorageError(
590 _(b'the name \'%s\' is reserved') % branch
590 _(b'the name \'%s\' is reserved') % branch
591 )
591 )
592 sortedfiles = sorted(files.touched)
592 sortedfiles = sorted(files.touched)
593 flags = 0
593 flags = 0
594 sidedata = None
594 sidedata = None
595 if self._copiesstorage == b'changeset-sidedata':
595 if self._copiesstorage == b'changeset-sidedata':
596 if files.has_copies_info:
596 if files.has_copies_info:
597 flags |= flagutil.REVIDX_HASCOPIESINFO
597 flags |= flagutil.REVIDX_HASCOPIESINFO
598 sidedata = metadata.encode_files_sidedata(files)
598 sidedata = metadata.encode_files_sidedata(files)
599
599
600 if extra:
600 if extra:
601 extra = encodeextra(extra)
601 extra = encodeextra(extra)
602 parseddate = b"%s %s" % (parseddate, extra)
602 parseddate = b"%s %s" % (parseddate, extra)
603 l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
603 l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
604 text = b"\n".join(l)
604 text = b"\n".join(l)
605 rev = self.addrevision(
605 rev = self.addrevision(
606 text, transaction, len(self), p1, p2, sidedata=sidedata, flags=flags
606 text, transaction, len(self), p1, p2, sidedata=sidedata, flags=flags
607 )
607 )
608 return self.node(rev)
608 return self.node(rev)
609
609
610 def branchinfo(self, rev):
610 def branchinfo(self, rev):
611 """return the branch name and open/close state of a revision
611 """return the branch name and open/close state of a revision
612
612
613 This function exists because creating a changectx object
613 This function exists because creating a changectx object
614 just to access this is costly."""
614 just to access this is costly."""
615 return self.changelogrevision(rev).branchinfo
615 return self.changelogrevision(rev).branchinfo
616
616
617 def _nodeduplicatecallback(self, transaction, rev):
617 def _nodeduplicatecallback(self, transaction, rev):
618 # keep track of revisions that got "re-added", eg: unbunde of know rev.
618 # keep track of revisions that got "re-added", eg: unbunde of know rev.
619 #
619 #
620 # We track them in a list to preserve their order from the source bundle
620 # We track them in a list to preserve their order from the source bundle
621 duplicates = transaction.changes.setdefault(b'revduplicates', [])
621 duplicates = transaction.changes.setdefault(b'revduplicates', [])
622 duplicates.append(rev)
622 duplicates.append(rev)
General Comments 0
You need to be logged in to leave comments. Login now