##// END OF EJS Templates
changelog: add a `changes` property on `changelogrevision`...
marmoute -
r46144:9a3563b4 default
parent child Browse files
Show More
@@ -1,597 +1,613 b''
1 # changelog.py - changelog class for mercurial
1 # changelog.py - changelog class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 from .i18n import _
10 from .i18n import _
11 from .node import (
11 from .node import (
12 bin,
12 bin,
13 hex,
13 hex,
14 nullid,
14 nullid,
15 )
15 )
16 from .thirdparty import attr
16 from .thirdparty import attr
17
17
18 from . import (
18 from . import (
19 encoding,
19 encoding,
20 error,
20 error,
21 metadata,
21 metadata,
22 pycompat,
22 pycompat,
23 revlog,
23 revlog,
24 )
24 )
25 from .utils import (
25 from .utils import (
26 dateutil,
26 dateutil,
27 stringutil,
27 stringutil,
28 )
28 )
29
29
30 from .revlogutils import sidedata as sidedatamod
30 from .revlogutils import sidedata as sidedatamod
31
31
32 _defaultextra = {b'branch': b'default'}
32 _defaultextra = {b'branch': b'default'}
33
33
34
34
35 def _string_escape(text):
35 def _string_escape(text):
36 """
36 """
37 >>> from .pycompat import bytechr as chr
37 >>> from .pycompat import bytechr as chr
38 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
38 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
39 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
39 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
40 >>> s
40 >>> s
41 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
41 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
42 >>> res = _string_escape(s)
42 >>> res = _string_escape(s)
43 >>> s == _string_unescape(res)
43 >>> s == _string_unescape(res)
44 True
44 True
45 """
45 """
46 # subset of the string_escape codec
46 # subset of the string_escape codec
47 text = (
47 text = (
48 text.replace(b'\\', b'\\\\')
48 text.replace(b'\\', b'\\\\')
49 .replace(b'\n', b'\\n')
49 .replace(b'\n', b'\\n')
50 .replace(b'\r', b'\\r')
50 .replace(b'\r', b'\\r')
51 )
51 )
52 return text.replace(b'\0', b'\\0')
52 return text.replace(b'\0', b'\\0')
53
53
54
54
55 def _string_unescape(text):
55 def _string_unescape(text):
56 if b'\\0' in text:
56 if b'\\0' in text:
57 # fix up \0 without getting into trouble with \\0
57 # fix up \0 without getting into trouble with \\0
58 text = text.replace(b'\\\\', b'\\\\\n')
58 text = text.replace(b'\\\\', b'\\\\\n')
59 text = text.replace(b'\\0', b'\0')
59 text = text.replace(b'\\0', b'\0')
60 text = text.replace(b'\n', b'')
60 text = text.replace(b'\n', b'')
61 return stringutil.unescapestr(text)
61 return stringutil.unescapestr(text)
62
62
63
63
64 def decodeextra(text):
64 def decodeextra(text):
65 """
65 """
66 >>> from .pycompat import bytechr as chr
66 >>> from .pycompat import bytechr as chr
67 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
67 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
68 ... ).items())
68 ... ).items())
69 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
69 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
70 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
70 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
71 ... b'baz': chr(92) + chr(0) + b'2'})
71 ... b'baz': chr(92) + chr(0) + b'2'})
72 ... ).items())
72 ... ).items())
73 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
73 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
74 """
74 """
75 extra = _defaultextra.copy()
75 extra = _defaultextra.copy()
76 for l in text.split(b'\0'):
76 for l in text.split(b'\0'):
77 if l:
77 if l:
78 k, v = _string_unescape(l).split(b':', 1)
78 k, v = _string_unescape(l).split(b':', 1)
79 extra[k] = v
79 extra[k] = v
80 return extra
80 return extra
81
81
82
82
83 def encodeextra(d):
83 def encodeextra(d):
84 # keys must be sorted to produce a deterministic changelog entry
84 # keys must be sorted to produce a deterministic changelog entry
85 items = [_string_escape(b'%s:%s' % (k, d[k])) for k in sorted(d)]
85 items = [_string_escape(b'%s:%s' % (k, d[k])) for k in sorted(d)]
86 return b"\0".join(items)
86 return b"\0".join(items)
87
87
88
88
89 def stripdesc(desc):
89 def stripdesc(desc):
90 """strip trailing whitespace and leading and trailing empty lines"""
90 """strip trailing whitespace and leading and trailing empty lines"""
91 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
91 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
92
92
93
93
94 class appender(object):
94 class appender(object):
95 '''the changelog index must be updated last on disk, so we use this class
95 '''the changelog index must be updated last on disk, so we use this class
96 to delay writes to it'''
96 to delay writes to it'''
97
97
98 def __init__(self, vfs, name, mode, buf):
98 def __init__(self, vfs, name, mode, buf):
99 self.data = buf
99 self.data = buf
100 fp = vfs(name, mode)
100 fp = vfs(name, mode)
101 self.fp = fp
101 self.fp = fp
102 self.offset = fp.tell()
102 self.offset = fp.tell()
103 self.size = vfs.fstat(fp).st_size
103 self.size = vfs.fstat(fp).st_size
104 self._end = self.size
104 self._end = self.size
105
105
106 def end(self):
106 def end(self):
107 return self._end
107 return self._end
108
108
109 def tell(self):
109 def tell(self):
110 return self.offset
110 return self.offset
111
111
112 def flush(self):
112 def flush(self):
113 pass
113 pass
114
114
115 @property
115 @property
116 def closed(self):
116 def closed(self):
117 return self.fp.closed
117 return self.fp.closed
118
118
119 def close(self):
119 def close(self):
120 self.fp.close()
120 self.fp.close()
121
121
122 def seek(self, offset, whence=0):
122 def seek(self, offset, whence=0):
123 '''virtual file offset spans real file and data'''
123 '''virtual file offset spans real file and data'''
124 if whence == 0:
124 if whence == 0:
125 self.offset = offset
125 self.offset = offset
126 elif whence == 1:
126 elif whence == 1:
127 self.offset += offset
127 self.offset += offset
128 elif whence == 2:
128 elif whence == 2:
129 self.offset = self.end() + offset
129 self.offset = self.end() + offset
130 if self.offset < self.size:
130 if self.offset < self.size:
131 self.fp.seek(self.offset)
131 self.fp.seek(self.offset)
132
132
133 def read(self, count=-1):
133 def read(self, count=-1):
134 '''only trick here is reads that span real file and data'''
134 '''only trick here is reads that span real file and data'''
135 ret = b""
135 ret = b""
136 if self.offset < self.size:
136 if self.offset < self.size:
137 s = self.fp.read(count)
137 s = self.fp.read(count)
138 ret = s
138 ret = s
139 self.offset += len(s)
139 self.offset += len(s)
140 if count > 0:
140 if count > 0:
141 count -= len(s)
141 count -= len(s)
142 if count != 0:
142 if count != 0:
143 doff = self.offset - self.size
143 doff = self.offset - self.size
144 self.data.insert(0, b"".join(self.data))
144 self.data.insert(0, b"".join(self.data))
145 del self.data[1:]
145 del self.data[1:]
146 s = self.data[0][doff : doff + count]
146 s = self.data[0][doff : doff + count]
147 self.offset += len(s)
147 self.offset += len(s)
148 ret += s
148 ret += s
149 return ret
149 return ret
150
150
151 def write(self, s):
151 def write(self, s):
152 self.data.append(bytes(s))
152 self.data.append(bytes(s))
153 self.offset += len(s)
153 self.offset += len(s)
154 self._end += len(s)
154 self._end += len(s)
155
155
156 def __enter__(self):
156 def __enter__(self):
157 self.fp.__enter__()
157 self.fp.__enter__()
158 return self
158 return self
159
159
160 def __exit__(self, *args):
160 def __exit__(self, *args):
161 return self.fp.__exit__(*args)
161 return self.fp.__exit__(*args)
162
162
163
163
164 class _divertopener(object):
164 class _divertopener(object):
165 def __init__(self, opener, target):
165 def __init__(self, opener, target):
166 self._opener = opener
166 self._opener = opener
167 self._target = target
167 self._target = target
168
168
169 def __call__(self, name, mode=b'r', checkambig=False, **kwargs):
169 def __call__(self, name, mode=b'r', checkambig=False, **kwargs):
170 if name != self._target:
170 if name != self._target:
171 return self._opener(name, mode, **kwargs)
171 return self._opener(name, mode, **kwargs)
172 return self._opener(name + b".a", mode, **kwargs)
172 return self._opener(name + b".a", mode, **kwargs)
173
173
174 def __getattr__(self, attr):
174 def __getattr__(self, attr):
175 return getattr(self._opener, attr)
175 return getattr(self._opener, attr)
176
176
177
177
178 def _delayopener(opener, target, buf):
178 def _delayopener(opener, target, buf):
179 """build an opener that stores chunks in 'buf' instead of 'target'"""
179 """build an opener that stores chunks in 'buf' instead of 'target'"""
180
180
181 def _delay(name, mode=b'r', checkambig=False, **kwargs):
181 def _delay(name, mode=b'r', checkambig=False, **kwargs):
182 if name != target:
182 if name != target:
183 return opener(name, mode, **kwargs)
183 return opener(name, mode, **kwargs)
184 assert not kwargs
184 assert not kwargs
185 return appender(opener, name, mode, buf)
185 return appender(opener, name, mode, buf)
186
186
187 return _delay
187 return _delay
188
188
189
189
190 @attr.s
190 @attr.s
191 class _changelogrevision(object):
191 class _changelogrevision(object):
192 # Extensions might modify _defaultextra, so let the constructor below pass
192 # Extensions might modify _defaultextra, so let the constructor below pass
193 # it in
193 # it in
194 extra = attr.ib()
194 extra = attr.ib()
195 manifest = attr.ib(default=nullid)
195 manifest = attr.ib(default=nullid)
196 user = attr.ib(default=b'')
196 user = attr.ib(default=b'')
197 date = attr.ib(default=(0, 0))
197 date = attr.ib(default=(0, 0))
198 files = attr.ib(default=attr.Factory(list))
198 files = attr.ib(default=attr.Factory(list))
199 filesadded = attr.ib(default=None)
199 filesadded = attr.ib(default=None)
200 filesremoved = attr.ib(default=None)
200 filesremoved = attr.ib(default=None)
201 p1copies = attr.ib(default=None)
201 p1copies = attr.ib(default=None)
202 p2copies = attr.ib(default=None)
202 p2copies = attr.ib(default=None)
203 description = attr.ib(default=b'')
203 description = attr.ib(default=b'')
204
204
205
205
206 class changelogrevision(object):
206 class changelogrevision(object):
207 """Holds results of a parsed changelog revision.
207 """Holds results of a parsed changelog revision.
208
208
209 Changelog revisions consist of multiple pieces of data, including
209 Changelog revisions consist of multiple pieces of data, including
210 the manifest node, user, and date. This object exposes a view into
210 the manifest node, user, and date. This object exposes a view into
211 the parsed object.
211 the parsed object.
212 """
212 """
213
213
214 __slots__ = (
214 __slots__ = (
215 '_offsets',
215 '_offsets',
216 '_text',
216 '_text',
217 '_sidedata',
217 '_sidedata',
218 '_cpsd',
218 '_cpsd',
219 '_changes',
219 )
220 )
220
221
221 def __new__(cls, text, sidedata, cpsd):
222 def __new__(cls, text, sidedata, cpsd):
222 if not text:
223 if not text:
223 return _changelogrevision(extra=_defaultextra)
224 return _changelogrevision(extra=_defaultextra)
224
225
225 self = super(changelogrevision, cls).__new__(cls)
226 self = super(changelogrevision, cls).__new__(cls)
226 # We could return here and implement the following as an __init__.
227 # We could return here and implement the following as an __init__.
227 # But doing it here is equivalent and saves an extra function call.
228 # But doing it here is equivalent and saves an extra function call.
228
229
229 # format used:
230 # format used:
230 # nodeid\n : manifest node in ascii
231 # nodeid\n : manifest node in ascii
231 # user\n : user, no \n or \r allowed
232 # user\n : user, no \n or \r allowed
232 # time tz extra\n : date (time is int or float, timezone is int)
233 # time tz extra\n : date (time is int or float, timezone is int)
233 # : extra is metadata, encoded and separated by '\0'
234 # : extra is metadata, encoded and separated by '\0'
234 # : older versions ignore it
235 # : older versions ignore it
235 # files\n\n : files modified by the cset, no \n or \r allowed
236 # files\n\n : files modified by the cset, no \n or \r allowed
236 # (.*) : comment (free text, ideally utf-8)
237 # (.*) : comment (free text, ideally utf-8)
237 #
238 #
238 # changelog v0 doesn't use extra
239 # changelog v0 doesn't use extra
239
240
240 nl1 = text.index(b'\n')
241 nl1 = text.index(b'\n')
241 nl2 = text.index(b'\n', nl1 + 1)
242 nl2 = text.index(b'\n', nl1 + 1)
242 nl3 = text.index(b'\n', nl2 + 1)
243 nl3 = text.index(b'\n', nl2 + 1)
243
244
244 # The list of files may be empty. Which means nl3 is the first of the
245 # The list of files may be empty. Which means nl3 is the first of the
245 # double newline that precedes the description.
246 # double newline that precedes the description.
246 if text[nl3 + 1 : nl3 + 2] == b'\n':
247 if text[nl3 + 1 : nl3 + 2] == b'\n':
247 doublenl = nl3
248 doublenl = nl3
248 else:
249 else:
249 doublenl = text.index(b'\n\n', nl3 + 1)
250 doublenl = text.index(b'\n\n', nl3 + 1)
250
251
251 self._offsets = (nl1, nl2, nl3, doublenl)
252 self._offsets = (nl1, nl2, nl3, doublenl)
252 self._text = text
253 self._text = text
253 self._sidedata = sidedata
254 self._sidedata = sidedata
254 self._cpsd = cpsd
255 self._cpsd = cpsd
256 self._changes = None
255
257
256 return self
258 return self
257
259
258 @property
260 @property
259 def manifest(self):
261 def manifest(self):
260 return bin(self._text[0 : self._offsets[0]])
262 return bin(self._text[0 : self._offsets[0]])
261
263
262 @property
264 @property
263 def user(self):
265 def user(self):
264 off = self._offsets
266 off = self._offsets
265 return encoding.tolocal(self._text[off[0] + 1 : off[1]])
267 return encoding.tolocal(self._text[off[0] + 1 : off[1]])
266
268
267 @property
269 @property
268 def _rawdate(self):
270 def _rawdate(self):
269 off = self._offsets
271 off = self._offsets
270 dateextra = self._text[off[1] + 1 : off[2]]
272 dateextra = self._text[off[1] + 1 : off[2]]
271 return dateextra.split(b' ', 2)[0:2]
273 return dateextra.split(b' ', 2)[0:2]
272
274
273 @property
275 @property
274 def _rawextra(self):
276 def _rawextra(self):
275 off = self._offsets
277 off = self._offsets
276 dateextra = self._text[off[1] + 1 : off[2]]
278 dateextra = self._text[off[1] + 1 : off[2]]
277 fields = dateextra.split(b' ', 2)
279 fields = dateextra.split(b' ', 2)
278 if len(fields) != 3:
280 if len(fields) != 3:
279 return None
281 return None
280
282
281 return fields[2]
283 return fields[2]
282
284
283 @property
285 @property
284 def date(self):
286 def date(self):
285 raw = self._rawdate
287 raw = self._rawdate
286 time = float(raw[0])
288 time = float(raw[0])
287 # Various tools did silly things with the timezone.
289 # Various tools did silly things with the timezone.
288 try:
290 try:
289 timezone = int(raw[1])
291 timezone = int(raw[1])
290 except ValueError:
292 except ValueError:
291 timezone = 0
293 timezone = 0
292
294
293 return time, timezone
295 return time, timezone
294
296
295 @property
297 @property
296 def extra(self):
298 def extra(self):
297 raw = self._rawextra
299 raw = self._rawextra
298 if raw is None:
300 if raw is None:
299 return _defaultextra
301 return _defaultextra
300
302
301 return decodeextra(raw)
303 return decodeextra(raw)
302
304
303 @property
305 @property
306 def changes(self):
307 if self._changes is not None:
308 return self._changes
309 changes = metadata.ChangingFiles(
310 touched=self.files or (),
311 added=self.filesadded or (),
312 removed=self.filesremoved or (),
313 p1_copies=self.p1copies or {},
314 p2_copies=self.p2copies or {},
315 )
316 self._changes = changes
317 return changes
318
319 @property
304 def files(self):
320 def files(self):
305 off = self._offsets
321 off = self._offsets
306 if off[2] == off[3]:
322 if off[2] == off[3]:
307 return []
323 return []
308
324
309 return self._text[off[2] + 1 : off[3]].split(b'\n')
325 return self._text[off[2] + 1 : off[3]].split(b'\n')
310
326
311 @property
327 @property
312 def filesadded(self):
328 def filesadded(self):
313 if self._cpsd:
329 if self._cpsd:
314 rawindices = self._sidedata.get(sidedatamod.SD_FILESADDED)
330 rawindices = self._sidedata.get(sidedatamod.SD_FILESADDED)
315 if not rawindices:
331 if not rawindices:
316 return []
332 return []
317 else:
333 else:
318 rawindices = self.extra.get(b'filesadded')
334 rawindices = self.extra.get(b'filesadded')
319 if rawindices is None:
335 if rawindices is None:
320 return None
336 return None
321 return metadata.decodefileindices(self.files, rawindices)
337 return metadata.decodefileindices(self.files, rawindices)
322
338
323 @property
339 @property
324 def filesremoved(self):
340 def filesremoved(self):
325 if self._cpsd:
341 if self._cpsd:
326 rawindices = self._sidedata.get(sidedatamod.SD_FILESREMOVED)
342 rawindices = self._sidedata.get(sidedatamod.SD_FILESREMOVED)
327 if not rawindices:
343 if not rawindices:
328 return []
344 return []
329 else:
345 else:
330 rawindices = self.extra.get(b'filesremoved')
346 rawindices = self.extra.get(b'filesremoved')
331 if rawindices is None:
347 if rawindices is None:
332 return None
348 return None
333 return metadata.decodefileindices(self.files, rawindices)
349 return metadata.decodefileindices(self.files, rawindices)
334
350
335 @property
351 @property
336 def p1copies(self):
352 def p1copies(self):
337 if self._cpsd:
353 if self._cpsd:
338 rawcopies = self._sidedata.get(sidedatamod.SD_P1COPIES)
354 rawcopies = self._sidedata.get(sidedatamod.SD_P1COPIES)
339 if not rawcopies:
355 if not rawcopies:
340 return {}
356 return {}
341 else:
357 else:
342 rawcopies = self.extra.get(b'p1copies')
358 rawcopies = self.extra.get(b'p1copies')
343 if rawcopies is None:
359 if rawcopies is None:
344 return None
360 return None
345 return metadata.decodecopies(self.files, rawcopies)
361 return metadata.decodecopies(self.files, rawcopies)
346
362
347 @property
363 @property
348 def p2copies(self):
364 def p2copies(self):
349 if self._cpsd:
365 if self._cpsd:
350 rawcopies = self._sidedata.get(sidedatamod.SD_P2COPIES)
366 rawcopies = self._sidedata.get(sidedatamod.SD_P2COPIES)
351 if not rawcopies:
367 if not rawcopies:
352 return {}
368 return {}
353 else:
369 else:
354 rawcopies = self.extra.get(b'p2copies')
370 rawcopies = self.extra.get(b'p2copies')
355 if rawcopies is None:
371 if rawcopies is None:
356 return None
372 return None
357 return metadata.decodecopies(self.files, rawcopies)
373 return metadata.decodecopies(self.files, rawcopies)
358
374
359 @property
375 @property
360 def description(self):
376 def description(self):
361 return encoding.tolocal(self._text[self._offsets[3] + 2 :])
377 return encoding.tolocal(self._text[self._offsets[3] + 2 :])
362
378
363
379
364 class changelog(revlog.revlog):
380 class changelog(revlog.revlog):
365 def __init__(self, opener, trypending=False):
381 def __init__(self, opener, trypending=False):
366 """Load a changelog revlog using an opener.
382 """Load a changelog revlog using an opener.
367
383
368 If ``trypending`` is true, we attempt to load the index from a
384 If ``trypending`` is true, we attempt to load the index from a
369 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
385 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
370 The ``00changelog.i.a`` file contains index (and possibly inline
386 The ``00changelog.i.a`` file contains index (and possibly inline
371 revision) data for a transaction that hasn't been finalized yet.
387 revision) data for a transaction that hasn't been finalized yet.
372 It exists in a separate file to facilitate readers (such as
388 It exists in a separate file to facilitate readers (such as
373 hooks processes) accessing data before a transaction is finalized.
389 hooks processes) accessing data before a transaction is finalized.
374 """
390 """
375 if trypending and opener.exists(b'00changelog.i.a'):
391 if trypending and opener.exists(b'00changelog.i.a'):
376 indexfile = b'00changelog.i.a'
392 indexfile = b'00changelog.i.a'
377 else:
393 else:
378 indexfile = b'00changelog.i'
394 indexfile = b'00changelog.i'
379
395
380 datafile = b'00changelog.d'
396 datafile = b'00changelog.d'
381 revlog.revlog.__init__(
397 revlog.revlog.__init__(
382 self,
398 self,
383 opener,
399 opener,
384 indexfile,
400 indexfile,
385 datafile=datafile,
401 datafile=datafile,
386 checkambig=True,
402 checkambig=True,
387 mmaplargeindex=True,
403 mmaplargeindex=True,
388 persistentnodemap=opener.options.get(b'persistent-nodemap', False),
404 persistentnodemap=opener.options.get(b'persistent-nodemap', False),
389 )
405 )
390
406
391 if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
407 if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
392 # changelogs don't benefit from generaldelta.
408 # changelogs don't benefit from generaldelta.
393
409
394 self.version &= ~revlog.FLAG_GENERALDELTA
410 self.version &= ~revlog.FLAG_GENERALDELTA
395 self._generaldelta = False
411 self._generaldelta = False
396
412
397 # Delta chains for changelogs tend to be very small because entries
413 # Delta chains for changelogs tend to be very small because entries
398 # tend to be small and don't delta well with each. So disable delta
414 # tend to be small and don't delta well with each. So disable delta
399 # chains.
415 # chains.
400 self._storedeltachains = False
416 self._storedeltachains = False
401
417
402 self._realopener = opener
418 self._realopener = opener
403 self._delayed = False
419 self._delayed = False
404 self._delaybuf = None
420 self._delaybuf = None
405 self._divert = False
421 self._divert = False
406 self._filteredrevs = frozenset()
422 self._filteredrevs = frozenset()
407 self._filteredrevs_hashcache = {}
423 self._filteredrevs_hashcache = {}
408 self._copiesstorage = opener.options.get(b'copies-storage')
424 self._copiesstorage = opener.options.get(b'copies-storage')
409
425
410 @property
426 @property
411 def filteredrevs(self):
427 def filteredrevs(self):
412 return self._filteredrevs
428 return self._filteredrevs
413
429
414 @filteredrevs.setter
430 @filteredrevs.setter
415 def filteredrevs(self, val):
431 def filteredrevs(self, val):
416 # Ensure all updates go through this function
432 # Ensure all updates go through this function
417 assert isinstance(val, frozenset)
433 assert isinstance(val, frozenset)
418 self._filteredrevs = val
434 self._filteredrevs = val
419 self._filteredrevs_hashcache = {}
435 self._filteredrevs_hashcache = {}
420
436
421 def delayupdate(self, tr):
437 def delayupdate(self, tr):
422 """delay visibility of index updates to other readers"""
438 """delay visibility of index updates to other readers"""
423
439
424 if not self._delayed:
440 if not self._delayed:
425 if len(self) == 0:
441 if len(self) == 0:
426 self._divert = True
442 self._divert = True
427 if self._realopener.exists(self.indexfile + b'.a'):
443 if self._realopener.exists(self.indexfile + b'.a'):
428 self._realopener.unlink(self.indexfile + b'.a')
444 self._realopener.unlink(self.indexfile + b'.a')
429 self.opener = _divertopener(self._realopener, self.indexfile)
445 self.opener = _divertopener(self._realopener, self.indexfile)
430 else:
446 else:
431 self._delaybuf = []
447 self._delaybuf = []
432 self.opener = _delayopener(
448 self.opener = _delayopener(
433 self._realopener, self.indexfile, self._delaybuf
449 self._realopener, self.indexfile, self._delaybuf
434 )
450 )
435 self._delayed = True
451 self._delayed = True
436 tr.addpending(b'cl-%i' % id(self), self._writepending)
452 tr.addpending(b'cl-%i' % id(self), self._writepending)
437 tr.addfinalize(b'cl-%i' % id(self), self._finalize)
453 tr.addfinalize(b'cl-%i' % id(self), self._finalize)
438
454
439 def _finalize(self, tr):
455 def _finalize(self, tr):
440 """finalize index updates"""
456 """finalize index updates"""
441 self._delayed = False
457 self._delayed = False
442 self.opener = self._realopener
458 self.opener = self._realopener
443 # move redirected index data back into place
459 # move redirected index data back into place
444 if self._divert:
460 if self._divert:
445 assert not self._delaybuf
461 assert not self._delaybuf
446 tmpname = self.indexfile + b".a"
462 tmpname = self.indexfile + b".a"
447 nfile = self.opener.open(tmpname)
463 nfile = self.opener.open(tmpname)
448 nfile.close()
464 nfile.close()
449 self.opener.rename(tmpname, self.indexfile, checkambig=True)
465 self.opener.rename(tmpname, self.indexfile, checkambig=True)
450 elif self._delaybuf:
466 elif self._delaybuf:
451 fp = self.opener(self.indexfile, b'a', checkambig=True)
467 fp = self.opener(self.indexfile, b'a', checkambig=True)
452 fp.write(b"".join(self._delaybuf))
468 fp.write(b"".join(self._delaybuf))
453 fp.close()
469 fp.close()
454 self._delaybuf = None
470 self._delaybuf = None
455 self._divert = False
471 self._divert = False
456 # split when we're done
472 # split when we're done
457 self._enforceinlinesize(tr)
473 self._enforceinlinesize(tr)
458
474
459 def _writepending(self, tr):
475 def _writepending(self, tr):
460 """create a file containing the unfinalized state for
476 """create a file containing the unfinalized state for
461 pretxnchangegroup"""
477 pretxnchangegroup"""
462 if self._delaybuf:
478 if self._delaybuf:
463 # make a temporary copy of the index
479 # make a temporary copy of the index
464 fp1 = self._realopener(self.indexfile)
480 fp1 = self._realopener(self.indexfile)
465 pendingfilename = self.indexfile + b".a"
481 pendingfilename = self.indexfile + b".a"
466 # register as a temp file to ensure cleanup on failure
482 # register as a temp file to ensure cleanup on failure
467 tr.registertmp(pendingfilename)
483 tr.registertmp(pendingfilename)
468 # write existing data
484 # write existing data
469 fp2 = self._realopener(pendingfilename, b"w")
485 fp2 = self._realopener(pendingfilename, b"w")
470 fp2.write(fp1.read())
486 fp2.write(fp1.read())
471 # add pending data
487 # add pending data
472 fp2.write(b"".join(self._delaybuf))
488 fp2.write(b"".join(self._delaybuf))
473 fp2.close()
489 fp2.close()
474 # switch modes so finalize can simply rename
490 # switch modes so finalize can simply rename
475 self._delaybuf = None
491 self._delaybuf = None
476 self._divert = True
492 self._divert = True
477 self.opener = _divertopener(self._realopener, self.indexfile)
493 self.opener = _divertopener(self._realopener, self.indexfile)
478
494
479 if self._divert:
495 if self._divert:
480 return True
496 return True
481
497
482 return False
498 return False
483
499
484 def _enforceinlinesize(self, tr, fp=None):
500 def _enforceinlinesize(self, tr, fp=None):
485 if not self._delayed:
501 if not self._delayed:
486 revlog.revlog._enforceinlinesize(self, tr, fp)
502 revlog.revlog._enforceinlinesize(self, tr, fp)
487
503
488 def read(self, node):
504 def read(self, node):
489 """Obtain data from a parsed changelog revision.
505 """Obtain data from a parsed changelog revision.
490
506
491 Returns a 6-tuple of:
507 Returns a 6-tuple of:
492
508
493 - manifest node in binary
509 - manifest node in binary
494 - author/user as a localstr
510 - author/user as a localstr
495 - date as a 2-tuple of (time, timezone)
511 - date as a 2-tuple of (time, timezone)
496 - list of files
512 - list of files
497 - commit message as a localstr
513 - commit message as a localstr
498 - dict of extra metadata
514 - dict of extra metadata
499
515
500 Unless you need to access all fields, consider calling
516 Unless you need to access all fields, consider calling
501 ``changelogrevision`` instead, as it is faster for partial object
517 ``changelogrevision`` instead, as it is faster for partial object
502 access.
518 access.
503 """
519 """
504 d, s = self._revisiondata(node)
520 d, s = self._revisiondata(node)
505 c = changelogrevision(
521 c = changelogrevision(
506 d, s, self._copiesstorage == b'changeset-sidedata'
522 d, s, self._copiesstorage == b'changeset-sidedata'
507 )
523 )
508 return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
524 return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
509
525
510 def changelogrevision(self, nodeorrev):
526 def changelogrevision(self, nodeorrev):
511 """Obtain a ``changelogrevision`` for a node or revision."""
527 """Obtain a ``changelogrevision`` for a node or revision."""
512 text, sidedata = self._revisiondata(nodeorrev)
528 text, sidedata = self._revisiondata(nodeorrev)
513 return changelogrevision(
529 return changelogrevision(
514 text, sidedata, self._copiesstorage == b'changeset-sidedata'
530 text, sidedata, self._copiesstorage == b'changeset-sidedata'
515 )
531 )
516
532
517 def readfiles(self, node):
533 def readfiles(self, node):
518 """
534 """
519 short version of read that only returns the files modified by the cset
535 short version of read that only returns the files modified by the cset
520 """
536 """
521 text = self.revision(node)
537 text = self.revision(node)
522 if not text:
538 if not text:
523 return []
539 return []
524 last = text.index(b"\n\n")
540 last = text.index(b"\n\n")
525 l = text[:last].split(b'\n')
541 l = text[:last].split(b'\n')
526 return l[3:]
542 return l[3:]
527
543
528 def add(
544 def add(
529 self,
545 self,
530 manifest,
546 manifest,
531 files,
547 files,
532 desc,
548 desc,
533 transaction,
549 transaction,
534 p1,
550 p1,
535 p2,
551 p2,
536 user,
552 user,
537 date=None,
553 date=None,
538 extra=None,
554 extra=None,
539 ):
555 ):
540 # Convert to UTF-8 encoded bytestrings as the very first
556 # Convert to UTF-8 encoded bytestrings as the very first
541 # thing: calling any method on a localstr object will turn it
557 # thing: calling any method on a localstr object will turn it
542 # into a str object and the cached UTF-8 string is thus lost.
558 # into a str object and the cached UTF-8 string is thus lost.
543 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
559 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
544
560
545 user = user.strip()
561 user = user.strip()
546 # An empty username or a username with a "\n" will make the
562 # An empty username or a username with a "\n" will make the
547 # revision text contain two "\n\n" sequences -> corrupt
563 # revision text contain two "\n\n" sequences -> corrupt
548 # repository since read cannot unpack the revision.
564 # repository since read cannot unpack the revision.
549 if not user:
565 if not user:
550 raise error.StorageError(_(b"empty username"))
566 raise error.StorageError(_(b"empty username"))
551 if b"\n" in user:
567 if b"\n" in user:
552 raise error.StorageError(
568 raise error.StorageError(
553 _(b"username %r contains a newline") % pycompat.bytestr(user)
569 _(b"username %r contains a newline") % pycompat.bytestr(user)
554 )
570 )
555
571
556 desc = stripdesc(desc)
572 desc = stripdesc(desc)
557
573
558 if date:
574 if date:
559 parseddate = b"%d %d" % dateutil.parsedate(date)
575 parseddate = b"%d %d" % dateutil.parsedate(date)
560 else:
576 else:
561 parseddate = b"%d %d" % dateutil.makedate()
577 parseddate = b"%d %d" % dateutil.makedate()
562 if extra:
578 if extra:
563 branch = extra.get(b"branch")
579 branch = extra.get(b"branch")
564 if branch in (b"default", b""):
580 if branch in (b"default", b""):
565 del extra[b"branch"]
581 del extra[b"branch"]
566 elif branch in (b".", b"null", b"tip"):
582 elif branch in (b".", b"null", b"tip"):
567 raise error.StorageError(
583 raise error.StorageError(
568 _(b'the name \'%s\' is reserved') % branch
584 _(b'the name \'%s\' is reserved') % branch
569 )
585 )
570 sortedfiles = sorted(files.touched)
586 sortedfiles = sorted(files.touched)
571 sidedata = None
587 sidedata = None
572 if self._copiesstorage == b'changeset-sidedata':
588 if self._copiesstorage == b'changeset-sidedata':
573 sidedata = metadata.encode_files_sidedata(files)
589 sidedata = metadata.encode_files_sidedata(files)
574
590
575 if extra:
591 if extra:
576 extra = encodeextra(extra)
592 extra = encodeextra(extra)
577 parseddate = b"%s %s" % (parseddate, extra)
593 parseddate = b"%s %s" % (parseddate, extra)
578 l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
594 l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
579 text = b"\n".join(l)
595 text = b"\n".join(l)
580 return self.addrevision(
596 return self.addrevision(
581 text, transaction, len(self), p1, p2, sidedata=sidedata
597 text, transaction, len(self), p1, p2, sidedata=sidedata
582 )
598 )
583
599
584 def branchinfo(self, rev):
600 def branchinfo(self, rev):
585 """return the branch name and open/close state of a revision
601 """return the branch name and open/close state of a revision
586
602
587 This function exists because creating a changectx object
603 This function exists because creating a changectx object
588 just to access this is costly."""
604 just to access this is costly."""
589 extra = self.read(rev)[5]
605 extra = self.read(rev)[5]
590 return encoding.tolocal(extra.get(b"branch")), b'close' in extra
606 return encoding.tolocal(extra.get(b"branch")), b'close' in extra
591
607
592 def _nodeduplicatecallback(self, transaction, node):
608 def _nodeduplicatecallback(self, transaction, node):
593 # keep track of revisions that got "re-added", eg: unbunde of know rev.
609 # keep track of revisions that got "re-added", eg: unbunde of know rev.
594 #
610 #
595 # We track them in a list to preserve their order from the source bundle
611 # We track them in a list to preserve their order from the source bundle
596 duplicates = transaction.changes.setdefault(b'revduplicates', [])
612 duplicates = transaction.changes.setdefault(b'revduplicates', [])
597 duplicates.append(self.rev(node))
613 duplicates.append(self.rev(node))
General Comments 0
You need to be logged in to leave comments. Login now