##// END OF EJS Templates
changelog: change the implementation of `_divertopener`...
marmoute -
r44985:897f0ce4 default
parent child Browse files
Show More
@@ -1,629 +1,632 b''
1 # changelog.py - changelog class for mercurial
1 # changelog.py - changelog class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 from .i18n import _
10 from .i18n import _
11 from .node import (
11 from .node import (
12 bin,
12 bin,
13 hex,
13 hex,
14 nullid,
14 nullid,
15 )
15 )
16 from .thirdparty import attr
16 from .thirdparty import attr
17
17
18 from . import (
18 from . import (
19 copies,
19 copies,
20 encoding,
20 encoding,
21 error,
21 error,
22 pycompat,
22 pycompat,
23 revlog,
23 revlog,
24 )
24 )
25 from .utils import (
25 from .utils import (
26 dateutil,
26 dateutil,
27 stringutil,
27 stringutil,
28 )
28 )
29
29
30 from .revlogutils import sidedata as sidedatamod
30 from .revlogutils import sidedata as sidedatamod
31
31
32 _defaultextra = {b'branch': b'default'}
32 _defaultextra = {b'branch': b'default'}
33
33
34
34
35 def _string_escape(text):
35 def _string_escape(text):
36 """
36 """
37 >>> from .pycompat import bytechr as chr
37 >>> from .pycompat import bytechr as chr
38 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
38 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
39 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
39 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
40 >>> s
40 >>> s
41 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
41 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
42 >>> res = _string_escape(s)
42 >>> res = _string_escape(s)
43 >>> s == _string_unescape(res)
43 >>> s == _string_unescape(res)
44 True
44 True
45 """
45 """
46 # subset of the string_escape codec
46 # subset of the string_escape codec
47 text = (
47 text = (
48 text.replace(b'\\', b'\\\\')
48 text.replace(b'\\', b'\\\\')
49 .replace(b'\n', b'\\n')
49 .replace(b'\n', b'\\n')
50 .replace(b'\r', b'\\r')
50 .replace(b'\r', b'\\r')
51 )
51 )
52 return text.replace(b'\0', b'\\0')
52 return text.replace(b'\0', b'\\0')
53
53
54
54
55 def _string_unescape(text):
55 def _string_unescape(text):
56 if b'\\0' in text:
56 if b'\\0' in text:
57 # fix up \0 without getting into trouble with \\0
57 # fix up \0 without getting into trouble with \\0
58 text = text.replace(b'\\\\', b'\\\\\n')
58 text = text.replace(b'\\\\', b'\\\\\n')
59 text = text.replace(b'\\0', b'\0')
59 text = text.replace(b'\\0', b'\0')
60 text = text.replace(b'\n', b'')
60 text = text.replace(b'\n', b'')
61 return stringutil.unescapestr(text)
61 return stringutil.unescapestr(text)
62
62
63
63
64 def decodeextra(text):
64 def decodeextra(text):
65 """
65 """
66 >>> from .pycompat import bytechr as chr
66 >>> from .pycompat import bytechr as chr
67 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
67 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
68 ... ).items())
68 ... ).items())
69 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
69 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
70 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
70 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
71 ... b'baz': chr(92) + chr(0) + b'2'})
71 ... b'baz': chr(92) + chr(0) + b'2'})
72 ... ).items())
72 ... ).items())
73 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
73 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
74 """
74 """
75 extra = _defaultextra.copy()
75 extra = _defaultextra.copy()
76 for l in text.split(b'\0'):
76 for l in text.split(b'\0'):
77 if l:
77 if l:
78 k, v = _string_unescape(l).split(b':', 1)
78 k, v = _string_unescape(l).split(b':', 1)
79 extra[k] = v
79 extra[k] = v
80 return extra
80 return extra
81
81
82
82
83 def encodeextra(d):
83 def encodeextra(d):
84 # keys must be sorted to produce a deterministic changelog entry
84 # keys must be sorted to produce a deterministic changelog entry
85 items = [
85 items = [
86 _string_escape(b'%s:%s' % (k, pycompat.bytestr(d[k])))
86 _string_escape(b'%s:%s' % (k, pycompat.bytestr(d[k])))
87 for k in sorted(d)
87 for k in sorted(d)
88 ]
88 ]
89 return b"\0".join(items)
89 return b"\0".join(items)
90
90
91
91
92 def stripdesc(desc):
92 def stripdesc(desc):
93 """strip trailing whitespace and leading and trailing empty lines"""
93 """strip trailing whitespace and leading and trailing empty lines"""
94 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
94 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
95
95
96
96
97 class appender(object):
97 class appender(object):
98 '''the changelog index must be updated last on disk, so we use this class
98 '''the changelog index must be updated last on disk, so we use this class
99 to delay writes to it'''
99 to delay writes to it'''
100
100
101 def __init__(self, vfs, name, mode, buf):
101 def __init__(self, vfs, name, mode, buf):
102 self.data = buf
102 self.data = buf
103 fp = vfs(name, mode)
103 fp = vfs(name, mode)
104 self.fp = fp
104 self.fp = fp
105 self.offset = fp.tell()
105 self.offset = fp.tell()
106 self.size = vfs.fstat(fp).st_size
106 self.size = vfs.fstat(fp).st_size
107 self._end = self.size
107 self._end = self.size
108
108
109 def end(self):
109 def end(self):
110 return self._end
110 return self._end
111
111
112 def tell(self):
112 def tell(self):
113 return self.offset
113 return self.offset
114
114
115 def flush(self):
115 def flush(self):
116 pass
116 pass
117
117
118 @property
118 @property
119 def closed(self):
119 def closed(self):
120 return self.fp.closed
120 return self.fp.closed
121
121
122 def close(self):
122 def close(self):
123 self.fp.close()
123 self.fp.close()
124
124
125 def seek(self, offset, whence=0):
125 def seek(self, offset, whence=0):
126 '''virtual file offset spans real file and data'''
126 '''virtual file offset spans real file and data'''
127 if whence == 0:
127 if whence == 0:
128 self.offset = offset
128 self.offset = offset
129 elif whence == 1:
129 elif whence == 1:
130 self.offset += offset
130 self.offset += offset
131 elif whence == 2:
131 elif whence == 2:
132 self.offset = self.end() + offset
132 self.offset = self.end() + offset
133 if self.offset < self.size:
133 if self.offset < self.size:
134 self.fp.seek(self.offset)
134 self.fp.seek(self.offset)
135
135
136 def read(self, count=-1):
136 def read(self, count=-1):
137 '''only trick here is reads that span real file and data'''
137 '''only trick here is reads that span real file and data'''
138 ret = b""
138 ret = b""
139 if self.offset < self.size:
139 if self.offset < self.size:
140 s = self.fp.read(count)
140 s = self.fp.read(count)
141 ret = s
141 ret = s
142 self.offset += len(s)
142 self.offset += len(s)
143 if count > 0:
143 if count > 0:
144 count -= len(s)
144 count -= len(s)
145 if count != 0:
145 if count != 0:
146 doff = self.offset - self.size
146 doff = self.offset - self.size
147 self.data.insert(0, b"".join(self.data))
147 self.data.insert(0, b"".join(self.data))
148 del self.data[1:]
148 del self.data[1:]
149 s = self.data[0][doff : doff + count]
149 s = self.data[0][doff : doff + count]
150 self.offset += len(s)
150 self.offset += len(s)
151 ret += s
151 ret += s
152 return ret
152 return ret
153
153
154 def write(self, s):
154 def write(self, s):
155 self.data.append(bytes(s))
155 self.data.append(bytes(s))
156 self.offset += len(s)
156 self.offset += len(s)
157 self._end += len(s)
157 self._end += len(s)
158
158
159 def __enter__(self):
159 def __enter__(self):
160 self.fp.__enter__()
160 self.fp.__enter__()
161 return self
161 return self
162
162
163 def __exit__(self, *args):
163 def __exit__(self, *args):
164 return self.fp.__exit__(*args)
164 return self.fp.__exit__(*args)
165
165
166
166
167 def _divertopener(opener, target):
167 class _divertopener(object):
168 """build an opener that writes in 'target.a' instead of 'target'"""
168 def __init__(self, opener, target):
169 self._opener = opener
170 self._target = target
169
171
170 def _divert(name, mode=b'r', checkambig=False, **kwargs):
172 def __call__(self, name, mode=b'r', checkambig=False, **kwargs):
171 if name != target:
173 if name != self._target:
172 return opener(name, mode, **kwargs)
174 return self._opener(name, mode, **kwargs)
173 return opener(name + b".a", mode, **kwargs)
175 return self._opener(name + b".a", mode, **kwargs)
174
176
175 return _divert
177 def __getattr__(self, attr):
178 return getattr(self._opener, attr)
176
179
177
180
178 def _delayopener(opener, target, buf):
181 def _delayopener(opener, target, buf):
179 """build an opener that stores chunks in 'buf' instead of 'target'"""
182 """build an opener that stores chunks in 'buf' instead of 'target'"""
180
183
181 def _delay(name, mode=b'r', checkambig=False, **kwargs):
184 def _delay(name, mode=b'r', checkambig=False, **kwargs):
182 if name != target:
185 if name != target:
183 return opener(name, mode, **kwargs)
186 return opener(name, mode, **kwargs)
184 assert not kwargs
187 assert not kwargs
185 return appender(opener, name, mode, buf)
188 return appender(opener, name, mode, buf)
186
189
187 return _delay
190 return _delay
188
191
189
192
190 @attr.s
193 @attr.s
191 class _changelogrevision(object):
194 class _changelogrevision(object):
192 # Extensions might modify _defaultextra, so let the constructor below pass
195 # Extensions might modify _defaultextra, so let the constructor below pass
193 # it in
196 # it in
194 extra = attr.ib()
197 extra = attr.ib()
195 manifest = attr.ib(default=nullid)
198 manifest = attr.ib(default=nullid)
196 user = attr.ib(default=b'')
199 user = attr.ib(default=b'')
197 date = attr.ib(default=(0, 0))
200 date = attr.ib(default=(0, 0))
198 files = attr.ib(default=attr.Factory(list))
201 files = attr.ib(default=attr.Factory(list))
199 filesadded = attr.ib(default=None)
202 filesadded = attr.ib(default=None)
200 filesremoved = attr.ib(default=None)
203 filesremoved = attr.ib(default=None)
201 p1copies = attr.ib(default=None)
204 p1copies = attr.ib(default=None)
202 p2copies = attr.ib(default=None)
205 p2copies = attr.ib(default=None)
203 description = attr.ib(default=b'')
206 description = attr.ib(default=b'')
204
207
205
208
206 class changelogrevision(object):
209 class changelogrevision(object):
207 """Holds results of a parsed changelog revision.
210 """Holds results of a parsed changelog revision.
208
211
209 Changelog revisions consist of multiple pieces of data, including
212 Changelog revisions consist of multiple pieces of data, including
210 the manifest node, user, and date. This object exposes a view into
213 the manifest node, user, and date. This object exposes a view into
211 the parsed object.
214 the parsed object.
212 """
215 """
213
216
214 __slots__ = (
217 __slots__ = (
215 '_offsets',
218 '_offsets',
216 '_text',
219 '_text',
217 '_sidedata',
220 '_sidedata',
218 '_cpsd',
221 '_cpsd',
219 )
222 )
220
223
221 def __new__(cls, text, sidedata, cpsd):
224 def __new__(cls, text, sidedata, cpsd):
222 if not text:
225 if not text:
223 return _changelogrevision(extra=_defaultextra)
226 return _changelogrevision(extra=_defaultextra)
224
227
225 self = super(changelogrevision, cls).__new__(cls)
228 self = super(changelogrevision, cls).__new__(cls)
226 # We could return here and implement the following as an __init__.
229 # We could return here and implement the following as an __init__.
227 # But doing it here is equivalent and saves an extra function call.
230 # But doing it here is equivalent and saves an extra function call.
228
231
229 # format used:
232 # format used:
230 # nodeid\n : manifest node in ascii
233 # nodeid\n : manifest node in ascii
231 # user\n : user, no \n or \r allowed
234 # user\n : user, no \n or \r allowed
232 # time tz extra\n : date (time is int or float, timezone is int)
235 # time tz extra\n : date (time is int or float, timezone is int)
233 # : extra is metadata, encoded and separated by '\0'
236 # : extra is metadata, encoded and separated by '\0'
234 # : older versions ignore it
237 # : older versions ignore it
235 # files\n\n : files modified by the cset, no \n or \r allowed
238 # files\n\n : files modified by the cset, no \n or \r allowed
236 # (.*) : comment (free text, ideally utf-8)
239 # (.*) : comment (free text, ideally utf-8)
237 #
240 #
238 # changelog v0 doesn't use extra
241 # changelog v0 doesn't use extra
239
242
240 nl1 = text.index(b'\n')
243 nl1 = text.index(b'\n')
241 nl2 = text.index(b'\n', nl1 + 1)
244 nl2 = text.index(b'\n', nl1 + 1)
242 nl3 = text.index(b'\n', nl2 + 1)
245 nl3 = text.index(b'\n', nl2 + 1)
243
246
244 # The list of files may be empty. Which means nl3 is the first of the
247 # The list of files may be empty. Which means nl3 is the first of the
245 # double newline that precedes the description.
248 # double newline that precedes the description.
246 if text[nl3 + 1 : nl3 + 2] == b'\n':
249 if text[nl3 + 1 : nl3 + 2] == b'\n':
247 doublenl = nl3
250 doublenl = nl3
248 else:
251 else:
249 doublenl = text.index(b'\n\n', nl3 + 1)
252 doublenl = text.index(b'\n\n', nl3 + 1)
250
253
251 self._offsets = (nl1, nl2, nl3, doublenl)
254 self._offsets = (nl1, nl2, nl3, doublenl)
252 self._text = text
255 self._text = text
253 self._sidedata = sidedata
256 self._sidedata = sidedata
254 self._cpsd = cpsd
257 self._cpsd = cpsd
255
258
256 return self
259 return self
257
260
258 @property
261 @property
259 def manifest(self):
262 def manifest(self):
260 return bin(self._text[0 : self._offsets[0]])
263 return bin(self._text[0 : self._offsets[0]])
261
264
262 @property
265 @property
263 def user(self):
266 def user(self):
264 off = self._offsets
267 off = self._offsets
265 return encoding.tolocal(self._text[off[0] + 1 : off[1]])
268 return encoding.tolocal(self._text[off[0] + 1 : off[1]])
266
269
267 @property
270 @property
268 def _rawdate(self):
271 def _rawdate(self):
269 off = self._offsets
272 off = self._offsets
270 dateextra = self._text[off[1] + 1 : off[2]]
273 dateextra = self._text[off[1] + 1 : off[2]]
271 return dateextra.split(b' ', 2)[0:2]
274 return dateextra.split(b' ', 2)[0:2]
272
275
273 @property
276 @property
274 def _rawextra(self):
277 def _rawextra(self):
275 off = self._offsets
278 off = self._offsets
276 dateextra = self._text[off[1] + 1 : off[2]]
279 dateextra = self._text[off[1] + 1 : off[2]]
277 fields = dateextra.split(b' ', 2)
280 fields = dateextra.split(b' ', 2)
278 if len(fields) != 3:
281 if len(fields) != 3:
279 return None
282 return None
280
283
281 return fields[2]
284 return fields[2]
282
285
283 @property
286 @property
284 def date(self):
287 def date(self):
285 raw = self._rawdate
288 raw = self._rawdate
286 time = float(raw[0])
289 time = float(raw[0])
287 # Various tools did silly things with the timezone.
290 # Various tools did silly things with the timezone.
288 try:
291 try:
289 timezone = int(raw[1])
292 timezone = int(raw[1])
290 except ValueError:
293 except ValueError:
291 timezone = 0
294 timezone = 0
292
295
293 return time, timezone
296 return time, timezone
294
297
295 @property
298 @property
296 def extra(self):
299 def extra(self):
297 raw = self._rawextra
300 raw = self._rawextra
298 if raw is None:
301 if raw is None:
299 return _defaultextra
302 return _defaultextra
300
303
301 return decodeextra(raw)
304 return decodeextra(raw)
302
305
303 @property
306 @property
304 def files(self):
307 def files(self):
305 off = self._offsets
308 off = self._offsets
306 if off[2] == off[3]:
309 if off[2] == off[3]:
307 return []
310 return []
308
311
309 return self._text[off[2] + 1 : off[3]].split(b'\n')
312 return self._text[off[2] + 1 : off[3]].split(b'\n')
310
313
311 @property
314 @property
312 def filesadded(self):
315 def filesadded(self):
313 if self._cpsd:
316 if self._cpsd:
314 rawindices = self._sidedata.get(sidedatamod.SD_FILESADDED)
317 rawindices = self._sidedata.get(sidedatamod.SD_FILESADDED)
315 if not rawindices:
318 if not rawindices:
316 return []
319 return []
317 else:
320 else:
318 rawindices = self.extra.get(b'filesadded')
321 rawindices = self.extra.get(b'filesadded')
319 if rawindices is None:
322 if rawindices is None:
320 return None
323 return None
321 return copies.decodefileindices(self.files, rawindices)
324 return copies.decodefileindices(self.files, rawindices)
322
325
323 @property
326 @property
324 def filesremoved(self):
327 def filesremoved(self):
325 if self._cpsd:
328 if self._cpsd:
326 rawindices = self._sidedata.get(sidedatamod.SD_FILESREMOVED)
329 rawindices = self._sidedata.get(sidedatamod.SD_FILESREMOVED)
327 if not rawindices:
330 if not rawindices:
328 return []
331 return []
329 else:
332 else:
330 rawindices = self.extra.get(b'filesremoved')
333 rawindices = self.extra.get(b'filesremoved')
331 if rawindices is None:
334 if rawindices is None:
332 return None
335 return None
333 return copies.decodefileindices(self.files, rawindices)
336 return copies.decodefileindices(self.files, rawindices)
334
337
335 @property
338 @property
336 def p1copies(self):
339 def p1copies(self):
337 if self._cpsd:
340 if self._cpsd:
338 rawcopies = self._sidedata.get(sidedatamod.SD_P1COPIES)
341 rawcopies = self._sidedata.get(sidedatamod.SD_P1COPIES)
339 if not rawcopies:
342 if not rawcopies:
340 return {}
343 return {}
341 else:
344 else:
342 rawcopies = self.extra.get(b'p1copies')
345 rawcopies = self.extra.get(b'p1copies')
343 if rawcopies is None:
346 if rawcopies is None:
344 return None
347 return None
345 return copies.decodecopies(self.files, rawcopies)
348 return copies.decodecopies(self.files, rawcopies)
346
349
347 @property
350 @property
348 def p2copies(self):
351 def p2copies(self):
349 if self._cpsd:
352 if self._cpsd:
350 rawcopies = self._sidedata.get(sidedatamod.SD_P2COPIES)
353 rawcopies = self._sidedata.get(sidedatamod.SD_P2COPIES)
351 if not rawcopies:
354 if not rawcopies:
352 return {}
355 return {}
353 else:
356 else:
354 rawcopies = self.extra.get(b'p2copies')
357 rawcopies = self.extra.get(b'p2copies')
355 if rawcopies is None:
358 if rawcopies is None:
356 return None
359 return None
357 return copies.decodecopies(self.files, rawcopies)
360 return copies.decodecopies(self.files, rawcopies)
358
361
359 @property
362 @property
360 def description(self):
363 def description(self):
361 return encoding.tolocal(self._text[self._offsets[3] + 2 :])
364 return encoding.tolocal(self._text[self._offsets[3] + 2 :])
362
365
363
366
364 class changelog(revlog.revlog):
367 class changelog(revlog.revlog):
365 def __init__(self, opener, trypending=False):
368 def __init__(self, opener, trypending=False):
366 """Load a changelog revlog using an opener.
369 """Load a changelog revlog using an opener.
367
370
368 If ``trypending`` is true, we attempt to load the index from a
371 If ``trypending`` is true, we attempt to load the index from a
369 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
372 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
370 The ``00changelog.i.a`` file contains index (and possibly inline
373 The ``00changelog.i.a`` file contains index (and possibly inline
371 revision) data for a transaction that hasn't been finalized yet.
374 revision) data for a transaction that hasn't been finalized yet.
372 It exists in a separate file to facilitate readers (such as
375 It exists in a separate file to facilitate readers (such as
373 hooks processes) accessing data before a transaction is finalized.
376 hooks processes) accessing data before a transaction is finalized.
374 """
377 """
375 if trypending and opener.exists(b'00changelog.i.a'):
378 if trypending and opener.exists(b'00changelog.i.a'):
376 indexfile = b'00changelog.i.a'
379 indexfile = b'00changelog.i.a'
377 else:
380 else:
378 indexfile = b'00changelog.i'
381 indexfile = b'00changelog.i'
379
382
380 datafile = b'00changelog.d'
383 datafile = b'00changelog.d'
381 revlog.revlog.__init__(
384 revlog.revlog.__init__(
382 self,
385 self,
383 opener,
386 opener,
384 indexfile,
387 indexfile,
385 datafile=datafile,
388 datafile=datafile,
386 checkambig=True,
389 checkambig=True,
387 mmaplargeindex=True,
390 mmaplargeindex=True,
388 persistentnodemap=opener.options.get(
391 persistentnodemap=opener.options.get(
389 b'exp-persistent-nodemap', False
392 b'exp-persistent-nodemap', False
390 ),
393 ),
391 )
394 )
392
395
393 if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
396 if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
394 # changelogs don't benefit from generaldelta.
397 # changelogs don't benefit from generaldelta.
395
398
396 self.version &= ~revlog.FLAG_GENERALDELTA
399 self.version &= ~revlog.FLAG_GENERALDELTA
397 self._generaldelta = False
400 self._generaldelta = False
398
401
399 # Delta chains for changelogs tend to be very small because entries
402 # Delta chains for changelogs tend to be very small because entries
400 # tend to be small and don't delta well with each. So disable delta
403 # tend to be small and don't delta well with each. So disable delta
401 # chains.
404 # chains.
402 self._storedeltachains = False
405 self._storedeltachains = False
403
406
404 self._realopener = opener
407 self._realopener = opener
405 self._delayed = False
408 self._delayed = False
406 self._delaybuf = None
409 self._delaybuf = None
407 self._divert = False
410 self._divert = False
408 self.filteredrevs = frozenset()
411 self.filteredrevs = frozenset()
409 self._copiesstorage = opener.options.get(b'copies-storage')
412 self._copiesstorage = opener.options.get(b'copies-storage')
410
413
411 def delayupdate(self, tr):
414 def delayupdate(self, tr):
412 """delay visibility of index updates to other readers"""
415 """delay visibility of index updates to other readers"""
413
416
414 if not self._delayed:
417 if not self._delayed:
415 if len(self) == 0:
418 if len(self) == 0:
416 self._divert = True
419 self._divert = True
417 if self._realopener.exists(self.indexfile + b'.a'):
420 if self._realopener.exists(self.indexfile + b'.a'):
418 self._realopener.unlink(self.indexfile + b'.a')
421 self._realopener.unlink(self.indexfile + b'.a')
419 self.opener = _divertopener(self._realopener, self.indexfile)
422 self.opener = _divertopener(self._realopener, self.indexfile)
420 else:
423 else:
421 self._delaybuf = []
424 self._delaybuf = []
422 self.opener = _delayopener(
425 self.opener = _delayopener(
423 self._realopener, self.indexfile, self._delaybuf
426 self._realopener, self.indexfile, self._delaybuf
424 )
427 )
425 self._delayed = True
428 self._delayed = True
426 tr.addpending(b'cl-%i' % id(self), self._writepending)
429 tr.addpending(b'cl-%i' % id(self), self._writepending)
427 tr.addfinalize(b'cl-%i' % id(self), self._finalize)
430 tr.addfinalize(b'cl-%i' % id(self), self._finalize)
428
431
429 def _finalize(self, tr):
432 def _finalize(self, tr):
430 """finalize index updates"""
433 """finalize index updates"""
431 self._delayed = False
434 self._delayed = False
432 self.opener = self._realopener
435 self.opener = self._realopener
433 # move redirected index data back into place
436 # move redirected index data back into place
434 if self._divert:
437 if self._divert:
435 assert not self._delaybuf
438 assert not self._delaybuf
436 tmpname = self.indexfile + b".a"
439 tmpname = self.indexfile + b".a"
437 nfile = self.opener.open(tmpname)
440 nfile = self.opener.open(tmpname)
438 nfile.close()
441 nfile.close()
439 self.opener.rename(tmpname, self.indexfile, checkambig=True)
442 self.opener.rename(tmpname, self.indexfile, checkambig=True)
440 elif self._delaybuf:
443 elif self._delaybuf:
441 fp = self.opener(self.indexfile, b'a', checkambig=True)
444 fp = self.opener(self.indexfile, b'a', checkambig=True)
442 fp.write(b"".join(self._delaybuf))
445 fp.write(b"".join(self._delaybuf))
443 fp.close()
446 fp.close()
444 self._delaybuf = None
447 self._delaybuf = None
445 self._divert = False
448 self._divert = False
446 # split when we're done
449 # split when we're done
447 self._enforceinlinesize(tr)
450 self._enforceinlinesize(tr)
448
451
449 def _writepending(self, tr):
452 def _writepending(self, tr):
450 """create a file containing the unfinalized state for
453 """create a file containing the unfinalized state for
451 pretxnchangegroup"""
454 pretxnchangegroup"""
452 if self._delaybuf:
455 if self._delaybuf:
453 # make a temporary copy of the index
456 # make a temporary copy of the index
454 fp1 = self._realopener(self.indexfile)
457 fp1 = self._realopener(self.indexfile)
455 pendingfilename = self.indexfile + b".a"
458 pendingfilename = self.indexfile + b".a"
456 # register as a temp file to ensure cleanup on failure
459 # register as a temp file to ensure cleanup on failure
457 tr.registertmp(pendingfilename)
460 tr.registertmp(pendingfilename)
458 # write existing data
461 # write existing data
459 fp2 = self._realopener(pendingfilename, b"w")
462 fp2 = self._realopener(pendingfilename, b"w")
460 fp2.write(fp1.read())
463 fp2.write(fp1.read())
461 # add pending data
464 # add pending data
462 fp2.write(b"".join(self._delaybuf))
465 fp2.write(b"".join(self._delaybuf))
463 fp2.close()
466 fp2.close()
464 # switch modes so finalize can simply rename
467 # switch modes so finalize can simply rename
465 self._delaybuf = None
468 self._delaybuf = None
466 self._divert = True
469 self._divert = True
467 self.opener = _divertopener(self._realopener, self.indexfile)
470 self.opener = _divertopener(self._realopener, self.indexfile)
468
471
469 if self._divert:
472 if self._divert:
470 return True
473 return True
471
474
472 return False
475 return False
473
476
474 def _enforceinlinesize(self, tr, fp=None):
477 def _enforceinlinesize(self, tr, fp=None):
475 if not self._delayed:
478 if not self._delayed:
476 revlog.revlog._enforceinlinesize(self, tr, fp)
479 revlog.revlog._enforceinlinesize(self, tr, fp)
477
480
478 def read(self, node):
481 def read(self, node):
479 """Obtain data from a parsed changelog revision.
482 """Obtain data from a parsed changelog revision.
480
483
481 Returns a 6-tuple of:
484 Returns a 6-tuple of:
482
485
483 - manifest node in binary
486 - manifest node in binary
484 - author/user as a localstr
487 - author/user as a localstr
485 - date as a 2-tuple of (time, timezone)
488 - date as a 2-tuple of (time, timezone)
486 - list of files
489 - list of files
487 - commit message as a localstr
490 - commit message as a localstr
488 - dict of extra metadata
491 - dict of extra metadata
489
492
490 Unless you need to access all fields, consider calling
493 Unless you need to access all fields, consider calling
491 ``changelogrevision`` instead, as it is faster for partial object
494 ``changelogrevision`` instead, as it is faster for partial object
492 access.
495 access.
493 """
496 """
494 d, s = self._revisiondata(node)
497 d, s = self._revisiondata(node)
495 c = changelogrevision(
498 c = changelogrevision(
496 d, s, self._copiesstorage == b'changeset-sidedata'
499 d, s, self._copiesstorage == b'changeset-sidedata'
497 )
500 )
498 return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
501 return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
499
502
500 def changelogrevision(self, nodeorrev):
503 def changelogrevision(self, nodeorrev):
501 """Obtain a ``changelogrevision`` for a node or revision."""
504 """Obtain a ``changelogrevision`` for a node or revision."""
502 text, sidedata = self._revisiondata(nodeorrev)
505 text, sidedata = self._revisiondata(nodeorrev)
503 return changelogrevision(
506 return changelogrevision(
504 text, sidedata, self._copiesstorage == b'changeset-sidedata'
507 text, sidedata, self._copiesstorage == b'changeset-sidedata'
505 )
508 )
506
509
507 def readfiles(self, node):
510 def readfiles(self, node):
508 """
511 """
509 short version of read that only returns the files modified by the cset
512 short version of read that only returns the files modified by the cset
510 """
513 """
511 text = self.revision(node)
514 text = self.revision(node)
512 if not text:
515 if not text:
513 return []
516 return []
514 last = text.index(b"\n\n")
517 last = text.index(b"\n\n")
515 l = text[:last].split(b'\n')
518 l = text[:last].split(b'\n')
516 return l[3:]
519 return l[3:]
517
520
518 def add(
521 def add(
519 self,
522 self,
520 manifest,
523 manifest,
521 files,
524 files,
522 desc,
525 desc,
523 transaction,
526 transaction,
524 p1,
527 p1,
525 p2,
528 p2,
526 user,
529 user,
527 date=None,
530 date=None,
528 extra=None,
531 extra=None,
529 p1copies=None,
532 p1copies=None,
530 p2copies=None,
533 p2copies=None,
531 filesadded=None,
534 filesadded=None,
532 filesremoved=None,
535 filesremoved=None,
533 ):
536 ):
534 # Convert to UTF-8 encoded bytestrings as the very first
537 # Convert to UTF-8 encoded bytestrings as the very first
535 # thing: calling any method on a localstr object will turn it
538 # thing: calling any method on a localstr object will turn it
536 # into a str object and the cached UTF-8 string is thus lost.
539 # into a str object and the cached UTF-8 string is thus lost.
537 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
540 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
538
541
539 user = user.strip()
542 user = user.strip()
540 # An empty username or a username with a "\n" will make the
543 # An empty username or a username with a "\n" will make the
541 # revision text contain two "\n\n" sequences -> corrupt
544 # revision text contain two "\n\n" sequences -> corrupt
542 # repository since read cannot unpack the revision.
545 # repository since read cannot unpack the revision.
543 if not user:
546 if not user:
544 raise error.StorageError(_(b"empty username"))
547 raise error.StorageError(_(b"empty username"))
545 if b"\n" in user:
548 if b"\n" in user:
546 raise error.StorageError(
549 raise error.StorageError(
547 _(b"username %r contains a newline") % pycompat.bytestr(user)
550 _(b"username %r contains a newline") % pycompat.bytestr(user)
548 )
551 )
549
552
550 desc = stripdesc(desc)
553 desc = stripdesc(desc)
551
554
552 if date:
555 if date:
553 parseddate = b"%d %d" % dateutil.parsedate(date)
556 parseddate = b"%d %d" % dateutil.parsedate(date)
554 else:
557 else:
555 parseddate = b"%d %d" % dateutil.makedate()
558 parseddate = b"%d %d" % dateutil.makedate()
556 if extra:
559 if extra:
557 branch = extra.get(b"branch")
560 branch = extra.get(b"branch")
558 if branch in (b"default", b""):
561 if branch in (b"default", b""):
559 del extra[b"branch"]
562 del extra[b"branch"]
560 elif branch in (b".", b"null", b"tip"):
563 elif branch in (b".", b"null", b"tip"):
561 raise error.StorageError(
564 raise error.StorageError(
562 _(b'the name \'%s\' is reserved') % branch
565 _(b'the name \'%s\' is reserved') % branch
563 )
566 )
564 sortedfiles = sorted(files)
567 sortedfiles = sorted(files)
565 sidedata = None
568 sidedata = None
566 if extra is not None:
569 if extra is not None:
567 for name in (
570 for name in (
568 b'p1copies',
571 b'p1copies',
569 b'p2copies',
572 b'p2copies',
570 b'filesadded',
573 b'filesadded',
571 b'filesremoved',
574 b'filesremoved',
572 ):
575 ):
573 extra.pop(name, None)
576 extra.pop(name, None)
574 if p1copies is not None:
577 if p1copies is not None:
575 p1copies = copies.encodecopies(sortedfiles, p1copies)
578 p1copies = copies.encodecopies(sortedfiles, p1copies)
576 if p2copies is not None:
579 if p2copies is not None:
577 p2copies = copies.encodecopies(sortedfiles, p2copies)
580 p2copies = copies.encodecopies(sortedfiles, p2copies)
578 if filesadded is not None:
581 if filesadded is not None:
579 filesadded = copies.encodefileindices(sortedfiles, filesadded)
582 filesadded = copies.encodefileindices(sortedfiles, filesadded)
580 if filesremoved is not None:
583 if filesremoved is not None:
581 filesremoved = copies.encodefileindices(sortedfiles, filesremoved)
584 filesremoved = copies.encodefileindices(sortedfiles, filesremoved)
582 if self._copiesstorage == b'extra':
585 if self._copiesstorage == b'extra':
583 extrasentries = p1copies, p2copies, filesadded, filesremoved
586 extrasentries = p1copies, p2copies, filesadded, filesremoved
584 if extra is None and any(x is not None for x in extrasentries):
587 if extra is None and any(x is not None for x in extrasentries):
585 extra = {}
588 extra = {}
586 if p1copies is not None:
589 if p1copies is not None:
587 extra[b'p1copies'] = p1copies
590 extra[b'p1copies'] = p1copies
588 if p2copies is not None:
591 if p2copies is not None:
589 extra[b'p2copies'] = p2copies
592 extra[b'p2copies'] = p2copies
590 if filesadded is not None:
593 if filesadded is not None:
591 extra[b'filesadded'] = filesadded
594 extra[b'filesadded'] = filesadded
592 if filesremoved is not None:
595 if filesremoved is not None:
593 extra[b'filesremoved'] = filesremoved
596 extra[b'filesremoved'] = filesremoved
594 elif self._copiesstorage == b'changeset-sidedata':
597 elif self._copiesstorage == b'changeset-sidedata':
595 sidedata = {}
598 sidedata = {}
596 if p1copies:
599 if p1copies:
597 sidedata[sidedatamod.SD_P1COPIES] = p1copies
600 sidedata[sidedatamod.SD_P1COPIES] = p1copies
598 if p2copies:
601 if p2copies:
599 sidedata[sidedatamod.SD_P2COPIES] = p2copies
602 sidedata[sidedatamod.SD_P2COPIES] = p2copies
600 if filesadded:
603 if filesadded:
601 sidedata[sidedatamod.SD_FILESADDED] = filesadded
604 sidedata[sidedatamod.SD_FILESADDED] = filesadded
602 if filesremoved:
605 if filesremoved:
603 sidedata[sidedatamod.SD_FILESREMOVED] = filesremoved
606 sidedata[sidedatamod.SD_FILESREMOVED] = filesremoved
604 if not sidedata:
607 if not sidedata:
605 sidedata = None
608 sidedata = None
606
609
607 if extra:
610 if extra:
608 extra = encodeextra(extra)
611 extra = encodeextra(extra)
609 parseddate = b"%s %s" % (parseddate, extra)
612 parseddate = b"%s %s" % (parseddate, extra)
610 l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
613 l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
611 text = b"\n".join(l)
614 text = b"\n".join(l)
612 return self.addrevision(
615 return self.addrevision(
613 text, transaction, len(self), p1, p2, sidedata=sidedata
616 text, transaction, len(self), p1, p2, sidedata=sidedata
614 )
617 )
615
618
616 def branchinfo(self, rev):
619 def branchinfo(self, rev):
617 """return the branch name and open/close state of a revision
620 """return the branch name and open/close state of a revision
618
621
619 This function exists because creating a changectx object
622 This function exists because creating a changectx object
620 just to access this is costly."""
623 just to access this is costly."""
621 extra = self.read(rev)[5]
624 extra = self.read(rev)[5]
622 return encoding.tolocal(extra.get(b"branch")), b'close' in extra
625 return encoding.tolocal(extra.get(b"branch")), b'close' in extra
623
626
624 def _nodeduplicatecallback(self, transaction, node):
627 def _nodeduplicatecallback(self, transaction, node):
625 # keep track of revisions that got "re-added", eg: unbunde of know rev.
628 # keep track of revisions that got "re-added", eg: unbunde of know rev.
626 #
629 #
627 # We track them in a list to preserve their order from the source bundle
630 # We track them in a list to preserve their order from the source bundle
628 duplicates = transaction.changes.setdefault(b'revduplicates', [])
631 duplicates = transaction.changes.setdefault(b'revduplicates', [])
629 duplicates.append(self.rev(node))
632 duplicates.append(self.rev(node))
General Comments 0
You need to be logged in to leave comments. Login now