##// END OF EJS Templates
changelog: stop useless enforcing split at the end of transaction...
marmoute -
r52209:178e50ed default
parent child Browse files
Show More
@@ -1,506 +1,507 b''
1 # changelog.py - changelog class for mercurial
1 # changelog.py - changelog class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 from .i18n import _
9 from .i18n import _
10 from .node import (
10 from .node import (
11 bin,
11 bin,
12 hex,
12 hex,
13 )
13 )
14 from .thirdparty import attr
14 from .thirdparty import attr
15
15
16 from . import (
16 from . import (
17 encoding,
17 encoding,
18 error,
18 error,
19 metadata,
19 metadata,
20 pycompat,
20 pycompat,
21 revlog,
21 revlog,
22 )
22 )
23 from .utils import (
23 from .utils import (
24 dateutil,
24 dateutil,
25 stringutil,
25 stringutil,
26 )
26 )
27 from .revlogutils import (
27 from .revlogutils import (
28 constants as revlog_constants,
28 constants as revlog_constants,
29 flagutil,
29 flagutil,
30 )
30 )
31
31
32 _defaultextra = {b'branch': b'default'}
32 _defaultextra = {b'branch': b'default'}
33
33
34
34
35 def _string_escape(text):
35 def _string_escape(text):
36 """
36 """
37 >>> from .pycompat import bytechr as chr
37 >>> from .pycompat import bytechr as chr
38 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
38 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
39 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
39 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
40 >>> s
40 >>> s
41 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
41 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
42 >>> res = _string_escape(s)
42 >>> res = _string_escape(s)
43 >>> s == _string_unescape(res)
43 >>> s == _string_unescape(res)
44 True
44 True
45 """
45 """
46 # subset of the string_escape codec
46 # subset of the string_escape codec
47 text = (
47 text = (
48 text.replace(b'\\', b'\\\\')
48 text.replace(b'\\', b'\\\\')
49 .replace(b'\n', b'\\n')
49 .replace(b'\n', b'\\n')
50 .replace(b'\r', b'\\r')
50 .replace(b'\r', b'\\r')
51 )
51 )
52 return text.replace(b'\0', b'\\0')
52 return text.replace(b'\0', b'\\0')
53
53
54
54
55 def _string_unescape(text):
55 def _string_unescape(text):
56 if b'\\0' in text:
56 if b'\\0' in text:
57 # fix up \0 without getting into trouble with \\0
57 # fix up \0 without getting into trouble with \\0
58 text = text.replace(b'\\\\', b'\\\\\n')
58 text = text.replace(b'\\\\', b'\\\\\n')
59 text = text.replace(b'\\0', b'\0')
59 text = text.replace(b'\\0', b'\0')
60 text = text.replace(b'\n', b'')
60 text = text.replace(b'\n', b'')
61 return stringutil.unescapestr(text)
61 return stringutil.unescapestr(text)
62
62
63
63
64 def decodeextra(text):
64 def decodeextra(text):
65 """
65 """
66 >>> from .pycompat import bytechr as chr
66 >>> from .pycompat import bytechr as chr
67 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
67 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
68 ... ).items())
68 ... ).items())
69 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
69 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
70 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
70 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
71 ... b'baz': chr(92) + chr(0) + b'2'})
71 ... b'baz': chr(92) + chr(0) + b'2'})
72 ... ).items())
72 ... ).items())
73 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
73 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
74 """
74 """
75 extra = _defaultextra.copy()
75 extra = _defaultextra.copy()
76 for l in text.split(b'\0'):
76 for l in text.split(b'\0'):
77 if l:
77 if l:
78 k, v = _string_unescape(l).split(b':', 1)
78 k, v = _string_unescape(l).split(b':', 1)
79 extra[k] = v
79 extra[k] = v
80 return extra
80 return extra
81
81
82
82
83 def encodeextra(d):
83 def encodeextra(d):
84 # keys must be sorted to produce a deterministic changelog entry
84 # keys must be sorted to produce a deterministic changelog entry
85 items = [_string_escape(b'%s:%s' % (k, d[k])) for k in sorted(d)]
85 items = [_string_escape(b'%s:%s' % (k, d[k])) for k in sorted(d)]
86 return b"\0".join(items)
86 return b"\0".join(items)
87
87
88
88
89 def stripdesc(desc):
89 def stripdesc(desc):
90 """strip trailing whitespace and leading and trailing empty lines"""
90 """strip trailing whitespace and leading and trailing empty lines"""
91 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
91 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
92
92
93
93
94 @attr.s
94 @attr.s
95 class _changelogrevision:
95 class _changelogrevision:
96 # Extensions might modify _defaultextra, so let the constructor below pass
96 # Extensions might modify _defaultextra, so let the constructor below pass
97 # it in
97 # it in
98 extra = attr.ib()
98 extra = attr.ib()
99 manifest = attr.ib()
99 manifest = attr.ib()
100 user = attr.ib(default=b'')
100 user = attr.ib(default=b'')
101 date = attr.ib(default=(0, 0))
101 date = attr.ib(default=(0, 0))
102 files = attr.ib(default=attr.Factory(list))
102 files = attr.ib(default=attr.Factory(list))
103 filesadded = attr.ib(default=None)
103 filesadded = attr.ib(default=None)
104 filesremoved = attr.ib(default=None)
104 filesremoved = attr.ib(default=None)
105 p1copies = attr.ib(default=None)
105 p1copies = attr.ib(default=None)
106 p2copies = attr.ib(default=None)
106 p2copies = attr.ib(default=None)
107 description = attr.ib(default=b'')
107 description = attr.ib(default=b'')
108 branchinfo = attr.ib(default=(_defaultextra[b'branch'], False))
108 branchinfo = attr.ib(default=(_defaultextra[b'branch'], False))
109
109
110
110
111 class changelogrevision:
111 class changelogrevision:
112 """Holds results of a parsed changelog revision.
112 """Holds results of a parsed changelog revision.
113
113
114 Changelog revisions consist of multiple pieces of data, including
114 Changelog revisions consist of multiple pieces of data, including
115 the manifest node, user, and date. This object exposes a view into
115 the manifest node, user, and date. This object exposes a view into
116 the parsed object.
116 the parsed object.
117 """
117 """
118
118
119 __slots__ = (
119 __slots__ = (
120 '_offsets',
120 '_offsets',
121 '_text',
121 '_text',
122 '_sidedata',
122 '_sidedata',
123 '_cpsd',
123 '_cpsd',
124 '_changes',
124 '_changes',
125 )
125 )
126
126
127 def __new__(cls, cl, text, sidedata, cpsd):
127 def __new__(cls, cl, text, sidedata, cpsd):
128 if not text:
128 if not text:
129 return _changelogrevision(extra=_defaultextra, manifest=cl.nullid)
129 return _changelogrevision(extra=_defaultextra, manifest=cl.nullid)
130
130
131 self = super(changelogrevision, cls).__new__(cls)
131 self = super(changelogrevision, cls).__new__(cls)
132 # We could return here and implement the following as an __init__.
132 # We could return here and implement the following as an __init__.
133 # But doing it here is equivalent and saves an extra function call.
133 # But doing it here is equivalent and saves an extra function call.
134
134
135 # format used:
135 # format used:
136 # nodeid\n : manifest node in ascii
136 # nodeid\n : manifest node in ascii
137 # user\n : user, no \n or \r allowed
137 # user\n : user, no \n or \r allowed
138 # time tz extra\n : date (time is int or float, timezone is int)
138 # time tz extra\n : date (time is int or float, timezone is int)
139 # : extra is metadata, encoded and separated by '\0'
139 # : extra is metadata, encoded and separated by '\0'
140 # : older versions ignore it
140 # : older versions ignore it
141 # files\n\n : files modified by the cset, no \n or \r allowed
141 # files\n\n : files modified by the cset, no \n or \r allowed
142 # (.*) : comment (free text, ideally utf-8)
142 # (.*) : comment (free text, ideally utf-8)
143 #
143 #
144 # changelog v0 doesn't use extra
144 # changelog v0 doesn't use extra
145
145
146 nl1 = text.index(b'\n')
146 nl1 = text.index(b'\n')
147 nl2 = text.index(b'\n', nl1 + 1)
147 nl2 = text.index(b'\n', nl1 + 1)
148 nl3 = text.index(b'\n', nl2 + 1)
148 nl3 = text.index(b'\n', nl2 + 1)
149
149
150 # The list of files may be empty. Which means nl3 is the first of the
150 # The list of files may be empty. Which means nl3 is the first of the
151 # double newline that precedes the description.
151 # double newline that precedes the description.
152 if text[nl3 + 1 : nl3 + 2] == b'\n':
152 if text[nl3 + 1 : nl3 + 2] == b'\n':
153 doublenl = nl3
153 doublenl = nl3
154 else:
154 else:
155 doublenl = text.index(b'\n\n', nl3 + 1)
155 doublenl = text.index(b'\n\n', nl3 + 1)
156
156
157 self._offsets = (nl1, nl2, nl3, doublenl)
157 self._offsets = (nl1, nl2, nl3, doublenl)
158 self._text = text
158 self._text = text
159 self._sidedata = sidedata
159 self._sidedata = sidedata
160 self._cpsd = cpsd
160 self._cpsd = cpsd
161 self._changes = None
161 self._changes = None
162
162
163 return self
163 return self
164
164
165 @property
165 @property
166 def manifest(self):
166 def manifest(self):
167 return bin(self._text[0 : self._offsets[0]])
167 return bin(self._text[0 : self._offsets[0]])
168
168
169 @property
169 @property
170 def user(self):
170 def user(self):
171 off = self._offsets
171 off = self._offsets
172 return encoding.tolocal(self._text[off[0] + 1 : off[1]])
172 return encoding.tolocal(self._text[off[0] + 1 : off[1]])
173
173
174 @property
174 @property
175 def _rawdate(self):
175 def _rawdate(self):
176 off = self._offsets
176 off = self._offsets
177 dateextra = self._text[off[1] + 1 : off[2]]
177 dateextra = self._text[off[1] + 1 : off[2]]
178 return dateextra.split(b' ', 2)[0:2]
178 return dateextra.split(b' ', 2)[0:2]
179
179
180 @property
180 @property
181 def _rawextra(self):
181 def _rawextra(self):
182 off = self._offsets
182 off = self._offsets
183 dateextra = self._text[off[1] + 1 : off[2]]
183 dateextra = self._text[off[1] + 1 : off[2]]
184 fields = dateextra.split(b' ', 2)
184 fields = dateextra.split(b' ', 2)
185 if len(fields) != 3:
185 if len(fields) != 3:
186 return None
186 return None
187
187
188 return fields[2]
188 return fields[2]
189
189
190 @property
190 @property
191 def date(self):
191 def date(self):
192 raw = self._rawdate
192 raw = self._rawdate
193 time = float(raw[0])
193 time = float(raw[0])
194 # Various tools did silly things with the timezone.
194 # Various tools did silly things with the timezone.
195 try:
195 try:
196 timezone = int(raw[1])
196 timezone = int(raw[1])
197 except ValueError:
197 except ValueError:
198 timezone = 0
198 timezone = 0
199
199
200 return time, timezone
200 return time, timezone
201
201
202 @property
202 @property
203 def extra(self):
203 def extra(self):
204 raw = self._rawextra
204 raw = self._rawextra
205 if raw is None:
205 if raw is None:
206 return _defaultextra
206 return _defaultextra
207
207
208 return decodeextra(raw)
208 return decodeextra(raw)
209
209
210 @property
210 @property
211 def changes(self):
211 def changes(self):
212 if self._changes is not None:
212 if self._changes is not None:
213 return self._changes
213 return self._changes
214 if self._cpsd:
214 if self._cpsd:
215 changes = metadata.decode_files_sidedata(self._sidedata)
215 changes = metadata.decode_files_sidedata(self._sidedata)
216 else:
216 else:
217 changes = metadata.ChangingFiles(
217 changes = metadata.ChangingFiles(
218 touched=self.files or (),
218 touched=self.files or (),
219 added=self.filesadded or (),
219 added=self.filesadded or (),
220 removed=self.filesremoved or (),
220 removed=self.filesremoved or (),
221 p1_copies=self.p1copies or {},
221 p1_copies=self.p1copies or {},
222 p2_copies=self.p2copies or {},
222 p2_copies=self.p2copies or {},
223 )
223 )
224 self._changes = changes
224 self._changes = changes
225 return changes
225 return changes
226
226
227 @property
227 @property
228 def files(self):
228 def files(self):
229 if self._cpsd:
229 if self._cpsd:
230 return sorted(self.changes.touched)
230 return sorted(self.changes.touched)
231 off = self._offsets
231 off = self._offsets
232 if off[2] == off[3]:
232 if off[2] == off[3]:
233 return []
233 return []
234
234
235 return self._text[off[2] + 1 : off[3]].split(b'\n')
235 return self._text[off[2] + 1 : off[3]].split(b'\n')
236
236
237 @property
237 @property
238 def filesadded(self):
238 def filesadded(self):
239 if self._cpsd:
239 if self._cpsd:
240 return self.changes.added
240 return self.changes.added
241 else:
241 else:
242 rawindices = self.extra.get(b'filesadded')
242 rawindices = self.extra.get(b'filesadded')
243 if rawindices is None:
243 if rawindices is None:
244 return None
244 return None
245 return metadata.decodefileindices(self.files, rawindices)
245 return metadata.decodefileindices(self.files, rawindices)
246
246
247 @property
247 @property
248 def filesremoved(self):
248 def filesremoved(self):
249 if self._cpsd:
249 if self._cpsd:
250 return self.changes.removed
250 return self.changes.removed
251 else:
251 else:
252 rawindices = self.extra.get(b'filesremoved')
252 rawindices = self.extra.get(b'filesremoved')
253 if rawindices is None:
253 if rawindices is None:
254 return None
254 return None
255 return metadata.decodefileindices(self.files, rawindices)
255 return metadata.decodefileindices(self.files, rawindices)
256
256
257 @property
257 @property
258 def p1copies(self):
258 def p1copies(self):
259 if self._cpsd:
259 if self._cpsd:
260 return self.changes.copied_from_p1
260 return self.changes.copied_from_p1
261 else:
261 else:
262 rawcopies = self.extra.get(b'p1copies')
262 rawcopies = self.extra.get(b'p1copies')
263 if rawcopies is None:
263 if rawcopies is None:
264 return None
264 return None
265 return metadata.decodecopies(self.files, rawcopies)
265 return metadata.decodecopies(self.files, rawcopies)
266
266
267 @property
267 @property
268 def p2copies(self):
268 def p2copies(self):
269 if self._cpsd:
269 if self._cpsd:
270 return self.changes.copied_from_p2
270 return self.changes.copied_from_p2
271 else:
271 else:
272 rawcopies = self.extra.get(b'p2copies')
272 rawcopies = self.extra.get(b'p2copies')
273 if rawcopies is None:
273 if rawcopies is None:
274 return None
274 return None
275 return metadata.decodecopies(self.files, rawcopies)
275 return metadata.decodecopies(self.files, rawcopies)
276
276
277 @property
277 @property
278 def description(self):
278 def description(self):
279 return encoding.tolocal(self._text[self._offsets[3] + 2 :])
279 return encoding.tolocal(self._text[self._offsets[3] + 2 :])
280
280
281 @property
281 @property
282 def branchinfo(self):
282 def branchinfo(self):
283 extra = self.extra
283 extra = self.extra
284 return encoding.tolocal(extra.get(b"branch")), b'close' in extra
284 return encoding.tolocal(extra.get(b"branch")), b'close' in extra
285
285
286
286
287 class changelog(revlog.revlog):
287 class changelog(revlog.revlog):
288 def __init__(self, opener, trypending=False, concurrencychecker=None):
288 def __init__(self, opener, trypending=False, concurrencychecker=None):
289 """Load a changelog revlog using an opener.
289 """Load a changelog revlog using an opener.
290
290
291 If ``trypending`` is true, we attempt to load the index from a
291 If ``trypending`` is true, we attempt to load the index from a
292 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
292 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
293 The ``00changelog.i.a`` file contains index (and possibly inline
293 The ``00changelog.i.a`` file contains index (and possibly inline
294 revision) data for a transaction that hasn't been finalized yet.
294 revision) data for a transaction that hasn't been finalized yet.
295 It exists in a separate file to facilitate readers (such as
295 It exists in a separate file to facilitate readers (such as
296 hooks processes) accessing data before a transaction is finalized.
296 hooks processes) accessing data before a transaction is finalized.
297
297
298 ``concurrencychecker`` will be passed to the revlog init function, see
298 ``concurrencychecker`` will be passed to the revlog init function, see
299 the documentation there.
299 the documentation there.
300 """
300 """
301 revlog.revlog.__init__(
301 revlog.revlog.__init__(
302 self,
302 self,
303 opener,
303 opener,
304 target=(revlog_constants.KIND_CHANGELOG, None),
304 target=(revlog_constants.KIND_CHANGELOG, None),
305 radix=b'00changelog',
305 radix=b'00changelog',
306 checkambig=True,
306 checkambig=True,
307 mmaplargeindex=True,
307 mmaplargeindex=True,
308 persistentnodemap=opener.options.get(b'persistent-nodemap', False),
308 persistentnodemap=opener.options.get(b'persistent-nodemap', False),
309 concurrencychecker=concurrencychecker,
309 concurrencychecker=concurrencychecker,
310 trypending=trypending,
310 trypending=trypending,
311 may_inline=False,
311 may_inline=False,
312 )
312 )
313
313
314 if self._initempty and (self._format_version == revlog.REVLOGV1):
314 if self._initempty and (self._format_version == revlog.REVLOGV1):
315 # changelogs don't benefit from generaldelta.
315 # changelogs don't benefit from generaldelta.
316
316
317 self._format_flags &= ~revlog.FLAG_GENERALDELTA
317 self._format_flags &= ~revlog.FLAG_GENERALDELTA
318 self.delta_config.general_delta = False
318 self.delta_config.general_delta = False
319
319
320 # Delta chains for changelogs tend to be very small because entries
320 # Delta chains for changelogs tend to be very small because entries
321 # tend to be small and don't delta well with each. So disable delta
321 # tend to be small and don't delta well with each. So disable delta
322 # chains.
322 # chains.
323 self._storedeltachains = False
323 self._storedeltachains = False
324
324
325 self._v2_delayed = False
325 self._v2_delayed = False
326 self._filteredrevs = frozenset()
326 self._filteredrevs = frozenset()
327 self._filteredrevs_hashcache = {}
327 self._filteredrevs_hashcache = {}
328 self._copiesstorage = opener.options.get(b'copies-storage')
328 self._copiesstorage = opener.options.get(b'copies-storage')
329
329
330 @property
330 @property
331 def filteredrevs(self):
331 def filteredrevs(self):
332 return self._filteredrevs
332 return self._filteredrevs
333
333
334 @filteredrevs.setter
334 @filteredrevs.setter
335 def filteredrevs(self, val):
335 def filteredrevs(self, val):
336 # Ensure all updates go through this function
336 # Ensure all updates go through this function
337 assert isinstance(val, frozenset)
337 assert isinstance(val, frozenset)
338 self._filteredrevs = val
338 self._filteredrevs = val
339 self._filteredrevs_hashcache = {}
339 self._filteredrevs_hashcache = {}
340
340
341 def _write_docket(self, tr):
341 def _write_docket(self, tr):
342 if not self._v2_delayed:
342 if not self._v2_delayed:
343 super(changelog, self)._write_docket(tr)
343 super(changelog, self)._write_docket(tr)
344
344
345 def delayupdate(self, tr):
345 def delayupdate(self, tr):
346 """delay visibility of index updates to other readers"""
346 """delay visibility of index updates to other readers"""
347 assert not self._inner.is_open
347 assert not self._inner.is_open
348 assert not self._may_inline
348 assert not self._may_inline
349 # enforce that older changelog that are still inline are split at the
349 # enforce that older changelog that are still inline are split at the
350 # first opportunity.
350 # first opportunity.
351 if self._inline:
351 if self._inline:
352 self._enforceinlinesize(tr)
352 self._enforceinlinesize(tr)
353 if self._docket is not None:
353 if self._docket is not None:
354 self._v2_delayed = True
354 self._v2_delayed = True
355 else:
355 else:
356 new_index = self._inner.delay()
356 new_index = self._inner.delay()
357 if new_index is not None:
357 if new_index is not None:
358 self._indexfile = new_index
358 self._indexfile = new_index
359 tr.registertmp(new_index)
359 tr.registertmp(new_index)
360 tr.addpending(b'cl-%i' % id(self), self._writepending)
360 tr.addpending(b'cl-%i' % id(self), self._writepending)
361 tr.addfinalize(b'cl-%i' % id(self), self._finalize)
361 tr.addfinalize(b'cl-%i' % id(self), self._finalize)
362
362
363 def _finalize(self, tr):
363 def _finalize(self, tr):
364 """finalize index updates"""
364 """finalize index updates"""
365 assert not self._inner.is_open
365 assert not self._inner.is_open
366 if self._docket is not None:
366 if self._docket is not None:
367 self._docket.write(tr)
367 self._docket.write(tr)
368 self._v2_delayed = False
368 self._v2_delayed = False
369 else:
369 else:
370 new_index_file = self._inner.finalize_pending()
370 new_index_file = self._inner.finalize_pending()
371 self._indexfile = new_index_file
371 self._indexfile = new_index_file
372 # split when we're done
372 if self._inline:
373 self._enforceinlinesize(tr, side_write=False)
373 msg = 'changelog should not be inline at that point'
374 raise error.ProgrammingError(msg)
374
375
375 def _writepending(self, tr):
376 def _writepending(self, tr):
376 """create a file containing the unfinalized state for
377 """create a file containing the unfinalized state for
377 pretxnchangegroup"""
378 pretxnchangegroup"""
378 assert not self._inner.is_open
379 assert not self._inner.is_open
379 if self._docket:
380 if self._docket:
380 any_pending = self._docket.write(tr, pending=True)
381 any_pending = self._docket.write(tr, pending=True)
381 self._v2_delayed = False
382 self._v2_delayed = False
382 else:
383 else:
383 new_index, any_pending = self._inner.write_pending()
384 new_index, any_pending = self._inner.write_pending()
384 if new_index is not None:
385 if new_index is not None:
385 self._indexfile = new_index
386 self._indexfile = new_index
386 tr.registertmp(new_index)
387 tr.registertmp(new_index)
387 return any_pending
388 return any_pending
388
389
389 def _enforceinlinesize(self, tr, side_write=True):
390 def _enforceinlinesize(self, tr, side_write=True):
390 if not self.is_delaying:
391 if not self.is_delaying:
391 revlog.revlog._enforceinlinesize(self, tr, side_write=side_write)
392 revlog.revlog._enforceinlinesize(self, tr, side_write=side_write)
392
393
393 def read(self, nodeorrev):
394 def read(self, nodeorrev):
394 """Obtain data from a parsed changelog revision.
395 """Obtain data from a parsed changelog revision.
395
396
396 Returns a 6-tuple of:
397 Returns a 6-tuple of:
397
398
398 - manifest node in binary
399 - manifest node in binary
399 - author/user as a localstr
400 - author/user as a localstr
400 - date as a 2-tuple of (time, timezone)
401 - date as a 2-tuple of (time, timezone)
401 - list of files
402 - list of files
402 - commit message as a localstr
403 - commit message as a localstr
403 - dict of extra metadata
404 - dict of extra metadata
404
405
405 Unless you need to access all fields, consider calling
406 Unless you need to access all fields, consider calling
406 ``changelogrevision`` instead, as it is faster for partial object
407 ``changelogrevision`` instead, as it is faster for partial object
407 access.
408 access.
408 """
409 """
409 d = self._revisiondata(nodeorrev)
410 d = self._revisiondata(nodeorrev)
410 sidedata = self.sidedata(nodeorrev)
411 sidedata = self.sidedata(nodeorrev)
411 copy_sd = self._copiesstorage == b'changeset-sidedata'
412 copy_sd = self._copiesstorage == b'changeset-sidedata'
412 c = changelogrevision(self, d, sidedata, copy_sd)
413 c = changelogrevision(self, d, sidedata, copy_sd)
413 return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
414 return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
414
415
415 def changelogrevision(self, nodeorrev):
416 def changelogrevision(self, nodeorrev):
416 """Obtain a ``changelogrevision`` for a node or revision."""
417 """Obtain a ``changelogrevision`` for a node or revision."""
417 text = self._revisiondata(nodeorrev)
418 text = self._revisiondata(nodeorrev)
418 sidedata = self.sidedata(nodeorrev)
419 sidedata = self.sidedata(nodeorrev)
419 return changelogrevision(
420 return changelogrevision(
420 self, text, sidedata, self._copiesstorage == b'changeset-sidedata'
421 self, text, sidedata, self._copiesstorage == b'changeset-sidedata'
421 )
422 )
422
423
423 def readfiles(self, nodeorrev):
424 def readfiles(self, nodeorrev):
424 """
425 """
425 short version of read that only returns the files modified by the cset
426 short version of read that only returns the files modified by the cset
426 """
427 """
427 text = self.revision(nodeorrev)
428 text = self.revision(nodeorrev)
428 if not text:
429 if not text:
429 return []
430 return []
430 last = text.index(b"\n\n")
431 last = text.index(b"\n\n")
431 l = text[:last].split(b'\n')
432 l = text[:last].split(b'\n')
432 return l[3:]
433 return l[3:]
433
434
434 def add(
435 def add(
435 self,
436 self,
436 manifest,
437 manifest,
437 files,
438 files,
438 desc,
439 desc,
439 transaction,
440 transaction,
440 p1,
441 p1,
441 p2,
442 p2,
442 user,
443 user,
443 date=None,
444 date=None,
444 extra=None,
445 extra=None,
445 ):
446 ):
446 # Convert to UTF-8 encoded bytestrings as the very first
447 # Convert to UTF-8 encoded bytestrings as the very first
447 # thing: calling any method on a localstr object will turn it
448 # thing: calling any method on a localstr object will turn it
448 # into a str object and the cached UTF-8 string is thus lost.
449 # into a str object and the cached UTF-8 string is thus lost.
449 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
450 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
450
451
451 user = user.strip()
452 user = user.strip()
452 # An empty username or a username with a "\n" will make the
453 # An empty username or a username with a "\n" will make the
453 # revision text contain two "\n\n" sequences -> corrupt
454 # revision text contain two "\n\n" sequences -> corrupt
454 # repository since read cannot unpack the revision.
455 # repository since read cannot unpack the revision.
455 if not user:
456 if not user:
456 raise error.StorageError(_(b"empty username"))
457 raise error.StorageError(_(b"empty username"))
457 if b"\n" in user:
458 if b"\n" in user:
458 raise error.StorageError(
459 raise error.StorageError(
459 _(b"username %r contains a newline") % pycompat.bytestr(user)
460 _(b"username %r contains a newline") % pycompat.bytestr(user)
460 )
461 )
461
462
462 desc = stripdesc(desc)
463 desc = stripdesc(desc)
463
464
464 if date:
465 if date:
465 parseddate = b"%d %d" % dateutil.parsedate(date)
466 parseddate = b"%d %d" % dateutil.parsedate(date)
466 else:
467 else:
467 parseddate = b"%d %d" % dateutil.makedate()
468 parseddate = b"%d %d" % dateutil.makedate()
468 if extra:
469 if extra:
469 branch = extra.get(b"branch")
470 branch = extra.get(b"branch")
470 if branch in (b"default", b""):
471 if branch in (b"default", b""):
471 del extra[b"branch"]
472 del extra[b"branch"]
472 elif branch in (b".", b"null", b"tip"):
473 elif branch in (b".", b"null", b"tip"):
473 raise error.StorageError(
474 raise error.StorageError(
474 _(b'the name \'%s\' is reserved') % branch
475 _(b'the name \'%s\' is reserved') % branch
475 )
476 )
476 sortedfiles = sorted(files.touched)
477 sortedfiles = sorted(files.touched)
477 flags = 0
478 flags = 0
478 sidedata = None
479 sidedata = None
479 if self._copiesstorage == b'changeset-sidedata':
480 if self._copiesstorage == b'changeset-sidedata':
480 if files.has_copies_info:
481 if files.has_copies_info:
481 flags |= flagutil.REVIDX_HASCOPIESINFO
482 flags |= flagutil.REVIDX_HASCOPIESINFO
482 sidedata = metadata.encode_files_sidedata(files)
483 sidedata = metadata.encode_files_sidedata(files)
483
484
484 if extra:
485 if extra:
485 extra = encodeextra(extra)
486 extra = encodeextra(extra)
486 parseddate = b"%s %s" % (parseddate, extra)
487 parseddate = b"%s %s" % (parseddate, extra)
487 l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
488 l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
488 text = b"\n".join(l)
489 text = b"\n".join(l)
489 rev = self.addrevision(
490 rev = self.addrevision(
490 text, transaction, len(self), p1, p2, sidedata=sidedata, flags=flags
491 text, transaction, len(self), p1, p2, sidedata=sidedata, flags=flags
491 )
492 )
492 return self.node(rev)
493 return self.node(rev)
493
494
494 def branchinfo(self, rev):
495 def branchinfo(self, rev):
495 """return the branch name and open/close state of a revision
496 """return the branch name and open/close state of a revision
496
497
497 This function exists because creating a changectx object
498 This function exists because creating a changectx object
498 just to access this is costly."""
499 just to access this is costly."""
499 return self.changelogrevision(rev).branchinfo
500 return self.changelogrevision(rev).branchinfo
500
501
501 def _nodeduplicatecallback(self, transaction, rev):
502 def _nodeduplicatecallback(self, transaction, rev):
502 # keep track of revisions that got "re-added", eg: unbunde of know rev.
503 # keep track of revisions that got "re-added", eg: unbunde of know rev.
503 #
504 #
504 # We track them in a list to preserve their order from the source bundle
505 # We track them in a list to preserve their order from the source bundle
505 duplicates = transaction.changes.setdefault(b'revduplicates', [])
506 duplicates = transaction.changes.setdefault(b'revduplicates', [])
506 duplicates.append(rev)
507 duplicates.append(rev)
General Comments 0
You need to be logged in to leave comments. Login now