##// END OF EJS Templates
changelog: fix handling of empty copy entries in changeset...
Martin von Zweigbergk -
r42756:e3df1e15 default
parent child Browse files
Show More
@@ -1,665 +1,669 b''
1 # changelog.py - changelog class for mercurial
1 # changelog.py - changelog class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 from .i18n import _
10 from .i18n import _
11 from .node import (
11 from .node import (
12 bin,
12 bin,
13 hex,
13 hex,
14 nullid,
14 nullid,
15 )
15 )
16 from .thirdparty import (
16 from .thirdparty import (
17 attr,
17 attr,
18 )
18 )
19
19
20 from . import (
20 from . import (
21 encoding,
21 encoding,
22 error,
22 error,
23 pycompat,
23 pycompat,
24 revlog,
24 revlog,
25 util,
25 util,
26 )
26 )
27 from .utils import (
27 from .utils import (
28 dateutil,
28 dateutil,
29 stringutil,
29 stringutil,
30 )
30 )
31
31
32 _defaultextra = {'branch': 'default'}
32 _defaultextra = {'branch': 'default'}
33
33
34 def _string_escape(text):
34 def _string_escape(text):
35 """
35 """
36 >>> from .pycompat import bytechr as chr
36 >>> from .pycompat import bytechr as chr
37 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
37 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
38 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
38 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
39 >>> s
39 >>> s
40 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
40 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
41 >>> res = _string_escape(s)
41 >>> res = _string_escape(s)
42 >>> s == _string_unescape(res)
42 >>> s == _string_unescape(res)
43 True
43 True
44 """
44 """
45 # subset of the string_escape codec
45 # subset of the string_escape codec
46 text = text.replace('\\', '\\\\').replace('\n', '\\n').replace('\r', '\\r')
46 text = text.replace('\\', '\\\\').replace('\n', '\\n').replace('\r', '\\r')
47 return text.replace('\0', '\\0')
47 return text.replace('\0', '\\0')
48
48
49 def _string_unescape(text):
49 def _string_unescape(text):
50 if '\\0' in text:
50 if '\\0' in text:
51 # fix up \0 without getting into trouble with \\0
51 # fix up \0 without getting into trouble with \\0
52 text = text.replace('\\\\', '\\\\\n')
52 text = text.replace('\\\\', '\\\\\n')
53 text = text.replace('\\0', '\0')
53 text = text.replace('\\0', '\0')
54 text = text.replace('\n', '')
54 text = text.replace('\n', '')
55 return stringutil.unescapestr(text)
55 return stringutil.unescapestr(text)
56
56
57 def decodeextra(text):
57 def decodeextra(text):
58 """
58 """
59 >>> from .pycompat import bytechr as chr
59 >>> from .pycompat import bytechr as chr
60 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
60 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
61 ... ).items())
61 ... ).items())
62 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
62 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
63 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
63 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
64 ... b'baz': chr(92) + chr(0) + b'2'})
64 ... b'baz': chr(92) + chr(0) + b'2'})
65 ... ).items())
65 ... ).items())
66 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
66 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
67 """
67 """
68 extra = _defaultextra.copy()
68 extra = _defaultextra.copy()
69 for l in text.split('\0'):
69 for l in text.split('\0'):
70 if l:
70 if l:
71 k, v = _string_unescape(l).split(':', 1)
71 k, v = _string_unescape(l).split(':', 1)
72 extra[k] = v
72 extra[k] = v
73 return extra
73 return extra
74
74
75 def encodeextra(d):
75 def encodeextra(d):
76 # keys must be sorted to produce a deterministic changelog entry
76 # keys must be sorted to produce a deterministic changelog entry
77 items = [
77 items = [
78 _string_escape('%s:%s' % (k, pycompat.bytestr(d[k])))
78 _string_escape('%s:%s' % (k, pycompat.bytestr(d[k])))
79 for k in sorted(d)
79 for k in sorted(d)
80 ]
80 ]
81 return "\0".join(items)
81 return "\0".join(items)
82
82
83 def encodecopies(files, copies):
83 def encodecopies(files, copies):
84 items = []
84 items = []
85 for i, dst in enumerate(files):
85 for i, dst in enumerate(files):
86 if dst in copies:
86 if dst in copies:
87 items.append('%d\0%s' % (i, copies[dst]))
87 items.append('%d\0%s' % (i, copies[dst]))
88 if len(items) != len(copies):
88 if len(items) != len(copies):
89 raise error.ProgrammingError('some copy targets missing from file list')
89 raise error.ProgrammingError('some copy targets missing from file list')
90 return "\n".join(items)
90 return "\n".join(items)
91
91
92 def decodecopies(files, data):
92 def decodecopies(files, data):
93 try:
93 try:
94 copies = {}
94 copies = {}
95 if not data:
96 return copies
95 for l in data.split('\n'):
97 for l in data.split('\n'):
96 strindex, src = l.split('\0')
98 strindex, src = l.split('\0')
97 i = int(strindex)
99 i = int(strindex)
98 dst = files[i]
100 dst = files[i]
99 copies[dst] = src
101 copies[dst] = src
100 return copies
102 return copies
101 except (ValueError, IndexError):
103 except (ValueError, IndexError):
102 # Perhaps someone had chosen the same key name (e.g. "p1copies") and
104 # Perhaps someone had chosen the same key name (e.g. "p1copies") and
103 # used different syntax for the value.
105 # used different syntax for the value.
104 return None
106 return None
105
107
106 def encodefileindices(files, subset):
108 def encodefileindices(files, subset):
107 subset = set(subset)
109 subset = set(subset)
108 indices = []
110 indices = []
109 for i, f in enumerate(files):
111 for i, f in enumerate(files):
110 if f in subset:
112 if f in subset:
111 indices.append('%d' % i)
113 indices.append('%d' % i)
112 return '\n'.join(indices)
114 return '\n'.join(indices)
113
115
114 def decodefileindices(files, data):
116 def decodefileindices(files, data):
115 try:
117 try:
116 subset = []
118 subset = []
119 if not data:
120 return subset
117 for strindex in data.split('\n'):
121 for strindex in data.split('\n'):
118 i = int(strindex)
122 i = int(strindex)
119 if i < 0 or i >= len(files):
123 if i < 0 or i >= len(files):
120 return None
124 return None
121 subset.append(files[i])
125 subset.append(files[i])
122 return subset
126 return subset
123 except (ValueError, IndexError):
127 except (ValueError, IndexError):
124 # Perhaps someone had chosen the same key name (e.g. "added") and
128 # Perhaps someone had chosen the same key name (e.g. "added") and
125 # used different syntax for the value.
129 # used different syntax for the value.
126 return None
130 return None
127
131
128 def stripdesc(desc):
132 def stripdesc(desc):
129 """strip trailing whitespace and leading and trailing empty lines"""
133 """strip trailing whitespace and leading and trailing empty lines"""
130 return '\n'.join([l.rstrip() for l in desc.splitlines()]).strip('\n')
134 return '\n'.join([l.rstrip() for l in desc.splitlines()]).strip('\n')
131
135
132 class appender(object):
136 class appender(object):
133 '''the changelog index must be updated last on disk, so we use this class
137 '''the changelog index must be updated last on disk, so we use this class
134 to delay writes to it'''
138 to delay writes to it'''
135 def __init__(self, vfs, name, mode, buf):
139 def __init__(self, vfs, name, mode, buf):
136 self.data = buf
140 self.data = buf
137 fp = vfs(name, mode)
141 fp = vfs(name, mode)
138 self.fp = fp
142 self.fp = fp
139 self.offset = fp.tell()
143 self.offset = fp.tell()
140 self.size = vfs.fstat(fp).st_size
144 self.size = vfs.fstat(fp).st_size
141 self._end = self.size
145 self._end = self.size
142
146
143 def end(self):
147 def end(self):
144 return self._end
148 return self._end
145 def tell(self):
149 def tell(self):
146 return self.offset
150 return self.offset
147 def flush(self):
151 def flush(self):
148 pass
152 pass
149
153
150 @property
154 @property
151 def closed(self):
155 def closed(self):
152 return self.fp.closed
156 return self.fp.closed
153
157
154 def close(self):
158 def close(self):
155 self.fp.close()
159 self.fp.close()
156
160
157 def seek(self, offset, whence=0):
161 def seek(self, offset, whence=0):
158 '''virtual file offset spans real file and data'''
162 '''virtual file offset spans real file and data'''
159 if whence == 0:
163 if whence == 0:
160 self.offset = offset
164 self.offset = offset
161 elif whence == 1:
165 elif whence == 1:
162 self.offset += offset
166 self.offset += offset
163 elif whence == 2:
167 elif whence == 2:
164 self.offset = self.end() + offset
168 self.offset = self.end() + offset
165 if self.offset < self.size:
169 if self.offset < self.size:
166 self.fp.seek(self.offset)
170 self.fp.seek(self.offset)
167
171
168 def read(self, count=-1):
172 def read(self, count=-1):
169 '''only trick here is reads that span real file and data'''
173 '''only trick here is reads that span real file and data'''
170 ret = ""
174 ret = ""
171 if self.offset < self.size:
175 if self.offset < self.size:
172 s = self.fp.read(count)
176 s = self.fp.read(count)
173 ret = s
177 ret = s
174 self.offset += len(s)
178 self.offset += len(s)
175 if count > 0:
179 if count > 0:
176 count -= len(s)
180 count -= len(s)
177 if count != 0:
181 if count != 0:
178 doff = self.offset - self.size
182 doff = self.offset - self.size
179 self.data.insert(0, "".join(self.data))
183 self.data.insert(0, "".join(self.data))
180 del self.data[1:]
184 del self.data[1:]
181 s = self.data[0][doff:doff + count]
185 s = self.data[0][doff:doff + count]
182 self.offset += len(s)
186 self.offset += len(s)
183 ret += s
187 ret += s
184 return ret
188 return ret
185
189
186 def write(self, s):
190 def write(self, s):
187 self.data.append(bytes(s))
191 self.data.append(bytes(s))
188 self.offset += len(s)
192 self.offset += len(s)
189 self._end += len(s)
193 self._end += len(s)
190
194
191 def __enter__(self):
195 def __enter__(self):
192 self.fp.__enter__()
196 self.fp.__enter__()
193 return self
197 return self
194
198
195 def __exit__(self, *args):
199 def __exit__(self, *args):
196 return self.fp.__exit__(*args)
200 return self.fp.__exit__(*args)
197
201
198 def _divertopener(opener, target):
202 def _divertopener(opener, target):
199 """build an opener that writes in 'target.a' instead of 'target'"""
203 """build an opener that writes in 'target.a' instead of 'target'"""
200 def _divert(name, mode='r', checkambig=False):
204 def _divert(name, mode='r', checkambig=False):
201 if name != target:
205 if name != target:
202 return opener(name, mode)
206 return opener(name, mode)
203 return opener(name + ".a", mode)
207 return opener(name + ".a", mode)
204 return _divert
208 return _divert
205
209
206 def _delayopener(opener, target, buf):
210 def _delayopener(opener, target, buf):
207 """build an opener that stores chunks in 'buf' instead of 'target'"""
211 """build an opener that stores chunks in 'buf' instead of 'target'"""
208 def _delay(name, mode='r', checkambig=False):
212 def _delay(name, mode='r', checkambig=False):
209 if name != target:
213 if name != target:
210 return opener(name, mode)
214 return opener(name, mode)
211 return appender(opener, name, mode, buf)
215 return appender(opener, name, mode, buf)
212 return _delay
216 return _delay
213
217
214 @attr.s
218 @attr.s
215 class _changelogrevision(object):
219 class _changelogrevision(object):
216 # Extensions might modify _defaultextra, so let the constructor below pass
220 # Extensions might modify _defaultextra, so let the constructor below pass
217 # it in
221 # it in
218 extra = attr.ib()
222 extra = attr.ib()
219 manifest = attr.ib(default=nullid)
223 manifest = attr.ib(default=nullid)
220 user = attr.ib(default='')
224 user = attr.ib(default='')
221 date = attr.ib(default=(0, 0))
225 date = attr.ib(default=(0, 0))
222 files = attr.ib(default=attr.Factory(list))
226 files = attr.ib(default=attr.Factory(list))
223 filesadded = attr.ib(default=None)
227 filesadded = attr.ib(default=None)
224 filesremoved = attr.ib(default=None)
228 filesremoved = attr.ib(default=None)
225 p1copies = attr.ib(default=None)
229 p1copies = attr.ib(default=None)
226 p2copies = attr.ib(default=None)
230 p2copies = attr.ib(default=None)
227 description = attr.ib(default='')
231 description = attr.ib(default='')
228
232
229 class changelogrevision(object):
233 class changelogrevision(object):
230 """Holds results of a parsed changelog revision.
234 """Holds results of a parsed changelog revision.
231
235
232 Changelog revisions consist of multiple pieces of data, including
236 Changelog revisions consist of multiple pieces of data, including
233 the manifest node, user, and date. This object exposes a view into
237 the manifest node, user, and date. This object exposes a view into
234 the parsed object.
238 the parsed object.
235 """
239 """
236
240
237 __slots__ = (
241 __slots__ = (
238 r'_offsets',
242 r'_offsets',
239 r'_text',
243 r'_text',
240 )
244 )
241
245
242 def __new__(cls, text):
246 def __new__(cls, text):
243 if not text:
247 if not text:
244 return _changelogrevision(extra=_defaultextra)
248 return _changelogrevision(extra=_defaultextra)
245
249
246 self = super(changelogrevision, cls).__new__(cls)
250 self = super(changelogrevision, cls).__new__(cls)
247 # We could return here and implement the following as an __init__.
251 # We could return here and implement the following as an __init__.
248 # But doing it here is equivalent and saves an extra function call.
252 # But doing it here is equivalent and saves an extra function call.
249
253
250 # format used:
254 # format used:
251 # nodeid\n : manifest node in ascii
255 # nodeid\n : manifest node in ascii
252 # user\n : user, no \n or \r allowed
256 # user\n : user, no \n or \r allowed
253 # time tz extra\n : date (time is int or float, timezone is int)
257 # time tz extra\n : date (time is int or float, timezone is int)
254 # : extra is metadata, encoded and separated by '\0'
258 # : extra is metadata, encoded and separated by '\0'
255 # : older versions ignore it
259 # : older versions ignore it
256 # files\n\n : files modified by the cset, no \n or \r allowed
260 # files\n\n : files modified by the cset, no \n or \r allowed
257 # (.*) : comment (free text, ideally utf-8)
261 # (.*) : comment (free text, ideally utf-8)
258 #
262 #
259 # changelog v0 doesn't use extra
263 # changelog v0 doesn't use extra
260
264
261 nl1 = text.index('\n')
265 nl1 = text.index('\n')
262 nl2 = text.index('\n', nl1 + 1)
266 nl2 = text.index('\n', nl1 + 1)
263 nl3 = text.index('\n', nl2 + 1)
267 nl3 = text.index('\n', nl2 + 1)
264
268
265 # The list of files may be empty. Which means nl3 is the first of the
269 # The list of files may be empty. Which means nl3 is the first of the
266 # double newline that precedes the description.
270 # double newline that precedes the description.
267 if text[nl3 + 1:nl3 + 2] == '\n':
271 if text[nl3 + 1:nl3 + 2] == '\n':
268 doublenl = nl3
272 doublenl = nl3
269 else:
273 else:
270 doublenl = text.index('\n\n', nl3 + 1)
274 doublenl = text.index('\n\n', nl3 + 1)
271
275
272 self._offsets = (nl1, nl2, nl3, doublenl)
276 self._offsets = (nl1, nl2, nl3, doublenl)
273 self._text = text
277 self._text = text
274
278
275 return self
279 return self
276
280
277 @property
281 @property
278 def manifest(self):
282 def manifest(self):
279 return bin(self._text[0:self._offsets[0]])
283 return bin(self._text[0:self._offsets[0]])
280
284
281 @property
285 @property
282 def user(self):
286 def user(self):
283 off = self._offsets
287 off = self._offsets
284 return encoding.tolocal(self._text[off[0] + 1:off[1]])
288 return encoding.tolocal(self._text[off[0] + 1:off[1]])
285
289
286 @property
290 @property
287 def _rawdate(self):
291 def _rawdate(self):
288 off = self._offsets
292 off = self._offsets
289 dateextra = self._text[off[1] + 1:off[2]]
293 dateextra = self._text[off[1] + 1:off[2]]
290 return dateextra.split(' ', 2)[0:2]
294 return dateextra.split(' ', 2)[0:2]
291
295
292 @property
296 @property
293 def _rawextra(self):
297 def _rawextra(self):
294 off = self._offsets
298 off = self._offsets
295 dateextra = self._text[off[1] + 1:off[2]]
299 dateextra = self._text[off[1] + 1:off[2]]
296 fields = dateextra.split(' ', 2)
300 fields = dateextra.split(' ', 2)
297 if len(fields) != 3:
301 if len(fields) != 3:
298 return None
302 return None
299
303
300 return fields[2]
304 return fields[2]
301
305
302 @property
306 @property
303 def date(self):
307 def date(self):
304 raw = self._rawdate
308 raw = self._rawdate
305 time = float(raw[0])
309 time = float(raw[0])
306 # Various tools did silly things with the timezone.
310 # Various tools did silly things with the timezone.
307 try:
311 try:
308 timezone = int(raw[1])
312 timezone = int(raw[1])
309 except ValueError:
313 except ValueError:
310 timezone = 0
314 timezone = 0
311
315
312 return time, timezone
316 return time, timezone
313
317
314 @property
318 @property
315 def extra(self):
319 def extra(self):
316 raw = self._rawextra
320 raw = self._rawextra
317 if raw is None:
321 if raw is None:
318 return _defaultextra
322 return _defaultextra
319
323
320 return decodeextra(raw)
324 return decodeextra(raw)
321
325
322 @property
326 @property
323 def files(self):
327 def files(self):
324 off = self._offsets
328 off = self._offsets
325 if off[2] == off[3]:
329 if off[2] == off[3]:
326 return []
330 return []
327
331
328 return self._text[off[2] + 1:off[3]].split('\n')
332 return self._text[off[2] + 1:off[3]].split('\n')
329
333
330 @property
334 @property
331 def filesadded(self):
335 def filesadded(self):
332 rawindices = self.extra.get('filesadded')
336 rawindices = self.extra.get('filesadded')
333 return rawindices and decodefileindices(self.files, rawindices)
337 return rawindices and decodefileindices(self.files, rawindices)
334
338
335 @property
339 @property
336 def filesremoved(self):
340 def filesremoved(self):
337 rawindices = self.extra.get('filesremoved')
341 rawindices = self.extra.get('filesremoved')
338 return rawindices and decodefileindices(self.files, rawindices)
342 return rawindices and decodefileindices(self.files, rawindices)
339
343
340 @property
344 @property
341 def p1copies(self):
345 def p1copies(self):
342 rawcopies = self.extra.get('p1copies')
346 rawcopies = self.extra.get('p1copies')
343 return rawcopies and decodecopies(self.files, rawcopies)
347 return rawcopies and decodecopies(self.files, rawcopies)
344
348
345 @property
349 @property
346 def p2copies(self):
350 def p2copies(self):
347 rawcopies = self.extra.get('p2copies')
351 rawcopies = self.extra.get('p2copies')
348 return rawcopies and decodecopies(self.files, rawcopies)
352 return rawcopies and decodecopies(self.files, rawcopies)
349
353
350 @property
354 @property
351 def description(self):
355 def description(self):
352 return encoding.tolocal(self._text[self._offsets[3] + 2:])
356 return encoding.tolocal(self._text[self._offsets[3] + 2:])
353
357
354 class changelog(revlog.revlog):
358 class changelog(revlog.revlog):
355 def __init__(self, opener, trypending=False):
359 def __init__(self, opener, trypending=False):
356 """Load a changelog revlog using an opener.
360 """Load a changelog revlog using an opener.
357
361
358 If ``trypending`` is true, we attempt to load the index from a
362 If ``trypending`` is true, we attempt to load the index from a
359 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
363 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
360 The ``00changelog.i.a`` file contains index (and possibly inline
364 The ``00changelog.i.a`` file contains index (and possibly inline
361 revision) data for a transaction that hasn't been finalized yet.
365 revision) data for a transaction that hasn't been finalized yet.
362 It exists in a separate file to facilitate readers (such as
366 It exists in a separate file to facilitate readers (such as
363 hooks processes) accessing data before a transaction is finalized.
367 hooks processes) accessing data before a transaction is finalized.
364 """
368 """
365 if trypending and opener.exists('00changelog.i.a'):
369 if trypending and opener.exists('00changelog.i.a'):
366 indexfile = '00changelog.i.a'
370 indexfile = '00changelog.i.a'
367 else:
371 else:
368 indexfile = '00changelog.i'
372 indexfile = '00changelog.i'
369
373
370 datafile = '00changelog.d'
374 datafile = '00changelog.d'
371 revlog.revlog.__init__(self, opener, indexfile, datafile=datafile,
375 revlog.revlog.__init__(self, opener, indexfile, datafile=datafile,
372 checkambig=True, mmaplargeindex=True)
376 checkambig=True, mmaplargeindex=True)
373
377
374 if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
378 if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
375 # changelogs don't benefit from generaldelta.
379 # changelogs don't benefit from generaldelta.
376
380
377 self.version &= ~revlog.FLAG_GENERALDELTA
381 self.version &= ~revlog.FLAG_GENERALDELTA
378 self._generaldelta = False
382 self._generaldelta = False
379
383
380 # Delta chains for changelogs tend to be very small because entries
384 # Delta chains for changelogs tend to be very small because entries
381 # tend to be small and don't delta well with each. So disable delta
385 # tend to be small and don't delta well with each. So disable delta
382 # chains.
386 # chains.
383 self._storedeltachains = False
387 self._storedeltachains = False
384
388
385 self._realopener = opener
389 self._realopener = opener
386 self._delayed = False
390 self._delayed = False
387 self._delaybuf = None
391 self._delaybuf = None
388 self._divert = False
392 self._divert = False
389 self.filteredrevs = frozenset()
393 self.filteredrevs = frozenset()
390
394
391 def tiprev(self):
395 def tiprev(self):
392 for i in pycompat.xrange(len(self) -1, -2, -1):
396 for i in pycompat.xrange(len(self) -1, -2, -1):
393 if i not in self.filteredrevs:
397 if i not in self.filteredrevs:
394 return i
398 return i
395
399
396 def tip(self):
400 def tip(self):
397 """filtered version of revlog.tip"""
401 """filtered version of revlog.tip"""
398 return self.node(self.tiprev())
402 return self.node(self.tiprev())
399
403
400 def __contains__(self, rev):
404 def __contains__(self, rev):
401 """filtered version of revlog.__contains__"""
405 """filtered version of revlog.__contains__"""
402 return (0 <= rev < len(self)
406 return (0 <= rev < len(self)
403 and rev not in self.filteredrevs)
407 and rev not in self.filteredrevs)
404
408
405 def __iter__(self):
409 def __iter__(self):
406 """filtered version of revlog.__iter__"""
410 """filtered version of revlog.__iter__"""
407 if len(self.filteredrevs) == 0:
411 if len(self.filteredrevs) == 0:
408 return revlog.revlog.__iter__(self)
412 return revlog.revlog.__iter__(self)
409
413
410 def filterediter():
414 def filterediter():
411 for i in pycompat.xrange(len(self)):
415 for i in pycompat.xrange(len(self)):
412 if i not in self.filteredrevs:
416 if i not in self.filteredrevs:
413 yield i
417 yield i
414
418
415 return filterediter()
419 return filterediter()
416
420
417 def revs(self, start=0, stop=None):
421 def revs(self, start=0, stop=None):
418 """filtered version of revlog.revs"""
422 """filtered version of revlog.revs"""
419 for i in super(changelog, self).revs(start, stop):
423 for i in super(changelog, self).revs(start, stop):
420 if i not in self.filteredrevs:
424 if i not in self.filteredrevs:
421 yield i
425 yield i
422
426
423 def _checknofilteredinrevs(self, revs):
427 def _checknofilteredinrevs(self, revs):
424 """raise the appropriate error if 'revs' contains a filtered revision
428 """raise the appropriate error if 'revs' contains a filtered revision
425
429
426 This returns a version of 'revs' to be used thereafter by the caller.
430 This returns a version of 'revs' to be used thereafter by the caller.
427 In particular, if revs is an iterator, it is converted into a set.
431 In particular, if revs is an iterator, it is converted into a set.
428 """
432 """
429 safehasattr = util.safehasattr
433 safehasattr = util.safehasattr
430 if safehasattr(revs, '__next__'):
434 if safehasattr(revs, '__next__'):
431 # Note that inspect.isgenerator() is not true for iterators,
435 # Note that inspect.isgenerator() is not true for iterators,
432 revs = set(revs)
436 revs = set(revs)
433
437
434 filteredrevs = self.filteredrevs
438 filteredrevs = self.filteredrevs
435 if safehasattr(revs, 'first'): # smartset
439 if safehasattr(revs, 'first'): # smartset
436 offenders = revs & filteredrevs
440 offenders = revs & filteredrevs
437 else:
441 else:
438 offenders = filteredrevs.intersection(revs)
442 offenders = filteredrevs.intersection(revs)
439
443
440 for rev in offenders:
444 for rev in offenders:
441 raise error.FilteredIndexError(rev)
445 raise error.FilteredIndexError(rev)
442 return revs
446 return revs
443
447
444 def headrevs(self, revs=None):
448 def headrevs(self, revs=None):
445 if revs is None and self.filteredrevs:
449 if revs is None and self.filteredrevs:
446 try:
450 try:
447 return self.index.headrevsfiltered(self.filteredrevs)
451 return self.index.headrevsfiltered(self.filteredrevs)
448 # AttributeError covers non-c-extension environments and
452 # AttributeError covers non-c-extension environments and
449 # old c extensions without filter handling.
453 # old c extensions without filter handling.
450 except AttributeError:
454 except AttributeError:
451 return self._headrevs()
455 return self._headrevs()
452
456
453 if self.filteredrevs:
457 if self.filteredrevs:
454 revs = self._checknofilteredinrevs(revs)
458 revs = self._checknofilteredinrevs(revs)
455 return super(changelog, self).headrevs(revs)
459 return super(changelog, self).headrevs(revs)
456
460
457 def strip(self, *args, **kwargs):
461 def strip(self, *args, **kwargs):
458 # XXX make something better than assert
462 # XXX make something better than assert
459 # We can't expect proper strip behavior if we are filtered.
463 # We can't expect proper strip behavior if we are filtered.
460 assert not self.filteredrevs
464 assert not self.filteredrevs
461 super(changelog, self).strip(*args, **kwargs)
465 super(changelog, self).strip(*args, **kwargs)
462
466
463 def rev(self, node):
467 def rev(self, node):
464 """filtered version of revlog.rev"""
468 """filtered version of revlog.rev"""
465 r = super(changelog, self).rev(node)
469 r = super(changelog, self).rev(node)
466 if r in self.filteredrevs:
470 if r in self.filteredrevs:
467 raise error.FilteredLookupError(hex(node), self.indexfile,
471 raise error.FilteredLookupError(hex(node), self.indexfile,
468 _('filtered node'))
472 _('filtered node'))
469 return r
473 return r
470
474
471 def node(self, rev):
475 def node(self, rev):
472 """filtered version of revlog.node"""
476 """filtered version of revlog.node"""
473 if rev in self.filteredrevs:
477 if rev in self.filteredrevs:
474 raise error.FilteredIndexError(rev)
478 raise error.FilteredIndexError(rev)
475 return super(changelog, self).node(rev)
479 return super(changelog, self).node(rev)
476
480
477 def linkrev(self, rev):
481 def linkrev(self, rev):
478 """filtered version of revlog.linkrev"""
482 """filtered version of revlog.linkrev"""
479 if rev in self.filteredrevs:
483 if rev in self.filteredrevs:
480 raise error.FilteredIndexError(rev)
484 raise error.FilteredIndexError(rev)
481 return super(changelog, self).linkrev(rev)
485 return super(changelog, self).linkrev(rev)
482
486
483 def parentrevs(self, rev):
487 def parentrevs(self, rev):
484 """filtered version of revlog.parentrevs"""
488 """filtered version of revlog.parentrevs"""
485 if rev in self.filteredrevs:
489 if rev in self.filteredrevs:
486 raise error.FilteredIndexError(rev)
490 raise error.FilteredIndexError(rev)
487 return super(changelog, self).parentrevs(rev)
491 return super(changelog, self).parentrevs(rev)
488
492
489 def flags(self, rev):
493 def flags(self, rev):
490 """filtered version of revlog.flags"""
494 """filtered version of revlog.flags"""
491 if rev in self.filteredrevs:
495 if rev in self.filteredrevs:
492 raise error.FilteredIndexError(rev)
496 raise error.FilteredIndexError(rev)
493 return super(changelog, self).flags(rev)
497 return super(changelog, self).flags(rev)
494
498
495 def delayupdate(self, tr):
499 def delayupdate(self, tr):
496 "delay visibility of index updates to other readers"
500 "delay visibility of index updates to other readers"
497
501
498 if not self._delayed:
502 if not self._delayed:
499 if len(self) == 0:
503 if len(self) == 0:
500 self._divert = True
504 self._divert = True
501 if self._realopener.exists(self.indexfile + '.a'):
505 if self._realopener.exists(self.indexfile + '.a'):
502 self._realopener.unlink(self.indexfile + '.a')
506 self._realopener.unlink(self.indexfile + '.a')
503 self.opener = _divertopener(self._realopener, self.indexfile)
507 self.opener = _divertopener(self._realopener, self.indexfile)
504 else:
508 else:
505 self._delaybuf = []
509 self._delaybuf = []
506 self.opener = _delayopener(self._realopener, self.indexfile,
510 self.opener = _delayopener(self._realopener, self.indexfile,
507 self._delaybuf)
511 self._delaybuf)
508 self._delayed = True
512 self._delayed = True
509 tr.addpending('cl-%i' % id(self), self._writepending)
513 tr.addpending('cl-%i' % id(self), self._writepending)
510 tr.addfinalize('cl-%i' % id(self), self._finalize)
514 tr.addfinalize('cl-%i' % id(self), self._finalize)
511
515
512 def _finalize(self, tr):
516 def _finalize(self, tr):
513 "finalize index updates"
517 "finalize index updates"
514 self._delayed = False
518 self._delayed = False
515 self.opener = self._realopener
519 self.opener = self._realopener
516 # move redirected index data back into place
520 # move redirected index data back into place
517 if self._divert:
521 if self._divert:
518 assert not self._delaybuf
522 assert not self._delaybuf
519 tmpname = self.indexfile + ".a"
523 tmpname = self.indexfile + ".a"
520 nfile = self.opener.open(tmpname)
524 nfile = self.opener.open(tmpname)
521 nfile.close()
525 nfile.close()
522 self.opener.rename(tmpname, self.indexfile, checkambig=True)
526 self.opener.rename(tmpname, self.indexfile, checkambig=True)
523 elif self._delaybuf:
527 elif self._delaybuf:
524 fp = self.opener(self.indexfile, 'a', checkambig=True)
528 fp = self.opener(self.indexfile, 'a', checkambig=True)
525 fp.write("".join(self._delaybuf))
529 fp.write("".join(self._delaybuf))
526 fp.close()
530 fp.close()
527 self._delaybuf = None
531 self._delaybuf = None
528 self._divert = False
532 self._divert = False
529 # split when we're done
533 # split when we're done
530 self._enforceinlinesize(tr)
534 self._enforceinlinesize(tr)
531
535
532 def _writepending(self, tr):
536 def _writepending(self, tr):
533 "create a file containing the unfinalized state for pretxnchangegroup"
537 "create a file containing the unfinalized state for pretxnchangegroup"
534 if self._delaybuf:
538 if self._delaybuf:
535 # make a temporary copy of the index
539 # make a temporary copy of the index
536 fp1 = self._realopener(self.indexfile)
540 fp1 = self._realopener(self.indexfile)
537 pendingfilename = self.indexfile + ".a"
541 pendingfilename = self.indexfile + ".a"
538 # register as a temp file to ensure cleanup on failure
542 # register as a temp file to ensure cleanup on failure
539 tr.registertmp(pendingfilename)
543 tr.registertmp(pendingfilename)
540 # write existing data
544 # write existing data
541 fp2 = self._realopener(pendingfilename, "w")
545 fp2 = self._realopener(pendingfilename, "w")
542 fp2.write(fp1.read())
546 fp2.write(fp1.read())
543 # add pending data
547 # add pending data
544 fp2.write("".join(self._delaybuf))
548 fp2.write("".join(self._delaybuf))
545 fp2.close()
549 fp2.close()
546 # switch modes so finalize can simply rename
550 # switch modes so finalize can simply rename
547 self._delaybuf = None
551 self._delaybuf = None
548 self._divert = True
552 self._divert = True
549 self.opener = _divertopener(self._realopener, self.indexfile)
553 self.opener = _divertopener(self._realopener, self.indexfile)
550
554
551 if self._divert:
555 if self._divert:
552 return True
556 return True
553
557
554 return False
558 return False
555
559
556 def _enforceinlinesize(self, tr, fp=None):
560 def _enforceinlinesize(self, tr, fp=None):
557 if not self._delayed:
561 if not self._delayed:
558 revlog.revlog._enforceinlinesize(self, tr, fp)
562 revlog.revlog._enforceinlinesize(self, tr, fp)
559
563
560 def read(self, node):
564 def read(self, node):
561 """Obtain data from a parsed changelog revision.
565 """Obtain data from a parsed changelog revision.
562
566
563 Returns a 6-tuple of:
567 Returns a 6-tuple of:
564
568
565 - manifest node in binary
569 - manifest node in binary
566 - author/user as a localstr
570 - author/user as a localstr
567 - date as a 2-tuple of (time, timezone)
571 - date as a 2-tuple of (time, timezone)
568 - list of files
572 - list of files
569 - commit message as a localstr
573 - commit message as a localstr
570 - dict of extra metadata
574 - dict of extra metadata
571
575
572 Unless you need to access all fields, consider calling
576 Unless you need to access all fields, consider calling
573 ``changelogrevision`` instead, as it is faster for partial object
577 ``changelogrevision`` instead, as it is faster for partial object
574 access.
578 access.
575 """
579 """
576 c = changelogrevision(self.revision(node))
580 c = changelogrevision(self.revision(node))
577 return (
581 return (
578 c.manifest,
582 c.manifest,
579 c.user,
583 c.user,
580 c.date,
584 c.date,
581 c.files,
585 c.files,
582 c.description,
586 c.description,
583 c.extra
587 c.extra
584 )
588 )
585
589
586 def changelogrevision(self, nodeorrev):
590 def changelogrevision(self, nodeorrev):
587 """Obtain a ``changelogrevision`` for a node or revision."""
591 """Obtain a ``changelogrevision`` for a node or revision."""
588 return changelogrevision(self.revision(nodeorrev))
592 return changelogrevision(self.revision(nodeorrev))
589
593
590 def readfiles(self, node):
594 def readfiles(self, node):
591 """
595 """
592 short version of read that only returns the files modified by the cset
596 short version of read that only returns the files modified by the cset
593 """
597 """
594 text = self.revision(node)
598 text = self.revision(node)
595 if not text:
599 if not text:
596 return []
600 return []
597 last = text.index("\n\n")
601 last = text.index("\n\n")
598 l = text[:last].split('\n')
602 l = text[:last].split('\n')
599 return l[3:]
603 return l[3:]
600
604
601 def add(self, manifest, files, desc, transaction, p1, p2,
605 def add(self, manifest, files, desc, transaction, p1, p2,
602 user, date=None, extra=None, p1copies=None, p2copies=None,
606 user, date=None, extra=None, p1copies=None, p2copies=None,
603 filesadded=None, filesremoved=None):
607 filesadded=None, filesremoved=None):
604 # Convert to UTF-8 encoded bytestrings as the very first
608 # Convert to UTF-8 encoded bytestrings as the very first
605 # thing: calling any method on a localstr object will turn it
609 # thing: calling any method on a localstr object will turn it
606 # into a str object and the cached UTF-8 string is thus lost.
610 # into a str object and the cached UTF-8 string is thus lost.
607 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
611 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
608
612
609 user = user.strip()
613 user = user.strip()
610 # An empty username or a username with a "\n" will make the
614 # An empty username or a username with a "\n" will make the
611 # revision text contain two "\n\n" sequences -> corrupt
615 # revision text contain two "\n\n" sequences -> corrupt
612 # repository since read cannot unpack the revision.
616 # repository since read cannot unpack the revision.
613 if not user:
617 if not user:
614 raise error.StorageError(_("empty username"))
618 raise error.StorageError(_("empty username"))
615 if "\n" in user:
619 if "\n" in user:
616 raise error.StorageError(_("username %r contains a newline")
620 raise error.StorageError(_("username %r contains a newline")
617 % pycompat.bytestr(user))
621 % pycompat.bytestr(user))
618
622
619 desc = stripdesc(desc)
623 desc = stripdesc(desc)
620
624
621 if date:
625 if date:
622 parseddate = "%d %d" % dateutil.parsedate(date)
626 parseddate = "%d %d" % dateutil.parsedate(date)
623 else:
627 else:
624 parseddate = "%d %d" % dateutil.makedate()
628 parseddate = "%d %d" % dateutil.makedate()
625 if extra:
629 if extra:
626 branch = extra.get("branch")
630 branch = extra.get("branch")
627 if branch in ("default", ""):
631 if branch in ("default", ""):
628 del extra["branch"]
632 del extra["branch"]
629 elif branch in (".", "null", "tip"):
633 elif branch in (".", "null", "tip"):
630 raise error.StorageError(_('the name \'%s\' is reserved')
634 raise error.StorageError(_('the name \'%s\' is reserved')
631 % branch)
635 % branch)
632 extrasentries = p1copies, p2copies, filesadded, filesremoved
636 extrasentries = p1copies, p2copies, filesadded, filesremoved
633 if extra is None and any(x is not None for x in extrasentries):
637 if extra is None and any(x is not None for x in extrasentries):
634 extra = {}
638 extra = {}
635 sortedfiles = sorted(files)
639 sortedfiles = sorted(files)
636 if p1copies is not None:
640 if p1copies is not None:
637 extra['p1copies'] = encodecopies(sortedfiles, p1copies)
641 extra['p1copies'] = encodecopies(sortedfiles, p1copies)
638 if p2copies is not None:
642 if p2copies is not None:
639 extra['p2copies'] = encodecopies(sortedfiles, p2copies)
643 extra['p2copies'] = encodecopies(sortedfiles, p2copies)
640 if filesadded is not None:
644 if filesadded is not None:
641 extra['filesadded'] = encodefileindices(sortedfiles, filesadded)
645 extra['filesadded'] = encodefileindices(sortedfiles, filesadded)
642 if filesremoved is not None:
646 if filesremoved is not None:
643 extra['filesremoved'] = encodefileindices(sortedfiles, filesremoved)
647 extra['filesremoved'] = encodefileindices(sortedfiles, filesremoved)
644
648
645 if extra:
649 if extra:
646 extra = encodeextra(extra)
650 extra = encodeextra(extra)
647 parseddate = "%s %s" % (parseddate, extra)
651 parseddate = "%s %s" % (parseddate, extra)
648 l = [hex(manifest), user, parseddate] + sortedfiles + ["", desc]
652 l = [hex(manifest), user, parseddate] + sortedfiles + ["", desc]
649 text = "\n".join(l)
653 text = "\n".join(l)
650 return self.addrevision(text, transaction, len(self), p1, p2)
654 return self.addrevision(text, transaction, len(self), p1, p2)
651
655
652 def branchinfo(self, rev):
656 def branchinfo(self, rev):
653 """return the branch name and open/close state of a revision
657 """return the branch name and open/close state of a revision
654
658
655 This function exists because creating a changectx object
659 This function exists because creating a changectx object
656 just to access this is costly."""
660 just to access this is costly."""
657 extra = self.read(rev)[5]
661 extra = self.read(rev)[5]
658 return encoding.tolocal(extra.get("branch")), 'close' in extra
662 return encoding.tolocal(extra.get("branch")), 'close' in extra
659
663
660 def _nodeduplicatecallback(self, transaction, node):
664 def _nodeduplicatecallback(self, transaction, node):
661 # keep track of revisions that got "re-added", eg: unbunde of know rev.
665 # keep track of revisions that got "re-added", eg: unbunde of know rev.
662 #
666 #
663 # We track them in a list to preserve their order from the source bundle
667 # We track them in a list to preserve their order from the source bundle
664 duplicates = transaction.changes.setdefault('revduplicates', [])
668 duplicates = transaction.changes.setdefault('revduplicates', [])
665 duplicates.append(self.rev(node))
669 duplicates.append(self.rev(node))
General Comments 0
You need to be logged in to leave comments. Login now