##// END OF EJS Templates
repoview: move changelog.tiprev() override to filteredchangelog...
Martin von Zweigbergk -
r43748:7bc8e49a default
parent child Browse files
Show More
@@ -1,726 +1,720 b''
1 # changelog.py - changelog class for mercurial
1 # changelog.py - changelog class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 from .i18n import _
10 from .i18n import _
11 from .node import (
11 from .node import (
12 bin,
12 bin,
13 hex,
13 hex,
14 nullid,
14 nullid,
15 )
15 )
16 from .thirdparty import attr
16 from .thirdparty import attr
17
17
18 from . import (
18 from . import (
19 copies,
19 copies,
20 encoding,
20 encoding,
21 error,
21 error,
22 pycompat,
22 pycompat,
23 revlog,
23 revlog,
24 util,
24 util,
25 )
25 )
26 from .utils import (
26 from .utils import (
27 dateutil,
27 dateutil,
28 stringutil,
28 stringutil,
29 )
29 )
30
30
31 from .revlogutils import sidedata as sidedatamod
31 from .revlogutils import sidedata as sidedatamod
32
32
33 _defaultextra = {b'branch': b'default'}
33 _defaultextra = {b'branch': b'default'}
34
34
35
35
36 def _string_escape(text):
36 def _string_escape(text):
37 """
37 """
38 >>> from .pycompat import bytechr as chr
38 >>> from .pycompat import bytechr as chr
39 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
39 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
40 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
40 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
41 >>> s
41 >>> s
42 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
42 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
43 >>> res = _string_escape(s)
43 >>> res = _string_escape(s)
44 >>> s == _string_unescape(res)
44 >>> s == _string_unescape(res)
45 True
45 True
46 """
46 """
47 # subset of the string_escape codec
47 # subset of the string_escape codec
48 text = (
48 text = (
49 text.replace(b'\\', b'\\\\')
49 text.replace(b'\\', b'\\\\')
50 .replace(b'\n', b'\\n')
50 .replace(b'\n', b'\\n')
51 .replace(b'\r', b'\\r')
51 .replace(b'\r', b'\\r')
52 )
52 )
53 return text.replace(b'\0', b'\\0')
53 return text.replace(b'\0', b'\\0')
54
54
55
55
56 def _string_unescape(text):
56 def _string_unescape(text):
57 if b'\\0' in text:
57 if b'\\0' in text:
58 # fix up \0 without getting into trouble with \\0
58 # fix up \0 without getting into trouble with \\0
59 text = text.replace(b'\\\\', b'\\\\\n')
59 text = text.replace(b'\\\\', b'\\\\\n')
60 text = text.replace(b'\\0', b'\0')
60 text = text.replace(b'\\0', b'\0')
61 text = text.replace(b'\n', b'')
61 text = text.replace(b'\n', b'')
62 return stringutil.unescapestr(text)
62 return stringutil.unescapestr(text)
63
63
64
64
65 def decodeextra(text):
65 def decodeextra(text):
66 """
66 """
67 >>> from .pycompat import bytechr as chr
67 >>> from .pycompat import bytechr as chr
68 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
68 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
69 ... ).items())
69 ... ).items())
70 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
70 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
71 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
71 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
72 ... b'baz': chr(92) + chr(0) + b'2'})
72 ... b'baz': chr(92) + chr(0) + b'2'})
73 ... ).items())
73 ... ).items())
74 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
74 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
75 """
75 """
76 extra = _defaultextra.copy()
76 extra = _defaultextra.copy()
77 for l in text.split(b'\0'):
77 for l in text.split(b'\0'):
78 if l:
78 if l:
79 k, v = _string_unescape(l).split(b':', 1)
79 k, v = _string_unescape(l).split(b':', 1)
80 extra[k] = v
80 extra[k] = v
81 return extra
81 return extra
82
82
83
83
84 def encodeextra(d):
84 def encodeextra(d):
85 # keys must be sorted to produce a deterministic changelog entry
85 # keys must be sorted to produce a deterministic changelog entry
86 items = [
86 items = [
87 _string_escape(b'%s:%s' % (k, pycompat.bytestr(d[k])))
87 _string_escape(b'%s:%s' % (k, pycompat.bytestr(d[k])))
88 for k in sorted(d)
88 for k in sorted(d)
89 ]
89 ]
90 return b"\0".join(items)
90 return b"\0".join(items)
91
91
92
92
93 def stripdesc(desc):
93 def stripdesc(desc):
94 """strip trailing whitespace and leading and trailing empty lines"""
94 """strip trailing whitespace and leading and trailing empty lines"""
95 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
95 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
96
96
97
97
98 class appender(object):
98 class appender(object):
99 '''the changelog index must be updated last on disk, so we use this class
99 '''the changelog index must be updated last on disk, so we use this class
100 to delay writes to it'''
100 to delay writes to it'''
101
101
102 def __init__(self, vfs, name, mode, buf):
102 def __init__(self, vfs, name, mode, buf):
103 self.data = buf
103 self.data = buf
104 fp = vfs(name, mode)
104 fp = vfs(name, mode)
105 self.fp = fp
105 self.fp = fp
106 self.offset = fp.tell()
106 self.offset = fp.tell()
107 self.size = vfs.fstat(fp).st_size
107 self.size = vfs.fstat(fp).st_size
108 self._end = self.size
108 self._end = self.size
109
109
110 def end(self):
110 def end(self):
111 return self._end
111 return self._end
112
112
113 def tell(self):
113 def tell(self):
114 return self.offset
114 return self.offset
115
115
116 def flush(self):
116 def flush(self):
117 pass
117 pass
118
118
119 @property
119 @property
120 def closed(self):
120 def closed(self):
121 return self.fp.closed
121 return self.fp.closed
122
122
123 def close(self):
123 def close(self):
124 self.fp.close()
124 self.fp.close()
125
125
126 def seek(self, offset, whence=0):
126 def seek(self, offset, whence=0):
127 '''virtual file offset spans real file and data'''
127 '''virtual file offset spans real file and data'''
128 if whence == 0:
128 if whence == 0:
129 self.offset = offset
129 self.offset = offset
130 elif whence == 1:
130 elif whence == 1:
131 self.offset += offset
131 self.offset += offset
132 elif whence == 2:
132 elif whence == 2:
133 self.offset = self.end() + offset
133 self.offset = self.end() + offset
134 if self.offset < self.size:
134 if self.offset < self.size:
135 self.fp.seek(self.offset)
135 self.fp.seek(self.offset)
136
136
137 def read(self, count=-1):
137 def read(self, count=-1):
138 '''only trick here is reads that span real file and data'''
138 '''only trick here is reads that span real file and data'''
139 ret = b""
139 ret = b""
140 if self.offset < self.size:
140 if self.offset < self.size:
141 s = self.fp.read(count)
141 s = self.fp.read(count)
142 ret = s
142 ret = s
143 self.offset += len(s)
143 self.offset += len(s)
144 if count > 0:
144 if count > 0:
145 count -= len(s)
145 count -= len(s)
146 if count != 0:
146 if count != 0:
147 doff = self.offset - self.size
147 doff = self.offset - self.size
148 self.data.insert(0, b"".join(self.data))
148 self.data.insert(0, b"".join(self.data))
149 del self.data[1:]
149 del self.data[1:]
150 s = self.data[0][doff : doff + count]
150 s = self.data[0][doff : doff + count]
151 self.offset += len(s)
151 self.offset += len(s)
152 ret += s
152 ret += s
153 return ret
153 return ret
154
154
155 def write(self, s):
155 def write(self, s):
156 self.data.append(bytes(s))
156 self.data.append(bytes(s))
157 self.offset += len(s)
157 self.offset += len(s)
158 self._end += len(s)
158 self._end += len(s)
159
159
160 def __enter__(self):
160 def __enter__(self):
161 self.fp.__enter__()
161 self.fp.__enter__()
162 return self
162 return self
163
163
164 def __exit__(self, *args):
164 def __exit__(self, *args):
165 return self.fp.__exit__(*args)
165 return self.fp.__exit__(*args)
166
166
167
167
168 def _divertopener(opener, target):
168 def _divertopener(opener, target):
169 """build an opener that writes in 'target.a' instead of 'target'"""
169 """build an opener that writes in 'target.a' instead of 'target'"""
170
170
171 def _divert(name, mode=b'r', checkambig=False):
171 def _divert(name, mode=b'r', checkambig=False):
172 if name != target:
172 if name != target:
173 return opener(name, mode)
173 return opener(name, mode)
174 return opener(name + b".a", mode)
174 return opener(name + b".a", mode)
175
175
176 return _divert
176 return _divert
177
177
178
178
179 def _delayopener(opener, target, buf):
179 def _delayopener(opener, target, buf):
180 """build an opener that stores chunks in 'buf' instead of 'target'"""
180 """build an opener that stores chunks in 'buf' instead of 'target'"""
181
181
182 def _delay(name, mode=b'r', checkambig=False):
182 def _delay(name, mode=b'r', checkambig=False):
183 if name != target:
183 if name != target:
184 return opener(name, mode)
184 return opener(name, mode)
185 return appender(opener, name, mode, buf)
185 return appender(opener, name, mode, buf)
186
186
187 return _delay
187 return _delay
188
188
189
189
190 @attr.s
190 @attr.s
191 class _changelogrevision(object):
191 class _changelogrevision(object):
192 # Extensions might modify _defaultextra, so let the constructor below pass
192 # Extensions might modify _defaultextra, so let the constructor below pass
193 # it in
193 # it in
194 extra = attr.ib()
194 extra = attr.ib()
195 manifest = attr.ib(default=nullid)
195 manifest = attr.ib(default=nullid)
196 user = attr.ib(default=b'')
196 user = attr.ib(default=b'')
197 date = attr.ib(default=(0, 0))
197 date = attr.ib(default=(0, 0))
198 files = attr.ib(default=attr.Factory(list))
198 files = attr.ib(default=attr.Factory(list))
199 filesadded = attr.ib(default=None)
199 filesadded = attr.ib(default=None)
200 filesremoved = attr.ib(default=None)
200 filesremoved = attr.ib(default=None)
201 p1copies = attr.ib(default=None)
201 p1copies = attr.ib(default=None)
202 p2copies = attr.ib(default=None)
202 p2copies = attr.ib(default=None)
203 description = attr.ib(default=b'')
203 description = attr.ib(default=b'')
204
204
205
205
206 class changelogrevision(object):
206 class changelogrevision(object):
207 """Holds results of a parsed changelog revision.
207 """Holds results of a parsed changelog revision.
208
208
209 Changelog revisions consist of multiple pieces of data, including
209 Changelog revisions consist of multiple pieces of data, including
210 the manifest node, user, and date. This object exposes a view into
210 the manifest node, user, and date. This object exposes a view into
211 the parsed object.
211 the parsed object.
212 """
212 """
213
213
214 __slots__ = (
214 __slots__ = (
215 r'_offsets',
215 r'_offsets',
216 r'_text',
216 r'_text',
217 r'_sidedata',
217 r'_sidedata',
218 r'_cpsd',
218 r'_cpsd',
219 )
219 )
220
220
221 def __new__(cls, text, sidedata, cpsd):
221 def __new__(cls, text, sidedata, cpsd):
222 if not text:
222 if not text:
223 return _changelogrevision(extra=_defaultextra)
223 return _changelogrevision(extra=_defaultextra)
224
224
225 self = super(changelogrevision, cls).__new__(cls)
225 self = super(changelogrevision, cls).__new__(cls)
226 # We could return here and implement the following as an __init__.
226 # We could return here and implement the following as an __init__.
227 # But doing it here is equivalent and saves an extra function call.
227 # But doing it here is equivalent and saves an extra function call.
228
228
229 # format used:
229 # format used:
230 # nodeid\n : manifest node in ascii
230 # nodeid\n : manifest node in ascii
231 # user\n : user, no \n or \r allowed
231 # user\n : user, no \n or \r allowed
232 # time tz extra\n : date (time is int or float, timezone is int)
232 # time tz extra\n : date (time is int or float, timezone is int)
233 # : extra is metadata, encoded and separated by '\0'
233 # : extra is metadata, encoded and separated by '\0'
234 # : older versions ignore it
234 # : older versions ignore it
235 # files\n\n : files modified by the cset, no \n or \r allowed
235 # files\n\n : files modified by the cset, no \n or \r allowed
236 # (.*) : comment (free text, ideally utf-8)
236 # (.*) : comment (free text, ideally utf-8)
237 #
237 #
238 # changelog v0 doesn't use extra
238 # changelog v0 doesn't use extra
239
239
240 nl1 = text.index(b'\n')
240 nl1 = text.index(b'\n')
241 nl2 = text.index(b'\n', nl1 + 1)
241 nl2 = text.index(b'\n', nl1 + 1)
242 nl3 = text.index(b'\n', nl2 + 1)
242 nl3 = text.index(b'\n', nl2 + 1)
243
243
244 # The list of files may be empty. Which means nl3 is the first of the
244 # The list of files may be empty. Which means nl3 is the first of the
245 # double newline that precedes the description.
245 # double newline that precedes the description.
246 if text[nl3 + 1 : nl3 + 2] == b'\n':
246 if text[nl3 + 1 : nl3 + 2] == b'\n':
247 doublenl = nl3
247 doublenl = nl3
248 else:
248 else:
249 doublenl = text.index(b'\n\n', nl3 + 1)
249 doublenl = text.index(b'\n\n', nl3 + 1)
250
250
251 self._offsets = (nl1, nl2, nl3, doublenl)
251 self._offsets = (nl1, nl2, nl3, doublenl)
252 self._text = text
252 self._text = text
253 self._sidedata = sidedata
253 self._sidedata = sidedata
254 self._cpsd = cpsd
254 self._cpsd = cpsd
255
255
256 return self
256 return self
257
257
258 @property
258 @property
259 def manifest(self):
259 def manifest(self):
260 return bin(self._text[0 : self._offsets[0]])
260 return bin(self._text[0 : self._offsets[0]])
261
261
262 @property
262 @property
263 def user(self):
263 def user(self):
264 off = self._offsets
264 off = self._offsets
265 return encoding.tolocal(self._text[off[0] + 1 : off[1]])
265 return encoding.tolocal(self._text[off[0] + 1 : off[1]])
266
266
267 @property
267 @property
268 def _rawdate(self):
268 def _rawdate(self):
269 off = self._offsets
269 off = self._offsets
270 dateextra = self._text[off[1] + 1 : off[2]]
270 dateextra = self._text[off[1] + 1 : off[2]]
271 return dateextra.split(b' ', 2)[0:2]
271 return dateextra.split(b' ', 2)[0:2]
272
272
273 @property
273 @property
274 def _rawextra(self):
274 def _rawextra(self):
275 off = self._offsets
275 off = self._offsets
276 dateextra = self._text[off[1] + 1 : off[2]]
276 dateextra = self._text[off[1] + 1 : off[2]]
277 fields = dateextra.split(b' ', 2)
277 fields = dateextra.split(b' ', 2)
278 if len(fields) != 3:
278 if len(fields) != 3:
279 return None
279 return None
280
280
281 return fields[2]
281 return fields[2]
282
282
283 @property
283 @property
284 def date(self):
284 def date(self):
285 raw = self._rawdate
285 raw = self._rawdate
286 time = float(raw[0])
286 time = float(raw[0])
287 # Various tools did silly things with the timezone.
287 # Various tools did silly things with the timezone.
288 try:
288 try:
289 timezone = int(raw[1])
289 timezone = int(raw[1])
290 except ValueError:
290 except ValueError:
291 timezone = 0
291 timezone = 0
292
292
293 return time, timezone
293 return time, timezone
294
294
295 @property
295 @property
296 def extra(self):
296 def extra(self):
297 raw = self._rawextra
297 raw = self._rawextra
298 if raw is None:
298 if raw is None:
299 return _defaultextra
299 return _defaultextra
300
300
301 return decodeextra(raw)
301 return decodeextra(raw)
302
302
303 @property
303 @property
304 def files(self):
304 def files(self):
305 off = self._offsets
305 off = self._offsets
306 if off[2] == off[3]:
306 if off[2] == off[3]:
307 return []
307 return []
308
308
309 return self._text[off[2] + 1 : off[3]].split(b'\n')
309 return self._text[off[2] + 1 : off[3]].split(b'\n')
310
310
311 @property
311 @property
312 def filesadded(self):
312 def filesadded(self):
313 if self._cpsd:
313 if self._cpsd:
314 rawindices = self._sidedata.get(sidedatamod.SD_FILESADDED)
314 rawindices = self._sidedata.get(sidedatamod.SD_FILESADDED)
315 if not rawindices:
315 if not rawindices:
316 return []
316 return []
317 else:
317 else:
318 rawindices = self.extra.get(b'filesadded')
318 rawindices = self.extra.get(b'filesadded')
319 if rawindices is None:
319 if rawindices is None:
320 return None
320 return None
321 return copies.decodefileindices(self.files, rawindices)
321 return copies.decodefileindices(self.files, rawindices)
322
322
323 @property
323 @property
324 def filesremoved(self):
324 def filesremoved(self):
325 if self._cpsd:
325 if self._cpsd:
326 rawindices = self._sidedata.get(sidedatamod.SD_FILESREMOVED)
326 rawindices = self._sidedata.get(sidedatamod.SD_FILESREMOVED)
327 if not rawindices:
327 if not rawindices:
328 return []
328 return []
329 else:
329 else:
330 rawindices = self.extra.get(b'filesremoved')
330 rawindices = self.extra.get(b'filesremoved')
331 if rawindices is None:
331 if rawindices is None:
332 return None
332 return None
333 return copies.decodefileindices(self.files, rawindices)
333 return copies.decodefileindices(self.files, rawindices)
334
334
335 @property
335 @property
336 def p1copies(self):
336 def p1copies(self):
337 if self._cpsd:
337 if self._cpsd:
338 rawcopies = self._sidedata.get(sidedatamod.SD_P1COPIES)
338 rawcopies = self._sidedata.get(sidedatamod.SD_P1COPIES)
339 if not rawcopies:
339 if not rawcopies:
340 return {}
340 return {}
341 else:
341 else:
342 rawcopies = self.extra.get(b'p1copies')
342 rawcopies = self.extra.get(b'p1copies')
343 if rawcopies is None:
343 if rawcopies is None:
344 return None
344 return None
345 return copies.decodecopies(self.files, rawcopies)
345 return copies.decodecopies(self.files, rawcopies)
346
346
347 @property
347 @property
348 def p2copies(self):
348 def p2copies(self):
349 if self._cpsd:
349 if self._cpsd:
350 rawcopies = self._sidedata.get(sidedatamod.SD_P2COPIES)
350 rawcopies = self._sidedata.get(sidedatamod.SD_P2COPIES)
351 if not rawcopies:
351 if not rawcopies:
352 return {}
352 return {}
353 else:
353 else:
354 rawcopies = self.extra.get(b'p2copies')
354 rawcopies = self.extra.get(b'p2copies')
355 if rawcopies is None:
355 if rawcopies is None:
356 return None
356 return None
357 return copies.decodecopies(self.files, rawcopies)
357 return copies.decodecopies(self.files, rawcopies)
358
358
359 @property
359 @property
360 def description(self):
360 def description(self):
361 return encoding.tolocal(self._text[self._offsets[3] + 2 :])
361 return encoding.tolocal(self._text[self._offsets[3] + 2 :])
362
362
363
363
364 class changelog(revlog.revlog):
364 class changelog(revlog.revlog):
365 def __init__(self, opener, trypending=False):
365 def __init__(self, opener, trypending=False):
366 """Load a changelog revlog using an opener.
366 """Load a changelog revlog using an opener.
367
367
368 If ``trypending`` is true, we attempt to load the index from a
368 If ``trypending`` is true, we attempt to load the index from a
369 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
369 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
370 The ``00changelog.i.a`` file contains index (and possibly inline
370 The ``00changelog.i.a`` file contains index (and possibly inline
371 revision) data for a transaction that hasn't been finalized yet.
371 revision) data for a transaction that hasn't been finalized yet.
372 It exists in a separate file to facilitate readers (such as
372 It exists in a separate file to facilitate readers (such as
373 hooks processes) accessing data before a transaction is finalized.
373 hooks processes) accessing data before a transaction is finalized.
374 """
374 """
375 if trypending and opener.exists(b'00changelog.i.a'):
375 if trypending and opener.exists(b'00changelog.i.a'):
376 indexfile = b'00changelog.i.a'
376 indexfile = b'00changelog.i.a'
377 else:
377 else:
378 indexfile = b'00changelog.i'
378 indexfile = b'00changelog.i'
379
379
380 datafile = b'00changelog.d'
380 datafile = b'00changelog.d'
381 revlog.revlog.__init__(
381 revlog.revlog.__init__(
382 self,
382 self,
383 opener,
383 opener,
384 indexfile,
384 indexfile,
385 datafile=datafile,
385 datafile=datafile,
386 checkambig=True,
386 checkambig=True,
387 mmaplargeindex=True,
387 mmaplargeindex=True,
388 )
388 )
389
389
390 if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
390 if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
391 # changelogs don't benefit from generaldelta.
391 # changelogs don't benefit from generaldelta.
392
392
393 self.version &= ~revlog.FLAG_GENERALDELTA
393 self.version &= ~revlog.FLAG_GENERALDELTA
394 self._generaldelta = False
394 self._generaldelta = False
395
395
396 # Delta chains for changelogs tend to be very small because entries
396 # Delta chains for changelogs tend to be very small because entries
397 # tend to be small and don't delta well with each. So disable delta
397 # tend to be small and don't delta well with each. So disable delta
398 # chains.
398 # chains.
399 self._storedeltachains = False
399 self._storedeltachains = False
400
400
401 self._realopener = opener
401 self._realopener = opener
402 self._delayed = False
402 self._delayed = False
403 self._delaybuf = None
403 self._delaybuf = None
404 self._divert = False
404 self._divert = False
405 self.filteredrevs = frozenset()
405 self.filteredrevs = frozenset()
406 self._copiesstorage = opener.options.get(b'copies-storage')
406 self._copiesstorage = opener.options.get(b'copies-storage')
407
407
408 def tiprev(self):
409 """filtered version of revlog.tiprev"""
410 for i in pycompat.xrange(len(self) - 1, -2, -1):
411 if i not in self.filteredrevs:
412 return i
413
414 def __contains__(self, rev):
408 def __contains__(self, rev):
415 """filtered version of revlog.__contains__"""
409 """filtered version of revlog.__contains__"""
416 return 0 <= rev < len(self) and rev not in self.filteredrevs
410 return 0 <= rev < len(self) and rev not in self.filteredrevs
417
411
418 def __iter__(self):
412 def __iter__(self):
419 """filtered version of revlog.__iter__"""
413 """filtered version of revlog.__iter__"""
420 if len(self.filteredrevs) == 0:
414 if len(self.filteredrevs) == 0:
421 return revlog.revlog.__iter__(self)
415 return revlog.revlog.__iter__(self)
422
416
423 def filterediter():
417 def filterediter():
424 for i in pycompat.xrange(len(self)):
418 for i in pycompat.xrange(len(self)):
425 if i not in self.filteredrevs:
419 if i not in self.filteredrevs:
426 yield i
420 yield i
427
421
428 return filterediter()
422 return filterediter()
429
423
430 def revs(self, start=0, stop=None):
424 def revs(self, start=0, stop=None):
431 """filtered version of revlog.revs"""
425 """filtered version of revlog.revs"""
432 for i in super(changelog, self).revs(start, stop):
426 for i in super(changelog, self).revs(start, stop):
433 if i not in self.filteredrevs:
427 if i not in self.filteredrevs:
434 yield i
428 yield i
435
429
436 def _checknofilteredinrevs(self, revs):
430 def _checknofilteredinrevs(self, revs):
437 """raise the appropriate error if 'revs' contains a filtered revision
431 """raise the appropriate error if 'revs' contains a filtered revision
438
432
439 This returns a version of 'revs' to be used thereafter by the caller.
433 This returns a version of 'revs' to be used thereafter by the caller.
440 In particular, if revs is an iterator, it is converted into a set.
434 In particular, if revs is an iterator, it is converted into a set.
441 """
435 """
442 safehasattr = util.safehasattr
436 safehasattr = util.safehasattr
443 if safehasattr(revs, '__next__'):
437 if safehasattr(revs, '__next__'):
444 # Note that inspect.isgenerator() is not true for iterators,
438 # Note that inspect.isgenerator() is not true for iterators,
445 revs = set(revs)
439 revs = set(revs)
446
440
447 filteredrevs = self.filteredrevs
441 filteredrevs = self.filteredrevs
448 if safehasattr(revs, 'first'): # smartset
442 if safehasattr(revs, 'first'): # smartset
449 offenders = revs & filteredrevs
443 offenders = revs & filteredrevs
450 else:
444 else:
451 offenders = filteredrevs.intersection(revs)
445 offenders = filteredrevs.intersection(revs)
452
446
453 for rev in offenders:
447 for rev in offenders:
454 raise error.FilteredIndexError(rev)
448 raise error.FilteredIndexError(rev)
455 return revs
449 return revs
456
450
457 def headrevs(self, revs=None):
451 def headrevs(self, revs=None):
458 if revs is None and self.filteredrevs:
452 if revs is None and self.filteredrevs:
459 try:
453 try:
460 return self.index.headrevsfiltered(self.filteredrevs)
454 return self.index.headrevsfiltered(self.filteredrevs)
461 # AttributeError covers non-c-extension environments and
455 # AttributeError covers non-c-extension environments and
462 # old c extensions without filter handling.
456 # old c extensions without filter handling.
463 except AttributeError:
457 except AttributeError:
464 return self._headrevs()
458 return self._headrevs()
465
459
466 if self.filteredrevs:
460 if self.filteredrevs:
467 revs = self._checknofilteredinrevs(revs)
461 revs = self._checknofilteredinrevs(revs)
468 return super(changelog, self).headrevs(revs)
462 return super(changelog, self).headrevs(revs)
469
463
470 def strip(self, *args, **kwargs):
464 def strip(self, *args, **kwargs):
471 # XXX make something better than assert
465 # XXX make something better than assert
472 # We can't expect proper strip behavior if we are filtered.
466 # We can't expect proper strip behavior if we are filtered.
473 assert not self.filteredrevs
467 assert not self.filteredrevs
474 super(changelog, self).strip(*args, **kwargs)
468 super(changelog, self).strip(*args, **kwargs)
475
469
476 def rev(self, node):
470 def rev(self, node):
477 """filtered version of revlog.rev"""
471 """filtered version of revlog.rev"""
478 r = super(changelog, self).rev(node)
472 r = super(changelog, self).rev(node)
479 if r in self.filteredrevs:
473 if r in self.filteredrevs:
480 raise error.FilteredLookupError(
474 raise error.FilteredLookupError(
481 hex(node), self.indexfile, _(b'filtered node')
475 hex(node), self.indexfile, _(b'filtered node')
482 )
476 )
483 return r
477 return r
484
478
485 def node(self, rev):
479 def node(self, rev):
486 """filtered version of revlog.node"""
480 """filtered version of revlog.node"""
487 if rev in self.filteredrevs:
481 if rev in self.filteredrevs:
488 raise error.FilteredIndexError(rev)
482 raise error.FilteredIndexError(rev)
489 return super(changelog, self).node(rev)
483 return super(changelog, self).node(rev)
490
484
491 def linkrev(self, rev):
485 def linkrev(self, rev):
492 """filtered version of revlog.linkrev"""
486 """filtered version of revlog.linkrev"""
493 if rev in self.filteredrevs:
487 if rev in self.filteredrevs:
494 raise error.FilteredIndexError(rev)
488 raise error.FilteredIndexError(rev)
495 return super(changelog, self).linkrev(rev)
489 return super(changelog, self).linkrev(rev)
496
490
497 def parentrevs(self, rev):
491 def parentrevs(self, rev):
498 """filtered version of revlog.parentrevs"""
492 """filtered version of revlog.parentrevs"""
499 if rev in self.filteredrevs:
493 if rev in self.filteredrevs:
500 raise error.FilteredIndexError(rev)
494 raise error.FilteredIndexError(rev)
501 return super(changelog, self).parentrevs(rev)
495 return super(changelog, self).parentrevs(rev)
502
496
503 def flags(self, rev):
497 def flags(self, rev):
504 """filtered version of revlog.flags"""
498 """filtered version of revlog.flags"""
505 if rev in self.filteredrevs:
499 if rev in self.filteredrevs:
506 raise error.FilteredIndexError(rev)
500 raise error.FilteredIndexError(rev)
507 return super(changelog, self).flags(rev)
501 return super(changelog, self).flags(rev)
508
502
509 def delayupdate(self, tr):
503 def delayupdate(self, tr):
510 b"delay visibility of index updates to other readers"
504 b"delay visibility of index updates to other readers"
511
505
512 if not self._delayed:
506 if not self._delayed:
513 if len(self) == 0:
507 if len(self) == 0:
514 self._divert = True
508 self._divert = True
515 if self._realopener.exists(self.indexfile + b'.a'):
509 if self._realopener.exists(self.indexfile + b'.a'):
516 self._realopener.unlink(self.indexfile + b'.a')
510 self._realopener.unlink(self.indexfile + b'.a')
517 self.opener = _divertopener(self._realopener, self.indexfile)
511 self.opener = _divertopener(self._realopener, self.indexfile)
518 else:
512 else:
519 self._delaybuf = []
513 self._delaybuf = []
520 self.opener = _delayopener(
514 self.opener = _delayopener(
521 self._realopener, self.indexfile, self._delaybuf
515 self._realopener, self.indexfile, self._delaybuf
522 )
516 )
523 self._delayed = True
517 self._delayed = True
524 tr.addpending(b'cl-%i' % id(self), self._writepending)
518 tr.addpending(b'cl-%i' % id(self), self._writepending)
525 tr.addfinalize(b'cl-%i' % id(self), self._finalize)
519 tr.addfinalize(b'cl-%i' % id(self), self._finalize)
526
520
527 def _finalize(self, tr):
521 def _finalize(self, tr):
528 b"finalize index updates"
522 b"finalize index updates"
529 self._delayed = False
523 self._delayed = False
530 self.opener = self._realopener
524 self.opener = self._realopener
531 # move redirected index data back into place
525 # move redirected index data back into place
532 if self._divert:
526 if self._divert:
533 assert not self._delaybuf
527 assert not self._delaybuf
534 tmpname = self.indexfile + b".a"
528 tmpname = self.indexfile + b".a"
535 nfile = self.opener.open(tmpname)
529 nfile = self.opener.open(tmpname)
536 nfile.close()
530 nfile.close()
537 self.opener.rename(tmpname, self.indexfile, checkambig=True)
531 self.opener.rename(tmpname, self.indexfile, checkambig=True)
538 elif self._delaybuf:
532 elif self._delaybuf:
539 fp = self.opener(self.indexfile, b'a', checkambig=True)
533 fp = self.opener(self.indexfile, b'a', checkambig=True)
540 fp.write(b"".join(self._delaybuf))
534 fp.write(b"".join(self._delaybuf))
541 fp.close()
535 fp.close()
542 self._delaybuf = None
536 self._delaybuf = None
543 self._divert = False
537 self._divert = False
544 # split when we're done
538 # split when we're done
545 self._enforceinlinesize(tr)
539 self._enforceinlinesize(tr)
546
540
547 def _writepending(self, tr):
541 def _writepending(self, tr):
548 b"create a file containing the unfinalized state for pretxnchangegroup"
542 b"create a file containing the unfinalized state for pretxnchangegroup"
549 if self._delaybuf:
543 if self._delaybuf:
550 # make a temporary copy of the index
544 # make a temporary copy of the index
551 fp1 = self._realopener(self.indexfile)
545 fp1 = self._realopener(self.indexfile)
552 pendingfilename = self.indexfile + b".a"
546 pendingfilename = self.indexfile + b".a"
553 # register as a temp file to ensure cleanup on failure
547 # register as a temp file to ensure cleanup on failure
554 tr.registertmp(pendingfilename)
548 tr.registertmp(pendingfilename)
555 # write existing data
549 # write existing data
556 fp2 = self._realopener(pendingfilename, b"w")
550 fp2 = self._realopener(pendingfilename, b"w")
557 fp2.write(fp1.read())
551 fp2.write(fp1.read())
558 # add pending data
552 # add pending data
559 fp2.write(b"".join(self._delaybuf))
553 fp2.write(b"".join(self._delaybuf))
560 fp2.close()
554 fp2.close()
561 # switch modes so finalize can simply rename
555 # switch modes so finalize can simply rename
562 self._delaybuf = None
556 self._delaybuf = None
563 self._divert = True
557 self._divert = True
564 self.opener = _divertopener(self._realopener, self.indexfile)
558 self.opener = _divertopener(self._realopener, self.indexfile)
565
559
566 if self._divert:
560 if self._divert:
567 return True
561 return True
568
562
569 return False
563 return False
570
564
571 def _enforceinlinesize(self, tr, fp=None):
565 def _enforceinlinesize(self, tr, fp=None):
572 if not self._delayed:
566 if not self._delayed:
573 revlog.revlog._enforceinlinesize(self, tr, fp)
567 revlog.revlog._enforceinlinesize(self, tr, fp)
574
568
575 def read(self, node):
569 def read(self, node):
576 """Obtain data from a parsed changelog revision.
570 """Obtain data from a parsed changelog revision.
577
571
578 Returns a 6-tuple of:
572 Returns a 6-tuple of:
579
573
580 - manifest node in binary
574 - manifest node in binary
581 - author/user as a localstr
575 - author/user as a localstr
582 - date as a 2-tuple of (time, timezone)
576 - date as a 2-tuple of (time, timezone)
583 - list of files
577 - list of files
584 - commit message as a localstr
578 - commit message as a localstr
585 - dict of extra metadata
579 - dict of extra metadata
586
580
587 Unless you need to access all fields, consider calling
581 Unless you need to access all fields, consider calling
588 ``changelogrevision`` instead, as it is faster for partial object
582 ``changelogrevision`` instead, as it is faster for partial object
589 access.
583 access.
590 """
584 """
591 d, s = self._revisiondata(node)
585 d, s = self._revisiondata(node)
592 c = changelogrevision(
586 c = changelogrevision(
593 d, s, self._copiesstorage == b'changeset-sidedata'
587 d, s, self._copiesstorage == b'changeset-sidedata'
594 )
588 )
595 return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
589 return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
596
590
597 def changelogrevision(self, nodeorrev):
591 def changelogrevision(self, nodeorrev):
598 """Obtain a ``changelogrevision`` for a node or revision."""
592 """Obtain a ``changelogrevision`` for a node or revision."""
599 text, sidedata = self._revisiondata(nodeorrev)
593 text, sidedata = self._revisiondata(nodeorrev)
600 return changelogrevision(
594 return changelogrevision(
601 text, sidedata, self._copiesstorage == b'changeset-sidedata'
595 text, sidedata, self._copiesstorage == b'changeset-sidedata'
602 )
596 )
603
597
604 def readfiles(self, node):
598 def readfiles(self, node):
605 """
599 """
606 short version of read that only returns the files modified by the cset
600 short version of read that only returns the files modified by the cset
607 """
601 """
608 text = self.revision(node)
602 text = self.revision(node)
609 if not text:
603 if not text:
610 return []
604 return []
611 last = text.index(b"\n\n")
605 last = text.index(b"\n\n")
612 l = text[:last].split(b'\n')
606 l = text[:last].split(b'\n')
613 return l[3:]
607 return l[3:]
614
608
615 def add(
609 def add(
616 self,
610 self,
617 manifest,
611 manifest,
618 files,
612 files,
619 desc,
613 desc,
620 transaction,
614 transaction,
621 p1,
615 p1,
622 p2,
616 p2,
623 user,
617 user,
624 date=None,
618 date=None,
625 extra=None,
619 extra=None,
626 p1copies=None,
620 p1copies=None,
627 p2copies=None,
621 p2copies=None,
628 filesadded=None,
622 filesadded=None,
629 filesremoved=None,
623 filesremoved=None,
630 ):
624 ):
631 # Convert to UTF-8 encoded bytestrings as the very first
625 # Convert to UTF-8 encoded bytestrings as the very first
632 # thing: calling any method on a localstr object will turn it
626 # thing: calling any method on a localstr object will turn it
633 # into a str object and the cached UTF-8 string is thus lost.
627 # into a str object and the cached UTF-8 string is thus lost.
634 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
628 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
635
629
636 user = user.strip()
630 user = user.strip()
637 # An empty username or a username with a "\n" will make the
631 # An empty username or a username with a "\n" will make the
638 # revision text contain two "\n\n" sequences -> corrupt
632 # revision text contain two "\n\n" sequences -> corrupt
639 # repository since read cannot unpack the revision.
633 # repository since read cannot unpack the revision.
640 if not user:
634 if not user:
641 raise error.StorageError(_(b"empty username"))
635 raise error.StorageError(_(b"empty username"))
642 if b"\n" in user:
636 if b"\n" in user:
643 raise error.StorageError(
637 raise error.StorageError(
644 _(b"username %r contains a newline") % pycompat.bytestr(user)
638 _(b"username %r contains a newline") % pycompat.bytestr(user)
645 )
639 )
646
640
647 desc = stripdesc(desc)
641 desc = stripdesc(desc)
648
642
649 if date:
643 if date:
650 parseddate = b"%d %d" % dateutil.parsedate(date)
644 parseddate = b"%d %d" % dateutil.parsedate(date)
651 else:
645 else:
652 parseddate = b"%d %d" % dateutil.makedate()
646 parseddate = b"%d %d" % dateutil.makedate()
653 if extra:
647 if extra:
654 branch = extra.get(b"branch")
648 branch = extra.get(b"branch")
655 if branch in (b"default", b""):
649 if branch in (b"default", b""):
656 del extra[b"branch"]
650 del extra[b"branch"]
657 elif branch in (b".", b"null", b"tip"):
651 elif branch in (b".", b"null", b"tip"):
658 raise error.StorageError(
652 raise error.StorageError(
659 _(b'the name \'%s\' is reserved') % branch
653 _(b'the name \'%s\' is reserved') % branch
660 )
654 )
661 sortedfiles = sorted(files)
655 sortedfiles = sorted(files)
662 sidedata = None
656 sidedata = None
663 if extra is not None:
657 if extra is not None:
664 for name in (
658 for name in (
665 b'p1copies',
659 b'p1copies',
666 b'p2copies',
660 b'p2copies',
667 b'filesadded',
661 b'filesadded',
668 b'filesremoved',
662 b'filesremoved',
669 ):
663 ):
670 extra.pop(name, None)
664 extra.pop(name, None)
671 if p1copies is not None:
665 if p1copies is not None:
672 p1copies = copies.encodecopies(sortedfiles, p1copies)
666 p1copies = copies.encodecopies(sortedfiles, p1copies)
673 if p2copies is not None:
667 if p2copies is not None:
674 p2copies = copies.encodecopies(sortedfiles, p2copies)
668 p2copies = copies.encodecopies(sortedfiles, p2copies)
675 if filesadded is not None:
669 if filesadded is not None:
676 filesadded = copies.encodefileindices(sortedfiles, filesadded)
670 filesadded = copies.encodefileindices(sortedfiles, filesadded)
677 if filesremoved is not None:
671 if filesremoved is not None:
678 filesremoved = copies.encodefileindices(sortedfiles, filesremoved)
672 filesremoved = copies.encodefileindices(sortedfiles, filesremoved)
679 if self._copiesstorage == b'extra':
673 if self._copiesstorage == b'extra':
680 extrasentries = p1copies, p2copies, filesadded, filesremoved
674 extrasentries = p1copies, p2copies, filesadded, filesremoved
681 if extra is None and any(x is not None for x in extrasentries):
675 if extra is None and any(x is not None for x in extrasentries):
682 extra = {}
676 extra = {}
683 if p1copies is not None:
677 if p1copies is not None:
684 extra[b'p1copies'] = p1copies
678 extra[b'p1copies'] = p1copies
685 if p2copies is not None:
679 if p2copies is not None:
686 extra[b'p2copies'] = p2copies
680 extra[b'p2copies'] = p2copies
687 if filesadded is not None:
681 if filesadded is not None:
688 extra[b'filesadded'] = filesadded
682 extra[b'filesadded'] = filesadded
689 if filesremoved is not None:
683 if filesremoved is not None:
690 extra[b'filesremoved'] = filesremoved
684 extra[b'filesremoved'] = filesremoved
691 elif self._copiesstorage == b'changeset-sidedata':
685 elif self._copiesstorage == b'changeset-sidedata':
692 sidedata = {}
686 sidedata = {}
693 if p1copies:
687 if p1copies:
694 sidedata[sidedatamod.SD_P1COPIES] = p1copies
688 sidedata[sidedatamod.SD_P1COPIES] = p1copies
695 if p2copies:
689 if p2copies:
696 sidedata[sidedatamod.SD_P2COPIES] = p2copies
690 sidedata[sidedatamod.SD_P2COPIES] = p2copies
697 if filesadded:
691 if filesadded:
698 sidedata[sidedatamod.SD_FILESADDED] = filesadded
692 sidedata[sidedatamod.SD_FILESADDED] = filesadded
699 if filesremoved:
693 if filesremoved:
700 sidedata[sidedatamod.SD_FILESREMOVED] = filesremoved
694 sidedata[sidedatamod.SD_FILESREMOVED] = filesremoved
701 if not sidedata:
695 if not sidedata:
702 sidedata = None
696 sidedata = None
703
697
704 if extra:
698 if extra:
705 extra = encodeextra(extra)
699 extra = encodeextra(extra)
706 parseddate = b"%s %s" % (parseddate, extra)
700 parseddate = b"%s %s" % (parseddate, extra)
707 l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
701 l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
708 text = b"\n".join(l)
702 text = b"\n".join(l)
709 return self.addrevision(
703 return self.addrevision(
710 text, transaction, len(self), p1, p2, sidedata=sidedata
704 text, transaction, len(self), p1, p2, sidedata=sidedata
711 )
705 )
712
706
713 def branchinfo(self, rev):
707 def branchinfo(self, rev):
714 """return the branch name and open/close state of a revision
708 """return the branch name and open/close state of a revision
715
709
716 This function exists because creating a changectx object
710 This function exists because creating a changectx object
717 just to access this is costly."""
711 just to access this is costly."""
718 extra = self.read(rev)[5]
712 extra = self.read(rev)[5]
719 return encoding.tolocal(extra.get(b"branch")), b'close' in extra
713 return encoding.tolocal(extra.get(b"branch")), b'close' in extra
720
714
721 def _nodeduplicatecallback(self, transaction, node):
715 def _nodeduplicatecallback(self, transaction, node):
722 # keep track of revisions that got "re-added", eg: unbunde of know rev.
716 # keep track of revisions that got "re-added", eg: unbunde of know rev.
723 #
717 #
724 # We track them in a list to preserve their order from the source bundle
718 # We track them in a list to preserve their order from the source bundle
725 duplicates = transaction.changes.setdefault(b'revduplicates', [])
719 duplicates = transaction.changes.setdefault(b'revduplicates', [])
726 duplicates.append(self.rev(node))
720 duplicates.append(self.rev(node))
@@ -1,348 +1,352 b''
1 # repoview.py - Filtered view of a localrepo object
1 # repoview.py - Filtered view of a localrepo object
2 #
2 #
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
4 # Logilab SA <contact@logilab.fr>
4 # Logilab SA <contact@logilab.fr>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from __future__ import absolute_import
9 from __future__ import absolute_import
10
10
11 import copy
11 import copy
12 import weakref
12 import weakref
13
13
14 from .node import nullrev
14 from .node import nullrev
15 from .pycompat import (
15 from .pycompat import (
16 delattr,
16 delattr,
17 getattr,
17 getattr,
18 setattr,
18 setattr,
19 )
19 )
20 from . import (
20 from . import (
21 obsolete,
21 obsolete,
22 phases,
22 phases,
23 pycompat,
23 pycompat,
24 tags as tagsmod,
24 tags as tagsmod,
25 util,
25 util,
26 )
26 )
27 from .utils import repoviewutil
27 from .utils import repoviewutil
28
28
29
29
30 def hideablerevs(repo):
30 def hideablerevs(repo):
31 """Revision candidates to be hidden
31 """Revision candidates to be hidden
32
32
33 This is a standalone function to allow extensions to wrap it.
33 This is a standalone function to allow extensions to wrap it.
34
34
35 Because we use the set of immutable changesets as a fallback subset in
35 Because we use the set of immutable changesets as a fallback subset in
36 branchmap (see mercurial.utils.repoviewutils.subsettable), you cannot set
36 branchmap (see mercurial.utils.repoviewutils.subsettable), you cannot set
37 "public" changesets as "hideable". Doing so would break multiple code
37 "public" changesets as "hideable". Doing so would break multiple code
38 assertions and lead to crashes."""
38 assertions and lead to crashes."""
39 obsoletes = obsolete.getrevs(repo, b'obsolete')
39 obsoletes = obsolete.getrevs(repo, b'obsolete')
40 internals = repo._phasecache.getrevset(repo, phases.localhiddenphases)
40 internals = repo._phasecache.getrevset(repo, phases.localhiddenphases)
41 internals = frozenset(internals)
41 internals = frozenset(internals)
42 return obsoletes | internals
42 return obsoletes | internals
43
43
44
44
45 def pinnedrevs(repo):
45 def pinnedrevs(repo):
46 """revisions blocking hidden changesets from being filtered
46 """revisions blocking hidden changesets from being filtered
47 """
47 """
48
48
49 cl = repo.changelog
49 cl = repo.changelog
50 pinned = set()
50 pinned = set()
51 pinned.update([par.rev() for par in repo[None].parents()])
51 pinned.update([par.rev() for par in repo[None].parents()])
52 pinned.update([cl.rev(bm) for bm in repo._bookmarks.values()])
52 pinned.update([cl.rev(bm) for bm in repo._bookmarks.values()])
53
53
54 tags = {}
54 tags = {}
55 tagsmod.readlocaltags(repo.ui, repo, tags, {})
55 tagsmod.readlocaltags(repo.ui, repo, tags, {})
56 if tags:
56 if tags:
57 rev, nodemap = cl.rev, cl.nodemap
57 rev, nodemap = cl.rev, cl.nodemap
58 pinned.update(rev(t[0]) for t in tags.values() if t[0] in nodemap)
58 pinned.update(rev(t[0]) for t in tags.values() if t[0] in nodemap)
59 return pinned
59 return pinned
60
60
61
61
62 def _revealancestors(pfunc, hidden, revs):
62 def _revealancestors(pfunc, hidden, revs):
63 """reveals contiguous chains of hidden ancestors of 'revs' by removing them
63 """reveals contiguous chains of hidden ancestors of 'revs' by removing them
64 from 'hidden'
64 from 'hidden'
65
65
66 - pfunc(r): a funtion returning parent of 'r',
66 - pfunc(r): a funtion returning parent of 'r',
67 - hidden: the (preliminary) hidden revisions, to be updated
67 - hidden: the (preliminary) hidden revisions, to be updated
68 - revs: iterable of revnum,
68 - revs: iterable of revnum,
69
69
70 (Ancestors are revealed exclusively, i.e. the elements in 'revs' are
70 (Ancestors are revealed exclusively, i.e. the elements in 'revs' are
71 *not* revealed)
71 *not* revealed)
72 """
72 """
73 stack = list(revs)
73 stack = list(revs)
74 while stack:
74 while stack:
75 for p in pfunc(stack.pop()):
75 for p in pfunc(stack.pop()):
76 if p != nullrev and p in hidden:
76 if p != nullrev and p in hidden:
77 hidden.remove(p)
77 hidden.remove(p)
78 stack.append(p)
78 stack.append(p)
79
79
80
80
81 def computehidden(repo, visibilityexceptions=None):
81 def computehidden(repo, visibilityexceptions=None):
82 """compute the set of hidden revision to filter
82 """compute the set of hidden revision to filter
83
83
84 During most operation hidden should be filtered."""
84 During most operation hidden should be filtered."""
85 assert not repo.changelog.filteredrevs
85 assert not repo.changelog.filteredrevs
86
86
87 hidden = hideablerevs(repo)
87 hidden = hideablerevs(repo)
88 if hidden:
88 if hidden:
89 hidden = set(hidden - pinnedrevs(repo))
89 hidden = set(hidden - pinnedrevs(repo))
90 if visibilityexceptions:
90 if visibilityexceptions:
91 hidden -= visibilityexceptions
91 hidden -= visibilityexceptions
92 pfunc = repo.changelog.parentrevs
92 pfunc = repo.changelog.parentrevs
93 mutable = repo._phasecache.getrevset(repo, phases.mutablephases)
93 mutable = repo._phasecache.getrevset(repo, phases.mutablephases)
94
94
95 visible = mutable - hidden
95 visible = mutable - hidden
96 _revealancestors(pfunc, hidden, visible)
96 _revealancestors(pfunc, hidden, visible)
97 return frozenset(hidden)
97 return frozenset(hidden)
98
98
99
99
100 def computesecret(repo, visibilityexceptions=None):
100 def computesecret(repo, visibilityexceptions=None):
101 """compute the set of revision that can never be exposed through hgweb
101 """compute the set of revision that can never be exposed through hgweb
102
102
103 Changeset in the secret phase (or above) should stay unaccessible."""
103 Changeset in the secret phase (or above) should stay unaccessible."""
104 assert not repo.changelog.filteredrevs
104 assert not repo.changelog.filteredrevs
105 secrets = repo._phasecache.getrevset(repo, phases.remotehiddenphases)
105 secrets = repo._phasecache.getrevset(repo, phases.remotehiddenphases)
106 return frozenset(secrets)
106 return frozenset(secrets)
107
107
108
108
109 def computeunserved(repo, visibilityexceptions=None):
109 def computeunserved(repo, visibilityexceptions=None):
110 """compute the set of revision that should be filtered when used a server
110 """compute the set of revision that should be filtered when used a server
111
111
112 Secret and hidden changeset should not pretend to be here."""
112 Secret and hidden changeset should not pretend to be here."""
113 assert not repo.changelog.filteredrevs
113 assert not repo.changelog.filteredrevs
114 # fast path in simple case to avoid impact of non optimised code
114 # fast path in simple case to avoid impact of non optimised code
115 hiddens = filterrevs(repo, b'visible')
115 hiddens = filterrevs(repo, b'visible')
116 secrets = filterrevs(repo, b'served.hidden')
116 secrets = filterrevs(repo, b'served.hidden')
117 if secrets:
117 if secrets:
118 return frozenset(hiddens | secrets)
118 return frozenset(hiddens | secrets)
119 else:
119 else:
120 return hiddens
120 return hiddens
121
121
122
122
123 def computemutable(repo, visibilityexceptions=None):
123 def computemutable(repo, visibilityexceptions=None):
124 assert not repo.changelog.filteredrevs
124 assert not repo.changelog.filteredrevs
125 # fast check to avoid revset call on huge repo
125 # fast check to avoid revset call on huge repo
126 if any(repo._phasecache.phaseroots[1:]):
126 if any(repo._phasecache.phaseroots[1:]):
127 getphase = repo._phasecache.phase
127 getphase = repo._phasecache.phase
128 maymutable = filterrevs(repo, b'base')
128 maymutable = filterrevs(repo, b'base')
129 return frozenset(r for r in maymutable if getphase(repo, r))
129 return frozenset(r for r in maymutable if getphase(repo, r))
130 return frozenset()
130 return frozenset()
131
131
132
132
133 def computeimpactable(repo, visibilityexceptions=None):
133 def computeimpactable(repo, visibilityexceptions=None):
134 """Everything impactable by mutable revision
134 """Everything impactable by mutable revision
135
135
136 The immutable filter still have some chance to get invalidated. This will
136 The immutable filter still have some chance to get invalidated. This will
137 happen when:
137 happen when:
138
138
139 - you garbage collect hidden changeset,
139 - you garbage collect hidden changeset,
140 - public phase is moved backward,
140 - public phase is moved backward,
141 - something is changed in the filtering (this could be fixed)
141 - something is changed in the filtering (this could be fixed)
142
142
143 This filter out any mutable changeset and any public changeset that may be
143 This filter out any mutable changeset and any public changeset that may be
144 impacted by something happening to a mutable revision.
144 impacted by something happening to a mutable revision.
145
145
146 This is achieved by filtered everything with a revision number egal or
146 This is achieved by filtered everything with a revision number egal or
147 higher than the first mutable changeset is filtered."""
147 higher than the first mutable changeset is filtered."""
148 assert not repo.changelog.filteredrevs
148 assert not repo.changelog.filteredrevs
149 cl = repo.changelog
149 cl = repo.changelog
150 firstmutable = len(cl)
150 firstmutable = len(cl)
151 for roots in repo._phasecache.phaseroots[1:]:
151 for roots in repo._phasecache.phaseroots[1:]:
152 if roots:
152 if roots:
153 firstmutable = min(firstmutable, min(cl.rev(r) for r in roots))
153 firstmutable = min(firstmutable, min(cl.rev(r) for r in roots))
154 # protect from nullrev root
154 # protect from nullrev root
155 firstmutable = max(0, firstmutable)
155 firstmutable = max(0, firstmutable)
156 return frozenset(pycompat.xrange(firstmutable, len(cl)))
156 return frozenset(pycompat.xrange(firstmutable, len(cl)))
157
157
158
158
159 # function to compute filtered set
159 # function to compute filtered set
160 #
160 #
161 # When adding a new filter you MUST update the table at:
161 # When adding a new filter you MUST update the table at:
162 # mercurial.utils.repoviewutil.subsettable
162 # mercurial.utils.repoviewutil.subsettable
163 # Otherwise your filter will have to recompute all its branches cache
163 # Otherwise your filter will have to recompute all its branches cache
164 # from scratch (very slow).
164 # from scratch (very slow).
165 filtertable = {
165 filtertable = {
166 b'visible': computehidden,
166 b'visible': computehidden,
167 b'visible-hidden': computehidden,
167 b'visible-hidden': computehidden,
168 b'served.hidden': computesecret,
168 b'served.hidden': computesecret,
169 b'served': computeunserved,
169 b'served': computeunserved,
170 b'immutable': computemutable,
170 b'immutable': computemutable,
171 b'base': computeimpactable,
171 b'base': computeimpactable,
172 }
172 }
173
173
174 _basefiltername = list(filtertable)
174 _basefiltername = list(filtertable)
175
175
176
176
177 def extrafilter(ui):
177 def extrafilter(ui):
178 """initialize extra filter and return its id
178 """initialize extra filter and return its id
179
179
180 If extra filtering is configured, we make sure the associated filtered view
180 If extra filtering is configured, we make sure the associated filtered view
181 are declared and return the associated id.
181 are declared and return the associated id.
182 """
182 """
183 frevs = ui.config(b'experimental', b'extra-filter-revs')
183 frevs = ui.config(b'experimental', b'extra-filter-revs')
184 if frevs is None:
184 if frevs is None:
185 return None
185 return None
186
186
187 fid = pycompat.sysbytes(util.DIGESTS[b'sha1'](frevs).hexdigest())[:12]
187 fid = pycompat.sysbytes(util.DIGESTS[b'sha1'](frevs).hexdigest())[:12]
188
188
189 combine = lambda fname: fname + b'%' + fid
189 combine = lambda fname: fname + b'%' + fid
190
190
191 subsettable = repoviewutil.subsettable
191 subsettable = repoviewutil.subsettable
192
192
193 if combine(b'base') not in filtertable:
193 if combine(b'base') not in filtertable:
194 for name in _basefiltername:
194 for name in _basefiltername:
195
195
196 def extrafilteredrevs(repo, *args, **kwargs):
196 def extrafilteredrevs(repo, *args, **kwargs):
197 baserevs = filtertable[name](repo, *args, **kwargs)
197 baserevs = filtertable[name](repo, *args, **kwargs)
198 extrarevs = frozenset(repo.revs(frevs))
198 extrarevs = frozenset(repo.revs(frevs))
199 return baserevs | extrarevs
199 return baserevs | extrarevs
200
200
201 filtertable[combine(name)] = extrafilteredrevs
201 filtertable[combine(name)] = extrafilteredrevs
202 if name in subsettable:
202 if name in subsettable:
203 subsettable[combine(name)] = combine(subsettable[name])
203 subsettable[combine(name)] = combine(subsettable[name])
204 return fid
204 return fid
205
205
206
206
207 def filterrevs(repo, filtername, visibilityexceptions=None):
207 def filterrevs(repo, filtername, visibilityexceptions=None):
208 """returns set of filtered revision for this filter name
208 """returns set of filtered revision for this filter name
209
209
210 visibilityexceptions is a set of revs which must are exceptions for
210 visibilityexceptions is a set of revs which must are exceptions for
211 hidden-state and must be visible. They are dynamic and hence we should not
211 hidden-state and must be visible. They are dynamic and hence we should not
212 cache it's result"""
212 cache it's result"""
213 if filtername not in repo.filteredrevcache:
213 if filtername not in repo.filteredrevcache:
214 func = filtertable[filtername]
214 func = filtertable[filtername]
215 if visibilityexceptions:
215 if visibilityexceptions:
216 return func(repo.unfiltered, visibilityexceptions)
216 return func(repo.unfiltered, visibilityexceptions)
217 repo.filteredrevcache[filtername] = func(repo.unfiltered())
217 repo.filteredrevcache[filtername] = func(repo.unfiltered())
218 return repo.filteredrevcache[filtername]
218 return repo.filteredrevcache[filtername]
219
219
220
220
221 def wrapchangelog(unfichangelog, filteredrevs):
221 def wrapchangelog(unfichangelog, filteredrevs):
222 cl = copy.copy(unfichangelog)
222 cl = copy.copy(unfichangelog)
223 cl.filteredrevs = filteredrevs
223 cl.filteredrevs = filteredrevs
224
224
225 class filteredchangelog(cl.__class__):
225 class filteredchangelog(cl.__class__):
226 pass
226 def tiprev(self):
227 """filtered version of revlog.tiprev"""
228 for i in pycompat.xrange(len(self) - 1, -2, -1):
229 if i not in self.filteredrevs:
230 return i
227
231
228 cl.__class__ = filteredchangelog
232 cl.__class__ = filteredchangelog
229
233
230 return cl
234 return cl
231
235
232
236
233 class repoview(object):
237 class repoview(object):
234 """Provide a read/write view of a repo through a filtered changelog
238 """Provide a read/write view of a repo through a filtered changelog
235
239
236 This object is used to access a filtered version of a repository without
240 This object is used to access a filtered version of a repository without
237 altering the original repository object itself. We can not alter the
241 altering the original repository object itself. We can not alter the
238 original object for two main reasons:
242 original object for two main reasons:
239 - It prevents the use of a repo with multiple filters at the same time. In
243 - It prevents the use of a repo with multiple filters at the same time. In
240 particular when multiple threads are involved.
244 particular when multiple threads are involved.
241 - It makes scope of the filtering harder to control.
245 - It makes scope of the filtering harder to control.
242
246
243 This object behaves very closely to the original repository. All attribute
247 This object behaves very closely to the original repository. All attribute
244 operations are done on the original repository:
248 operations are done on the original repository:
245 - An access to `repoview.someattr` actually returns `repo.someattr`,
249 - An access to `repoview.someattr` actually returns `repo.someattr`,
246 - A write to `repoview.someattr` actually sets value of `repo.someattr`,
250 - A write to `repoview.someattr` actually sets value of `repo.someattr`,
247 - A deletion of `repoview.someattr` actually drops `someattr`
251 - A deletion of `repoview.someattr` actually drops `someattr`
248 from `repo.__dict__`.
252 from `repo.__dict__`.
249
253
250 The only exception is the `changelog` property. It is overridden to return
254 The only exception is the `changelog` property. It is overridden to return
251 a (surface) copy of `repo.changelog` with some revisions filtered. The
255 a (surface) copy of `repo.changelog` with some revisions filtered. The
252 `filtername` attribute of the view control the revisions that need to be
256 `filtername` attribute of the view control the revisions that need to be
253 filtered. (the fact the changelog is copied is an implementation detail).
257 filtered. (the fact the changelog is copied is an implementation detail).
254
258
255 Unlike attributes, this object intercepts all method calls. This means that
259 Unlike attributes, this object intercepts all method calls. This means that
256 all methods are run on the `repoview` object with the filtered `changelog`
260 all methods are run on the `repoview` object with the filtered `changelog`
257 property. For this purpose the simple `repoview` class must be mixed with
261 property. For this purpose the simple `repoview` class must be mixed with
258 the actual class of the repository. This ensures that the resulting
262 the actual class of the repository. This ensures that the resulting
259 `repoview` object have the very same methods than the repo object. This
263 `repoview` object have the very same methods than the repo object. This
260 leads to the property below.
264 leads to the property below.
261
265
262 repoview.method() --> repo.__class__.method(repoview)
266 repoview.method() --> repo.__class__.method(repoview)
263
267
264 The inheritance has to be done dynamically because `repo` can be of any
268 The inheritance has to be done dynamically because `repo` can be of any
265 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
269 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
266 """
270 """
267
271
268 def __init__(self, repo, filtername, visibilityexceptions=None):
272 def __init__(self, repo, filtername, visibilityexceptions=None):
269 object.__setattr__(self, r'_unfilteredrepo', repo)
273 object.__setattr__(self, r'_unfilteredrepo', repo)
270 object.__setattr__(self, r'filtername', filtername)
274 object.__setattr__(self, r'filtername', filtername)
271 object.__setattr__(self, r'_clcachekey', None)
275 object.__setattr__(self, r'_clcachekey', None)
272 object.__setattr__(self, r'_clcache', None)
276 object.__setattr__(self, r'_clcache', None)
273 # revs which are exceptions and must not be hidden
277 # revs which are exceptions and must not be hidden
274 object.__setattr__(self, r'_visibilityexceptions', visibilityexceptions)
278 object.__setattr__(self, r'_visibilityexceptions', visibilityexceptions)
275
279
276 # not a propertycache on purpose we shall implement a proper cache later
280 # not a propertycache on purpose we shall implement a proper cache later
277 @property
281 @property
278 def changelog(self):
282 def changelog(self):
279 """return a filtered version of the changeset
283 """return a filtered version of the changeset
280
284
281 this changelog must not be used for writing"""
285 this changelog must not be used for writing"""
282 # some cache may be implemented later
286 # some cache may be implemented later
283 unfi = self._unfilteredrepo
287 unfi = self._unfilteredrepo
284 unfichangelog = unfi.changelog
288 unfichangelog = unfi.changelog
285 # bypass call to changelog.method
289 # bypass call to changelog.method
286 unfiindex = unfichangelog.index
290 unfiindex = unfichangelog.index
287 unfilen = len(unfiindex)
291 unfilen = len(unfiindex)
288 unfinode = unfiindex[unfilen - 1][7]
292 unfinode = unfiindex[unfilen - 1][7]
289 with util.timedcm('repo filter for %s', self.filtername):
293 with util.timedcm('repo filter for %s', self.filtername):
290 revs = filterrevs(unfi, self.filtername, self._visibilityexceptions)
294 revs = filterrevs(unfi, self.filtername, self._visibilityexceptions)
291 cl = self._clcache
295 cl = self._clcache
292 newkey = (unfilen, unfinode, hash(revs), unfichangelog._delayed)
296 newkey = (unfilen, unfinode, hash(revs), unfichangelog._delayed)
293 # if cl.index is not unfiindex, unfi.changelog would be
297 # if cl.index is not unfiindex, unfi.changelog would be
294 # recreated, and our clcache refers to garbage object
298 # recreated, and our clcache refers to garbage object
295 if cl is not None and (
299 if cl is not None and (
296 cl.index is not unfiindex or newkey != self._clcachekey
300 cl.index is not unfiindex or newkey != self._clcachekey
297 ):
301 ):
298 cl = None
302 cl = None
299 # could have been made None by the previous if
303 # could have been made None by the previous if
300 if cl is None:
304 if cl is None:
301 cl = wrapchangelog(unfichangelog, revs)
305 cl = wrapchangelog(unfichangelog, revs)
302 object.__setattr__(self, r'_clcache', cl)
306 object.__setattr__(self, r'_clcache', cl)
303 object.__setattr__(self, r'_clcachekey', newkey)
307 object.__setattr__(self, r'_clcachekey', newkey)
304 return cl
308 return cl
305
309
306 def unfiltered(self):
310 def unfiltered(self):
307 """Return an unfiltered version of a repo"""
311 """Return an unfiltered version of a repo"""
308 return self._unfilteredrepo
312 return self._unfilteredrepo
309
313
310 def filtered(self, name, visibilityexceptions=None):
314 def filtered(self, name, visibilityexceptions=None):
311 """Return a filtered version of a repository"""
315 """Return a filtered version of a repository"""
312 if name == self.filtername and not visibilityexceptions:
316 if name == self.filtername and not visibilityexceptions:
313 return self
317 return self
314 return self.unfiltered().filtered(name, visibilityexceptions)
318 return self.unfiltered().filtered(name, visibilityexceptions)
315
319
316 def __repr__(self):
320 def __repr__(self):
317 return r'<%s:%s %r>' % (
321 return r'<%s:%s %r>' % (
318 self.__class__.__name__,
322 self.__class__.__name__,
319 pycompat.sysstr(self.filtername),
323 pycompat.sysstr(self.filtername),
320 self.unfiltered(),
324 self.unfiltered(),
321 )
325 )
322
326
323 # everything access are forwarded to the proxied repo
327 # everything access are forwarded to the proxied repo
324 def __getattr__(self, attr):
328 def __getattr__(self, attr):
325 return getattr(self._unfilteredrepo, attr)
329 return getattr(self._unfilteredrepo, attr)
326
330
327 def __setattr__(self, attr, value):
331 def __setattr__(self, attr, value):
328 return setattr(self._unfilteredrepo, attr, value)
332 return setattr(self._unfilteredrepo, attr, value)
329
333
330 def __delattr__(self, attr):
334 def __delattr__(self, attr):
331 return delattr(self._unfilteredrepo, attr)
335 return delattr(self._unfilteredrepo, attr)
332
336
333
337
334 # Python <3.4 easily leaks types via __mro__. See
338 # Python <3.4 easily leaks types via __mro__. See
335 # https://bugs.python.org/issue17950. We cache dynamically created types
339 # https://bugs.python.org/issue17950. We cache dynamically created types
336 # so they won't be leaked on every invocation of repo.filtered().
340 # so they won't be leaked on every invocation of repo.filtered().
337 _filteredrepotypes = weakref.WeakKeyDictionary()
341 _filteredrepotypes = weakref.WeakKeyDictionary()
338
342
339
343
340 def newtype(base):
344 def newtype(base):
341 """Create a new type with the repoview mixin and the given base class"""
345 """Create a new type with the repoview mixin and the given base class"""
342 if base not in _filteredrepotypes:
346 if base not in _filteredrepotypes:
343
347
344 class filteredrepo(repoview, base):
348 class filteredrepo(repoview, base):
345 pass
349 pass
346
350
347 _filteredrepotypes[base] = filteredrepo
351 _filteredrepotypes[base] = filteredrepo
348 return _filteredrepotypes[base]
352 return _filteredrepotypes[base]
General Comments 0
You need to be logged in to leave comments. Login now