##// END OF EJS Templates
repoview: move changelog.__contains__() override to filteredchangelog...
Martin von Zweigbergk -
r43749:c470e699 default
parent child Browse files
Show More
@@ -1,720 +1,716 b''
1 # changelog.py - changelog class for mercurial
1 # changelog.py - changelog class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 from .i18n import _
10 from .i18n import _
11 from .node import (
11 from .node import (
12 bin,
12 bin,
13 hex,
13 hex,
14 nullid,
14 nullid,
15 )
15 )
16 from .thirdparty import attr
16 from .thirdparty import attr
17
17
18 from . import (
18 from . import (
19 copies,
19 copies,
20 encoding,
20 encoding,
21 error,
21 error,
22 pycompat,
22 pycompat,
23 revlog,
23 revlog,
24 util,
24 util,
25 )
25 )
26 from .utils import (
26 from .utils import (
27 dateutil,
27 dateutil,
28 stringutil,
28 stringutil,
29 )
29 )
30
30
31 from .revlogutils import sidedata as sidedatamod
31 from .revlogutils import sidedata as sidedatamod
32
32
33 _defaultextra = {b'branch': b'default'}
33 _defaultextra = {b'branch': b'default'}
34
34
35
35
36 def _string_escape(text):
36 def _string_escape(text):
37 """
37 """
38 >>> from .pycompat import bytechr as chr
38 >>> from .pycompat import bytechr as chr
39 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
39 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
40 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
40 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
41 >>> s
41 >>> s
42 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
42 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
43 >>> res = _string_escape(s)
43 >>> res = _string_escape(s)
44 >>> s == _string_unescape(res)
44 >>> s == _string_unescape(res)
45 True
45 True
46 """
46 """
47 # subset of the string_escape codec
47 # subset of the string_escape codec
48 text = (
48 text = (
49 text.replace(b'\\', b'\\\\')
49 text.replace(b'\\', b'\\\\')
50 .replace(b'\n', b'\\n')
50 .replace(b'\n', b'\\n')
51 .replace(b'\r', b'\\r')
51 .replace(b'\r', b'\\r')
52 )
52 )
53 return text.replace(b'\0', b'\\0')
53 return text.replace(b'\0', b'\\0')
54
54
55
55
56 def _string_unescape(text):
56 def _string_unescape(text):
57 if b'\\0' in text:
57 if b'\\0' in text:
58 # fix up \0 without getting into trouble with \\0
58 # fix up \0 without getting into trouble with \\0
59 text = text.replace(b'\\\\', b'\\\\\n')
59 text = text.replace(b'\\\\', b'\\\\\n')
60 text = text.replace(b'\\0', b'\0')
60 text = text.replace(b'\\0', b'\0')
61 text = text.replace(b'\n', b'')
61 text = text.replace(b'\n', b'')
62 return stringutil.unescapestr(text)
62 return stringutil.unescapestr(text)
63
63
64
64
65 def decodeextra(text):
65 def decodeextra(text):
66 """
66 """
67 >>> from .pycompat import bytechr as chr
67 >>> from .pycompat import bytechr as chr
68 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
68 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
69 ... ).items())
69 ... ).items())
70 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
70 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
71 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
71 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
72 ... b'baz': chr(92) + chr(0) + b'2'})
72 ... b'baz': chr(92) + chr(0) + b'2'})
73 ... ).items())
73 ... ).items())
74 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
74 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
75 """
75 """
76 extra = _defaultextra.copy()
76 extra = _defaultextra.copy()
77 for l in text.split(b'\0'):
77 for l in text.split(b'\0'):
78 if l:
78 if l:
79 k, v = _string_unescape(l).split(b':', 1)
79 k, v = _string_unescape(l).split(b':', 1)
80 extra[k] = v
80 extra[k] = v
81 return extra
81 return extra
82
82
83
83
84 def encodeextra(d):
84 def encodeextra(d):
85 # keys must be sorted to produce a deterministic changelog entry
85 # keys must be sorted to produce a deterministic changelog entry
86 items = [
86 items = [
87 _string_escape(b'%s:%s' % (k, pycompat.bytestr(d[k])))
87 _string_escape(b'%s:%s' % (k, pycompat.bytestr(d[k])))
88 for k in sorted(d)
88 for k in sorted(d)
89 ]
89 ]
90 return b"\0".join(items)
90 return b"\0".join(items)
91
91
92
92
93 def stripdesc(desc):
93 def stripdesc(desc):
94 """strip trailing whitespace and leading and trailing empty lines"""
94 """strip trailing whitespace and leading and trailing empty lines"""
95 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
95 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
96
96
97
97
98 class appender(object):
98 class appender(object):
99 '''the changelog index must be updated last on disk, so we use this class
99 '''the changelog index must be updated last on disk, so we use this class
100 to delay writes to it'''
100 to delay writes to it'''
101
101
102 def __init__(self, vfs, name, mode, buf):
102 def __init__(self, vfs, name, mode, buf):
103 self.data = buf
103 self.data = buf
104 fp = vfs(name, mode)
104 fp = vfs(name, mode)
105 self.fp = fp
105 self.fp = fp
106 self.offset = fp.tell()
106 self.offset = fp.tell()
107 self.size = vfs.fstat(fp).st_size
107 self.size = vfs.fstat(fp).st_size
108 self._end = self.size
108 self._end = self.size
109
109
110 def end(self):
110 def end(self):
111 return self._end
111 return self._end
112
112
113 def tell(self):
113 def tell(self):
114 return self.offset
114 return self.offset
115
115
116 def flush(self):
116 def flush(self):
117 pass
117 pass
118
118
119 @property
119 @property
120 def closed(self):
120 def closed(self):
121 return self.fp.closed
121 return self.fp.closed
122
122
123 def close(self):
123 def close(self):
124 self.fp.close()
124 self.fp.close()
125
125
126 def seek(self, offset, whence=0):
126 def seek(self, offset, whence=0):
127 '''virtual file offset spans real file and data'''
127 '''virtual file offset spans real file and data'''
128 if whence == 0:
128 if whence == 0:
129 self.offset = offset
129 self.offset = offset
130 elif whence == 1:
130 elif whence == 1:
131 self.offset += offset
131 self.offset += offset
132 elif whence == 2:
132 elif whence == 2:
133 self.offset = self.end() + offset
133 self.offset = self.end() + offset
134 if self.offset < self.size:
134 if self.offset < self.size:
135 self.fp.seek(self.offset)
135 self.fp.seek(self.offset)
136
136
137 def read(self, count=-1):
137 def read(self, count=-1):
138 '''only trick here is reads that span real file and data'''
138 '''only trick here is reads that span real file and data'''
139 ret = b""
139 ret = b""
140 if self.offset < self.size:
140 if self.offset < self.size:
141 s = self.fp.read(count)
141 s = self.fp.read(count)
142 ret = s
142 ret = s
143 self.offset += len(s)
143 self.offset += len(s)
144 if count > 0:
144 if count > 0:
145 count -= len(s)
145 count -= len(s)
146 if count != 0:
146 if count != 0:
147 doff = self.offset - self.size
147 doff = self.offset - self.size
148 self.data.insert(0, b"".join(self.data))
148 self.data.insert(0, b"".join(self.data))
149 del self.data[1:]
149 del self.data[1:]
150 s = self.data[0][doff : doff + count]
150 s = self.data[0][doff : doff + count]
151 self.offset += len(s)
151 self.offset += len(s)
152 ret += s
152 ret += s
153 return ret
153 return ret
154
154
155 def write(self, s):
155 def write(self, s):
156 self.data.append(bytes(s))
156 self.data.append(bytes(s))
157 self.offset += len(s)
157 self.offset += len(s)
158 self._end += len(s)
158 self._end += len(s)
159
159
160 def __enter__(self):
160 def __enter__(self):
161 self.fp.__enter__()
161 self.fp.__enter__()
162 return self
162 return self
163
163
164 def __exit__(self, *args):
164 def __exit__(self, *args):
165 return self.fp.__exit__(*args)
165 return self.fp.__exit__(*args)
166
166
167
167
168 def _divertopener(opener, target):
168 def _divertopener(opener, target):
169 """build an opener that writes in 'target.a' instead of 'target'"""
169 """build an opener that writes in 'target.a' instead of 'target'"""
170
170
171 def _divert(name, mode=b'r', checkambig=False):
171 def _divert(name, mode=b'r', checkambig=False):
172 if name != target:
172 if name != target:
173 return opener(name, mode)
173 return opener(name, mode)
174 return opener(name + b".a", mode)
174 return opener(name + b".a", mode)
175
175
176 return _divert
176 return _divert
177
177
178
178
179 def _delayopener(opener, target, buf):
179 def _delayopener(opener, target, buf):
180 """build an opener that stores chunks in 'buf' instead of 'target'"""
180 """build an opener that stores chunks in 'buf' instead of 'target'"""
181
181
182 def _delay(name, mode=b'r', checkambig=False):
182 def _delay(name, mode=b'r', checkambig=False):
183 if name != target:
183 if name != target:
184 return opener(name, mode)
184 return opener(name, mode)
185 return appender(opener, name, mode, buf)
185 return appender(opener, name, mode, buf)
186
186
187 return _delay
187 return _delay
188
188
189
189
190 @attr.s
190 @attr.s
191 class _changelogrevision(object):
191 class _changelogrevision(object):
192 # Extensions might modify _defaultextra, so let the constructor below pass
192 # Extensions might modify _defaultextra, so let the constructor below pass
193 # it in
193 # it in
194 extra = attr.ib()
194 extra = attr.ib()
195 manifest = attr.ib(default=nullid)
195 manifest = attr.ib(default=nullid)
196 user = attr.ib(default=b'')
196 user = attr.ib(default=b'')
197 date = attr.ib(default=(0, 0))
197 date = attr.ib(default=(0, 0))
198 files = attr.ib(default=attr.Factory(list))
198 files = attr.ib(default=attr.Factory(list))
199 filesadded = attr.ib(default=None)
199 filesadded = attr.ib(default=None)
200 filesremoved = attr.ib(default=None)
200 filesremoved = attr.ib(default=None)
201 p1copies = attr.ib(default=None)
201 p1copies = attr.ib(default=None)
202 p2copies = attr.ib(default=None)
202 p2copies = attr.ib(default=None)
203 description = attr.ib(default=b'')
203 description = attr.ib(default=b'')
204
204
205
205
206 class changelogrevision(object):
206 class changelogrevision(object):
207 """Holds results of a parsed changelog revision.
207 """Holds results of a parsed changelog revision.
208
208
209 Changelog revisions consist of multiple pieces of data, including
209 Changelog revisions consist of multiple pieces of data, including
210 the manifest node, user, and date. This object exposes a view into
210 the manifest node, user, and date. This object exposes a view into
211 the parsed object.
211 the parsed object.
212 """
212 """
213
213
214 __slots__ = (
214 __slots__ = (
215 r'_offsets',
215 r'_offsets',
216 r'_text',
216 r'_text',
217 r'_sidedata',
217 r'_sidedata',
218 r'_cpsd',
218 r'_cpsd',
219 )
219 )
220
220
221 def __new__(cls, text, sidedata, cpsd):
221 def __new__(cls, text, sidedata, cpsd):
222 if not text:
222 if not text:
223 return _changelogrevision(extra=_defaultextra)
223 return _changelogrevision(extra=_defaultextra)
224
224
225 self = super(changelogrevision, cls).__new__(cls)
225 self = super(changelogrevision, cls).__new__(cls)
226 # We could return here and implement the following as an __init__.
226 # We could return here and implement the following as an __init__.
227 # But doing it here is equivalent and saves an extra function call.
227 # But doing it here is equivalent and saves an extra function call.
228
228
229 # format used:
229 # format used:
230 # nodeid\n : manifest node in ascii
230 # nodeid\n : manifest node in ascii
231 # user\n : user, no \n or \r allowed
231 # user\n : user, no \n or \r allowed
232 # time tz extra\n : date (time is int or float, timezone is int)
232 # time tz extra\n : date (time is int or float, timezone is int)
233 # : extra is metadata, encoded and separated by '\0'
233 # : extra is metadata, encoded and separated by '\0'
234 # : older versions ignore it
234 # : older versions ignore it
235 # files\n\n : files modified by the cset, no \n or \r allowed
235 # files\n\n : files modified by the cset, no \n or \r allowed
236 # (.*) : comment (free text, ideally utf-8)
236 # (.*) : comment (free text, ideally utf-8)
237 #
237 #
238 # changelog v0 doesn't use extra
238 # changelog v0 doesn't use extra
239
239
240 nl1 = text.index(b'\n')
240 nl1 = text.index(b'\n')
241 nl2 = text.index(b'\n', nl1 + 1)
241 nl2 = text.index(b'\n', nl1 + 1)
242 nl3 = text.index(b'\n', nl2 + 1)
242 nl3 = text.index(b'\n', nl2 + 1)
243
243
244 # The list of files may be empty. Which means nl3 is the first of the
244 # The list of files may be empty. Which means nl3 is the first of the
245 # double newline that precedes the description.
245 # double newline that precedes the description.
246 if text[nl3 + 1 : nl3 + 2] == b'\n':
246 if text[nl3 + 1 : nl3 + 2] == b'\n':
247 doublenl = nl3
247 doublenl = nl3
248 else:
248 else:
249 doublenl = text.index(b'\n\n', nl3 + 1)
249 doublenl = text.index(b'\n\n', nl3 + 1)
250
250
251 self._offsets = (nl1, nl2, nl3, doublenl)
251 self._offsets = (nl1, nl2, nl3, doublenl)
252 self._text = text
252 self._text = text
253 self._sidedata = sidedata
253 self._sidedata = sidedata
254 self._cpsd = cpsd
254 self._cpsd = cpsd
255
255
256 return self
256 return self
257
257
258 @property
258 @property
259 def manifest(self):
259 def manifest(self):
260 return bin(self._text[0 : self._offsets[0]])
260 return bin(self._text[0 : self._offsets[0]])
261
261
262 @property
262 @property
263 def user(self):
263 def user(self):
264 off = self._offsets
264 off = self._offsets
265 return encoding.tolocal(self._text[off[0] + 1 : off[1]])
265 return encoding.tolocal(self._text[off[0] + 1 : off[1]])
266
266
267 @property
267 @property
268 def _rawdate(self):
268 def _rawdate(self):
269 off = self._offsets
269 off = self._offsets
270 dateextra = self._text[off[1] + 1 : off[2]]
270 dateextra = self._text[off[1] + 1 : off[2]]
271 return dateextra.split(b' ', 2)[0:2]
271 return dateextra.split(b' ', 2)[0:2]
272
272
273 @property
273 @property
274 def _rawextra(self):
274 def _rawextra(self):
275 off = self._offsets
275 off = self._offsets
276 dateextra = self._text[off[1] + 1 : off[2]]
276 dateextra = self._text[off[1] + 1 : off[2]]
277 fields = dateextra.split(b' ', 2)
277 fields = dateextra.split(b' ', 2)
278 if len(fields) != 3:
278 if len(fields) != 3:
279 return None
279 return None
280
280
281 return fields[2]
281 return fields[2]
282
282
283 @property
283 @property
284 def date(self):
284 def date(self):
285 raw = self._rawdate
285 raw = self._rawdate
286 time = float(raw[0])
286 time = float(raw[0])
287 # Various tools did silly things with the timezone.
287 # Various tools did silly things with the timezone.
288 try:
288 try:
289 timezone = int(raw[1])
289 timezone = int(raw[1])
290 except ValueError:
290 except ValueError:
291 timezone = 0
291 timezone = 0
292
292
293 return time, timezone
293 return time, timezone
294
294
295 @property
295 @property
296 def extra(self):
296 def extra(self):
297 raw = self._rawextra
297 raw = self._rawextra
298 if raw is None:
298 if raw is None:
299 return _defaultextra
299 return _defaultextra
300
300
301 return decodeextra(raw)
301 return decodeextra(raw)
302
302
303 @property
303 @property
304 def files(self):
304 def files(self):
305 off = self._offsets
305 off = self._offsets
306 if off[2] == off[3]:
306 if off[2] == off[3]:
307 return []
307 return []
308
308
309 return self._text[off[2] + 1 : off[3]].split(b'\n')
309 return self._text[off[2] + 1 : off[3]].split(b'\n')
310
310
311 @property
311 @property
312 def filesadded(self):
312 def filesadded(self):
313 if self._cpsd:
313 if self._cpsd:
314 rawindices = self._sidedata.get(sidedatamod.SD_FILESADDED)
314 rawindices = self._sidedata.get(sidedatamod.SD_FILESADDED)
315 if not rawindices:
315 if not rawindices:
316 return []
316 return []
317 else:
317 else:
318 rawindices = self.extra.get(b'filesadded')
318 rawindices = self.extra.get(b'filesadded')
319 if rawindices is None:
319 if rawindices is None:
320 return None
320 return None
321 return copies.decodefileindices(self.files, rawindices)
321 return copies.decodefileindices(self.files, rawindices)
322
322
323 @property
323 @property
324 def filesremoved(self):
324 def filesremoved(self):
325 if self._cpsd:
325 if self._cpsd:
326 rawindices = self._sidedata.get(sidedatamod.SD_FILESREMOVED)
326 rawindices = self._sidedata.get(sidedatamod.SD_FILESREMOVED)
327 if not rawindices:
327 if not rawindices:
328 return []
328 return []
329 else:
329 else:
330 rawindices = self.extra.get(b'filesremoved')
330 rawindices = self.extra.get(b'filesremoved')
331 if rawindices is None:
331 if rawindices is None:
332 return None
332 return None
333 return copies.decodefileindices(self.files, rawindices)
333 return copies.decodefileindices(self.files, rawindices)
334
334
335 @property
335 @property
336 def p1copies(self):
336 def p1copies(self):
337 if self._cpsd:
337 if self._cpsd:
338 rawcopies = self._sidedata.get(sidedatamod.SD_P1COPIES)
338 rawcopies = self._sidedata.get(sidedatamod.SD_P1COPIES)
339 if not rawcopies:
339 if not rawcopies:
340 return {}
340 return {}
341 else:
341 else:
342 rawcopies = self.extra.get(b'p1copies')
342 rawcopies = self.extra.get(b'p1copies')
343 if rawcopies is None:
343 if rawcopies is None:
344 return None
344 return None
345 return copies.decodecopies(self.files, rawcopies)
345 return copies.decodecopies(self.files, rawcopies)
346
346
347 @property
347 @property
348 def p2copies(self):
348 def p2copies(self):
349 if self._cpsd:
349 if self._cpsd:
350 rawcopies = self._sidedata.get(sidedatamod.SD_P2COPIES)
350 rawcopies = self._sidedata.get(sidedatamod.SD_P2COPIES)
351 if not rawcopies:
351 if not rawcopies:
352 return {}
352 return {}
353 else:
353 else:
354 rawcopies = self.extra.get(b'p2copies')
354 rawcopies = self.extra.get(b'p2copies')
355 if rawcopies is None:
355 if rawcopies is None:
356 return None
356 return None
357 return copies.decodecopies(self.files, rawcopies)
357 return copies.decodecopies(self.files, rawcopies)
358
358
359 @property
359 @property
360 def description(self):
360 def description(self):
361 return encoding.tolocal(self._text[self._offsets[3] + 2 :])
361 return encoding.tolocal(self._text[self._offsets[3] + 2 :])
362
362
363
363
364 class changelog(revlog.revlog):
364 class changelog(revlog.revlog):
365 def __init__(self, opener, trypending=False):
365 def __init__(self, opener, trypending=False):
366 """Load a changelog revlog using an opener.
366 """Load a changelog revlog using an opener.
367
367
368 If ``trypending`` is true, we attempt to load the index from a
368 If ``trypending`` is true, we attempt to load the index from a
369 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
369 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
370 The ``00changelog.i.a`` file contains index (and possibly inline
370 The ``00changelog.i.a`` file contains index (and possibly inline
371 revision) data for a transaction that hasn't been finalized yet.
371 revision) data for a transaction that hasn't been finalized yet.
372 It exists in a separate file to facilitate readers (such as
372 It exists in a separate file to facilitate readers (such as
373 hooks processes) accessing data before a transaction is finalized.
373 hooks processes) accessing data before a transaction is finalized.
374 """
374 """
375 if trypending and opener.exists(b'00changelog.i.a'):
375 if trypending and opener.exists(b'00changelog.i.a'):
376 indexfile = b'00changelog.i.a'
376 indexfile = b'00changelog.i.a'
377 else:
377 else:
378 indexfile = b'00changelog.i'
378 indexfile = b'00changelog.i'
379
379
380 datafile = b'00changelog.d'
380 datafile = b'00changelog.d'
381 revlog.revlog.__init__(
381 revlog.revlog.__init__(
382 self,
382 self,
383 opener,
383 opener,
384 indexfile,
384 indexfile,
385 datafile=datafile,
385 datafile=datafile,
386 checkambig=True,
386 checkambig=True,
387 mmaplargeindex=True,
387 mmaplargeindex=True,
388 )
388 )
389
389
390 if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
390 if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
391 # changelogs don't benefit from generaldelta.
391 # changelogs don't benefit from generaldelta.
392
392
393 self.version &= ~revlog.FLAG_GENERALDELTA
393 self.version &= ~revlog.FLAG_GENERALDELTA
394 self._generaldelta = False
394 self._generaldelta = False
395
395
396 # Delta chains for changelogs tend to be very small because entries
396 # Delta chains for changelogs tend to be very small because entries
397 # tend to be small and don't delta well with each. So disable delta
397 # tend to be small and don't delta well with each. So disable delta
398 # chains.
398 # chains.
399 self._storedeltachains = False
399 self._storedeltachains = False
400
400
401 self._realopener = opener
401 self._realopener = opener
402 self._delayed = False
402 self._delayed = False
403 self._delaybuf = None
403 self._delaybuf = None
404 self._divert = False
404 self._divert = False
405 self.filteredrevs = frozenset()
405 self.filteredrevs = frozenset()
406 self._copiesstorage = opener.options.get(b'copies-storage')
406 self._copiesstorage = opener.options.get(b'copies-storage')
407
407
408 def __contains__(self, rev):
409 """filtered version of revlog.__contains__"""
410 return 0 <= rev < len(self) and rev not in self.filteredrevs
411
412 def __iter__(self):
408 def __iter__(self):
413 """filtered version of revlog.__iter__"""
409 """filtered version of revlog.__iter__"""
414 if len(self.filteredrevs) == 0:
410 if len(self.filteredrevs) == 0:
415 return revlog.revlog.__iter__(self)
411 return revlog.revlog.__iter__(self)
416
412
417 def filterediter():
413 def filterediter():
418 for i in pycompat.xrange(len(self)):
414 for i in pycompat.xrange(len(self)):
419 if i not in self.filteredrevs:
415 if i not in self.filteredrevs:
420 yield i
416 yield i
421
417
422 return filterediter()
418 return filterediter()
423
419
424 def revs(self, start=0, stop=None):
420 def revs(self, start=0, stop=None):
425 """filtered version of revlog.revs"""
421 """filtered version of revlog.revs"""
426 for i in super(changelog, self).revs(start, stop):
422 for i in super(changelog, self).revs(start, stop):
427 if i not in self.filteredrevs:
423 if i not in self.filteredrevs:
428 yield i
424 yield i
429
425
430 def _checknofilteredinrevs(self, revs):
426 def _checknofilteredinrevs(self, revs):
431 """raise the appropriate error if 'revs' contains a filtered revision
427 """raise the appropriate error if 'revs' contains a filtered revision
432
428
433 This returns a version of 'revs' to be used thereafter by the caller.
429 This returns a version of 'revs' to be used thereafter by the caller.
434 In particular, if revs is an iterator, it is converted into a set.
430 In particular, if revs is an iterator, it is converted into a set.
435 """
431 """
436 safehasattr = util.safehasattr
432 safehasattr = util.safehasattr
437 if safehasattr(revs, '__next__'):
433 if safehasattr(revs, '__next__'):
438 # Note that inspect.isgenerator() is not true for iterators,
434 # Note that inspect.isgenerator() is not true for iterators,
439 revs = set(revs)
435 revs = set(revs)
440
436
441 filteredrevs = self.filteredrevs
437 filteredrevs = self.filteredrevs
442 if safehasattr(revs, 'first'): # smartset
438 if safehasattr(revs, 'first'): # smartset
443 offenders = revs & filteredrevs
439 offenders = revs & filteredrevs
444 else:
440 else:
445 offenders = filteredrevs.intersection(revs)
441 offenders = filteredrevs.intersection(revs)
446
442
447 for rev in offenders:
443 for rev in offenders:
448 raise error.FilteredIndexError(rev)
444 raise error.FilteredIndexError(rev)
449 return revs
445 return revs
450
446
451 def headrevs(self, revs=None):
447 def headrevs(self, revs=None):
452 if revs is None and self.filteredrevs:
448 if revs is None and self.filteredrevs:
453 try:
449 try:
454 return self.index.headrevsfiltered(self.filteredrevs)
450 return self.index.headrevsfiltered(self.filteredrevs)
455 # AttributeError covers non-c-extension environments and
451 # AttributeError covers non-c-extension environments and
456 # old c extensions without filter handling.
452 # old c extensions without filter handling.
457 except AttributeError:
453 except AttributeError:
458 return self._headrevs()
454 return self._headrevs()
459
455
460 if self.filteredrevs:
456 if self.filteredrevs:
461 revs = self._checknofilteredinrevs(revs)
457 revs = self._checknofilteredinrevs(revs)
462 return super(changelog, self).headrevs(revs)
458 return super(changelog, self).headrevs(revs)
463
459
464 def strip(self, *args, **kwargs):
460 def strip(self, *args, **kwargs):
465 # XXX make something better than assert
461 # XXX make something better than assert
466 # We can't expect proper strip behavior if we are filtered.
462 # We can't expect proper strip behavior if we are filtered.
467 assert not self.filteredrevs
463 assert not self.filteredrevs
468 super(changelog, self).strip(*args, **kwargs)
464 super(changelog, self).strip(*args, **kwargs)
469
465
470 def rev(self, node):
466 def rev(self, node):
471 """filtered version of revlog.rev"""
467 """filtered version of revlog.rev"""
472 r = super(changelog, self).rev(node)
468 r = super(changelog, self).rev(node)
473 if r in self.filteredrevs:
469 if r in self.filteredrevs:
474 raise error.FilteredLookupError(
470 raise error.FilteredLookupError(
475 hex(node), self.indexfile, _(b'filtered node')
471 hex(node), self.indexfile, _(b'filtered node')
476 )
472 )
477 return r
473 return r
478
474
479 def node(self, rev):
475 def node(self, rev):
480 """filtered version of revlog.node"""
476 """filtered version of revlog.node"""
481 if rev in self.filteredrevs:
477 if rev in self.filteredrevs:
482 raise error.FilteredIndexError(rev)
478 raise error.FilteredIndexError(rev)
483 return super(changelog, self).node(rev)
479 return super(changelog, self).node(rev)
484
480
485 def linkrev(self, rev):
481 def linkrev(self, rev):
486 """filtered version of revlog.linkrev"""
482 """filtered version of revlog.linkrev"""
487 if rev in self.filteredrevs:
483 if rev in self.filteredrevs:
488 raise error.FilteredIndexError(rev)
484 raise error.FilteredIndexError(rev)
489 return super(changelog, self).linkrev(rev)
485 return super(changelog, self).linkrev(rev)
490
486
491 def parentrevs(self, rev):
487 def parentrevs(self, rev):
492 """filtered version of revlog.parentrevs"""
488 """filtered version of revlog.parentrevs"""
493 if rev in self.filteredrevs:
489 if rev in self.filteredrevs:
494 raise error.FilteredIndexError(rev)
490 raise error.FilteredIndexError(rev)
495 return super(changelog, self).parentrevs(rev)
491 return super(changelog, self).parentrevs(rev)
496
492
497 def flags(self, rev):
493 def flags(self, rev):
498 """filtered version of revlog.flags"""
494 """filtered version of revlog.flags"""
499 if rev in self.filteredrevs:
495 if rev in self.filteredrevs:
500 raise error.FilteredIndexError(rev)
496 raise error.FilteredIndexError(rev)
501 return super(changelog, self).flags(rev)
497 return super(changelog, self).flags(rev)
502
498
503 def delayupdate(self, tr):
499 def delayupdate(self, tr):
504 b"delay visibility of index updates to other readers"
500 b"delay visibility of index updates to other readers"
505
501
506 if not self._delayed:
502 if not self._delayed:
507 if len(self) == 0:
503 if len(self) == 0:
508 self._divert = True
504 self._divert = True
509 if self._realopener.exists(self.indexfile + b'.a'):
505 if self._realopener.exists(self.indexfile + b'.a'):
510 self._realopener.unlink(self.indexfile + b'.a')
506 self._realopener.unlink(self.indexfile + b'.a')
511 self.opener = _divertopener(self._realopener, self.indexfile)
507 self.opener = _divertopener(self._realopener, self.indexfile)
512 else:
508 else:
513 self._delaybuf = []
509 self._delaybuf = []
514 self.opener = _delayopener(
510 self.opener = _delayopener(
515 self._realopener, self.indexfile, self._delaybuf
511 self._realopener, self.indexfile, self._delaybuf
516 )
512 )
517 self._delayed = True
513 self._delayed = True
518 tr.addpending(b'cl-%i' % id(self), self._writepending)
514 tr.addpending(b'cl-%i' % id(self), self._writepending)
519 tr.addfinalize(b'cl-%i' % id(self), self._finalize)
515 tr.addfinalize(b'cl-%i' % id(self), self._finalize)
520
516
521 def _finalize(self, tr):
517 def _finalize(self, tr):
522 b"finalize index updates"
518 b"finalize index updates"
523 self._delayed = False
519 self._delayed = False
524 self.opener = self._realopener
520 self.opener = self._realopener
525 # move redirected index data back into place
521 # move redirected index data back into place
526 if self._divert:
522 if self._divert:
527 assert not self._delaybuf
523 assert not self._delaybuf
528 tmpname = self.indexfile + b".a"
524 tmpname = self.indexfile + b".a"
529 nfile = self.opener.open(tmpname)
525 nfile = self.opener.open(tmpname)
530 nfile.close()
526 nfile.close()
531 self.opener.rename(tmpname, self.indexfile, checkambig=True)
527 self.opener.rename(tmpname, self.indexfile, checkambig=True)
532 elif self._delaybuf:
528 elif self._delaybuf:
533 fp = self.opener(self.indexfile, b'a', checkambig=True)
529 fp = self.opener(self.indexfile, b'a', checkambig=True)
534 fp.write(b"".join(self._delaybuf))
530 fp.write(b"".join(self._delaybuf))
535 fp.close()
531 fp.close()
536 self._delaybuf = None
532 self._delaybuf = None
537 self._divert = False
533 self._divert = False
538 # split when we're done
534 # split when we're done
539 self._enforceinlinesize(tr)
535 self._enforceinlinesize(tr)
540
536
541 def _writepending(self, tr):
537 def _writepending(self, tr):
542 b"create a file containing the unfinalized state for pretxnchangegroup"
538 b"create a file containing the unfinalized state for pretxnchangegroup"
543 if self._delaybuf:
539 if self._delaybuf:
544 # make a temporary copy of the index
540 # make a temporary copy of the index
545 fp1 = self._realopener(self.indexfile)
541 fp1 = self._realopener(self.indexfile)
546 pendingfilename = self.indexfile + b".a"
542 pendingfilename = self.indexfile + b".a"
547 # register as a temp file to ensure cleanup on failure
543 # register as a temp file to ensure cleanup on failure
548 tr.registertmp(pendingfilename)
544 tr.registertmp(pendingfilename)
549 # write existing data
545 # write existing data
550 fp2 = self._realopener(pendingfilename, b"w")
546 fp2 = self._realopener(pendingfilename, b"w")
551 fp2.write(fp1.read())
547 fp2.write(fp1.read())
552 # add pending data
548 # add pending data
553 fp2.write(b"".join(self._delaybuf))
549 fp2.write(b"".join(self._delaybuf))
554 fp2.close()
550 fp2.close()
555 # switch modes so finalize can simply rename
551 # switch modes so finalize can simply rename
556 self._delaybuf = None
552 self._delaybuf = None
557 self._divert = True
553 self._divert = True
558 self.opener = _divertopener(self._realopener, self.indexfile)
554 self.opener = _divertopener(self._realopener, self.indexfile)
559
555
560 if self._divert:
556 if self._divert:
561 return True
557 return True
562
558
563 return False
559 return False
564
560
565 def _enforceinlinesize(self, tr, fp=None):
561 def _enforceinlinesize(self, tr, fp=None):
566 if not self._delayed:
562 if not self._delayed:
567 revlog.revlog._enforceinlinesize(self, tr, fp)
563 revlog.revlog._enforceinlinesize(self, tr, fp)
568
564
569 def read(self, node):
565 def read(self, node):
570 """Obtain data from a parsed changelog revision.
566 """Obtain data from a parsed changelog revision.
571
567
572 Returns a 6-tuple of:
568 Returns a 6-tuple of:
573
569
574 - manifest node in binary
570 - manifest node in binary
575 - author/user as a localstr
571 - author/user as a localstr
576 - date as a 2-tuple of (time, timezone)
572 - date as a 2-tuple of (time, timezone)
577 - list of files
573 - list of files
578 - commit message as a localstr
574 - commit message as a localstr
579 - dict of extra metadata
575 - dict of extra metadata
580
576
581 Unless you need to access all fields, consider calling
577 Unless you need to access all fields, consider calling
582 ``changelogrevision`` instead, as it is faster for partial object
578 ``changelogrevision`` instead, as it is faster for partial object
583 access.
579 access.
584 """
580 """
585 d, s = self._revisiondata(node)
581 d, s = self._revisiondata(node)
586 c = changelogrevision(
582 c = changelogrevision(
587 d, s, self._copiesstorage == b'changeset-sidedata'
583 d, s, self._copiesstorage == b'changeset-sidedata'
588 )
584 )
589 return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
585 return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
590
586
591 def changelogrevision(self, nodeorrev):
587 def changelogrevision(self, nodeorrev):
592 """Obtain a ``changelogrevision`` for a node or revision."""
588 """Obtain a ``changelogrevision`` for a node or revision."""
593 text, sidedata = self._revisiondata(nodeorrev)
589 text, sidedata = self._revisiondata(nodeorrev)
594 return changelogrevision(
590 return changelogrevision(
595 text, sidedata, self._copiesstorage == b'changeset-sidedata'
591 text, sidedata, self._copiesstorage == b'changeset-sidedata'
596 )
592 )
597
593
598 def readfiles(self, node):
594 def readfiles(self, node):
599 """
595 """
600 short version of read that only returns the files modified by the cset
596 short version of read that only returns the files modified by the cset
601 """
597 """
602 text = self.revision(node)
598 text = self.revision(node)
603 if not text:
599 if not text:
604 return []
600 return []
605 last = text.index(b"\n\n")
601 last = text.index(b"\n\n")
606 l = text[:last].split(b'\n')
602 l = text[:last].split(b'\n')
607 return l[3:]
603 return l[3:]
608
604
609 def add(
605 def add(
610 self,
606 self,
611 manifest,
607 manifest,
612 files,
608 files,
613 desc,
609 desc,
614 transaction,
610 transaction,
615 p1,
611 p1,
616 p2,
612 p2,
617 user,
613 user,
618 date=None,
614 date=None,
619 extra=None,
615 extra=None,
620 p1copies=None,
616 p1copies=None,
621 p2copies=None,
617 p2copies=None,
622 filesadded=None,
618 filesadded=None,
623 filesremoved=None,
619 filesremoved=None,
624 ):
620 ):
625 # Convert to UTF-8 encoded bytestrings as the very first
621 # Convert to UTF-8 encoded bytestrings as the very first
626 # thing: calling any method on a localstr object will turn it
622 # thing: calling any method on a localstr object will turn it
627 # into a str object and the cached UTF-8 string is thus lost.
623 # into a str object and the cached UTF-8 string is thus lost.
628 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
624 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
629
625
630 user = user.strip()
626 user = user.strip()
631 # An empty username or a username with a "\n" will make the
627 # An empty username or a username with a "\n" will make the
632 # revision text contain two "\n\n" sequences -> corrupt
628 # revision text contain two "\n\n" sequences -> corrupt
633 # repository since read cannot unpack the revision.
629 # repository since read cannot unpack the revision.
634 if not user:
630 if not user:
635 raise error.StorageError(_(b"empty username"))
631 raise error.StorageError(_(b"empty username"))
636 if b"\n" in user:
632 if b"\n" in user:
637 raise error.StorageError(
633 raise error.StorageError(
638 _(b"username %r contains a newline") % pycompat.bytestr(user)
634 _(b"username %r contains a newline") % pycompat.bytestr(user)
639 )
635 )
640
636
641 desc = stripdesc(desc)
637 desc = stripdesc(desc)
642
638
643 if date:
639 if date:
644 parseddate = b"%d %d" % dateutil.parsedate(date)
640 parseddate = b"%d %d" % dateutil.parsedate(date)
645 else:
641 else:
646 parseddate = b"%d %d" % dateutil.makedate()
642 parseddate = b"%d %d" % dateutil.makedate()
647 if extra:
643 if extra:
648 branch = extra.get(b"branch")
644 branch = extra.get(b"branch")
649 if branch in (b"default", b""):
645 if branch in (b"default", b""):
650 del extra[b"branch"]
646 del extra[b"branch"]
651 elif branch in (b".", b"null", b"tip"):
647 elif branch in (b".", b"null", b"tip"):
652 raise error.StorageError(
648 raise error.StorageError(
653 _(b'the name \'%s\' is reserved') % branch
649 _(b'the name \'%s\' is reserved') % branch
654 )
650 )
655 sortedfiles = sorted(files)
651 sortedfiles = sorted(files)
656 sidedata = None
652 sidedata = None
657 if extra is not None:
653 if extra is not None:
658 for name in (
654 for name in (
659 b'p1copies',
655 b'p1copies',
660 b'p2copies',
656 b'p2copies',
661 b'filesadded',
657 b'filesadded',
662 b'filesremoved',
658 b'filesremoved',
663 ):
659 ):
664 extra.pop(name, None)
660 extra.pop(name, None)
665 if p1copies is not None:
661 if p1copies is not None:
666 p1copies = copies.encodecopies(sortedfiles, p1copies)
662 p1copies = copies.encodecopies(sortedfiles, p1copies)
667 if p2copies is not None:
663 if p2copies is not None:
668 p2copies = copies.encodecopies(sortedfiles, p2copies)
664 p2copies = copies.encodecopies(sortedfiles, p2copies)
669 if filesadded is not None:
665 if filesadded is not None:
670 filesadded = copies.encodefileindices(sortedfiles, filesadded)
666 filesadded = copies.encodefileindices(sortedfiles, filesadded)
671 if filesremoved is not None:
667 if filesremoved is not None:
672 filesremoved = copies.encodefileindices(sortedfiles, filesremoved)
668 filesremoved = copies.encodefileindices(sortedfiles, filesremoved)
673 if self._copiesstorage == b'extra':
669 if self._copiesstorage == b'extra':
674 extrasentries = p1copies, p2copies, filesadded, filesremoved
670 extrasentries = p1copies, p2copies, filesadded, filesremoved
675 if extra is None and any(x is not None for x in extrasentries):
671 if extra is None and any(x is not None for x in extrasentries):
676 extra = {}
672 extra = {}
677 if p1copies is not None:
673 if p1copies is not None:
678 extra[b'p1copies'] = p1copies
674 extra[b'p1copies'] = p1copies
679 if p2copies is not None:
675 if p2copies is not None:
680 extra[b'p2copies'] = p2copies
676 extra[b'p2copies'] = p2copies
681 if filesadded is not None:
677 if filesadded is not None:
682 extra[b'filesadded'] = filesadded
678 extra[b'filesadded'] = filesadded
683 if filesremoved is not None:
679 if filesremoved is not None:
684 extra[b'filesremoved'] = filesremoved
680 extra[b'filesremoved'] = filesremoved
685 elif self._copiesstorage == b'changeset-sidedata':
681 elif self._copiesstorage == b'changeset-sidedata':
686 sidedata = {}
682 sidedata = {}
687 if p1copies:
683 if p1copies:
688 sidedata[sidedatamod.SD_P1COPIES] = p1copies
684 sidedata[sidedatamod.SD_P1COPIES] = p1copies
689 if p2copies:
685 if p2copies:
690 sidedata[sidedatamod.SD_P2COPIES] = p2copies
686 sidedata[sidedatamod.SD_P2COPIES] = p2copies
691 if filesadded:
687 if filesadded:
692 sidedata[sidedatamod.SD_FILESADDED] = filesadded
688 sidedata[sidedatamod.SD_FILESADDED] = filesadded
693 if filesremoved:
689 if filesremoved:
694 sidedata[sidedatamod.SD_FILESREMOVED] = filesremoved
690 sidedata[sidedatamod.SD_FILESREMOVED] = filesremoved
695 if not sidedata:
691 if not sidedata:
696 sidedata = None
692 sidedata = None
697
693
698 if extra:
694 if extra:
699 extra = encodeextra(extra)
695 extra = encodeextra(extra)
700 parseddate = b"%s %s" % (parseddate, extra)
696 parseddate = b"%s %s" % (parseddate, extra)
701 l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
697 l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
702 text = b"\n".join(l)
698 text = b"\n".join(l)
703 return self.addrevision(
699 return self.addrevision(
704 text, transaction, len(self), p1, p2, sidedata=sidedata
700 text, transaction, len(self), p1, p2, sidedata=sidedata
705 )
701 )
706
702
707 def branchinfo(self, rev):
703 def branchinfo(self, rev):
708 """return the branch name and open/close state of a revision
704 """return the branch name and open/close state of a revision
709
705
710 This function exists because creating a changectx object
706 This function exists because creating a changectx object
711 just to access this is costly."""
707 just to access this is costly."""
712 extra = self.read(rev)[5]
708 extra = self.read(rev)[5]
713 return encoding.tolocal(extra.get(b"branch")), b'close' in extra
709 return encoding.tolocal(extra.get(b"branch")), b'close' in extra
714
710
715 def _nodeduplicatecallback(self, transaction, node):
711 def _nodeduplicatecallback(self, transaction, node):
716 # keep track of revisions that got "re-added", eg: unbunde of know rev.
712 # keep track of revisions that got "re-added", eg: unbunde of know rev.
717 #
713 #
718 # We track them in a list to preserve their order from the source bundle
714 # We track them in a list to preserve their order from the source bundle
719 duplicates = transaction.changes.setdefault(b'revduplicates', [])
715 duplicates = transaction.changes.setdefault(b'revduplicates', [])
720 duplicates.append(self.rev(node))
716 duplicates.append(self.rev(node))
@@ -1,352 +1,356 b''
1 # repoview.py - Filtered view of a localrepo object
1 # repoview.py - Filtered view of a localrepo object
2 #
2 #
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
4 # Logilab SA <contact@logilab.fr>
4 # Logilab SA <contact@logilab.fr>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from __future__ import absolute_import
9 from __future__ import absolute_import
10
10
11 import copy
11 import copy
12 import weakref
12 import weakref
13
13
14 from .node import nullrev
14 from .node import nullrev
15 from .pycompat import (
15 from .pycompat import (
16 delattr,
16 delattr,
17 getattr,
17 getattr,
18 setattr,
18 setattr,
19 )
19 )
20 from . import (
20 from . import (
21 obsolete,
21 obsolete,
22 phases,
22 phases,
23 pycompat,
23 pycompat,
24 tags as tagsmod,
24 tags as tagsmod,
25 util,
25 util,
26 )
26 )
27 from .utils import repoviewutil
27 from .utils import repoviewutil
28
28
29
29
30 def hideablerevs(repo):
30 def hideablerevs(repo):
31 """Revision candidates to be hidden
31 """Revision candidates to be hidden
32
32
33 This is a standalone function to allow extensions to wrap it.
33 This is a standalone function to allow extensions to wrap it.
34
34
35 Because we use the set of immutable changesets as a fallback subset in
35 Because we use the set of immutable changesets as a fallback subset in
36 branchmap (see mercurial.utils.repoviewutils.subsettable), you cannot set
36 branchmap (see mercurial.utils.repoviewutils.subsettable), you cannot set
37 "public" changesets as "hideable". Doing so would break multiple code
37 "public" changesets as "hideable". Doing so would break multiple code
38 assertions and lead to crashes."""
38 assertions and lead to crashes."""
39 obsoletes = obsolete.getrevs(repo, b'obsolete')
39 obsoletes = obsolete.getrevs(repo, b'obsolete')
40 internals = repo._phasecache.getrevset(repo, phases.localhiddenphases)
40 internals = repo._phasecache.getrevset(repo, phases.localhiddenphases)
41 internals = frozenset(internals)
41 internals = frozenset(internals)
42 return obsoletes | internals
42 return obsoletes | internals
43
43
44
44
45 def pinnedrevs(repo):
45 def pinnedrevs(repo):
46 """revisions blocking hidden changesets from being filtered
46 """revisions blocking hidden changesets from being filtered
47 """
47 """
48
48
49 cl = repo.changelog
49 cl = repo.changelog
50 pinned = set()
50 pinned = set()
51 pinned.update([par.rev() for par in repo[None].parents()])
51 pinned.update([par.rev() for par in repo[None].parents()])
52 pinned.update([cl.rev(bm) for bm in repo._bookmarks.values()])
52 pinned.update([cl.rev(bm) for bm in repo._bookmarks.values()])
53
53
54 tags = {}
54 tags = {}
55 tagsmod.readlocaltags(repo.ui, repo, tags, {})
55 tagsmod.readlocaltags(repo.ui, repo, tags, {})
56 if tags:
56 if tags:
57 rev, nodemap = cl.rev, cl.nodemap
57 rev, nodemap = cl.rev, cl.nodemap
58 pinned.update(rev(t[0]) for t in tags.values() if t[0] in nodemap)
58 pinned.update(rev(t[0]) for t in tags.values() if t[0] in nodemap)
59 return pinned
59 return pinned
60
60
61
61
62 def _revealancestors(pfunc, hidden, revs):
62 def _revealancestors(pfunc, hidden, revs):
63 """reveals contiguous chains of hidden ancestors of 'revs' by removing them
63 """reveals contiguous chains of hidden ancestors of 'revs' by removing them
64 from 'hidden'
64 from 'hidden'
65
65
66 - pfunc(r): a funtion returning parent of 'r',
66 - pfunc(r): a funtion returning parent of 'r',
67 - hidden: the (preliminary) hidden revisions, to be updated
67 - hidden: the (preliminary) hidden revisions, to be updated
68 - revs: iterable of revnum,
68 - revs: iterable of revnum,
69
69
70 (Ancestors are revealed exclusively, i.e. the elements in 'revs' are
70 (Ancestors are revealed exclusively, i.e. the elements in 'revs' are
71 *not* revealed)
71 *not* revealed)
72 """
72 """
73 stack = list(revs)
73 stack = list(revs)
74 while stack:
74 while stack:
75 for p in pfunc(stack.pop()):
75 for p in pfunc(stack.pop()):
76 if p != nullrev and p in hidden:
76 if p != nullrev and p in hidden:
77 hidden.remove(p)
77 hidden.remove(p)
78 stack.append(p)
78 stack.append(p)
79
79
80
80
81 def computehidden(repo, visibilityexceptions=None):
81 def computehidden(repo, visibilityexceptions=None):
82 """compute the set of hidden revision to filter
82 """compute the set of hidden revision to filter
83
83
84 During most operation hidden should be filtered."""
84 During most operation hidden should be filtered."""
85 assert not repo.changelog.filteredrevs
85 assert not repo.changelog.filteredrevs
86
86
87 hidden = hideablerevs(repo)
87 hidden = hideablerevs(repo)
88 if hidden:
88 if hidden:
89 hidden = set(hidden - pinnedrevs(repo))
89 hidden = set(hidden - pinnedrevs(repo))
90 if visibilityexceptions:
90 if visibilityexceptions:
91 hidden -= visibilityexceptions
91 hidden -= visibilityexceptions
92 pfunc = repo.changelog.parentrevs
92 pfunc = repo.changelog.parentrevs
93 mutable = repo._phasecache.getrevset(repo, phases.mutablephases)
93 mutable = repo._phasecache.getrevset(repo, phases.mutablephases)
94
94
95 visible = mutable - hidden
95 visible = mutable - hidden
96 _revealancestors(pfunc, hidden, visible)
96 _revealancestors(pfunc, hidden, visible)
97 return frozenset(hidden)
97 return frozenset(hidden)
98
98
99
99
100 def computesecret(repo, visibilityexceptions=None):
100 def computesecret(repo, visibilityexceptions=None):
101 """compute the set of revision that can never be exposed through hgweb
101 """compute the set of revision that can never be exposed through hgweb
102
102
103 Changeset in the secret phase (or above) should stay unaccessible."""
103 Changeset in the secret phase (or above) should stay unaccessible."""
104 assert not repo.changelog.filteredrevs
104 assert not repo.changelog.filteredrevs
105 secrets = repo._phasecache.getrevset(repo, phases.remotehiddenphases)
105 secrets = repo._phasecache.getrevset(repo, phases.remotehiddenphases)
106 return frozenset(secrets)
106 return frozenset(secrets)
107
107
108
108
109 def computeunserved(repo, visibilityexceptions=None):
109 def computeunserved(repo, visibilityexceptions=None):
110 """compute the set of revision that should be filtered when used a server
110 """compute the set of revision that should be filtered when used a server
111
111
112 Secret and hidden changeset should not pretend to be here."""
112 Secret and hidden changeset should not pretend to be here."""
113 assert not repo.changelog.filteredrevs
113 assert not repo.changelog.filteredrevs
114 # fast path in simple case to avoid impact of non optimised code
114 # fast path in simple case to avoid impact of non optimised code
115 hiddens = filterrevs(repo, b'visible')
115 hiddens = filterrevs(repo, b'visible')
116 secrets = filterrevs(repo, b'served.hidden')
116 secrets = filterrevs(repo, b'served.hidden')
117 if secrets:
117 if secrets:
118 return frozenset(hiddens | secrets)
118 return frozenset(hiddens | secrets)
119 else:
119 else:
120 return hiddens
120 return hiddens
121
121
122
122
123 def computemutable(repo, visibilityexceptions=None):
123 def computemutable(repo, visibilityexceptions=None):
124 assert not repo.changelog.filteredrevs
124 assert not repo.changelog.filteredrevs
125 # fast check to avoid revset call on huge repo
125 # fast check to avoid revset call on huge repo
126 if any(repo._phasecache.phaseroots[1:]):
126 if any(repo._phasecache.phaseroots[1:]):
127 getphase = repo._phasecache.phase
127 getphase = repo._phasecache.phase
128 maymutable = filterrevs(repo, b'base')
128 maymutable = filterrevs(repo, b'base')
129 return frozenset(r for r in maymutable if getphase(repo, r))
129 return frozenset(r for r in maymutable if getphase(repo, r))
130 return frozenset()
130 return frozenset()
131
131
132
132
133 def computeimpactable(repo, visibilityexceptions=None):
133 def computeimpactable(repo, visibilityexceptions=None):
134 """Everything impactable by mutable revision
134 """Everything impactable by mutable revision
135
135
136 The immutable filter still have some chance to get invalidated. This will
136 The immutable filter still have some chance to get invalidated. This will
137 happen when:
137 happen when:
138
138
139 - you garbage collect hidden changeset,
139 - you garbage collect hidden changeset,
140 - public phase is moved backward,
140 - public phase is moved backward,
141 - something is changed in the filtering (this could be fixed)
141 - something is changed in the filtering (this could be fixed)
142
142
143 This filter out any mutable changeset and any public changeset that may be
143 This filter out any mutable changeset and any public changeset that may be
144 impacted by something happening to a mutable revision.
144 impacted by something happening to a mutable revision.
145
145
146 This is achieved by filtered everything with a revision number egal or
146 This is achieved by filtered everything with a revision number egal or
147 higher than the first mutable changeset is filtered."""
147 higher than the first mutable changeset is filtered."""
148 assert not repo.changelog.filteredrevs
148 assert not repo.changelog.filteredrevs
149 cl = repo.changelog
149 cl = repo.changelog
150 firstmutable = len(cl)
150 firstmutable = len(cl)
151 for roots in repo._phasecache.phaseroots[1:]:
151 for roots in repo._phasecache.phaseroots[1:]:
152 if roots:
152 if roots:
153 firstmutable = min(firstmutable, min(cl.rev(r) for r in roots))
153 firstmutable = min(firstmutable, min(cl.rev(r) for r in roots))
154 # protect from nullrev root
154 # protect from nullrev root
155 firstmutable = max(0, firstmutable)
155 firstmutable = max(0, firstmutable)
156 return frozenset(pycompat.xrange(firstmutable, len(cl)))
156 return frozenset(pycompat.xrange(firstmutable, len(cl)))
157
157
158
158
159 # function to compute filtered set
159 # function to compute filtered set
160 #
160 #
161 # When adding a new filter you MUST update the table at:
161 # When adding a new filter you MUST update the table at:
162 # mercurial.utils.repoviewutil.subsettable
162 # mercurial.utils.repoviewutil.subsettable
163 # Otherwise your filter will have to recompute all its branches cache
163 # Otherwise your filter will have to recompute all its branches cache
164 # from scratch (very slow).
164 # from scratch (very slow).
165 filtertable = {
165 filtertable = {
166 b'visible': computehidden,
166 b'visible': computehidden,
167 b'visible-hidden': computehidden,
167 b'visible-hidden': computehidden,
168 b'served.hidden': computesecret,
168 b'served.hidden': computesecret,
169 b'served': computeunserved,
169 b'served': computeunserved,
170 b'immutable': computemutable,
170 b'immutable': computemutable,
171 b'base': computeimpactable,
171 b'base': computeimpactable,
172 }
172 }
173
173
174 _basefiltername = list(filtertable)
174 _basefiltername = list(filtertable)
175
175
176
176
177 def extrafilter(ui):
177 def extrafilter(ui):
178 """initialize extra filter and return its id
178 """initialize extra filter and return its id
179
179
180 If extra filtering is configured, we make sure the associated filtered view
180 If extra filtering is configured, we make sure the associated filtered view
181 are declared and return the associated id.
181 are declared and return the associated id.
182 """
182 """
183 frevs = ui.config(b'experimental', b'extra-filter-revs')
183 frevs = ui.config(b'experimental', b'extra-filter-revs')
184 if frevs is None:
184 if frevs is None:
185 return None
185 return None
186
186
187 fid = pycompat.sysbytes(util.DIGESTS[b'sha1'](frevs).hexdigest())[:12]
187 fid = pycompat.sysbytes(util.DIGESTS[b'sha1'](frevs).hexdigest())[:12]
188
188
189 combine = lambda fname: fname + b'%' + fid
189 combine = lambda fname: fname + b'%' + fid
190
190
191 subsettable = repoviewutil.subsettable
191 subsettable = repoviewutil.subsettable
192
192
193 if combine(b'base') not in filtertable:
193 if combine(b'base') not in filtertable:
194 for name in _basefiltername:
194 for name in _basefiltername:
195
195
196 def extrafilteredrevs(repo, *args, **kwargs):
196 def extrafilteredrevs(repo, *args, **kwargs):
197 baserevs = filtertable[name](repo, *args, **kwargs)
197 baserevs = filtertable[name](repo, *args, **kwargs)
198 extrarevs = frozenset(repo.revs(frevs))
198 extrarevs = frozenset(repo.revs(frevs))
199 return baserevs | extrarevs
199 return baserevs | extrarevs
200
200
201 filtertable[combine(name)] = extrafilteredrevs
201 filtertable[combine(name)] = extrafilteredrevs
202 if name in subsettable:
202 if name in subsettable:
203 subsettable[combine(name)] = combine(subsettable[name])
203 subsettable[combine(name)] = combine(subsettable[name])
204 return fid
204 return fid
205
205
206
206
207 def filterrevs(repo, filtername, visibilityexceptions=None):
207 def filterrevs(repo, filtername, visibilityexceptions=None):
208 """returns set of filtered revision for this filter name
208 """returns set of filtered revision for this filter name
209
209
210 visibilityexceptions is a set of revs which must are exceptions for
210 visibilityexceptions is a set of revs which must are exceptions for
211 hidden-state and must be visible. They are dynamic and hence we should not
211 hidden-state and must be visible. They are dynamic and hence we should not
212 cache it's result"""
212 cache it's result"""
213 if filtername not in repo.filteredrevcache:
213 if filtername not in repo.filteredrevcache:
214 func = filtertable[filtername]
214 func = filtertable[filtername]
215 if visibilityexceptions:
215 if visibilityexceptions:
216 return func(repo.unfiltered, visibilityexceptions)
216 return func(repo.unfiltered, visibilityexceptions)
217 repo.filteredrevcache[filtername] = func(repo.unfiltered())
217 repo.filteredrevcache[filtername] = func(repo.unfiltered())
218 return repo.filteredrevcache[filtername]
218 return repo.filteredrevcache[filtername]
219
219
220
220
221 def wrapchangelog(unfichangelog, filteredrevs):
221 def wrapchangelog(unfichangelog, filteredrevs):
222 cl = copy.copy(unfichangelog)
222 cl = copy.copy(unfichangelog)
223 cl.filteredrevs = filteredrevs
223 cl.filteredrevs = filteredrevs
224
224
225 class filteredchangelog(cl.__class__):
225 class filteredchangelog(cl.__class__):
226 def tiprev(self):
226 def tiprev(self):
227 """filtered version of revlog.tiprev"""
227 """filtered version of revlog.tiprev"""
228 for i in pycompat.xrange(len(self) - 1, -2, -1):
228 for i in pycompat.xrange(len(self) - 1, -2, -1):
229 if i not in self.filteredrevs:
229 if i not in self.filteredrevs:
230 return i
230 return i
231
231
232 def __contains__(self, rev):
233 """filtered version of revlog.__contains__"""
234 return 0 <= rev < len(self) and rev not in self.filteredrevs
235
232 cl.__class__ = filteredchangelog
236 cl.__class__ = filteredchangelog
233
237
234 return cl
238 return cl
235
239
236
240
237 class repoview(object):
241 class repoview(object):
238 """Provide a read/write view of a repo through a filtered changelog
242 """Provide a read/write view of a repo through a filtered changelog
239
243
240 This object is used to access a filtered version of a repository without
244 This object is used to access a filtered version of a repository without
241 altering the original repository object itself. We can not alter the
245 altering the original repository object itself. We can not alter the
242 original object for two main reasons:
246 original object for two main reasons:
243 - It prevents the use of a repo with multiple filters at the same time. In
247 - It prevents the use of a repo with multiple filters at the same time. In
244 particular when multiple threads are involved.
248 particular when multiple threads are involved.
245 - It makes scope of the filtering harder to control.
249 - It makes scope of the filtering harder to control.
246
250
247 This object behaves very closely to the original repository. All attribute
251 This object behaves very closely to the original repository. All attribute
248 operations are done on the original repository:
252 operations are done on the original repository:
249 - An access to `repoview.someattr` actually returns `repo.someattr`,
253 - An access to `repoview.someattr` actually returns `repo.someattr`,
250 - A write to `repoview.someattr` actually sets value of `repo.someattr`,
254 - A write to `repoview.someattr` actually sets value of `repo.someattr`,
251 - A deletion of `repoview.someattr` actually drops `someattr`
255 - A deletion of `repoview.someattr` actually drops `someattr`
252 from `repo.__dict__`.
256 from `repo.__dict__`.
253
257
254 The only exception is the `changelog` property. It is overridden to return
258 The only exception is the `changelog` property. It is overridden to return
255 a (surface) copy of `repo.changelog` with some revisions filtered. The
259 a (surface) copy of `repo.changelog` with some revisions filtered. The
256 `filtername` attribute of the view control the revisions that need to be
260 `filtername` attribute of the view control the revisions that need to be
257 filtered. (the fact the changelog is copied is an implementation detail).
261 filtered. (the fact the changelog is copied is an implementation detail).
258
262
259 Unlike attributes, this object intercepts all method calls. This means that
263 Unlike attributes, this object intercepts all method calls. This means that
260 all methods are run on the `repoview` object with the filtered `changelog`
264 all methods are run on the `repoview` object with the filtered `changelog`
261 property. For this purpose the simple `repoview` class must be mixed with
265 property. For this purpose the simple `repoview` class must be mixed with
262 the actual class of the repository. This ensures that the resulting
266 the actual class of the repository. This ensures that the resulting
263 `repoview` object have the very same methods than the repo object. This
267 `repoview` object have the very same methods than the repo object. This
264 leads to the property below.
268 leads to the property below.
265
269
266 repoview.method() --> repo.__class__.method(repoview)
270 repoview.method() --> repo.__class__.method(repoview)
267
271
268 The inheritance has to be done dynamically because `repo` can be of any
272 The inheritance has to be done dynamically because `repo` can be of any
269 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
273 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
270 """
274 """
271
275
272 def __init__(self, repo, filtername, visibilityexceptions=None):
276 def __init__(self, repo, filtername, visibilityexceptions=None):
273 object.__setattr__(self, r'_unfilteredrepo', repo)
277 object.__setattr__(self, r'_unfilteredrepo', repo)
274 object.__setattr__(self, r'filtername', filtername)
278 object.__setattr__(self, r'filtername', filtername)
275 object.__setattr__(self, r'_clcachekey', None)
279 object.__setattr__(self, r'_clcachekey', None)
276 object.__setattr__(self, r'_clcache', None)
280 object.__setattr__(self, r'_clcache', None)
277 # revs which are exceptions and must not be hidden
281 # revs which are exceptions and must not be hidden
278 object.__setattr__(self, r'_visibilityexceptions', visibilityexceptions)
282 object.__setattr__(self, r'_visibilityexceptions', visibilityexceptions)
279
283
280 # not a propertycache on purpose we shall implement a proper cache later
284 # not a propertycache on purpose we shall implement a proper cache later
281 @property
285 @property
282 def changelog(self):
286 def changelog(self):
283 """return a filtered version of the changeset
287 """return a filtered version of the changeset
284
288
285 this changelog must not be used for writing"""
289 this changelog must not be used for writing"""
286 # some cache may be implemented later
290 # some cache may be implemented later
287 unfi = self._unfilteredrepo
291 unfi = self._unfilteredrepo
288 unfichangelog = unfi.changelog
292 unfichangelog = unfi.changelog
289 # bypass call to changelog.method
293 # bypass call to changelog.method
290 unfiindex = unfichangelog.index
294 unfiindex = unfichangelog.index
291 unfilen = len(unfiindex)
295 unfilen = len(unfiindex)
292 unfinode = unfiindex[unfilen - 1][7]
296 unfinode = unfiindex[unfilen - 1][7]
293 with util.timedcm('repo filter for %s', self.filtername):
297 with util.timedcm('repo filter for %s', self.filtername):
294 revs = filterrevs(unfi, self.filtername, self._visibilityexceptions)
298 revs = filterrevs(unfi, self.filtername, self._visibilityexceptions)
295 cl = self._clcache
299 cl = self._clcache
296 newkey = (unfilen, unfinode, hash(revs), unfichangelog._delayed)
300 newkey = (unfilen, unfinode, hash(revs), unfichangelog._delayed)
297 # if cl.index is not unfiindex, unfi.changelog would be
301 # if cl.index is not unfiindex, unfi.changelog would be
298 # recreated, and our clcache refers to garbage object
302 # recreated, and our clcache refers to garbage object
299 if cl is not None and (
303 if cl is not None and (
300 cl.index is not unfiindex or newkey != self._clcachekey
304 cl.index is not unfiindex or newkey != self._clcachekey
301 ):
305 ):
302 cl = None
306 cl = None
303 # could have been made None by the previous if
307 # could have been made None by the previous if
304 if cl is None:
308 if cl is None:
305 cl = wrapchangelog(unfichangelog, revs)
309 cl = wrapchangelog(unfichangelog, revs)
306 object.__setattr__(self, r'_clcache', cl)
310 object.__setattr__(self, r'_clcache', cl)
307 object.__setattr__(self, r'_clcachekey', newkey)
311 object.__setattr__(self, r'_clcachekey', newkey)
308 return cl
312 return cl
309
313
310 def unfiltered(self):
314 def unfiltered(self):
311 """Return an unfiltered version of a repo"""
315 """Return an unfiltered version of a repo"""
312 return self._unfilteredrepo
316 return self._unfilteredrepo
313
317
314 def filtered(self, name, visibilityexceptions=None):
318 def filtered(self, name, visibilityexceptions=None):
315 """Return a filtered version of a repository"""
319 """Return a filtered version of a repository"""
316 if name == self.filtername and not visibilityexceptions:
320 if name == self.filtername and not visibilityexceptions:
317 return self
321 return self
318 return self.unfiltered().filtered(name, visibilityexceptions)
322 return self.unfiltered().filtered(name, visibilityexceptions)
319
323
320 def __repr__(self):
324 def __repr__(self):
321 return r'<%s:%s %r>' % (
325 return r'<%s:%s %r>' % (
322 self.__class__.__name__,
326 self.__class__.__name__,
323 pycompat.sysstr(self.filtername),
327 pycompat.sysstr(self.filtername),
324 self.unfiltered(),
328 self.unfiltered(),
325 )
329 )
326
330
327 # everything access are forwarded to the proxied repo
331 # everything access are forwarded to the proxied repo
328 def __getattr__(self, attr):
332 def __getattr__(self, attr):
329 return getattr(self._unfilteredrepo, attr)
333 return getattr(self._unfilteredrepo, attr)
330
334
331 def __setattr__(self, attr, value):
335 def __setattr__(self, attr, value):
332 return setattr(self._unfilteredrepo, attr, value)
336 return setattr(self._unfilteredrepo, attr, value)
333
337
334 def __delattr__(self, attr):
338 def __delattr__(self, attr):
335 return delattr(self._unfilteredrepo, attr)
339 return delattr(self._unfilteredrepo, attr)
336
340
337
341
338 # Python <3.4 easily leaks types via __mro__. See
342 # Python <3.4 easily leaks types via __mro__. See
339 # https://bugs.python.org/issue17950. We cache dynamically created types
343 # https://bugs.python.org/issue17950. We cache dynamically created types
340 # so they won't be leaked on every invocation of repo.filtered().
344 # so they won't be leaked on every invocation of repo.filtered().
341 _filteredrepotypes = weakref.WeakKeyDictionary()
345 _filteredrepotypes = weakref.WeakKeyDictionary()
342
346
343
347
344 def newtype(base):
348 def newtype(base):
345 """Create a new type with the repoview mixin and the given base class"""
349 """Create a new type with the repoview mixin and the given base class"""
346 if base not in _filteredrepotypes:
350 if base not in _filteredrepotypes:
347
351
348 class filteredrepo(repoview, base):
352 class filteredrepo(repoview, base):
349 pass
353 pass
350
354
351 _filteredrepotypes[base] = filteredrepo
355 _filteredrepotypes[base] = filteredrepo
352 return _filteredrepotypes[base]
356 return _filteredrepotypes[base]
General Comments 0
You need to be logged in to leave comments. Login now