##// END OF EJS Templates
repoview: move changelog.revs() override to filteredchangelog...
Martin von Zweigbergk -
r43751:5ade4728 default
parent child Browse files
Show More
@@ -1,704 +1,698 b''
1 # changelog.py - changelog class for mercurial
1 # changelog.py - changelog class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 from .i18n import _
10 from .i18n import _
11 from .node import (
11 from .node import (
12 bin,
12 bin,
13 hex,
13 hex,
14 nullid,
14 nullid,
15 )
15 )
16 from .thirdparty import attr
16 from .thirdparty import attr
17
17
18 from . import (
18 from . import (
19 copies,
19 copies,
20 encoding,
20 encoding,
21 error,
21 error,
22 pycompat,
22 pycompat,
23 revlog,
23 revlog,
24 util,
24 util,
25 )
25 )
26 from .utils import (
26 from .utils import (
27 dateutil,
27 dateutil,
28 stringutil,
28 stringutil,
29 )
29 )
30
30
31 from .revlogutils import sidedata as sidedatamod
31 from .revlogutils import sidedata as sidedatamod
32
32
33 _defaultextra = {b'branch': b'default'}
33 _defaultextra = {b'branch': b'default'}
34
34
35
35
36 def _string_escape(text):
36 def _string_escape(text):
37 """
37 """
38 >>> from .pycompat import bytechr as chr
38 >>> from .pycompat import bytechr as chr
39 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
39 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
40 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
40 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
41 >>> s
41 >>> s
42 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
42 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
43 >>> res = _string_escape(s)
43 >>> res = _string_escape(s)
44 >>> s == _string_unescape(res)
44 >>> s == _string_unescape(res)
45 True
45 True
46 """
46 """
47 # subset of the string_escape codec
47 # subset of the string_escape codec
48 text = (
48 text = (
49 text.replace(b'\\', b'\\\\')
49 text.replace(b'\\', b'\\\\')
50 .replace(b'\n', b'\\n')
50 .replace(b'\n', b'\\n')
51 .replace(b'\r', b'\\r')
51 .replace(b'\r', b'\\r')
52 )
52 )
53 return text.replace(b'\0', b'\\0')
53 return text.replace(b'\0', b'\\0')
54
54
55
55
56 def _string_unescape(text):
56 def _string_unescape(text):
57 if b'\\0' in text:
57 if b'\\0' in text:
58 # fix up \0 without getting into trouble with \\0
58 # fix up \0 without getting into trouble with \\0
59 text = text.replace(b'\\\\', b'\\\\\n')
59 text = text.replace(b'\\\\', b'\\\\\n')
60 text = text.replace(b'\\0', b'\0')
60 text = text.replace(b'\\0', b'\0')
61 text = text.replace(b'\n', b'')
61 text = text.replace(b'\n', b'')
62 return stringutil.unescapestr(text)
62 return stringutil.unescapestr(text)
63
63
64
64
65 def decodeextra(text):
65 def decodeextra(text):
66 """
66 """
67 >>> from .pycompat import bytechr as chr
67 >>> from .pycompat import bytechr as chr
68 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
68 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
69 ... ).items())
69 ... ).items())
70 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
70 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
71 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
71 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
72 ... b'baz': chr(92) + chr(0) + b'2'})
72 ... b'baz': chr(92) + chr(0) + b'2'})
73 ... ).items())
73 ... ).items())
74 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
74 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
75 """
75 """
76 extra = _defaultextra.copy()
76 extra = _defaultextra.copy()
77 for l in text.split(b'\0'):
77 for l in text.split(b'\0'):
78 if l:
78 if l:
79 k, v = _string_unescape(l).split(b':', 1)
79 k, v = _string_unescape(l).split(b':', 1)
80 extra[k] = v
80 extra[k] = v
81 return extra
81 return extra
82
82
83
83
84 def encodeextra(d):
84 def encodeextra(d):
85 # keys must be sorted to produce a deterministic changelog entry
85 # keys must be sorted to produce a deterministic changelog entry
86 items = [
86 items = [
87 _string_escape(b'%s:%s' % (k, pycompat.bytestr(d[k])))
87 _string_escape(b'%s:%s' % (k, pycompat.bytestr(d[k])))
88 for k in sorted(d)
88 for k in sorted(d)
89 ]
89 ]
90 return b"\0".join(items)
90 return b"\0".join(items)
91
91
92
92
93 def stripdesc(desc):
93 def stripdesc(desc):
94 """strip trailing whitespace and leading and trailing empty lines"""
94 """strip trailing whitespace and leading and trailing empty lines"""
95 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
95 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
96
96
97
97
98 class appender(object):
98 class appender(object):
99 '''the changelog index must be updated last on disk, so we use this class
99 '''the changelog index must be updated last on disk, so we use this class
100 to delay writes to it'''
100 to delay writes to it'''
101
101
102 def __init__(self, vfs, name, mode, buf):
102 def __init__(self, vfs, name, mode, buf):
103 self.data = buf
103 self.data = buf
104 fp = vfs(name, mode)
104 fp = vfs(name, mode)
105 self.fp = fp
105 self.fp = fp
106 self.offset = fp.tell()
106 self.offset = fp.tell()
107 self.size = vfs.fstat(fp).st_size
107 self.size = vfs.fstat(fp).st_size
108 self._end = self.size
108 self._end = self.size
109
109
110 def end(self):
110 def end(self):
111 return self._end
111 return self._end
112
112
113 def tell(self):
113 def tell(self):
114 return self.offset
114 return self.offset
115
115
116 def flush(self):
116 def flush(self):
117 pass
117 pass
118
118
119 @property
119 @property
120 def closed(self):
120 def closed(self):
121 return self.fp.closed
121 return self.fp.closed
122
122
123 def close(self):
123 def close(self):
124 self.fp.close()
124 self.fp.close()
125
125
126 def seek(self, offset, whence=0):
126 def seek(self, offset, whence=0):
127 '''virtual file offset spans real file and data'''
127 '''virtual file offset spans real file and data'''
128 if whence == 0:
128 if whence == 0:
129 self.offset = offset
129 self.offset = offset
130 elif whence == 1:
130 elif whence == 1:
131 self.offset += offset
131 self.offset += offset
132 elif whence == 2:
132 elif whence == 2:
133 self.offset = self.end() + offset
133 self.offset = self.end() + offset
134 if self.offset < self.size:
134 if self.offset < self.size:
135 self.fp.seek(self.offset)
135 self.fp.seek(self.offset)
136
136
137 def read(self, count=-1):
137 def read(self, count=-1):
138 '''only trick here is reads that span real file and data'''
138 '''only trick here is reads that span real file and data'''
139 ret = b""
139 ret = b""
140 if self.offset < self.size:
140 if self.offset < self.size:
141 s = self.fp.read(count)
141 s = self.fp.read(count)
142 ret = s
142 ret = s
143 self.offset += len(s)
143 self.offset += len(s)
144 if count > 0:
144 if count > 0:
145 count -= len(s)
145 count -= len(s)
146 if count != 0:
146 if count != 0:
147 doff = self.offset - self.size
147 doff = self.offset - self.size
148 self.data.insert(0, b"".join(self.data))
148 self.data.insert(0, b"".join(self.data))
149 del self.data[1:]
149 del self.data[1:]
150 s = self.data[0][doff : doff + count]
150 s = self.data[0][doff : doff + count]
151 self.offset += len(s)
151 self.offset += len(s)
152 ret += s
152 ret += s
153 return ret
153 return ret
154
154
155 def write(self, s):
155 def write(self, s):
156 self.data.append(bytes(s))
156 self.data.append(bytes(s))
157 self.offset += len(s)
157 self.offset += len(s)
158 self._end += len(s)
158 self._end += len(s)
159
159
160 def __enter__(self):
160 def __enter__(self):
161 self.fp.__enter__()
161 self.fp.__enter__()
162 return self
162 return self
163
163
164 def __exit__(self, *args):
164 def __exit__(self, *args):
165 return self.fp.__exit__(*args)
165 return self.fp.__exit__(*args)
166
166
167
167
168 def _divertopener(opener, target):
168 def _divertopener(opener, target):
169 """build an opener that writes in 'target.a' instead of 'target'"""
169 """build an opener that writes in 'target.a' instead of 'target'"""
170
170
171 def _divert(name, mode=b'r', checkambig=False):
171 def _divert(name, mode=b'r', checkambig=False):
172 if name != target:
172 if name != target:
173 return opener(name, mode)
173 return opener(name, mode)
174 return opener(name + b".a", mode)
174 return opener(name + b".a", mode)
175
175
176 return _divert
176 return _divert
177
177
178
178
179 def _delayopener(opener, target, buf):
179 def _delayopener(opener, target, buf):
180 """build an opener that stores chunks in 'buf' instead of 'target'"""
180 """build an opener that stores chunks in 'buf' instead of 'target'"""
181
181
182 def _delay(name, mode=b'r', checkambig=False):
182 def _delay(name, mode=b'r', checkambig=False):
183 if name != target:
183 if name != target:
184 return opener(name, mode)
184 return opener(name, mode)
185 return appender(opener, name, mode, buf)
185 return appender(opener, name, mode, buf)
186
186
187 return _delay
187 return _delay
188
188
189
189
190 @attr.s
190 @attr.s
191 class _changelogrevision(object):
191 class _changelogrevision(object):
192 # Extensions might modify _defaultextra, so let the constructor below pass
192 # Extensions might modify _defaultextra, so let the constructor below pass
193 # it in
193 # it in
194 extra = attr.ib()
194 extra = attr.ib()
195 manifest = attr.ib(default=nullid)
195 manifest = attr.ib(default=nullid)
196 user = attr.ib(default=b'')
196 user = attr.ib(default=b'')
197 date = attr.ib(default=(0, 0))
197 date = attr.ib(default=(0, 0))
198 files = attr.ib(default=attr.Factory(list))
198 files = attr.ib(default=attr.Factory(list))
199 filesadded = attr.ib(default=None)
199 filesadded = attr.ib(default=None)
200 filesremoved = attr.ib(default=None)
200 filesremoved = attr.ib(default=None)
201 p1copies = attr.ib(default=None)
201 p1copies = attr.ib(default=None)
202 p2copies = attr.ib(default=None)
202 p2copies = attr.ib(default=None)
203 description = attr.ib(default=b'')
203 description = attr.ib(default=b'')
204
204
205
205
206 class changelogrevision(object):
206 class changelogrevision(object):
207 """Holds results of a parsed changelog revision.
207 """Holds results of a parsed changelog revision.
208
208
209 Changelog revisions consist of multiple pieces of data, including
209 Changelog revisions consist of multiple pieces of data, including
210 the manifest node, user, and date. This object exposes a view into
210 the manifest node, user, and date. This object exposes a view into
211 the parsed object.
211 the parsed object.
212 """
212 """
213
213
214 __slots__ = (
214 __slots__ = (
215 r'_offsets',
215 r'_offsets',
216 r'_text',
216 r'_text',
217 r'_sidedata',
217 r'_sidedata',
218 r'_cpsd',
218 r'_cpsd',
219 )
219 )
220
220
221 def __new__(cls, text, sidedata, cpsd):
221 def __new__(cls, text, sidedata, cpsd):
222 if not text:
222 if not text:
223 return _changelogrevision(extra=_defaultextra)
223 return _changelogrevision(extra=_defaultextra)
224
224
225 self = super(changelogrevision, cls).__new__(cls)
225 self = super(changelogrevision, cls).__new__(cls)
226 # We could return here and implement the following as an __init__.
226 # We could return here and implement the following as an __init__.
227 # But doing it here is equivalent and saves an extra function call.
227 # But doing it here is equivalent and saves an extra function call.
228
228
229 # format used:
229 # format used:
230 # nodeid\n : manifest node in ascii
230 # nodeid\n : manifest node in ascii
231 # user\n : user, no \n or \r allowed
231 # user\n : user, no \n or \r allowed
232 # time tz extra\n : date (time is int or float, timezone is int)
232 # time tz extra\n : date (time is int or float, timezone is int)
233 # : extra is metadata, encoded and separated by '\0'
233 # : extra is metadata, encoded and separated by '\0'
234 # : older versions ignore it
234 # : older versions ignore it
235 # files\n\n : files modified by the cset, no \n or \r allowed
235 # files\n\n : files modified by the cset, no \n or \r allowed
236 # (.*) : comment (free text, ideally utf-8)
236 # (.*) : comment (free text, ideally utf-8)
237 #
237 #
238 # changelog v0 doesn't use extra
238 # changelog v0 doesn't use extra
239
239
240 nl1 = text.index(b'\n')
240 nl1 = text.index(b'\n')
241 nl2 = text.index(b'\n', nl1 + 1)
241 nl2 = text.index(b'\n', nl1 + 1)
242 nl3 = text.index(b'\n', nl2 + 1)
242 nl3 = text.index(b'\n', nl2 + 1)
243
243
244 # The list of files may be empty. Which means nl3 is the first of the
244 # The list of files may be empty. Which means nl3 is the first of the
245 # double newline that precedes the description.
245 # double newline that precedes the description.
246 if text[nl3 + 1 : nl3 + 2] == b'\n':
246 if text[nl3 + 1 : nl3 + 2] == b'\n':
247 doublenl = nl3
247 doublenl = nl3
248 else:
248 else:
249 doublenl = text.index(b'\n\n', nl3 + 1)
249 doublenl = text.index(b'\n\n', nl3 + 1)
250
250
251 self._offsets = (nl1, nl2, nl3, doublenl)
251 self._offsets = (nl1, nl2, nl3, doublenl)
252 self._text = text
252 self._text = text
253 self._sidedata = sidedata
253 self._sidedata = sidedata
254 self._cpsd = cpsd
254 self._cpsd = cpsd
255
255
256 return self
256 return self
257
257
258 @property
258 @property
259 def manifest(self):
259 def manifest(self):
260 return bin(self._text[0 : self._offsets[0]])
260 return bin(self._text[0 : self._offsets[0]])
261
261
262 @property
262 @property
263 def user(self):
263 def user(self):
264 off = self._offsets
264 off = self._offsets
265 return encoding.tolocal(self._text[off[0] + 1 : off[1]])
265 return encoding.tolocal(self._text[off[0] + 1 : off[1]])
266
266
267 @property
267 @property
268 def _rawdate(self):
268 def _rawdate(self):
269 off = self._offsets
269 off = self._offsets
270 dateextra = self._text[off[1] + 1 : off[2]]
270 dateextra = self._text[off[1] + 1 : off[2]]
271 return dateextra.split(b' ', 2)[0:2]
271 return dateextra.split(b' ', 2)[0:2]
272
272
273 @property
273 @property
274 def _rawextra(self):
274 def _rawextra(self):
275 off = self._offsets
275 off = self._offsets
276 dateextra = self._text[off[1] + 1 : off[2]]
276 dateextra = self._text[off[1] + 1 : off[2]]
277 fields = dateextra.split(b' ', 2)
277 fields = dateextra.split(b' ', 2)
278 if len(fields) != 3:
278 if len(fields) != 3:
279 return None
279 return None
280
280
281 return fields[2]
281 return fields[2]
282
282
283 @property
283 @property
284 def date(self):
284 def date(self):
285 raw = self._rawdate
285 raw = self._rawdate
286 time = float(raw[0])
286 time = float(raw[0])
287 # Various tools did silly things with the timezone.
287 # Various tools did silly things with the timezone.
288 try:
288 try:
289 timezone = int(raw[1])
289 timezone = int(raw[1])
290 except ValueError:
290 except ValueError:
291 timezone = 0
291 timezone = 0
292
292
293 return time, timezone
293 return time, timezone
294
294
295 @property
295 @property
296 def extra(self):
296 def extra(self):
297 raw = self._rawextra
297 raw = self._rawextra
298 if raw is None:
298 if raw is None:
299 return _defaultextra
299 return _defaultextra
300
300
301 return decodeextra(raw)
301 return decodeextra(raw)
302
302
303 @property
303 @property
304 def files(self):
304 def files(self):
305 off = self._offsets
305 off = self._offsets
306 if off[2] == off[3]:
306 if off[2] == off[3]:
307 return []
307 return []
308
308
309 return self._text[off[2] + 1 : off[3]].split(b'\n')
309 return self._text[off[2] + 1 : off[3]].split(b'\n')
310
310
311 @property
311 @property
312 def filesadded(self):
312 def filesadded(self):
313 if self._cpsd:
313 if self._cpsd:
314 rawindices = self._sidedata.get(sidedatamod.SD_FILESADDED)
314 rawindices = self._sidedata.get(sidedatamod.SD_FILESADDED)
315 if not rawindices:
315 if not rawindices:
316 return []
316 return []
317 else:
317 else:
318 rawindices = self.extra.get(b'filesadded')
318 rawindices = self.extra.get(b'filesadded')
319 if rawindices is None:
319 if rawindices is None:
320 return None
320 return None
321 return copies.decodefileindices(self.files, rawindices)
321 return copies.decodefileindices(self.files, rawindices)
322
322
323 @property
323 @property
324 def filesremoved(self):
324 def filesremoved(self):
325 if self._cpsd:
325 if self._cpsd:
326 rawindices = self._sidedata.get(sidedatamod.SD_FILESREMOVED)
326 rawindices = self._sidedata.get(sidedatamod.SD_FILESREMOVED)
327 if not rawindices:
327 if not rawindices:
328 return []
328 return []
329 else:
329 else:
330 rawindices = self.extra.get(b'filesremoved')
330 rawindices = self.extra.get(b'filesremoved')
331 if rawindices is None:
331 if rawindices is None:
332 return None
332 return None
333 return copies.decodefileindices(self.files, rawindices)
333 return copies.decodefileindices(self.files, rawindices)
334
334
335 @property
335 @property
336 def p1copies(self):
336 def p1copies(self):
337 if self._cpsd:
337 if self._cpsd:
338 rawcopies = self._sidedata.get(sidedatamod.SD_P1COPIES)
338 rawcopies = self._sidedata.get(sidedatamod.SD_P1COPIES)
339 if not rawcopies:
339 if not rawcopies:
340 return {}
340 return {}
341 else:
341 else:
342 rawcopies = self.extra.get(b'p1copies')
342 rawcopies = self.extra.get(b'p1copies')
343 if rawcopies is None:
343 if rawcopies is None:
344 return None
344 return None
345 return copies.decodecopies(self.files, rawcopies)
345 return copies.decodecopies(self.files, rawcopies)
346
346
347 @property
347 @property
348 def p2copies(self):
348 def p2copies(self):
349 if self._cpsd:
349 if self._cpsd:
350 rawcopies = self._sidedata.get(sidedatamod.SD_P2COPIES)
350 rawcopies = self._sidedata.get(sidedatamod.SD_P2COPIES)
351 if not rawcopies:
351 if not rawcopies:
352 return {}
352 return {}
353 else:
353 else:
354 rawcopies = self.extra.get(b'p2copies')
354 rawcopies = self.extra.get(b'p2copies')
355 if rawcopies is None:
355 if rawcopies is None:
356 return None
356 return None
357 return copies.decodecopies(self.files, rawcopies)
357 return copies.decodecopies(self.files, rawcopies)
358
358
359 @property
359 @property
360 def description(self):
360 def description(self):
361 return encoding.tolocal(self._text[self._offsets[3] + 2 :])
361 return encoding.tolocal(self._text[self._offsets[3] + 2 :])
362
362
363
363
364 class changelog(revlog.revlog):
364 class changelog(revlog.revlog):
365 def __init__(self, opener, trypending=False):
365 def __init__(self, opener, trypending=False):
366 """Load a changelog revlog using an opener.
366 """Load a changelog revlog using an opener.
367
367
368 If ``trypending`` is true, we attempt to load the index from a
368 If ``trypending`` is true, we attempt to load the index from a
369 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
369 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
370 The ``00changelog.i.a`` file contains index (and possibly inline
370 The ``00changelog.i.a`` file contains index (and possibly inline
371 revision) data for a transaction that hasn't been finalized yet.
371 revision) data for a transaction that hasn't been finalized yet.
372 It exists in a separate file to facilitate readers (such as
372 It exists in a separate file to facilitate readers (such as
373 hooks processes) accessing data before a transaction is finalized.
373 hooks processes) accessing data before a transaction is finalized.
374 """
374 """
375 if trypending and opener.exists(b'00changelog.i.a'):
375 if trypending and opener.exists(b'00changelog.i.a'):
376 indexfile = b'00changelog.i.a'
376 indexfile = b'00changelog.i.a'
377 else:
377 else:
378 indexfile = b'00changelog.i'
378 indexfile = b'00changelog.i'
379
379
380 datafile = b'00changelog.d'
380 datafile = b'00changelog.d'
381 revlog.revlog.__init__(
381 revlog.revlog.__init__(
382 self,
382 self,
383 opener,
383 opener,
384 indexfile,
384 indexfile,
385 datafile=datafile,
385 datafile=datafile,
386 checkambig=True,
386 checkambig=True,
387 mmaplargeindex=True,
387 mmaplargeindex=True,
388 )
388 )
389
389
390 if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
390 if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
391 # changelogs don't benefit from generaldelta.
391 # changelogs don't benefit from generaldelta.
392
392
393 self.version &= ~revlog.FLAG_GENERALDELTA
393 self.version &= ~revlog.FLAG_GENERALDELTA
394 self._generaldelta = False
394 self._generaldelta = False
395
395
396 # Delta chains for changelogs tend to be very small because entries
396 # Delta chains for changelogs tend to be very small because entries
397 # tend to be small and don't delta well with each. So disable delta
397 # tend to be small and don't delta well with each. So disable delta
398 # chains.
398 # chains.
399 self._storedeltachains = False
399 self._storedeltachains = False
400
400
401 self._realopener = opener
401 self._realopener = opener
402 self._delayed = False
402 self._delayed = False
403 self._delaybuf = None
403 self._delaybuf = None
404 self._divert = False
404 self._divert = False
405 self.filteredrevs = frozenset()
405 self.filteredrevs = frozenset()
406 self._copiesstorage = opener.options.get(b'copies-storage')
406 self._copiesstorage = opener.options.get(b'copies-storage')
407
407
408 def revs(self, start=0, stop=None):
409 """filtered version of revlog.revs"""
410 for i in super(changelog, self).revs(start, stop):
411 if i not in self.filteredrevs:
412 yield i
413
414 def _checknofilteredinrevs(self, revs):
408 def _checknofilteredinrevs(self, revs):
415 """raise the appropriate error if 'revs' contains a filtered revision
409 """raise the appropriate error if 'revs' contains a filtered revision
416
410
417 This returns a version of 'revs' to be used thereafter by the caller.
411 This returns a version of 'revs' to be used thereafter by the caller.
418 In particular, if revs is an iterator, it is converted into a set.
412 In particular, if revs is an iterator, it is converted into a set.
419 """
413 """
420 safehasattr = util.safehasattr
414 safehasattr = util.safehasattr
421 if safehasattr(revs, '__next__'):
415 if safehasattr(revs, '__next__'):
422 # Note that inspect.isgenerator() is not true for iterators,
416 # Note that inspect.isgenerator() is not true for iterators,
423 revs = set(revs)
417 revs = set(revs)
424
418
425 filteredrevs = self.filteredrevs
419 filteredrevs = self.filteredrevs
426 if safehasattr(revs, 'first'): # smartset
420 if safehasattr(revs, 'first'): # smartset
427 offenders = revs & filteredrevs
421 offenders = revs & filteredrevs
428 else:
422 else:
429 offenders = filteredrevs.intersection(revs)
423 offenders = filteredrevs.intersection(revs)
430
424
431 for rev in offenders:
425 for rev in offenders:
432 raise error.FilteredIndexError(rev)
426 raise error.FilteredIndexError(rev)
433 return revs
427 return revs
434
428
435 def headrevs(self, revs=None):
429 def headrevs(self, revs=None):
436 if revs is None and self.filteredrevs:
430 if revs is None and self.filteredrevs:
437 try:
431 try:
438 return self.index.headrevsfiltered(self.filteredrevs)
432 return self.index.headrevsfiltered(self.filteredrevs)
439 # AttributeError covers non-c-extension environments and
433 # AttributeError covers non-c-extension environments and
440 # old c extensions without filter handling.
434 # old c extensions without filter handling.
441 except AttributeError:
435 except AttributeError:
442 return self._headrevs()
436 return self._headrevs()
443
437
444 if self.filteredrevs:
438 if self.filteredrevs:
445 revs = self._checknofilteredinrevs(revs)
439 revs = self._checknofilteredinrevs(revs)
446 return super(changelog, self).headrevs(revs)
440 return super(changelog, self).headrevs(revs)
447
441
448 def strip(self, *args, **kwargs):
442 def strip(self, *args, **kwargs):
449 # XXX make something better than assert
443 # XXX make something better than assert
450 # We can't expect proper strip behavior if we are filtered.
444 # We can't expect proper strip behavior if we are filtered.
451 assert not self.filteredrevs
445 assert not self.filteredrevs
452 super(changelog, self).strip(*args, **kwargs)
446 super(changelog, self).strip(*args, **kwargs)
453
447
454 def rev(self, node):
448 def rev(self, node):
455 """filtered version of revlog.rev"""
449 """filtered version of revlog.rev"""
456 r = super(changelog, self).rev(node)
450 r = super(changelog, self).rev(node)
457 if r in self.filteredrevs:
451 if r in self.filteredrevs:
458 raise error.FilteredLookupError(
452 raise error.FilteredLookupError(
459 hex(node), self.indexfile, _(b'filtered node')
453 hex(node), self.indexfile, _(b'filtered node')
460 )
454 )
461 return r
455 return r
462
456
463 def node(self, rev):
457 def node(self, rev):
464 """filtered version of revlog.node"""
458 """filtered version of revlog.node"""
465 if rev in self.filteredrevs:
459 if rev in self.filteredrevs:
466 raise error.FilteredIndexError(rev)
460 raise error.FilteredIndexError(rev)
467 return super(changelog, self).node(rev)
461 return super(changelog, self).node(rev)
468
462
469 def linkrev(self, rev):
463 def linkrev(self, rev):
470 """filtered version of revlog.linkrev"""
464 """filtered version of revlog.linkrev"""
471 if rev in self.filteredrevs:
465 if rev in self.filteredrevs:
472 raise error.FilteredIndexError(rev)
466 raise error.FilteredIndexError(rev)
473 return super(changelog, self).linkrev(rev)
467 return super(changelog, self).linkrev(rev)
474
468
475 def parentrevs(self, rev):
469 def parentrevs(self, rev):
476 """filtered version of revlog.parentrevs"""
470 """filtered version of revlog.parentrevs"""
477 if rev in self.filteredrevs:
471 if rev in self.filteredrevs:
478 raise error.FilteredIndexError(rev)
472 raise error.FilteredIndexError(rev)
479 return super(changelog, self).parentrevs(rev)
473 return super(changelog, self).parentrevs(rev)
480
474
481 def flags(self, rev):
475 def flags(self, rev):
482 """filtered version of revlog.flags"""
476 """filtered version of revlog.flags"""
483 if rev in self.filteredrevs:
477 if rev in self.filteredrevs:
484 raise error.FilteredIndexError(rev)
478 raise error.FilteredIndexError(rev)
485 return super(changelog, self).flags(rev)
479 return super(changelog, self).flags(rev)
486
480
487 def delayupdate(self, tr):
481 def delayupdate(self, tr):
488 b"delay visibility of index updates to other readers"
482 b"delay visibility of index updates to other readers"
489
483
490 if not self._delayed:
484 if not self._delayed:
491 if len(self) == 0:
485 if len(self) == 0:
492 self._divert = True
486 self._divert = True
493 if self._realopener.exists(self.indexfile + b'.a'):
487 if self._realopener.exists(self.indexfile + b'.a'):
494 self._realopener.unlink(self.indexfile + b'.a')
488 self._realopener.unlink(self.indexfile + b'.a')
495 self.opener = _divertopener(self._realopener, self.indexfile)
489 self.opener = _divertopener(self._realopener, self.indexfile)
496 else:
490 else:
497 self._delaybuf = []
491 self._delaybuf = []
498 self.opener = _delayopener(
492 self.opener = _delayopener(
499 self._realopener, self.indexfile, self._delaybuf
493 self._realopener, self.indexfile, self._delaybuf
500 )
494 )
501 self._delayed = True
495 self._delayed = True
502 tr.addpending(b'cl-%i' % id(self), self._writepending)
496 tr.addpending(b'cl-%i' % id(self), self._writepending)
503 tr.addfinalize(b'cl-%i' % id(self), self._finalize)
497 tr.addfinalize(b'cl-%i' % id(self), self._finalize)
504
498
505 def _finalize(self, tr):
499 def _finalize(self, tr):
506 b"finalize index updates"
500 b"finalize index updates"
507 self._delayed = False
501 self._delayed = False
508 self.opener = self._realopener
502 self.opener = self._realopener
509 # move redirected index data back into place
503 # move redirected index data back into place
510 if self._divert:
504 if self._divert:
511 assert not self._delaybuf
505 assert not self._delaybuf
512 tmpname = self.indexfile + b".a"
506 tmpname = self.indexfile + b".a"
513 nfile = self.opener.open(tmpname)
507 nfile = self.opener.open(tmpname)
514 nfile.close()
508 nfile.close()
515 self.opener.rename(tmpname, self.indexfile, checkambig=True)
509 self.opener.rename(tmpname, self.indexfile, checkambig=True)
516 elif self._delaybuf:
510 elif self._delaybuf:
517 fp = self.opener(self.indexfile, b'a', checkambig=True)
511 fp = self.opener(self.indexfile, b'a', checkambig=True)
518 fp.write(b"".join(self._delaybuf))
512 fp.write(b"".join(self._delaybuf))
519 fp.close()
513 fp.close()
520 self._delaybuf = None
514 self._delaybuf = None
521 self._divert = False
515 self._divert = False
522 # split when we're done
516 # split when we're done
523 self._enforceinlinesize(tr)
517 self._enforceinlinesize(tr)
524
518
525 def _writepending(self, tr):
519 def _writepending(self, tr):
526 b"create a file containing the unfinalized state for pretxnchangegroup"
520 b"create a file containing the unfinalized state for pretxnchangegroup"
527 if self._delaybuf:
521 if self._delaybuf:
528 # make a temporary copy of the index
522 # make a temporary copy of the index
529 fp1 = self._realopener(self.indexfile)
523 fp1 = self._realopener(self.indexfile)
530 pendingfilename = self.indexfile + b".a"
524 pendingfilename = self.indexfile + b".a"
531 # register as a temp file to ensure cleanup on failure
525 # register as a temp file to ensure cleanup on failure
532 tr.registertmp(pendingfilename)
526 tr.registertmp(pendingfilename)
533 # write existing data
527 # write existing data
534 fp2 = self._realopener(pendingfilename, b"w")
528 fp2 = self._realopener(pendingfilename, b"w")
535 fp2.write(fp1.read())
529 fp2.write(fp1.read())
536 # add pending data
530 # add pending data
537 fp2.write(b"".join(self._delaybuf))
531 fp2.write(b"".join(self._delaybuf))
538 fp2.close()
532 fp2.close()
539 # switch modes so finalize can simply rename
533 # switch modes so finalize can simply rename
540 self._delaybuf = None
534 self._delaybuf = None
541 self._divert = True
535 self._divert = True
542 self.opener = _divertopener(self._realopener, self.indexfile)
536 self.opener = _divertopener(self._realopener, self.indexfile)
543
537
544 if self._divert:
538 if self._divert:
545 return True
539 return True
546
540
547 return False
541 return False
548
542
549 def _enforceinlinesize(self, tr, fp=None):
543 def _enforceinlinesize(self, tr, fp=None):
550 if not self._delayed:
544 if not self._delayed:
551 revlog.revlog._enforceinlinesize(self, tr, fp)
545 revlog.revlog._enforceinlinesize(self, tr, fp)
552
546
553 def read(self, node):
547 def read(self, node):
554 """Obtain data from a parsed changelog revision.
548 """Obtain data from a parsed changelog revision.
555
549
556 Returns a 6-tuple of:
550 Returns a 6-tuple of:
557
551
558 - manifest node in binary
552 - manifest node in binary
559 - author/user as a localstr
553 - author/user as a localstr
560 - date as a 2-tuple of (time, timezone)
554 - date as a 2-tuple of (time, timezone)
561 - list of files
555 - list of files
562 - commit message as a localstr
556 - commit message as a localstr
563 - dict of extra metadata
557 - dict of extra metadata
564
558
565 Unless you need to access all fields, consider calling
559 Unless you need to access all fields, consider calling
566 ``changelogrevision`` instead, as it is faster for partial object
560 ``changelogrevision`` instead, as it is faster for partial object
567 access.
561 access.
568 """
562 """
569 d, s = self._revisiondata(node)
563 d, s = self._revisiondata(node)
570 c = changelogrevision(
564 c = changelogrevision(
571 d, s, self._copiesstorage == b'changeset-sidedata'
565 d, s, self._copiesstorage == b'changeset-sidedata'
572 )
566 )
573 return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
567 return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
574
568
575 def changelogrevision(self, nodeorrev):
569 def changelogrevision(self, nodeorrev):
576 """Obtain a ``changelogrevision`` for a node or revision."""
570 """Obtain a ``changelogrevision`` for a node or revision."""
577 text, sidedata = self._revisiondata(nodeorrev)
571 text, sidedata = self._revisiondata(nodeorrev)
578 return changelogrevision(
572 return changelogrevision(
579 text, sidedata, self._copiesstorage == b'changeset-sidedata'
573 text, sidedata, self._copiesstorage == b'changeset-sidedata'
580 )
574 )
581
575
582 def readfiles(self, node):
576 def readfiles(self, node):
583 """
577 """
584 short version of read that only returns the files modified by the cset
578 short version of read that only returns the files modified by the cset
585 """
579 """
586 text = self.revision(node)
580 text = self.revision(node)
587 if not text:
581 if not text:
588 return []
582 return []
589 last = text.index(b"\n\n")
583 last = text.index(b"\n\n")
590 l = text[:last].split(b'\n')
584 l = text[:last].split(b'\n')
591 return l[3:]
585 return l[3:]
592
586
593 def add(
587 def add(
594 self,
588 self,
595 manifest,
589 manifest,
596 files,
590 files,
597 desc,
591 desc,
598 transaction,
592 transaction,
599 p1,
593 p1,
600 p2,
594 p2,
601 user,
595 user,
602 date=None,
596 date=None,
603 extra=None,
597 extra=None,
604 p1copies=None,
598 p1copies=None,
605 p2copies=None,
599 p2copies=None,
606 filesadded=None,
600 filesadded=None,
607 filesremoved=None,
601 filesremoved=None,
608 ):
602 ):
609 # Convert to UTF-8 encoded bytestrings as the very first
603 # Convert to UTF-8 encoded bytestrings as the very first
610 # thing: calling any method on a localstr object will turn it
604 # thing: calling any method on a localstr object will turn it
611 # into a str object and the cached UTF-8 string is thus lost.
605 # into a str object and the cached UTF-8 string is thus lost.
612 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
606 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
613
607
614 user = user.strip()
608 user = user.strip()
615 # An empty username or a username with a "\n" will make the
609 # An empty username or a username with a "\n" will make the
616 # revision text contain two "\n\n" sequences -> corrupt
610 # revision text contain two "\n\n" sequences -> corrupt
617 # repository since read cannot unpack the revision.
611 # repository since read cannot unpack the revision.
618 if not user:
612 if not user:
619 raise error.StorageError(_(b"empty username"))
613 raise error.StorageError(_(b"empty username"))
620 if b"\n" in user:
614 if b"\n" in user:
621 raise error.StorageError(
615 raise error.StorageError(
622 _(b"username %r contains a newline") % pycompat.bytestr(user)
616 _(b"username %r contains a newline") % pycompat.bytestr(user)
623 )
617 )
624
618
625 desc = stripdesc(desc)
619 desc = stripdesc(desc)
626
620
627 if date:
621 if date:
628 parseddate = b"%d %d" % dateutil.parsedate(date)
622 parseddate = b"%d %d" % dateutil.parsedate(date)
629 else:
623 else:
630 parseddate = b"%d %d" % dateutil.makedate()
624 parseddate = b"%d %d" % dateutil.makedate()
631 if extra:
625 if extra:
632 branch = extra.get(b"branch")
626 branch = extra.get(b"branch")
633 if branch in (b"default", b""):
627 if branch in (b"default", b""):
634 del extra[b"branch"]
628 del extra[b"branch"]
635 elif branch in (b".", b"null", b"tip"):
629 elif branch in (b".", b"null", b"tip"):
636 raise error.StorageError(
630 raise error.StorageError(
637 _(b'the name \'%s\' is reserved') % branch
631 _(b'the name \'%s\' is reserved') % branch
638 )
632 )
639 sortedfiles = sorted(files)
633 sortedfiles = sorted(files)
640 sidedata = None
634 sidedata = None
641 if extra is not None:
635 if extra is not None:
642 for name in (
636 for name in (
643 b'p1copies',
637 b'p1copies',
644 b'p2copies',
638 b'p2copies',
645 b'filesadded',
639 b'filesadded',
646 b'filesremoved',
640 b'filesremoved',
647 ):
641 ):
648 extra.pop(name, None)
642 extra.pop(name, None)
649 if p1copies is not None:
643 if p1copies is not None:
650 p1copies = copies.encodecopies(sortedfiles, p1copies)
644 p1copies = copies.encodecopies(sortedfiles, p1copies)
651 if p2copies is not None:
645 if p2copies is not None:
652 p2copies = copies.encodecopies(sortedfiles, p2copies)
646 p2copies = copies.encodecopies(sortedfiles, p2copies)
653 if filesadded is not None:
647 if filesadded is not None:
654 filesadded = copies.encodefileindices(sortedfiles, filesadded)
648 filesadded = copies.encodefileindices(sortedfiles, filesadded)
655 if filesremoved is not None:
649 if filesremoved is not None:
656 filesremoved = copies.encodefileindices(sortedfiles, filesremoved)
650 filesremoved = copies.encodefileindices(sortedfiles, filesremoved)
657 if self._copiesstorage == b'extra':
651 if self._copiesstorage == b'extra':
658 extrasentries = p1copies, p2copies, filesadded, filesremoved
652 extrasentries = p1copies, p2copies, filesadded, filesremoved
659 if extra is None and any(x is not None for x in extrasentries):
653 if extra is None and any(x is not None for x in extrasentries):
660 extra = {}
654 extra = {}
661 if p1copies is not None:
655 if p1copies is not None:
662 extra[b'p1copies'] = p1copies
656 extra[b'p1copies'] = p1copies
663 if p2copies is not None:
657 if p2copies is not None:
664 extra[b'p2copies'] = p2copies
658 extra[b'p2copies'] = p2copies
665 if filesadded is not None:
659 if filesadded is not None:
666 extra[b'filesadded'] = filesadded
660 extra[b'filesadded'] = filesadded
667 if filesremoved is not None:
661 if filesremoved is not None:
668 extra[b'filesremoved'] = filesremoved
662 extra[b'filesremoved'] = filesremoved
669 elif self._copiesstorage == b'changeset-sidedata':
663 elif self._copiesstorage == b'changeset-sidedata':
670 sidedata = {}
664 sidedata = {}
671 if p1copies:
665 if p1copies:
672 sidedata[sidedatamod.SD_P1COPIES] = p1copies
666 sidedata[sidedatamod.SD_P1COPIES] = p1copies
673 if p2copies:
667 if p2copies:
674 sidedata[sidedatamod.SD_P2COPIES] = p2copies
668 sidedata[sidedatamod.SD_P2COPIES] = p2copies
675 if filesadded:
669 if filesadded:
676 sidedata[sidedatamod.SD_FILESADDED] = filesadded
670 sidedata[sidedatamod.SD_FILESADDED] = filesadded
677 if filesremoved:
671 if filesremoved:
678 sidedata[sidedatamod.SD_FILESREMOVED] = filesremoved
672 sidedata[sidedatamod.SD_FILESREMOVED] = filesremoved
679 if not sidedata:
673 if not sidedata:
680 sidedata = None
674 sidedata = None
681
675
682 if extra:
676 if extra:
683 extra = encodeextra(extra)
677 extra = encodeextra(extra)
684 parseddate = b"%s %s" % (parseddate, extra)
678 parseddate = b"%s %s" % (parseddate, extra)
685 l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
679 l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
686 text = b"\n".join(l)
680 text = b"\n".join(l)
687 return self.addrevision(
681 return self.addrevision(
688 text, transaction, len(self), p1, p2, sidedata=sidedata
682 text, transaction, len(self), p1, p2, sidedata=sidedata
689 )
683 )
690
684
691 def branchinfo(self, rev):
685 def branchinfo(self, rev):
692 """return the branch name and open/close state of a revision
686 """return the branch name and open/close state of a revision
693
687
694 This function exists because creating a changectx object
688 This function exists because creating a changectx object
695 just to access this is costly."""
689 just to access this is costly."""
696 extra = self.read(rev)[5]
690 extra = self.read(rev)[5]
697 return encoding.tolocal(extra.get(b"branch")), b'close' in extra
691 return encoding.tolocal(extra.get(b"branch")), b'close' in extra
698
692
699 def _nodeduplicatecallback(self, transaction, node):
693 def _nodeduplicatecallback(self, transaction, node):
700 # keep track of revisions that got "re-added", eg: unbunde of know rev.
694 # keep track of revisions that got "re-added", eg: unbunde of know rev.
701 #
695 #
702 # We track them in a list to preserve their order from the source bundle
696 # We track them in a list to preserve their order from the source bundle
703 duplicates = transaction.changes.setdefault(b'revduplicates', [])
697 duplicates = transaction.changes.setdefault(b'revduplicates', [])
704 duplicates.append(self.rev(node))
698 duplicates.append(self.rev(node))
@@ -1,370 +1,376 b''
1 # repoview.py - Filtered view of a localrepo object
1 # repoview.py - Filtered view of a localrepo object
2 #
2 #
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
4 # Logilab SA <contact@logilab.fr>
4 # Logilab SA <contact@logilab.fr>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from __future__ import absolute_import
9 from __future__ import absolute_import
10
10
11 import copy
11 import copy
12 import weakref
12 import weakref
13
13
14 from .node import nullrev
14 from .node import nullrev
15 from .pycompat import (
15 from .pycompat import (
16 delattr,
16 delattr,
17 getattr,
17 getattr,
18 setattr,
18 setattr,
19 )
19 )
20 from . import (
20 from . import (
21 obsolete,
21 obsolete,
22 phases,
22 phases,
23 pycompat,
23 pycompat,
24 revlog,
24 revlog,
25 tags as tagsmod,
25 tags as tagsmod,
26 util,
26 util,
27 )
27 )
28 from .utils import repoviewutil
28 from .utils import repoviewutil
29
29
30
30
31 def hideablerevs(repo):
31 def hideablerevs(repo):
32 """Revision candidates to be hidden
32 """Revision candidates to be hidden
33
33
34 This is a standalone function to allow extensions to wrap it.
34 This is a standalone function to allow extensions to wrap it.
35
35
36 Because we use the set of immutable changesets as a fallback subset in
36 Because we use the set of immutable changesets as a fallback subset in
37 branchmap (see mercurial.utils.repoviewutils.subsettable), you cannot set
37 branchmap (see mercurial.utils.repoviewutils.subsettable), you cannot set
38 "public" changesets as "hideable". Doing so would break multiple code
38 "public" changesets as "hideable". Doing so would break multiple code
39 assertions and lead to crashes."""
39 assertions and lead to crashes."""
40 obsoletes = obsolete.getrevs(repo, b'obsolete')
40 obsoletes = obsolete.getrevs(repo, b'obsolete')
41 internals = repo._phasecache.getrevset(repo, phases.localhiddenphases)
41 internals = repo._phasecache.getrevset(repo, phases.localhiddenphases)
42 internals = frozenset(internals)
42 internals = frozenset(internals)
43 return obsoletes | internals
43 return obsoletes | internals
44
44
45
45
46 def pinnedrevs(repo):
46 def pinnedrevs(repo):
47 """revisions blocking hidden changesets from being filtered
47 """revisions blocking hidden changesets from being filtered
48 """
48 """
49
49
50 cl = repo.changelog
50 cl = repo.changelog
51 pinned = set()
51 pinned = set()
52 pinned.update([par.rev() for par in repo[None].parents()])
52 pinned.update([par.rev() for par in repo[None].parents()])
53 pinned.update([cl.rev(bm) for bm in repo._bookmarks.values()])
53 pinned.update([cl.rev(bm) for bm in repo._bookmarks.values()])
54
54
55 tags = {}
55 tags = {}
56 tagsmod.readlocaltags(repo.ui, repo, tags, {})
56 tagsmod.readlocaltags(repo.ui, repo, tags, {})
57 if tags:
57 if tags:
58 rev, nodemap = cl.rev, cl.nodemap
58 rev, nodemap = cl.rev, cl.nodemap
59 pinned.update(rev(t[0]) for t in tags.values() if t[0] in nodemap)
59 pinned.update(rev(t[0]) for t in tags.values() if t[0] in nodemap)
60 return pinned
60 return pinned
61
61
62
62
63 def _revealancestors(pfunc, hidden, revs):
63 def _revealancestors(pfunc, hidden, revs):
64 """reveals contiguous chains of hidden ancestors of 'revs' by removing them
64 """reveals contiguous chains of hidden ancestors of 'revs' by removing them
65 from 'hidden'
65 from 'hidden'
66
66
67 - pfunc(r): a funtion returning parent of 'r',
67 - pfunc(r): a funtion returning parent of 'r',
68 - hidden: the (preliminary) hidden revisions, to be updated
68 - hidden: the (preliminary) hidden revisions, to be updated
69 - revs: iterable of revnum,
69 - revs: iterable of revnum,
70
70
71 (Ancestors are revealed exclusively, i.e. the elements in 'revs' are
71 (Ancestors are revealed exclusively, i.e. the elements in 'revs' are
72 *not* revealed)
72 *not* revealed)
73 """
73 """
74 stack = list(revs)
74 stack = list(revs)
75 while stack:
75 while stack:
76 for p in pfunc(stack.pop()):
76 for p in pfunc(stack.pop()):
77 if p != nullrev and p in hidden:
77 if p != nullrev and p in hidden:
78 hidden.remove(p)
78 hidden.remove(p)
79 stack.append(p)
79 stack.append(p)
80
80
81
81
82 def computehidden(repo, visibilityexceptions=None):
82 def computehidden(repo, visibilityexceptions=None):
83 """compute the set of hidden revision to filter
83 """compute the set of hidden revision to filter
84
84
85 During most operation hidden should be filtered."""
85 During most operation hidden should be filtered."""
86 assert not repo.changelog.filteredrevs
86 assert not repo.changelog.filteredrevs
87
87
88 hidden = hideablerevs(repo)
88 hidden = hideablerevs(repo)
89 if hidden:
89 if hidden:
90 hidden = set(hidden - pinnedrevs(repo))
90 hidden = set(hidden - pinnedrevs(repo))
91 if visibilityexceptions:
91 if visibilityexceptions:
92 hidden -= visibilityexceptions
92 hidden -= visibilityexceptions
93 pfunc = repo.changelog.parentrevs
93 pfunc = repo.changelog.parentrevs
94 mutable = repo._phasecache.getrevset(repo, phases.mutablephases)
94 mutable = repo._phasecache.getrevset(repo, phases.mutablephases)
95
95
96 visible = mutable - hidden
96 visible = mutable - hidden
97 _revealancestors(pfunc, hidden, visible)
97 _revealancestors(pfunc, hidden, visible)
98 return frozenset(hidden)
98 return frozenset(hidden)
99
99
100
100
101 def computesecret(repo, visibilityexceptions=None):
101 def computesecret(repo, visibilityexceptions=None):
102 """compute the set of revision that can never be exposed through hgweb
102 """compute the set of revision that can never be exposed through hgweb
103
103
104 Changeset in the secret phase (or above) should stay unaccessible."""
104 Changeset in the secret phase (or above) should stay unaccessible."""
105 assert not repo.changelog.filteredrevs
105 assert not repo.changelog.filteredrevs
106 secrets = repo._phasecache.getrevset(repo, phases.remotehiddenphases)
106 secrets = repo._phasecache.getrevset(repo, phases.remotehiddenphases)
107 return frozenset(secrets)
107 return frozenset(secrets)
108
108
109
109
110 def computeunserved(repo, visibilityexceptions=None):
110 def computeunserved(repo, visibilityexceptions=None):
111 """compute the set of revision that should be filtered when used a server
111 """compute the set of revision that should be filtered when used a server
112
112
113 Secret and hidden changeset should not pretend to be here."""
113 Secret and hidden changeset should not pretend to be here."""
114 assert not repo.changelog.filteredrevs
114 assert not repo.changelog.filteredrevs
115 # fast path in simple case to avoid impact of non optimised code
115 # fast path in simple case to avoid impact of non optimised code
116 hiddens = filterrevs(repo, b'visible')
116 hiddens = filterrevs(repo, b'visible')
117 secrets = filterrevs(repo, b'served.hidden')
117 secrets = filterrevs(repo, b'served.hidden')
118 if secrets:
118 if secrets:
119 return frozenset(hiddens | secrets)
119 return frozenset(hiddens | secrets)
120 else:
120 else:
121 return hiddens
121 return hiddens
122
122
123
123
124 def computemutable(repo, visibilityexceptions=None):
124 def computemutable(repo, visibilityexceptions=None):
125 assert not repo.changelog.filteredrevs
125 assert not repo.changelog.filteredrevs
126 # fast check to avoid revset call on huge repo
126 # fast check to avoid revset call on huge repo
127 if any(repo._phasecache.phaseroots[1:]):
127 if any(repo._phasecache.phaseroots[1:]):
128 getphase = repo._phasecache.phase
128 getphase = repo._phasecache.phase
129 maymutable = filterrevs(repo, b'base')
129 maymutable = filterrevs(repo, b'base')
130 return frozenset(r for r in maymutable if getphase(repo, r))
130 return frozenset(r for r in maymutable if getphase(repo, r))
131 return frozenset()
131 return frozenset()
132
132
133
133
134 def computeimpactable(repo, visibilityexceptions=None):
134 def computeimpactable(repo, visibilityexceptions=None):
135 """Everything impactable by mutable revision
135 """Everything impactable by mutable revision
136
136
137 The immutable filter still have some chance to get invalidated. This will
137 The immutable filter still have some chance to get invalidated. This will
138 happen when:
138 happen when:
139
139
140 - you garbage collect hidden changeset,
140 - you garbage collect hidden changeset,
141 - public phase is moved backward,
141 - public phase is moved backward,
142 - something is changed in the filtering (this could be fixed)
142 - something is changed in the filtering (this could be fixed)
143
143
144 This filter out any mutable changeset and any public changeset that may be
144 This filter out any mutable changeset and any public changeset that may be
145 impacted by something happening to a mutable revision.
145 impacted by something happening to a mutable revision.
146
146
147 This is achieved by filtered everything with a revision number egal or
147 This is achieved by filtered everything with a revision number egal or
148 higher than the first mutable changeset is filtered."""
148 higher than the first mutable changeset is filtered."""
149 assert not repo.changelog.filteredrevs
149 assert not repo.changelog.filteredrevs
150 cl = repo.changelog
150 cl = repo.changelog
151 firstmutable = len(cl)
151 firstmutable = len(cl)
152 for roots in repo._phasecache.phaseroots[1:]:
152 for roots in repo._phasecache.phaseroots[1:]:
153 if roots:
153 if roots:
154 firstmutable = min(firstmutable, min(cl.rev(r) for r in roots))
154 firstmutable = min(firstmutable, min(cl.rev(r) for r in roots))
155 # protect from nullrev root
155 # protect from nullrev root
156 firstmutable = max(0, firstmutable)
156 firstmutable = max(0, firstmutable)
157 return frozenset(pycompat.xrange(firstmutable, len(cl)))
157 return frozenset(pycompat.xrange(firstmutable, len(cl)))
158
158
159
159
160 # function to compute filtered set
160 # function to compute filtered set
161 #
161 #
162 # When adding a new filter you MUST update the table at:
162 # When adding a new filter you MUST update the table at:
163 # mercurial.utils.repoviewutil.subsettable
163 # mercurial.utils.repoviewutil.subsettable
164 # Otherwise your filter will have to recompute all its branches cache
164 # Otherwise your filter will have to recompute all its branches cache
165 # from scratch (very slow).
165 # from scratch (very slow).
166 filtertable = {
166 filtertable = {
167 b'visible': computehidden,
167 b'visible': computehidden,
168 b'visible-hidden': computehidden,
168 b'visible-hidden': computehidden,
169 b'served.hidden': computesecret,
169 b'served.hidden': computesecret,
170 b'served': computeunserved,
170 b'served': computeunserved,
171 b'immutable': computemutable,
171 b'immutable': computemutable,
172 b'base': computeimpactable,
172 b'base': computeimpactable,
173 }
173 }
174
174
175 _basefiltername = list(filtertable)
175 _basefiltername = list(filtertable)
176
176
177
177
178 def extrafilter(ui):
178 def extrafilter(ui):
179 """initialize extra filter and return its id
179 """initialize extra filter and return its id
180
180
181 If extra filtering is configured, we make sure the associated filtered view
181 If extra filtering is configured, we make sure the associated filtered view
182 are declared and return the associated id.
182 are declared and return the associated id.
183 """
183 """
184 frevs = ui.config(b'experimental', b'extra-filter-revs')
184 frevs = ui.config(b'experimental', b'extra-filter-revs')
185 if frevs is None:
185 if frevs is None:
186 return None
186 return None
187
187
188 fid = pycompat.sysbytes(util.DIGESTS[b'sha1'](frevs).hexdigest())[:12]
188 fid = pycompat.sysbytes(util.DIGESTS[b'sha1'](frevs).hexdigest())[:12]
189
189
190 combine = lambda fname: fname + b'%' + fid
190 combine = lambda fname: fname + b'%' + fid
191
191
192 subsettable = repoviewutil.subsettable
192 subsettable = repoviewutil.subsettable
193
193
194 if combine(b'base') not in filtertable:
194 if combine(b'base') not in filtertable:
195 for name in _basefiltername:
195 for name in _basefiltername:
196
196
197 def extrafilteredrevs(repo, *args, **kwargs):
197 def extrafilteredrevs(repo, *args, **kwargs):
198 baserevs = filtertable[name](repo, *args, **kwargs)
198 baserevs = filtertable[name](repo, *args, **kwargs)
199 extrarevs = frozenset(repo.revs(frevs))
199 extrarevs = frozenset(repo.revs(frevs))
200 return baserevs | extrarevs
200 return baserevs | extrarevs
201
201
202 filtertable[combine(name)] = extrafilteredrevs
202 filtertable[combine(name)] = extrafilteredrevs
203 if name in subsettable:
203 if name in subsettable:
204 subsettable[combine(name)] = combine(subsettable[name])
204 subsettable[combine(name)] = combine(subsettable[name])
205 return fid
205 return fid
206
206
207
207
208 def filterrevs(repo, filtername, visibilityexceptions=None):
208 def filterrevs(repo, filtername, visibilityexceptions=None):
209 """returns set of filtered revision for this filter name
209 """returns set of filtered revision for this filter name
210
210
211 visibilityexceptions is a set of revs which must are exceptions for
211 visibilityexceptions is a set of revs which must are exceptions for
212 hidden-state and must be visible. They are dynamic and hence we should not
212 hidden-state and must be visible. They are dynamic and hence we should not
213 cache it's result"""
213 cache it's result"""
214 if filtername not in repo.filteredrevcache:
214 if filtername not in repo.filteredrevcache:
215 func = filtertable[filtername]
215 func = filtertable[filtername]
216 if visibilityexceptions:
216 if visibilityexceptions:
217 return func(repo.unfiltered, visibilityexceptions)
217 return func(repo.unfiltered, visibilityexceptions)
218 repo.filteredrevcache[filtername] = func(repo.unfiltered())
218 repo.filteredrevcache[filtername] = func(repo.unfiltered())
219 return repo.filteredrevcache[filtername]
219 return repo.filteredrevcache[filtername]
220
220
221
221
222 def wrapchangelog(unfichangelog, filteredrevs):
222 def wrapchangelog(unfichangelog, filteredrevs):
223 cl = copy.copy(unfichangelog)
223 cl = copy.copy(unfichangelog)
224 cl.filteredrevs = filteredrevs
224 cl.filteredrevs = filteredrevs
225
225
226 class filteredchangelog(cl.__class__):
226 class filteredchangelog(cl.__class__):
227 def tiprev(self):
227 def tiprev(self):
228 """filtered version of revlog.tiprev"""
228 """filtered version of revlog.tiprev"""
229 for i in pycompat.xrange(len(self) - 1, -2, -1):
229 for i in pycompat.xrange(len(self) - 1, -2, -1):
230 if i not in self.filteredrevs:
230 if i not in self.filteredrevs:
231 return i
231 return i
232
232
233 def __contains__(self, rev):
233 def __contains__(self, rev):
234 """filtered version of revlog.__contains__"""
234 """filtered version of revlog.__contains__"""
235 return 0 <= rev < len(self) and rev not in self.filteredrevs
235 return 0 <= rev < len(self) and rev not in self.filteredrevs
236
236
237 def __iter__(self):
237 def __iter__(self):
238 """filtered version of revlog.__iter__"""
238 """filtered version of revlog.__iter__"""
239 if len(self.filteredrevs) == 0:
239 if len(self.filteredrevs) == 0:
240 return revlog.revlog.__iter__(self)
240 return revlog.revlog.__iter__(self)
241
241
242
242
243 def filterediter():
243 def filterediter():
244 for i in pycompat.xrange(len(self)):
244 for i in pycompat.xrange(len(self)):
245 if i not in self.filteredrevs:
245 if i not in self.filteredrevs:
246 yield i
246 yield i
247
247
248 return filterediter()
248 return filterediter()
249
249
250 def revs(self, start=0, stop=None):
251 """filtered version of revlog.revs"""
252 for i in super(filteredchangelog, self).revs(start, stop):
253 if i not in self.filteredrevs:
254 yield i
255
250 cl.__class__ = filteredchangelog
256 cl.__class__ = filteredchangelog
251
257
252 return cl
258 return cl
253
259
254
260
255 class repoview(object):
261 class repoview(object):
256 """Provide a read/write view of a repo through a filtered changelog
262 """Provide a read/write view of a repo through a filtered changelog
257
263
258 This object is used to access a filtered version of a repository without
264 This object is used to access a filtered version of a repository without
259 altering the original repository object itself. We can not alter the
265 altering the original repository object itself. We can not alter the
260 original object for two main reasons:
266 original object for two main reasons:
261 - It prevents the use of a repo with multiple filters at the same time. In
267 - It prevents the use of a repo with multiple filters at the same time. In
262 particular when multiple threads are involved.
268 particular when multiple threads are involved.
263 - It makes scope of the filtering harder to control.
269 - It makes scope of the filtering harder to control.
264
270
265 This object behaves very closely to the original repository. All attribute
271 This object behaves very closely to the original repository. All attribute
266 operations are done on the original repository:
272 operations are done on the original repository:
267 - An access to `repoview.someattr` actually returns `repo.someattr`,
273 - An access to `repoview.someattr` actually returns `repo.someattr`,
268 - A write to `repoview.someattr` actually sets value of `repo.someattr`,
274 - A write to `repoview.someattr` actually sets value of `repo.someattr`,
269 - A deletion of `repoview.someattr` actually drops `someattr`
275 - A deletion of `repoview.someattr` actually drops `someattr`
270 from `repo.__dict__`.
276 from `repo.__dict__`.
271
277
272 The only exception is the `changelog` property. It is overridden to return
278 The only exception is the `changelog` property. It is overridden to return
273 a (surface) copy of `repo.changelog` with some revisions filtered. The
279 a (surface) copy of `repo.changelog` with some revisions filtered. The
274 `filtername` attribute of the view control the revisions that need to be
280 `filtername` attribute of the view control the revisions that need to be
275 filtered. (the fact the changelog is copied is an implementation detail).
281 filtered. (the fact the changelog is copied is an implementation detail).
276
282
277 Unlike attributes, this object intercepts all method calls. This means that
283 Unlike attributes, this object intercepts all method calls. This means that
278 all methods are run on the `repoview` object with the filtered `changelog`
284 all methods are run on the `repoview` object with the filtered `changelog`
279 property. For this purpose the simple `repoview` class must be mixed with
285 property. For this purpose the simple `repoview` class must be mixed with
280 the actual class of the repository. This ensures that the resulting
286 the actual class of the repository. This ensures that the resulting
281 `repoview` object have the very same methods than the repo object. This
287 `repoview` object have the very same methods than the repo object. This
282 leads to the property below.
288 leads to the property below.
283
289
284 repoview.method() --> repo.__class__.method(repoview)
290 repoview.method() --> repo.__class__.method(repoview)
285
291
286 The inheritance has to be done dynamically because `repo` can be of any
292 The inheritance has to be done dynamically because `repo` can be of any
287 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
293 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
288 """
294 """
289
295
290 def __init__(self, repo, filtername, visibilityexceptions=None):
296 def __init__(self, repo, filtername, visibilityexceptions=None):
291 object.__setattr__(self, r'_unfilteredrepo', repo)
297 object.__setattr__(self, r'_unfilteredrepo', repo)
292 object.__setattr__(self, r'filtername', filtername)
298 object.__setattr__(self, r'filtername', filtername)
293 object.__setattr__(self, r'_clcachekey', None)
299 object.__setattr__(self, r'_clcachekey', None)
294 object.__setattr__(self, r'_clcache', None)
300 object.__setattr__(self, r'_clcache', None)
295 # revs which are exceptions and must not be hidden
301 # revs which are exceptions and must not be hidden
296 object.__setattr__(self, r'_visibilityexceptions', visibilityexceptions)
302 object.__setattr__(self, r'_visibilityexceptions', visibilityexceptions)
297
303
298 # not a propertycache on purpose we shall implement a proper cache later
304 # not a propertycache on purpose we shall implement a proper cache later
299 @property
305 @property
300 def changelog(self):
306 def changelog(self):
301 """return a filtered version of the changeset
307 """return a filtered version of the changeset
302
308
303 this changelog must not be used for writing"""
309 this changelog must not be used for writing"""
304 # some cache may be implemented later
310 # some cache may be implemented later
305 unfi = self._unfilteredrepo
311 unfi = self._unfilteredrepo
306 unfichangelog = unfi.changelog
312 unfichangelog = unfi.changelog
307 # bypass call to changelog.method
313 # bypass call to changelog.method
308 unfiindex = unfichangelog.index
314 unfiindex = unfichangelog.index
309 unfilen = len(unfiindex)
315 unfilen = len(unfiindex)
310 unfinode = unfiindex[unfilen - 1][7]
316 unfinode = unfiindex[unfilen - 1][7]
311 with util.timedcm('repo filter for %s', self.filtername):
317 with util.timedcm('repo filter for %s', self.filtername):
312 revs = filterrevs(unfi, self.filtername, self._visibilityexceptions)
318 revs = filterrevs(unfi, self.filtername, self._visibilityexceptions)
313 cl = self._clcache
319 cl = self._clcache
314 newkey = (unfilen, unfinode, hash(revs), unfichangelog._delayed)
320 newkey = (unfilen, unfinode, hash(revs), unfichangelog._delayed)
315 # if cl.index is not unfiindex, unfi.changelog would be
321 # if cl.index is not unfiindex, unfi.changelog would be
316 # recreated, and our clcache refers to garbage object
322 # recreated, and our clcache refers to garbage object
317 if cl is not None and (
323 if cl is not None and (
318 cl.index is not unfiindex or newkey != self._clcachekey
324 cl.index is not unfiindex or newkey != self._clcachekey
319 ):
325 ):
320 cl = None
326 cl = None
321 # could have been made None by the previous if
327 # could have been made None by the previous if
322 if cl is None:
328 if cl is None:
323 cl = wrapchangelog(unfichangelog, revs)
329 cl = wrapchangelog(unfichangelog, revs)
324 object.__setattr__(self, r'_clcache', cl)
330 object.__setattr__(self, r'_clcache', cl)
325 object.__setattr__(self, r'_clcachekey', newkey)
331 object.__setattr__(self, r'_clcachekey', newkey)
326 return cl
332 return cl
327
333
328 def unfiltered(self):
334 def unfiltered(self):
329 """Return an unfiltered version of a repo"""
335 """Return an unfiltered version of a repo"""
330 return self._unfilteredrepo
336 return self._unfilteredrepo
331
337
332 def filtered(self, name, visibilityexceptions=None):
338 def filtered(self, name, visibilityexceptions=None):
333 """Return a filtered version of a repository"""
339 """Return a filtered version of a repository"""
334 if name == self.filtername and not visibilityexceptions:
340 if name == self.filtername and not visibilityexceptions:
335 return self
341 return self
336 return self.unfiltered().filtered(name, visibilityexceptions)
342 return self.unfiltered().filtered(name, visibilityexceptions)
337
343
338 def __repr__(self):
344 def __repr__(self):
339 return r'<%s:%s %r>' % (
345 return r'<%s:%s %r>' % (
340 self.__class__.__name__,
346 self.__class__.__name__,
341 pycompat.sysstr(self.filtername),
347 pycompat.sysstr(self.filtername),
342 self.unfiltered(),
348 self.unfiltered(),
343 )
349 )
344
350
345 # everything access are forwarded to the proxied repo
351 # everything access are forwarded to the proxied repo
346 def __getattr__(self, attr):
352 def __getattr__(self, attr):
347 return getattr(self._unfilteredrepo, attr)
353 return getattr(self._unfilteredrepo, attr)
348
354
349 def __setattr__(self, attr, value):
355 def __setattr__(self, attr, value):
350 return setattr(self._unfilteredrepo, attr, value)
356 return setattr(self._unfilteredrepo, attr, value)
351
357
352 def __delattr__(self, attr):
358 def __delattr__(self, attr):
353 return delattr(self._unfilteredrepo, attr)
359 return delattr(self._unfilteredrepo, attr)
354
360
355
361
356 # Python <3.4 easily leaks types via __mro__. See
362 # Python <3.4 easily leaks types via __mro__. See
357 # https://bugs.python.org/issue17950. We cache dynamically created types
363 # https://bugs.python.org/issue17950. We cache dynamically created types
358 # so they won't be leaked on every invocation of repo.filtered().
364 # so they won't be leaked on every invocation of repo.filtered().
359 _filteredrepotypes = weakref.WeakKeyDictionary()
365 _filteredrepotypes = weakref.WeakKeyDictionary()
360
366
361
367
362 def newtype(base):
368 def newtype(base):
363 """Create a new type with the repoview mixin and the given base class"""
369 """Create a new type with the repoview mixin and the given base class"""
364 if base not in _filteredrepotypes:
370 if base not in _filteredrepotypes:
365
371
366 class filteredrepo(repoview, base):
372 class filteredrepo(repoview, base):
367 pass
373 pass
368
374
369 _filteredrepotypes[base] = filteredrepo
375 _filteredrepotypes[base] = filteredrepo
370 return _filteredrepotypes[base]
376 return _filteredrepotypes[base]
General Comments 0
You need to be logged in to leave comments. Login now