##// END OF EJS Templates
repoview: move changelog.__iter__() override to filteredchangelog...
Martin von Zweigbergk -
r43750:c093fb81 default
parent child Browse files
Show More
@@ -1,716 +1,704 b''
1 # changelog.py - changelog class for mercurial
1 # changelog.py - changelog class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 from .i18n import _
10 from .i18n import _
11 from .node import (
11 from .node import (
12 bin,
12 bin,
13 hex,
13 hex,
14 nullid,
14 nullid,
15 )
15 )
16 from .thirdparty import attr
16 from .thirdparty import attr
17
17
18 from . import (
18 from . import (
19 copies,
19 copies,
20 encoding,
20 encoding,
21 error,
21 error,
22 pycompat,
22 pycompat,
23 revlog,
23 revlog,
24 util,
24 util,
25 )
25 )
26 from .utils import (
26 from .utils import (
27 dateutil,
27 dateutil,
28 stringutil,
28 stringutil,
29 )
29 )
30
30
31 from .revlogutils import sidedata as sidedatamod
31 from .revlogutils import sidedata as sidedatamod
32
32
33 _defaultextra = {b'branch': b'default'}
33 _defaultextra = {b'branch': b'default'}
34
34
35
35
36 def _string_escape(text):
36 def _string_escape(text):
37 """
37 """
38 >>> from .pycompat import bytechr as chr
38 >>> from .pycompat import bytechr as chr
39 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
39 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
40 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
40 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
41 >>> s
41 >>> s
42 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
42 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
43 >>> res = _string_escape(s)
43 >>> res = _string_escape(s)
44 >>> s == _string_unescape(res)
44 >>> s == _string_unescape(res)
45 True
45 True
46 """
46 """
47 # subset of the string_escape codec
47 # subset of the string_escape codec
48 text = (
48 text = (
49 text.replace(b'\\', b'\\\\')
49 text.replace(b'\\', b'\\\\')
50 .replace(b'\n', b'\\n')
50 .replace(b'\n', b'\\n')
51 .replace(b'\r', b'\\r')
51 .replace(b'\r', b'\\r')
52 )
52 )
53 return text.replace(b'\0', b'\\0')
53 return text.replace(b'\0', b'\\0')
54
54
55
55
56 def _string_unescape(text):
56 def _string_unescape(text):
57 if b'\\0' in text:
57 if b'\\0' in text:
58 # fix up \0 without getting into trouble with \\0
58 # fix up \0 without getting into trouble with \\0
59 text = text.replace(b'\\\\', b'\\\\\n')
59 text = text.replace(b'\\\\', b'\\\\\n')
60 text = text.replace(b'\\0', b'\0')
60 text = text.replace(b'\\0', b'\0')
61 text = text.replace(b'\n', b'')
61 text = text.replace(b'\n', b'')
62 return stringutil.unescapestr(text)
62 return stringutil.unescapestr(text)
63
63
64
64
65 def decodeextra(text):
65 def decodeextra(text):
66 """
66 """
67 >>> from .pycompat import bytechr as chr
67 >>> from .pycompat import bytechr as chr
68 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
68 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
69 ... ).items())
69 ... ).items())
70 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
70 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
71 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
71 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
72 ... b'baz': chr(92) + chr(0) + b'2'})
72 ... b'baz': chr(92) + chr(0) + b'2'})
73 ... ).items())
73 ... ).items())
74 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
74 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
75 """
75 """
76 extra = _defaultextra.copy()
76 extra = _defaultextra.copy()
77 for l in text.split(b'\0'):
77 for l in text.split(b'\0'):
78 if l:
78 if l:
79 k, v = _string_unescape(l).split(b':', 1)
79 k, v = _string_unescape(l).split(b':', 1)
80 extra[k] = v
80 extra[k] = v
81 return extra
81 return extra
82
82
83
83
84 def encodeextra(d):
84 def encodeextra(d):
85 # keys must be sorted to produce a deterministic changelog entry
85 # keys must be sorted to produce a deterministic changelog entry
86 items = [
86 items = [
87 _string_escape(b'%s:%s' % (k, pycompat.bytestr(d[k])))
87 _string_escape(b'%s:%s' % (k, pycompat.bytestr(d[k])))
88 for k in sorted(d)
88 for k in sorted(d)
89 ]
89 ]
90 return b"\0".join(items)
90 return b"\0".join(items)
91
91
92
92
93 def stripdesc(desc):
93 def stripdesc(desc):
94 """strip trailing whitespace and leading and trailing empty lines"""
94 """strip trailing whitespace and leading and trailing empty lines"""
95 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
95 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
96
96
97
97
98 class appender(object):
98 class appender(object):
99 '''the changelog index must be updated last on disk, so we use this class
99 '''the changelog index must be updated last on disk, so we use this class
100 to delay writes to it'''
100 to delay writes to it'''
101
101
102 def __init__(self, vfs, name, mode, buf):
102 def __init__(self, vfs, name, mode, buf):
103 self.data = buf
103 self.data = buf
104 fp = vfs(name, mode)
104 fp = vfs(name, mode)
105 self.fp = fp
105 self.fp = fp
106 self.offset = fp.tell()
106 self.offset = fp.tell()
107 self.size = vfs.fstat(fp).st_size
107 self.size = vfs.fstat(fp).st_size
108 self._end = self.size
108 self._end = self.size
109
109
110 def end(self):
110 def end(self):
111 return self._end
111 return self._end
112
112
113 def tell(self):
113 def tell(self):
114 return self.offset
114 return self.offset
115
115
116 def flush(self):
116 def flush(self):
117 pass
117 pass
118
118
119 @property
119 @property
120 def closed(self):
120 def closed(self):
121 return self.fp.closed
121 return self.fp.closed
122
122
123 def close(self):
123 def close(self):
124 self.fp.close()
124 self.fp.close()
125
125
126 def seek(self, offset, whence=0):
126 def seek(self, offset, whence=0):
127 '''virtual file offset spans real file and data'''
127 '''virtual file offset spans real file and data'''
128 if whence == 0:
128 if whence == 0:
129 self.offset = offset
129 self.offset = offset
130 elif whence == 1:
130 elif whence == 1:
131 self.offset += offset
131 self.offset += offset
132 elif whence == 2:
132 elif whence == 2:
133 self.offset = self.end() + offset
133 self.offset = self.end() + offset
134 if self.offset < self.size:
134 if self.offset < self.size:
135 self.fp.seek(self.offset)
135 self.fp.seek(self.offset)
136
136
137 def read(self, count=-1):
137 def read(self, count=-1):
138 '''only trick here is reads that span real file and data'''
138 '''only trick here is reads that span real file and data'''
139 ret = b""
139 ret = b""
140 if self.offset < self.size:
140 if self.offset < self.size:
141 s = self.fp.read(count)
141 s = self.fp.read(count)
142 ret = s
142 ret = s
143 self.offset += len(s)
143 self.offset += len(s)
144 if count > 0:
144 if count > 0:
145 count -= len(s)
145 count -= len(s)
146 if count != 0:
146 if count != 0:
147 doff = self.offset - self.size
147 doff = self.offset - self.size
148 self.data.insert(0, b"".join(self.data))
148 self.data.insert(0, b"".join(self.data))
149 del self.data[1:]
149 del self.data[1:]
150 s = self.data[0][doff : doff + count]
150 s = self.data[0][doff : doff + count]
151 self.offset += len(s)
151 self.offset += len(s)
152 ret += s
152 ret += s
153 return ret
153 return ret
154
154
155 def write(self, s):
155 def write(self, s):
156 self.data.append(bytes(s))
156 self.data.append(bytes(s))
157 self.offset += len(s)
157 self.offset += len(s)
158 self._end += len(s)
158 self._end += len(s)
159
159
160 def __enter__(self):
160 def __enter__(self):
161 self.fp.__enter__()
161 self.fp.__enter__()
162 return self
162 return self
163
163
164 def __exit__(self, *args):
164 def __exit__(self, *args):
165 return self.fp.__exit__(*args)
165 return self.fp.__exit__(*args)
166
166
167
167
168 def _divertopener(opener, target):
168 def _divertopener(opener, target):
169 """build an opener that writes in 'target.a' instead of 'target'"""
169 """build an opener that writes in 'target.a' instead of 'target'"""
170
170
171 def _divert(name, mode=b'r', checkambig=False):
171 def _divert(name, mode=b'r', checkambig=False):
172 if name != target:
172 if name != target:
173 return opener(name, mode)
173 return opener(name, mode)
174 return opener(name + b".a", mode)
174 return opener(name + b".a", mode)
175
175
176 return _divert
176 return _divert
177
177
178
178
179 def _delayopener(opener, target, buf):
179 def _delayopener(opener, target, buf):
180 """build an opener that stores chunks in 'buf' instead of 'target'"""
180 """build an opener that stores chunks in 'buf' instead of 'target'"""
181
181
182 def _delay(name, mode=b'r', checkambig=False):
182 def _delay(name, mode=b'r', checkambig=False):
183 if name != target:
183 if name != target:
184 return opener(name, mode)
184 return opener(name, mode)
185 return appender(opener, name, mode, buf)
185 return appender(opener, name, mode, buf)
186
186
187 return _delay
187 return _delay
188
188
189
189
190 @attr.s
190 @attr.s
191 class _changelogrevision(object):
191 class _changelogrevision(object):
192 # Extensions might modify _defaultextra, so let the constructor below pass
192 # Extensions might modify _defaultextra, so let the constructor below pass
193 # it in
193 # it in
194 extra = attr.ib()
194 extra = attr.ib()
195 manifest = attr.ib(default=nullid)
195 manifest = attr.ib(default=nullid)
196 user = attr.ib(default=b'')
196 user = attr.ib(default=b'')
197 date = attr.ib(default=(0, 0))
197 date = attr.ib(default=(0, 0))
198 files = attr.ib(default=attr.Factory(list))
198 files = attr.ib(default=attr.Factory(list))
199 filesadded = attr.ib(default=None)
199 filesadded = attr.ib(default=None)
200 filesremoved = attr.ib(default=None)
200 filesremoved = attr.ib(default=None)
201 p1copies = attr.ib(default=None)
201 p1copies = attr.ib(default=None)
202 p2copies = attr.ib(default=None)
202 p2copies = attr.ib(default=None)
203 description = attr.ib(default=b'')
203 description = attr.ib(default=b'')
204
204
205
205
206 class changelogrevision(object):
206 class changelogrevision(object):
207 """Holds results of a parsed changelog revision.
207 """Holds results of a parsed changelog revision.
208
208
209 Changelog revisions consist of multiple pieces of data, including
209 Changelog revisions consist of multiple pieces of data, including
210 the manifest node, user, and date. This object exposes a view into
210 the manifest node, user, and date. This object exposes a view into
211 the parsed object.
211 the parsed object.
212 """
212 """
213
213
214 __slots__ = (
214 __slots__ = (
215 r'_offsets',
215 r'_offsets',
216 r'_text',
216 r'_text',
217 r'_sidedata',
217 r'_sidedata',
218 r'_cpsd',
218 r'_cpsd',
219 )
219 )
220
220
221 def __new__(cls, text, sidedata, cpsd):
221 def __new__(cls, text, sidedata, cpsd):
222 if not text:
222 if not text:
223 return _changelogrevision(extra=_defaultextra)
223 return _changelogrevision(extra=_defaultextra)
224
224
225 self = super(changelogrevision, cls).__new__(cls)
225 self = super(changelogrevision, cls).__new__(cls)
226 # We could return here and implement the following as an __init__.
226 # We could return here and implement the following as an __init__.
227 # But doing it here is equivalent and saves an extra function call.
227 # But doing it here is equivalent and saves an extra function call.
228
228
229 # format used:
229 # format used:
230 # nodeid\n : manifest node in ascii
230 # nodeid\n : manifest node in ascii
231 # user\n : user, no \n or \r allowed
231 # user\n : user, no \n or \r allowed
232 # time tz extra\n : date (time is int or float, timezone is int)
232 # time tz extra\n : date (time is int or float, timezone is int)
233 # : extra is metadata, encoded and separated by '\0'
233 # : extra is metadata, encoded and separated by '\0'
234 # : older versions ignore it
234 # : older versions ignore it
235 # files\n\n : files modified by the cset, no \n or \r allowed
235 # files\n\n : files modified by the cset, no \n or \r allowed
236 # (.*) : comment (free text, ideally utf-8)
236 # (.*) : comment (free text, ideally utf-8)
237 #
237 #
238 # changelog v0 doesn't use extra
238 # changelog v0 doesn't use extra
239
239
240 nl1 = text.index(b'\n')
240 nl1 = text.index(b'\n')
241 nl2 = text.index(b'\n', nl1 + 1)
241 nl2 = text.index(b'\n', nl1 + 1)
242 nl3 = text.index(b'\n', nl2 + 1)
242 nl3 = text.index(b'\n', nl2 + 1)
243
243
244 # The list of files may be empty. Which means nl3 is the first of the
244 # The list of files may be empty. Which means nl3 is the first of the
245 # double newline that precedes the description.
245 # double newline that precedes the description.
246 if text[nl3 + 1 : nl3 + 2] == b'\n':
246 if text[nl3 + 1 : nl3 + 2] == b'\n':
247 doublenl = nl3
247 doublenl = nl3
248 else:
248 else:
249 doublenl = text.index(b'\n\n', nl3 + 1)
249 doublenl = text.index(b'\n\n', nl3 + 1)
250
250
251 self._offsets = (nl1, nl2, nl3, doublenl)
251 self._offsets = (nl1, nl2, nl3, doublenl)
252 self._text = text
252 self._text = text
253 self._sidedata = sidedata
253 self._sidedata = sidedata
254 self._cpsd = cpsd
254 self._cpsd = cpsd
255
255
256 return self
256 return self
257
257
258 @property
258 @property
259 def manifest(self):
259 def manifest(self):
260 return bin(self._text[0 : self._offsets[0]])
260 return bin(self._text[0 : self._offsets[0]])
261
261
262 @property
262 @property
263 def user(self):
263 def user(self):
264 off = self._offsets
264 off = self._offsets
265 return encoding.tolocal(self._text[off[0] + 1 : off[1]])
265 return encoding.tolocal(self._text[off[0] + 1 : off[1]])
266
266
267 @property
267 @property
268 def _rawdate(self):
268 def _rawdate(self):
269 off = self._offsets
269 off = self._offsets
270 dateextra = self._text[off[1] + 1 : off[2]]
270 dateextra = self._text[off[1] + 1 : off[2]]
271 return dateextra.split(b' ', 2)[0:2]
271 return dateextra.split(b' ', 2)[0:2]
272
272
273 @property
273 @property
274 def _rawextra(self):
274 def _rawextra(self):
275 off = self._offsets
275 off = self._offsets
276 dateextra = self._text[off[1] + 1 : off[2]]
276 dateextra = self._text[off[1] + 1 : off[2]]
277 fields = dateextra.split(b' ', 2)
277 fields = dateextra.split(b' ', 2)
278 if len(fields) != 3:
278 if len(fields) != 3:
279 return None
279 return None
280
280
281 return fields[2]
281 return fields[2]
282
282
283 @property
283 @property
284 def date(self):
284 def date(self):
285 raw = self._rawdate
285 raw = self._rawdate
286 time = float(raw[0])
286 time = float(raw[0])
287 # Various tools did silly things with the timezone.
287 # Various tools did silly things with the timezone.
288 try:
288 try:
289 timezone = int(raw[1])
289 timezone = int(raw[1])
290 except ValueError:
290 except ValueError:
291 timezone = 0
291 timezone = 0
292
292
293 return time, timezone
293 return time, timezone
294
294
295 @property
295 @property
296 def extra(self):
296 def extra(self):
297 raw = self._rawextra
297 raw = self._rawextra
298 if raw is None:
298 if raw is None:
299 return _defaultextra
299 return _defaultextra
300
300
301 return decodeextra(raw)
301 return decodeextra(raw)
302
302
303 @property
303 @property
304 def files(self):
304 def files(self):
305 off = self._offsets
305 off = self._offsets
306 if off[2] == off[3]:
306 if off[2] == off[3]:
307 return []
307 return []
308
308
309 return self._text[off[2] + 1 : off[3]].split(b'\n')
309 return self._text[off[2] + 1 : off[3]].split(b'\n')
310
310
311 @property
311 @property
312 def filesadded(self):
312 def filesadded(self):
313 if self._cpsd:
313 if self._cpsd:
314 rawindices = self._sidedata.get(sidedatamod.SD_FILESADDED)
314 rawindices = self._sidedata.get(sidedatamod.SD_FILESADDED)
315 if not rawindices:
315 if not rawindices:
316 return []
316 return []
317 else:
317 else:
318 rawindices = self.extra.get(b'filesadded')
318 rawindices = self.extra.get(b'filesadded')
319 if rawindices is None:
319 if rawindices is None:
320 return None
320 return None
321 return copies.decodefileindices(self.files, rawindices)
321 return copies.decodefileindices(self.files, rawindices)
322
322
323 @property
323 @property
324 def filesremoved(self):
324 def filesremoved(self):
325 if self._cpsd:
325 if self._cpsd:
326 rawindices = self._sidedata.get(sidedatamod.SD_FILESREMOVED)
326 rawindices = self._sidedata.get(sidedatamod.SD_FILESREMOVED)
327 if not rawindices:
327 if not rawindices:
328 return []
328 return []
329 else:
329 else:
330 rawindices = self.extra.get(b'filesremoved')
330 rawindices = self.extra.get(b'filesremoved')
331 if rawindices is None:
331 if rawindices is None:
332 return None
332 return None
333 return copies.decodefileindices(self.files, rawindices)
333 return copies.decodefileindices(self.files, rawindices)
334
334
335 @property
335 @property
336 def p1copies(self):
336 def p1copies(self):
337 if self._cpsd:
337 if self._cpsd:
338 rawcopies = self._sidedata.get(sidedatamod.SD_P1COPIES)
338 rawcopies = self._sidedata.get(sidedatamod.SD_P1COPIES)
339 if not rawcopies:
339 if not rawcopies:
340 return {}
340 return {}
341 else:
341 else:
342 rawcopies = self.extra.get(b'p1copies')
342 rawcopies = self.extra.get(b'p1copies')
343 if rawcopies is None:
343 if rawcopies is None:
344 return None
344 return None
345 return copies.decodecopies(self.files, rawcopies)
345 return copies.decodecopies(self.files, rawcopies)
346
346
347 @property
347 @property
348 def p2copies(self):
348 def p2copies(self):
349 if self._cpsd:
349 if self._cpsd:
350 rawcopies = self._sidedata.get(sidedatamod.SD_P2COPIES)
350 rawcopies = self._sidedata.get(sidedatamod.SD_P2COPIES)
351 if not rawcopies:
351 if not rawcopies:
352 return {}
352 return {}
353 else:
353 else:
354 rawcopies = self.extra.get(b'p2copies')
354 rawcopies = self.extra.get(b'p2copies')
355 if rawcopies is None:
355 if rawcopies is None:
356 return None
356 return None
357 return copies.decodecopies(self.files, rawcopies)
357 return copies.decodecopies(self.files, rawcopies)
358
358
359 @property
359 @property
360 def description(self):
360 def description(self):
361 return encoding.tolocal(self._text[self._offsets[3] + 2 :])
361 return encoding.tolocal(self._text[self._offsets[3] + 2 :])
362
362
363
363
364 class changelog(revlog.revlog):
364 class changelog(revlog.revlog):
365 def __init__(self, opener, trypending=False):
365 def __init__(self, opener, trypending=False):
366 """Load a changelog revlog using an opener.
366 """Load a changelog revlog using an opener.
367
367
368 If ``trypending`` is true, we attempt to load the index from a
368 If ``trypending`` is true, we attempt to load the index from a
369 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
369 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
370 The ``00changelog.i.a`` file contains index (and possibly inline
370 The ``00changelog.i.a`` file contains index (and possibly inline
371 revision) data for a transaction that hasn't been finalized yet.
371 revision) data for a transaction that hasn't been finalized yet.
372 It exists in a separate file to facilitate readers (such as
372 It exists in a separate file to facilitate readers (such as
373 hooks processes) accessing data before a transaction is finalized.
373 hooks processes) accessing data before a transaction is finalized.
374 """
374 """
375 if trypending and opener.exists(b'00changelog.i.a'):
375 if trypending and opener.exists(b'00changelog.i.a'):
376 indexfile = b'00changelog.i.a'
376 indexfile = b'00changelog.i.a'
377 else:
377 else:
378 indexfile = b'00changelog.i'
378 indexfile = b'00changelog.i'
379
379
380 datafile = b'00changelog.d'
380 datafile = b'00changelog.d'
381 revlog.revlog.__init__(
381 revlog.revlog.__init__(
382 self,
382 self,
383 opener,
383 opener,
384 indexfile,
384 indexfile,
385 datafile=datafile,
385 datafile=datafile,
386 checkambig=True,
386 checkambig=True,
387 mmaplargeindex=True,
387 mmaplargeindex=True,
388 )
388 )
389
389
390 if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
390 if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
391 # changelogs don't benefit from generaldelta.
391 # changelogs don't benefit from generaldelta.
392
392
393 self.version &= ~revlog.FLAG_GENERALDELTA
393 self.version &= ~revlog.FLAG_GENERALDELTA
394 self._generaldelta = False
394 self._generaldelta = False
395
395
396 # Delta chains for changelogs tend to be very small because entries
396 # Delta chains for changelogs tend to be very small because entries
397 # tend to be small and don't delta well with each. So disable delta
397 # tend to be small and don't delta well with each. So disable delta
398 # chains.
398 # chains.
399 self._storedeltachains = False
399 self._storedeltachains = False
400
400
401 self._realopener = opener
401 self._realopener = opener
402 self._delayed = False
402 self._delayed = False
403 self._delaybuf = None
403 self._delaybuf = None
404 self._divert = False
404 self._divert = False
405 self.filteredrevs = frozenset()
405 self.filteredrevs = frozenset()
406 self._copiesstorage = opener.options.get(b'copies-storage')
406 self._copiesstorage = opener.options.get(b'copies-storage')
407
407
408 def __iter__(self):
409 """filtered version of revlog.__iter__"""
410 if len(self.filteredrevs) == 0:
411 return revlog.revlog.__iter__(self)
412
413 def filterediter():
414 for i in pycompat.xrange(len(self)):
415 if i not in self.filteredrevs:
416 yield i
417
418 return filterediter()
419
420 def revs(self, start=0, stop=None):
408 def revs(self, start=0, stop=None):
421 """filtered version of revlog.revs"""
409 """filtered version of revlog.revs"""
422 for i in super(changelog, self).revs(start, stop):
410 for i in super(changelog, self).revs(start, stop):
423 if i not in self.filteredrevs:
411 if i not in self.filteredrevs:
424 yield i
412 yield i
425
413
426 def _checknofilteredinrevs(self, revs):
414 def _checknofilteredinrevs(self, revs):
427 """raise the appropriate error if 'revs' contains a filtered revision
415 """raise the appropriate error if 'revs' contains a filtered revision
428
416
429 This returns a version of 'revs' to be used thereafter by the caller.
417 This returns a version of 'revs' to be used thereafter by the caller.
430 In particular, if revs is an iterator, it is converted into a set.
418 In particular, if revs is an iterator, it is converted into a set.
431 """
419 """
432 safehasattr = util.safehasattr
420 safehasattr = util.safehasattr
433 if safehasattr(revs, '__next__'):
421 if safehasattr(revs, '__next__'):
434 # Note that inspect.isgenerator() is not true for iterators,
422 # Note that inspect.isgenerator() is not true for iterators,
435 revs = set(revs)
423 revs = set(revs)
436
424
437 filteredrevs = self.filteredrevs
425 filteredrevs = self.filteredrevs
438 if safehasattr(revs, 'first'): # smartset
426 if safehasattr(revs, 'first'): # smartset
439 offenders = revs & filteredrevs
427 offenders = revs & filteredrevs
440 else:
428 else:
441 offenders = filteredrevs.intersection(revs)
429 offenders = filteredrevs.intersection(revs)
442
430
443 for rev in offenders:
431 for rev in offenders:
444 raise error.FilteredIndexError(rev)
432 raise error.FilteredIndexError(rev)
445 return revs
433 return revs
446
434
447 def headrevs(self, revs=None):
435 def headrevs(self, revs=None):
448 if revs is None and self.filteredrevs:
436 if revs is None and self.filteredrevs:
449 try:
437 try:
450 return self.index.headrevsfiltered(self.filteredrevs)
438 return self.index.headrevsfiltered(self.filteredrevs)
451 # AttributeError covers non-c-extension environments and
439 # AttributeError covers non-c-extension environments and
452 # old c extensions without filter handling.
440 # old c extensions without filter handling.
453 except AttributeError:
441 except AttributeError:
454 return self._headrevs()
442 return self._headrevs()
455
443
456 if self.filteredrevs:
444 if self.filteredrevs:
457 revs = self._checknofilteredinrevs(revs)
445 revs = self._checknofilteredinrevs(revs)
458 return super(changelog, self).headrevs(revs)
446 return super(changelog, self).headrevs(revs)
459
447
460 def strip(self, *args, **kwargs):
448 def strip(self, *args, **kwargs):
461 # XXX make something better than assert
449 # XXX make something better than assert
462 # We can't expect proper strip behavior if we are filtered.
450 # We can't expect proper strip behavior if we are filtered.
463 assert not self.filteredrevs
451 assert not self.filteredrevs
464 super(changelog, self).strip(*args, **kwargs)
452 super(changelog, self).strip(*args, **kwargs)
465
453
466 def rev(self, node):
454 def rev(self, node):
467 """filtered version of revlog.rev"""
455 """filtered version of revlog.rev"""
468 r = super(changelog, self).rev(node)
456 r = super(changelog, self).rev(node)
469 if r in self.filteredrevs:
457 if r in self.filteredrevs:
470 raise error.FilteredLookupError(
458 raise error.FilteredLookupError(
471 hex(node), self.indexfile, _(b'filtered node')
459 hex(node), self.indexfile, _(b'filtered node')
472 )
460 )
473 return r
461 return r
474
462
475 def node(self, rev):
463 def node(self, rev):
476 """filtered version of revlog.node"""
464 """filtered version of revlog.node"""
477 if rev in self.filteredrevs:
465 if rev in self.filteredrevs:
478 raise error.FilteredIndexError(rev)
466 raise error.FilteredIndexError(rev)
479 return super(changelog, self).node(rev)
467 return super(changelog, self).node(rev)
480
468
481 def linkrev(self, rev):
469 def linkrev(self, rev):
482 """filtered version of revlog.linkrev"""
470 """filtered version of revlog.linkrev"""
483 if rev in self.filteredrevs:
471 if rev in self.filteredrevs:
484 raise error.FilteredIndexError(rev)
472 raise error.FilteredIndexError(rev)
485 return super(changelog, self).linkrev(rev)
473 return super(changelog, self).linkrev(rev)
486
474
487 def parentrevs(self, rev):
475 def parentrevs(self, rev):
488 """filtered version of revlog.parentrevs"""
476 """filtered version of revlog.parentrevs"""
489 if rev in self.filteredrevs:
477 if rev in self.filteredrevs:
490 raise error.FilteredIndexError(rev)
478 raise error.FilteredIndexError(rev)
491 return super(changelog, self).parentrevs(rev)
479 return super(changelog, self).parentrevs(rev)
492
480
493 def flags(self, rev):
481 def flags(self, rev):
494 """filtered version of revlog.flags"""
482 """filtered version of revlog.flags"""
495 if rev in self.filteredrevs:
483 if rev in self.filteredrevs:
496 raise error.FilteredIndexError(rev)
484 raise error.FilteredIndexError(rev)
497 return super(changelog, self).flags(rev)
485 return super(changelog, self).flags(rev)
498
486
499 def delayupdate(self, tr):
487 def delayupdate(self, tr):
500 b"delay visibility of index updates to other readers"
488 b"delay visibility of index updates to other readers"
501
489
502 if not self._delayed:
490 if not self._delayed:
503 if len(self) == 0:
491 if len(self) == 0:
504 self._divert = True
492 self._divert = True
505 if self._realopener.exists(self.indexfile + b'.a'):
493 if self._realopener.exists(self.indexfile + b'.a'):
506 self._realopener.unlink(self.indexfile + b'.a')
494 self._realopener.unlink(self.indexfile + b'.a')
507 self.opener = _divertopener(self._realopener, self.indexfile)
495 self.opener = _divertopener(self._realopener, self.indexfile)
508 else:
496 else:
509 self._delaybuf = []
497 self._delaybuf = []
510 self.opener = _delayopener(
498 self.opener = _delayopener(
511 self._realopener, self.indexfile, self._delaybuf
499 self._realopener, self.indexfile, self._delaybuf
512 )
500 )
513 self._delayed = True
501 self._delayed = True
514 tr.addpending(b'cl-%i' % id(self), self._writepending)
502 tr.addpending(b'cl-%i' % id(self), self._writepending)
515 tr.addfinalize(b'cl-%i' % id(self), self._finalize)
503 tr.addfinalize(b'cl-%i' % id(self), self._finalize)
516
504
517 def _finalize(self, tr):
505 def _finalize(self, tr):
518 b"finalize index updates"
506 b"finalize index updates"
519 self._delayed = False
507 self._delayed = False
520 self.opener = self._realopener
508 self.opener = self._realopener
521 # move redirected index data back into place
509 # move redirected index data back into place
522 if self._divert:
510 if self._divert:
523 assert not self._delaybuf
511 assert not self._delaybuf
524 tmpname = self.indexfile + b".a"
512 tmpname = self.indexfile + b".a"
525 nfile = self.opener.open(tmpname)
513 nfile = self.opener.open(tmpname)
526 nfile.close()
514 nfile.close()
527 self.opener.rename(tmpname, self.indexfile, checkambig=True)
515 self.opener.rename(tmpname, self.indexfile, checkambig=True)
528 elif self._delaybuf:
516 elif self._delaybuf:
529 fp = self.opener(self.indexfile, b'a', checkambig=True)
517 fp = self.opener(self.indexfile, b'a', checkambig=True)
530 fp.write(b"".join(self._delaybuf))
518 fp.write(b"".join(self._delaybuf))
531 fp.close()
519 fp.close()
532 self._delaybuf = None
520 self._delaybuf = None
533 self._divert = False
521 self._divert = False
534 # split when we're done
522 # split when we're done
535 self._enforceinlinesize(tr)
523 self._enforceinlinesize(tr)
536
524
537 def _writepending(self, tr):
525 def _writepending(self, tr):
538 b"create a file containing the unfinalized state for pretxnchangegroup"
526 b"create a file containing the unfinalized state for pretxnchangegroup"
539 if self._delaybuf:
527 if self._delaybuf:
540 # make a temporary copy of the index
528 # make a temporary copy of the index
541 fp1 = self._realopener(self.indexfile)
529 fp1 = self._realopener(self.indexfile)
542 pendingfilename = self.indexfile + b".a"
530 pendingfilename = self.indexfile + b".a"
543 # register as a temp file to ensure cleanup on failure
531 # register as a temp file to ensure cleanup on failure
544 tr.registertmp(pendingfilename)
532 tr.registertmp(pendingfilename)
545 # write existing data
533 # write existing data
546 fp2 = self._realopener(pendingfilename, b"w")
534 fp2 = self._realopener(pendingfilename, b"w")
547 fp2.write(fp1.read())
535 fp2.write(fp1.read())
548 # add pending data
536 # add pending data
549 fp2.write(b"".join(self._delaybuf))
537 fp2.write(b"".join(self._delaybuf))
550 fp2.close()
538 fp2.close()
551 # switch modes so finalize can simply rename
539 # switch modes so finalize can simply rename
552 self._delaybuf = None
540 self._delaybuf = None
553 self._divert = True
541 self._divert = True
554 self.opener = _divertopener(self._realopener, self.indexfile)
542 self.opener = _divertopener(self._realopener, self.indexfile)
555
543
556 if self._divert:
544 if self._divert:
557 return True
545 return True
558
546
559 return False
547 return False
560
548
561 def _enforceinlinesize(self, tr, fp=None):
549 def _enforceinlinesize(self, tr, fp=None):
562 if not self._delayed:
550 if not self._delayed:
563 revlog.revlog._enforceinlinesize(self, tr, fp)
551 revlog.revlog._enforceinlinesize(self, tr, fp)
564
552
565 def read(self, node):
553 def read(self, node):
566 """Obtain data from a parsed changelog revision.
554 """Obtain data from a parsed changelog revision.
567
555
568 Returns a 6-tuple of:
556 Returns a 6-tuple of:
569
557
570 - manifest node in binary
558 - manifest node in binary
571 - author/user as a localstr
559 - author/user as a localstr
572 - date as a 2-tuple of (time, timezone)
560 - date as a 2-tuple of (time, timezone)
573 - list of files
561 - list of files
574 - commit message as a localstr
562 - commit message as a localstr
575 - dict of extra metadata
563 - dict of extra metadata
576
564
577 Unless you need to access all fields, consider calling
565 Unless you need to access all fields, consider calling
578 ``changelogrevision`` instead, as it is faster for partial object
566 ``changelogrevision`` instead, as it is faster for partial object
579 access.
567 access.
580 """
568 """
581 d, s = self._revisiondata(node)
569 d, s = self._revisiondata(node)
582 c = changelogrevision(
570 c = changelogrevision(
583 d, s, self._copiesstorage == b'changeset-sidedata'
571 d, s, self._copiesstorage == b'changeset-sidedata'
584 )
572 )
585 return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
573 return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
586
574
587 def changelogrevision(self, nodeorrev):
575 def changelogrevision(self, nodeorrev):
588 """Obtain a ``changelogrevision`` for a node or revision."""
576 """Obtain a ``changelogrevision`` for a node or revision."""
589 text, sidedata = self._revisiondata(nodeorrev)
577 text, sidedata = self._revisiondata(nodeorrev)
590 return changelogrevision(
578 return changelogrevision(
591 text, sidedata, self._copiesstorage == b'changeset-sidedata'
579 text, sidedata, self._copiesstorage == b'changeset-sidedata'
592 )
580 )
593
581
594 def readfiles(self, node):
582 def readfiles(self, node):
595 """
583 """
596 short version of read that only returns the files modified by the cset
584 short version of read that only returns the files modified by the cset
597 """
585 """
598 text = self.revision(node)
586 text = self.revision(node)
599 if not text:
587 if not text:
600 return []
588 return []
601 last = text.index(b"\n\n")
589 last = text.index(b"\n\n")
602 l = text[:last].split(b'\n')
590 l = text[:last].split(b'\n')
603 return l[3:]
591 return l[3:]
604
592
605 def add(
593 def add(
606 self,
594 self,
607 manifest,
595 manifest,
608 files,
596 files,
609 desc,
597 desc,
610 transaction,
598 transaction,
611 p1,
599 p1,
612 p2,
600 p2,
613 user,
601 user,
614 date=None,
602 date=None,
615 extra=None,
603 extra=None,
616 p1copies=None,
604 p1copies=None,
617 p2copies=None,
605 p2copies=None,
618 filesadded=None,
606 filesadded=None,
619 filesremoved=None,
607 filesremoved=None,
620 ):
608 ):
621 # Convert to UTF-8 encoded bytestrings as the very first
609 # Convert to UTF-8 encoded bytestrings as the very first
622 # thing: calling any method on a localstr object will turn it
610 # thing: calling any method on a localstr object will turn it
623 # into a str object and the cached UTF-8 string is thus lost.
611 # into a str object and the cached UTF-8 string is thus lost.
624 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
612 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
625
613
626 user = user.strip()
614 user = user.strip()
627 # An empty username or a username with a "\n" will make the
615 # An empty username or a username with a "\n" will make the
628 # revision text contain two "\n\n" sequences -> corrupt
616 # revision text contain two "\n\n" sequences -> corrupt
629 # repository since read cannot unpack the revision.
617 # repository since read cannot unpack the revision.
630 if not user:
618 if not user:
631 raise error.StorageError(_(b"empty username"))
619 raise error.StorageError(_(b"empty username"))
632 if b"\n" in user:
620 if b"\n" in user:
633 raise error.StorageError(
621 raise error.StorageError(
634 _(b"username %r contains a newline") % pycompat.bytestr(user)
622 _(b"username %r contains a newline") % pycompat.bytestr(user)
635 )
623 )
636
624
637 desc = stripdesc(desc)
625 desc = stripdesc(desc)
638
626
639 if date:
627 if date:
640 parseddate = b"%d %d" % dateutil.parsedate(date)
628 parseddate = b"%d %d" % dateutil.parsedate(date)
641 else:
629 else:
642 parseddate = b"%d %d" % dateutil.makedate()
630 parseddate = b"%d %d" % dateutil.makedate()
643 if extra:
631 if extra:
644 branch = extra.get(b"branch")
632 branch = extra.get(b"branch")
645 if branch in (b"default", b""):
633 if branch in (b"default", b""):
646 del extra[b"branch"]
634 del extra[b"branch"]
647 elif branch in (b".", b"null", b"tip"):
635 elif branch in (b".", b"null", b"tip"):
648 raise error.StorageError(
636 raise error.StorageError(
649 _(b'the name \'%s\' is reserved') % branch
637 _(b'the name \'%s\' is reserved') % branch
650 )
638 )
651 sortedfiles = sorted(files)
639 sortedfiles = sorted(files)
652 sidedata = None
640 sidedata = None
653 if extra is not None:
641 if extra is not None:
654 for name in (
642 for name in (
655 b'p1copies',
643 b'p1copies',
656 b'p2copies',
644 b'p2copies',
657 b'filesadded',
645 b'filesadded',
658 b'filesremoved',
646 b'filesremoved',
659 ):
647 ):
660 extra.pop(name, None)
648 extra.pop(name, None)
661 if p1copies is not None:
649 if p1copies is not None:
662 p1copies = copies.encodecopies(sortedfiles, p1copies)
650 p1copies = copies.encodecopies(sortedfiles, p1copies)
663 if p2copies is not None:
651 if p2copies is not None:
664 p2copies = copies.encodecopies(sortedfiles, p2copies)
652 p2copies = copies.encodecopies(sortedfiles, p2copies)
665 if filesadded is not None:
653 if filesadded is not None:
666 filesadded = copies.encodefileindices(sortedfiles, filesadded)
654 filesadded = copies.encodefileindices(sortedfiles, filesadded)
667 if filesremoved is not None:
655 if filesremoved is not None:
668 filesremoved = copies.encodefileindices(sortedfiles, filesremoved)
656 filesremoved = copies.encodefileindices(sortedfiles, filesremoved)
669 if self._copiesstorage == b'extra':
657 if self._copiesstorage == b'extra':
670 extrasentries = p1copies, p2copies, filesadded, filesremoved
658 extrasentries = p1copies, p2copies, filesadded, filesremoved
671 if extra is None and any(x is not None for x in extrasentries):
659 if extra is None and any(x is not None for x in extrasentries):
672 extra = {}
660 extra = {}
673 if p1copies is not None:
661 if p1copies is not None:
674 extra[b'p1copies'] = p1copies
662 extra[b'p1copies'] = p1copies
675 if p2copies is not None:
663 if p2copies is not None:
676 extra[b'p2copies'] = p2copies
664 extra[b'p2copies'] = p2copies
677 if filesadded is not None:
665 if filesadded is not None:
678 extra[b'filesadded'] = filesadded
666 extra[b'filesadded'] = filesadded
679 if filesremoved is not None:
667 if filesremoved is not None:
680 extra[b'filesremoved'] = filesremoved
668 extra[b'filesremoved'] = filesremoved
681 elif self._copiesstorage == b'changeset-sidedata':
669 elif self._copiesstorage == b'changeset-sidedata':
682 sidedata = {}
670 sidedata = {}
683 if p1copies:
671 if p1copies:
684 sidedata[sidedatamod.SD_P1COPIES] = p1copies
672 sidedata[sidedatamod.SD_P1COPIES] = p1copies
685 if p2copies:
673 if p2copies:
686 sidedata[sidedatamod.SD_P2COPIES] = p2copies
674 sidedata[sidedatamod.SD_P2COPIES] = p2copies
687 if filesadded:
675 if filesadded:
688 sidedata[sidedatamod.SD_FILESADDED] = filesadded
676 sidedata[sidedatamod.SD_FILESADDED] = filesadded
689 if filesremoved:
677 if filesremoved:
690 sidedata[sidedatamod.SD_FILESREMOVED] = filesremoved
678 sidedata[sidedatamod.SD_FILESREMOVED] = filesremoved
691 if not sidedata:
679 if not sidedata:
692 sidedata = None
680 sidedata = None
693
681
694 if extra:
682 if extra:
695 extra = encodeextra(extra)
683 extra = encodeextra(extra)
696 parseddate = b"%s %s" % (parseddate, extra)
684 parseddate = b"%s %s" % (parseddate, extra)
697 l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
685 l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
698 text = b"\n".join(l)
686 text = b"\n".join(l)
699 return self.addrevision(
687 return self.addrevision(
700 text, transaction, len(self), p1, p2, sidedata=sidedata
688 text, transaction, len(self), p1, p2, sidedata=sidedata
701 )
689 )
702
690
703 def branchinfo(self, rev):
691 def branchinfo(self, rev):
704 """return the branch name and open/close state of a revision
692 """return the branch name and open/close state of a revision
705
693
706 This function exists because creating a changectx object
694 This function exists because creating a changectx object
707 just to access this is costly."""
695 just to access this is costly."""
708 extra = self.read(rev)[5]
696 extra = self.read(rev)[5]
709 return encoding.tolocal(extra.get(b"branch")), b'close' in extra
697 return encoding.tolocal(extra.get(b"branch")), b'close' in extra
710
698
711 def _nodeduplicatecallback(self, transaction, node):
699 def _nodeduplicatecallback(self, transaction, node):
712 # keep track of revisions that got "re-added", eg: unbunde of know rev.
700 # keep track of revisions that got "re-added", eg: unbunde of know rev.
713 #
701 #
714 # We track them in a list to preserve their order from the source bundle
702 # We track them in a list to preserve their order from the source bundle
715 duplicates = transaction.changes.setdefault(b'revduplicates', [])
703 duplicates = transaction.changes.setdefault(b'revduplicates', [])
716 duplicates.append(self.rev(node))
704 duplicates.append(self.rev(node))
@@ -1,356 +1,370 b''
1 # repoview.py - Filtered view of a localrepo object
1 # repoview.py - Filtered view of a localrepo object
2 #
2 #
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
4 # Logilab SA <contact@logilab.fr>
4 # Logilab SA <contact@logilab.fr>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from __future__ import absolute_import
9 from __future__ import absolute_import
10
10
11 import copy
11 import copy
12 import weakref
12 import weakref
13
13
14 from .node import nullrev
14 from .node import nullrev
15 from .pycompat import (
15 from .pycompat import (
16 delattr,
16 delattr,
17 getattr,
17 getattr,
18 setattr,
18 setattr,
19 )
19 )
20 from . import (
20 from . import (
21 obsolete,
21 obsolete,
22 phases,
22 phases,
23 pycompat,
23 pycompat,
24 revlog,
24 tags as tagsmod,
25 tags as tagsmod,
25 util,
26 util,
26 )
27 )
27 from .utils import repoviewutil
28 from .utils import repoviewutil
28
29
29
30
30 def hideablerevs(repo):
31 def hideablerevs(repo):
31 """Revision candidates to be hidden
32 """Revision candidates to be hidden
32
33
33 This is a standalone function to allow extensions to wrap it.
34 This is a standalone function to allow extensions to wrap it.
34
35
35 Because we use the set of immutable changesets as a fallback subset in
36 Because we use the set of immutable changesets as a fallback subset in
36 branchmap (see mercurial.utils.repoviewutils.subsettable), you cannot set
37 branchmap (see mercurial.utils.repoviewutils.subsettable), you cannot set
37 "public" changesets as "hideable". Doing so would break multiple code
38 "public" changesets as "hideable". Doing so would break multiple code
38 assertions and lead to crashes."""
39 assertions and lead to crashes."""
39 obsoletes = obsolete.getrevs(repo, b'obsolete')
40 obsoletes = obsolete.getrevs(repo, b'obsolete')
40 internals = repo._phasecache.getrevset(repo, phases.localhiddenphases)
41 internals = repo._phasecache.getrevset(repo, phases.localhiddenphases)
41 internals = frozenset(internals)
42 internals = frozenset(internals)
42 return obsoletes | internals
43 return obsoletes | internals
43
44
44
45
45 def pinnedrevs(repo):
46 def pinnedrevs(repo):
46 """revisions blocking hidden changesets from being filtered
47 """revisions blocking hidden changesets from being filtered
47 """
48 """
48
49
49 cl = repo.changelog
50 cl = repo.changelog
50 pinned = set()
51 pinned = set()
51 pinned.update([par.rev() for par in repo[None].parents()])
52 pinned.update([par.rev() for par in repo[None].parents()])
52 pinned.update([cl.rev(bm) for bm in repo._bookmarks.values()])
53 pinned.update([cl.rev(bm) for bm in repo._bookmarks.values()])
53
54
54 tags = {}
55 tags = {}
55 tagsmod.readlocaltags(repo.ui, repo, tags, {})
56 tagsmod.readlocaltags(repo.ui, repo, tags, {})
56 if tags:
57 if tags:
57 rev, nodemap = cl.rev, cl.nodemap
58 rev, nodemap = cl.rev, cl.nodemap
58 pinned.update(rev(t[0]) for t in tags.values() if t[0] in nodemap)
59 pinned.update(rev(t[0]) for t in tags.values() if t[0] in nodemap)
59 return pinned
60 return pinned
60
61
61
62
62 def _revealancestors(pfunc, hidden, revs):
63 def _revealancestors(pfunc, hidden, revs):
63 """reveals contiguous chains of hidden ancestors of 'revs' by removing them
64 """reveals contiguous chains of hidden ancestors of 'revs' by removing them
64 from 'hidden'
65 from 'hidden'
65
66
66 - pfunc(r): a funtion returning parent of 'r',
67 - pfunc(r): a funtion returning parent of 'r',
67 - hidden: the (preliminary) hidden revisions, to be updated
68 - hidden: the (preliminary) hidden revisions, to be updated
68 - revs: iterable of revnum,
69 - revs: iterable of revnum,
69
70
70 (Ancestors are revealed exclusively, i.e. the elements in 'revs' are
71 (Ancestors are revealed exclusively, i.e. the elements in 'revs' are
71 *not* revealed)
72 *not* revealed)
72 """
73 """
73 stack = list(revs)
74 stack = list(revs)
74 while stack:
75 while stack:
75 for p in pfunc(stack.pop()):
76 for p in pfunc(stack.pop()):
76 if p != nullrev and p in hidden:
77 if p != nullrev and p in hidden:
77 hidden.remove(p)
78 hidden.remove(p)
78 stack.append(p)
79 stack.append(p)
79
80
80
81
81 def computehidden(repo, visibilityexceptions=None):
82 def computehidden(repo, visibilityexceptions=None):
82 """compute the set of hidden revision to filter
83 """compute the set of hidden revision to filter
83
84
84 During most operation hidden should be filtered."""
85 During most operation hidden should be filtered."""
85 assert not repo.changelog.filteredrevs
86 assert not repo.changelog.filteredrevs
86
87
87 hidden = hideablerevs(repo)
88 hidden = hideablerevs(repo)
88 if hidden:
89 if hidden:
89 hidden = set(hidden - pinnedrevs(repo))
90 hidden = set(hidden - pinnedrevs(repo))
90 if visibilityexceptions:
91 if visibilityexceptions:
91 hidden -= visibilityexceptions
92 hidden -= visibilityexceptions
92 pfunc = repo.changelog.parentrevs
93 pfunc = repo.changelog.parentrevs
93 mutable = repo._phasecache.getrevset(repo, phases.mutablephases)
94 mutable = repo._phasecache.getrevset(repo, phases.mutablephases)
94
95
95 visible = mutable - hidden
96 visible = mutable - hidden
96 _revealancestors(pfunc, hidden, visible)
97 _revealancestors(pfunc, hidden, visible)
97 return frozenset(hidden)
98 return frozenset(hidden)
98
99
99
100
100 def computesecret(repo, visibilityexceptions=None):
101 def computesecret(repo, visibilityexceptions=None):
101 """compute the set of revision that can never be exposed through hgweb
102 """compute the set of revision that can never be exposed through hgweb
102
103
103 Changeset in the secret phase (or above) should stay unaccessible."""
104 Changeset in the secret phase (or above) should stay unaccessible."""
104 assert not repo.changelog.filteredrevs
105 assert not repo.changelog.filteredrevs
105 secrets = repo._phasecache.getrevset(repo, phases.remotehiddenphases)
106 secrets = repo._phasecache.getrevset(repo, phases.remotehiddenphases)
106 return frozenset(secrets)
107 return frozenset(secrets)
107
108
108
109
109 def computeunserved(repo, visibilityexceptions=None):
110 def computeunserved(repo, visibilityexceptions=None):
110 """compute the set of revision that should be filtered when used a server
111 """compute the set of revision that should be filtered when used a server
111
112
112 Secret and hidden changeset should not pretend to be here."""
113 Secret and hidden changeset should not pretend to be here."""
113 assert not repo.changelog.filteredrevs
114 assert not repo.changelog.filteredrevs
114 # fast path in simple case to avoid impact of non optimised code
115 # fast path in simple case to avoid impact of non optimised code
115 hiddens = filterrevs(repo, b'visible')
116 hiddens = filterrevs(repo, b'visible')
116 secrets = filterrevs(repo, b'served.hidden')
117 secrets = filterrevs(repo, b'served.hidden')
117 if secrets:
118 if secrets:
118 return frozenset(hiddens | secrets)
119 return frozenset(hiddens | secrets)
119 else:
120 else:
120 return hiddens
121 return hiddens
121
122
122
123
123 def computemutable(repo, visibilityexceptions=None):
124 def computemutable(repo, visibilityexceptions=None):
124 assert not repo.changelog.filteredrevs
125 assert not repo.changelog.filteredrevs
125 # fast check to avoid revset call on huge repo
126 # fast check to avoid revset call on huge repo
126 if any(repo._phasecache.phaseroots[1:]):
127 if any(repo._phasecache.phaseroots[1:]):
127 getphase = repo._phasecache.phase
128 getphase = repo._phasecache.phase
128 maymutable = filterrevs(repo, b'base')
129 maymutable = filterrevs(repo, b'base')
129 return frozenset(r for r in maymutable if getphase(repo, r))
130 return frozenset(r for r in maymutable if getphase(repo, r))
130 return frozenset()
131 return frozenset()
131
132
132
133
133 def computeimpactable(repo, visibilityexceptions=None):
134 def computeimpactable(repo, visibilityexceptions=None):
134 """Everything impactable by mutable revision
135 """Everything impactable by mutable revision
135
136
136 The immutable filter still have some chance to get invalidated. This will
137 The immutable filter still have some chance to get invalidated. This will
137 happen when:
138 happen when:
138
139
139 - you garbage collect hidden changeset,
140 - you garbage collect hidden changeset,
140 - public phase is moved backward,
141 - public phase is moved backward,
141 - something is changed in the filtering (this could be fixed)
142 - something is changed in the filtering (this could be fixed)
142
143
143 This filter out any mutable changeset and any public changeset that may be
144 This filter out any mutable changeset and any public changeset that may be
144 impacted by something happening to a mutable revision.
145 impacted by something happening to a mutable revision.
145
146
146 This is achieved by filtered everything with a revision number egal or
147 This is achieved by filtered everything with a revision number egal or
147 higher than the first mutable changeset is filtered."""
148 higher than the first mutable changeset is filtered."""
148 assert not repo.changelog.filteredrevs
149 assert not repo.changelog.filteredrevs
149 cl = repo.changelog
150 cl = repo.changelog
150 firstmutable = len(cl)
151 firstmutable = len(cl)
151 for roots in repo._phasecache.phaseroots[1:]:
152 for roots in repo._phasecache.phaseroots[1:]:
152 if roots:
153 if roots:
153 firstmutable = min(firstmutable, min(cl.rev(r) for r in roots))
154 firstmutable = min(firstmutable, min(cl.rev(r) for r in roots))
154 # protect from nullrev root
155 # protect from nullrev root
155 firstmutable = max(0, firstmutable)
156 firstmutable = max(0, firstmutable)
156 return frozenset(pycompat.xrange(firstmutable, len(cl)))
157 return frozenset(pycompat.xrange(firstmutable, len(cl)))
157
158
158
159
159 # function to compute filtered set
160 # function to compute filtered set
160 #
161 #
161 # When adding a new filter you MUST update the table at:
162 # When adding a new filter you MUST update the table at:
162 # mercurial.utils.repoviewutil.subsettable
163 # mercurial.utils.repoviewutil.subsettable
163 # Otherwise your filter will have to recompute all its branches cache
164 # Otherwise your filter will have to recompute all its branches cache
164 # from scratch (very slow).
165 # from scratch (very slow).
165 filtertable = {
166 filtertable = {
166 b'visible': computehidden,
167 b'visible': computehidden,
167 b'visible-hidden': computehidden,
168 b'visible-hidden': computehidden,
168 b'served.hidden': computesecret,
169 b'served.hidden': computesecret,
169 b'served': computeunserved,
170 b'served': computeunserved,
170 b'immutable': computemutable,
171 b'immutable': computemutable,
171 b'base': computeimpactable,
172 b'base': computeimpactable,
172 }
173 }
173
174
174 _basefiltername = list(filtertable)
175 _basefiltername = list(filtertable)
175
176
176
177
177 def extrafilter(ui):
178 def extrafilter(ui):
178 """initialize extra filter and return its id
179 """initialize extra filter and return its id
179
180
180 If extra filtering is configured, we make sure the associated filtered view
181 If extra filtering is configured, we make sure the associated filtered view
181 are declared and return the associated id.
182 are declared and return the associated id.
182 """
183 """
183 frevs = ui.config(b'experimental', b'extra-filter-revs')
184 frevs = ui.config(b'experimental', b'extra-filter-revs')
184 if frevs is None:
185 if frevs is None:
185 return None
186 return None
186
187
187 fid = pycompat.sysbytes(util.DIGESTS[b'sha1'](frevs).hexdigest())[:12]
188 fid = pycompat.sysbytes(util.DIGESTS[b'sha1'](frevs).hexdigest())[:12]
188
189
189 combine = lambda fname: fname + b'%' + fid
190 combine = lambda fname: fname + b'%' + fid
190
191
191 subsettable = repoviewutil.subsettable
192 subsettable = repoviewutil.subsettable
192
193
193 if combine(b'base') not in filtertable:
194 if combine(b'base') not in filtertable:
194 for name in _basefiltername:
195 for name in _basefiltername:
195
196
196 def extrafilteredrevs(repo, *args, **kwargs):
197 def extrafilteredrevs(repo, *args, **kwargs):
197 baserevs = filtertable[name](repo, *args, **kwargs)
198 baserevs = filtertable[name](repo, *args, **kwargs)
198 extrarevs = frozenset(repo.revs(frevs))
199 extrarevs = frozenset(repo.revs(frevs))
199 return baserevs | extrarevs
200 return baserevs | extrarevs
200
201
201 filtertable[combine(name)] = extrafilteredrevs
202 filtertable[combine(name)] = extrafilteredrevs
202 if name in subsettable:
203 if name in subsettable:
203 subsettable[combine(name)] = combine(subsettable[name])
204 subsettable[combine(name)] = combine(subsettable[name])
204 return fid
205 return fid
205
206
206
207
207 def filterrevs(repo, filtername, visibilityexceptions=None):
208 def filterrevs(repo, filtername, visibilityexceptions=None):
208 """returns set of filtered revision for this filter name
209 """returns set of filtered revision for this filter name
209
210
210 visibilityexceptions is a set of revs which must are exceptions for
211 visibilityexceptions is a set of revs which must are exceptions for
211 hidden-state and must be visible. They are dynamic and hence we should not
212 hidden-state and must be visible. They are dynamic and hence we should not
212 cache it's result"""
213 cache it's result"""
213 if filtername not in repo.filteredrevcache:
214 if filtername not in repo.filteredrevcache:
214 func = filtertable[filtername]
215 func = filtertable[filtername]
215 if visibilityexceptions:
216 if visibilityexceptions:
216 return func(repo.unfiltered, visibilityexceptions)
217 return func(repo.unfiltered, visibilityexceptions)
217 repo.filteredrevcache[filtername] = func(repo.unfiltered())
218 repo.filteredrevcache[filtername] = func(repo.unfiltered())
218 return repo.filteredrevcache[filtername]
219 return repo.filteredrevcache[filtername]
219
220
220
221
221 def wrapchangelog(unfichangelog, filteredrevs):
222 def wrapchangelog(unfichangelog, filteredrevs):
222 cl = copy.copy(unfichangelog)
223 cl = copy.copy(unfichangelog)
223 cl.filteredrevs = filteredrevs
224 cl.filteredrevs = filteredrevs
224
225
225 class filteredchangelog(cl.__class__):
226 class filteredchangelog(cl.__class__):
226 def tiprev(self):
227 def tiprev(self):
227 """filtered version of revlog.tiprev"""
228 """filtered version of revlog.tiprev"""
228 for i in pycompat.xrange(len(self) - 1, -2, -1):
229 for i in pycompat.xrange(len(self) - 1, -2, -1):
229 if i not in self.filteredrevs:
230 if i not in self.filteredrevs:
230 return i
231 return i
231
232
232 def __contains__(self, rev):
233 def __contains__(self, rev):
233 """filtered version of revlog.__contains__"""
234 """filtered version of revlog.__contains__"""
234 return 0 <= rev < len(self) and rev not in self.filteredrevs
235 return 0 <= rev < len(self) and rev not in self.filteredrevs
235
236
237 def __iter__(self):
238 """filtered version of revlog.__iter__"""
239 if len(self.filteredrevs) == 0:
240 return revlog.revlog.__iter__(self)
241
242
243 def filterediter():
244 for i in pycompat.xrange(len(self)):
245 if i not in self.filteredrevs:
246 yield i
247
248 return filterediter()
249
236 cl.__class__ = filteredchangelog
250 cl.__class__ = filteredchangelog
237
251
238 return cl
252 return cl
239
253
240
254
241 class repoview(object):
255 class repoview(object):
242 """Provide a read/write view of a repo through a filtered changelog
256 """Provide a read/write view of a repo through a filtered changelog
243
257
244 This object is used to access a filtered version of a repository without
258 This object is used to access a filtered version of a repository without
245 altering the original repository object itself. We can not alter the
259 altering the original repository object itself. We can not alter the
246 original object for two main reasons:
260 original object for two main reasons:
247 - It prevents the use of a repo with multiple filters at the same time. In
261 - It prevents the use of a repo with multiple filters at the same time. In
248 particular when multiple threads are involved.
262 particular when multiple threads are involved.
249 - It makes scope of the filtering harder to control.
263 - It makes scope of the filtering harder to control.
250
264
251 This object behaves very closely to the original repository. All attribute
265 This object behaves very closely to the original repository. All attribute
252 operations are done on the original repository:
266 operations are done on the original repository:
253 - An access to `repoview.someattr` actually returns `repo.someattr`,
267 - An access to `repoview.someattr` actually returns `repo.someattr`,
254 - A write to `repoview.someattr` actually sets value of `repo.someattr`,
268 - A write to `repoview.someattr` actually sets value of `repo.someattr`,
255 - A deletion of `repoview.someattr` actually drops `someattr`
269 - A deletion of `repoview.someattr` actually drops `someattr`
256 from `repo.__dict__`.
270 from `repo.__dict__`.
257
271
258 The only exception is the `changelog` property. It is overridden to return
272 The only exception is the `changelog` property. It is overridden to return
259 a (surface) copy of `repo.changelog` with some revisions filtered. The
273 a (surface) copy of `repo.changelog` with some revisions filtered. The
260 `filtername` attribute of the view control the revisions that need to be
274 `filtername` attribute of the view control the revisions that need to be
261 filtered. (the fact the changelog is copied is an implementation detail).
275 filtered. (the fact the changelog is copied is an implementation detail).
262
276
263 Unlike attributes, this object intercepts all method calls. This means that
277 Unlike attributes, this object intercepts all method calls. This means that
264 all methods are run on the `repoview` object with the filtered `changelog`
278 all methods are run on the `repoview` object with the filtered `changelog`
265 property. For this purpose the simple `repoview` class must be mixed with
279 property. For this purpose the simple `repoview` class must be mixed with
266 the actual class of the repository. This ensures that the resulting
280 the actual class of the repository. This ensures that the resulting
267 `repoview` object have the very same methods than the repo object. This
281 `repoview` object have the very same methods than the repo object. This
268 leads to the property below.
282 leads to the property below.
269
283
270 repoview.method() --> repo.__class__.method(repoview)
284 repoview.method() --> repo.__class__.method(repoview)
271
285
272 The inheritance has to be done dynamically because `repo` can be of any
286 The inheritance has to be done dynamically because `repo` can be of any
273 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
287 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
274 """
288 """
275
289
276 def __init__(self, repo, filtername, visibilityexceptions=None):
290 def __init__(self, repo, filtername, visibilityexceptions=None):
277 object.__setattr__(self, r'_unfilteredrepo', repo)
291 object.__setattr__(self, r'_unfilteredrepo', repo)
278 object.__setattr__(self, r'filtername', filtername)
292 object.__setattr__(self, r'filtername', filtername)
279 object.__setattr__(self, r'_clcachekey', None)
293 object.__setattr__(self, r'_clcachekey', None)
280 object.__setattr__(self, r'_clcache', None)
294 object.__setattr__(self, r'_clcache', None)
281 # revs which are exceptions and must not be hidden
295 # revs which are exceptions and must not be hidden
282 object.__setattr__(self, r'_visibilityexceptions', visibilityexceptions)
296 object.__setattr__(self, r'_visibilityexceptions', visibilityexceptions)
283
297
284 # not a propertycache on purpose we shall implement a proper cache later
298 # not a propertycache on purpose we shall implement a proper cache later
285 @property
299 @property
286 def changelog(self):
300 def changelog(self):
287 """return a filtered version of the changeset
301 """return a filtered version of the changeset
288
302
289 this changelog must not be used for writing"""
303 this changelog must not be used for writing"""
290 # some cache may be implemented later
304 # some cache may be implemented later
291 unfi = self._unfilteredrepo
305 unfi = self._unfilteredrepo
292 unfichangelog = unfi.changelog
306 unfichangelog = unfi.changelog
293 # bypass call to changelog.method
307 # bypass call to changelog.method
294 unfiindex = unfichangelog.index
308 unfiindex = unfichangelog.index
295 unfilen = len(unfiindex)
309 unfilen = len(unfiindex)
296 unfinode = unfiindex[unfilen - 1][7]
310 unfinode = unfiindex[unfilen - 1][7]
297 with util.timedcm('repo filter for %s', self.filtername):
311 with util.timedcm('repo filter for %s', self.filtername):
298 revs = filterrevs(unfi, self.filtername, self._visibilityexceptions)
312 revs = filterrevs(unfi, self.filtername, self._visibilityexceptions)
299 cl = self._clcache
313 cl = self._clcache
300 newkey = (unfilen, unfinode, hash(revs), unfichangelog._delayed)
314 newkey = (unfilen, unfinode, hash(revs), unfichangelog._delayed)
301 # if cl.index is not unfiindex, unfi.changelog would be
315 # if cl.index is not unfiindex, unfi.changelog would be
302 # recreated, and our clcache refers to garbage object
316 # recreated, and our clcache refers to garbage object
303 if cl is not None and (
317 if cl is not None and (
304 cl.index is not unfiindex or newkey != self._clcachekey
318 cl.index is not unfiindex or newkey != self._clcachekey
305 ):
319 ):
306 cl = None
320 cl = None
307 # could have been made None by the previous if
321 # could have been made None by the previous if
308 if cl is None:
322 if cl is None:
309 cl = wrapchangelog(unfichangelog, revs)
323 cl = wrapchangelog(unfichangelog, revs)
310 object.__setattr__(self, r'_clcache', cl)
324 object.__setattr__(self, r'_clcache', cl)
311 object.__setattr__(self, r'_clcachekey', newkey)
325 object.__setattr__(self, r'_clcachekey', newkey)
312 return cl
326 return cl
313
327
314 def unfiltered(self):
328 def unfiltered(self):
315 """Return an unfiltered version of a repo"""
329 """Return an unfiltered version of a repo"""
316 return self._unfilteredrepo
330 return self._unfilteredrepo
317
331
318 def filtered(self, name, visibilityexceptions=None):
332 def filtered(self, name, visibilityexceptions=None):
319 """Return a filtered version of a repository"""
333 """Return a filtered version of a repository"""
320 if name == self.filtername and not visibilityexceptions:
334 if name == self.filtername and not visibilityexceptions:
321 return self
335 return self
322 return self.unfiltered().filtered(name, visibilityexceptions)
336 return self.unfiltered().filtered(name, visibilityexceptions)
323
337
324 def __repr__(self):
338 def __repr__(self):
325 return r'<%s:%s %r>' % (
339 return r'<%s:%s %r>' % (
326 self.__class__.__name__,
340 self.__class__.__name__,
327 pycompat.sysstr(self.filtername),
341 pycompat.sysstr(self.filtername),
328 self.unfiltered(),
342 self.unfiltered(),
329 )
343 )
330
344
331 # everything access are forwarded to the proxied repo
345 # everything access are forwarded to the proxied repo
332 def __getattr__(self, attr):
346 def __getattr__(self, attr):
333 return getattr(self._unfilteredrepo, attr)
347 return getattr(self._unfilteredrepo, attr)
334
348
335 def __setattr__(self, attr, value):
349 def __setattr__(self, attr, value):
336 return setattr(self._unfilteredrepo, attr, value)
350 return setattr(self._unfilteredrepo, attr, value)
337
351
338 def __delattr__(self, attr):
352 def __delattr__(self, attr):
339 return delattr(self._unfilteredrepo, attr)
353 return delattr(self._unfilteredrepo, attr)
340
354
341
355
342 # Python <3.4 easily leaks types via __mro__. See
356 # Python <3.4 easily leaks types via __mro__. See
343 # https://bugs.python.org/issue17950. We cache dynamically created types
357 # https://bugs.python.org/issue17950. We cache dynamically created types
344 # so they won't be leaked on every invocation of repo.filtered().
358 # so they won't be leaked on every invocation of repo.filtered().
345 _filteredrepotypes = weakref.WeakKeyDictionary()
359 _filteredrepotypes = weakref.WeakKeyDictionary()
346
360
347
361
348 def newtype(base):
362 def newtype(base):
349 """Create a new type with the repoview mixin and the given base class"""
363 """Create a new type with the repoview mixin and the given base class"""
350 if base not in _filteredrepotypes:
364 if base not in _filteredrepotypes:
351
365
352 class filteredrepo(repoview, base):
366 class filteredrepo(repoview, base):
353 pass
367 pass
354
368
355 _filteredrepotypes[base] = filteredrepo
369 _filteredrepotypes[base] = filteredrepo
356 return _filteredrepotypes[base]
370 return _filteredrepotypes[base]
General Comments 0
You need to be logged in to leave comments. Login now