##// END OF EJS Templates
repoview: move changelog.parentrevs() override to filteredchangelog...
Martin von Zweigbergk -
r43757:e1dae2ef default
parent child Browse files
Show More
@@ -1,636 +1,630 b''
1 # changelog.py - changelog class for mercurial
1 # changelog.py - changelog class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 from .i18n import _
10 from .i18n import _
11 from .node import (
11 from .node import (
12 bin,
12 bin,
13 hex,
13 hex,
14 nullid,
14 nullid,
15 )
15 )
16 from .thirdparty import attr
16 from .thirdparty import attr
17
17
18 from . import (
18 from . import (
19 copies,
19 copies,
20 encoding,
20 encoding,
21 error,
21 error,
22 pycompat,
22 pycompat,
23 revlog,
23 revlog,
24 )
24 )
25 from .utils import (
25 from .utils import (
26 dateutil,
26 dateutil,
27 stringutil,
27 stringutil,
28 )
28 )
29
29
30 from .revlogutils import sidedata as sidedatamod
30 from .revlogutils import sidedata as sidedatamod
31
31
32 _defaultextra = {b'branch': b'default'}
32 _defaultextra = {b'branch': b'default'}
33
33
34
34
35 def _string_escape(text):
35 def _string_escape(text):
36 """
36 """
37 >>> from .pycompat import bytechr as chr
37 >>> from .pycompat import bytechr as chr
38 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
38 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
39 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
39 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
40 >>> s
40 >>> s
41 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
41 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
42 >>> res = _string_escape(s)
42 >>> res = _string_escape(s)
43 >>> s == _string_unescape(res)
43 >>> s == _string_unescape(res)
44 True
44 True
45 """
45 """
46 # subset of the string_escape codec
46 # subset of the string_escape codec
47 text = (
47 text = (
48 text.replace(b'\\', b'\\\\')
48 text.replace(b'\\', b'\\\\')
49 .replace(b'\n', b'\\n')
49 .replace(b'\n', b'\\n')
50 .replace(b'\r', b'\\r')
50 .replace(b'\r', b'\\r')
51 )
51 )
52 return text.replace(b'\0', b'\\0')
52 return text.replace(b'\0', b'\\0')
53
53
54
54
55 def _string_unescape(text):
55 def _string_unescape(text):
56 if b'\\0' in text:
56 if b'\\0' in text:
57 # fix up \0 without getting into trouble with \\0
57 # fix up \0 without getting into trouble with \\0
58 text = text.replace(b'\\\\', b'\\\\\n')
58 text = text.replace(b'\\\\', b'\\\\\n')
59 text = text.replace(b'\\0', b'\0')
59 text = text.replace(b'\\0', b'\0')
60 text = text.replace(b'\n', b'')
60 text = text.replace(b'\n', b'')
61 return stringutil.unescapestr(text)
61 return stringutil.unescapestr(text)
62
62
63
63
64 def decodeextra(text):
64 def decodeextra(text):
65 """
65 """
66 >>> from .pycompat import bytechr as chr
66 >>> from .pycompat import bytechr as chr
67 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
67 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
68 ... ).items())
68 ... ).items())
69 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
69 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
70 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
70 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
71 ... b'baz': chr(92) + chr(0) + b'2'})
71 ... b'baz': chr(92) + chr(0) + b'2'})
72 ... ).items())
72 ... ).items())
73 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
73 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
74 """
74 """
75 extra = _defaultextra.copy()
75 extra = _defaultextra.copy()
76 for l in text.split(b'\0'):
76 for l in text.split(b'\0'):
77 if l:
77 if l:
78 k, v = _string_unescape(l).split(b':', 1)
78 k, v = _string_unescape(l).split(b':', 1)
79 extra[k] = v
79 extra[k] = v
80 return extra
80 return extra
81
81
82
82
83 def encodeextra(d):
83 def encodeextra(d):
84 # keys must be sorted to produce a deterministic changelog entry
84 # keys must be sorted to produce a deterministic changelog entry
85 items = [
85 items = [
86 _string_escape(b'%s:%s' % (k, pycompat.bytestr(d[k])))
86 _string_escape(b'%s:%s' % (k, pycompat.bytestr(d[k])))
87 for k in sorted(d)
87 for k in sorted(d)
88 ]
88 ]
89 return b"\0".join(items)
89 return b"\0".join(items)
90
90
91
91
92 def stripdesc(desc):
92 def stripdesc(desc):
93 """strip trailing whitespace and leading and trailing empty lines"""
93 """strip trailing whitespace and leading and trailing empty lines"""
94 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
94 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
95
95
96
96
97 class appender(object):
97 class appender(object):
98 '''the changelog index must be updated last on disk, so we use this class
98 '''the changelog index must be updated last on disk, so we use this class
99 to delay writes to it'''
99 to delay writes to it'''
100
100
101 def __init__(self, vfs, name, mode, buf):
101 def __init__(self, vfs, name, mode, buf):
102 self.data = buf
102 self.data = buf
103 fp = vfs(name, mode)
103 fp = vfs(name, mode)
104 self.fp = fp
104 self.fp = fp
105 self.offset = fp.tell()
105 self.offset = fp.tell()
106 self.size = vfs.fstat(fp).st_size
106 self.size = vfs.fstat(fp).st_size
107 self._end = self.size
107 self._end = self.size
108
108
109 def end(self):
109 def end(self):
110 return self._end
110 return self._end
111
111
112 def tell(self):
112 def tell(self):
113 return self.offset
113 return self.offset
114
114
115 def flush(self):
115 def flush(self):
116 pass
116 pass
117
117
118 @property
118 @property
119 def closed(self):
119 def closed(self):
120 return self.fp.closed
120 return self.fp.closed
121
121
122 def close(self):
122 def close(self):
123 self.fp.close()
123 self.fp.close()
124
124
125 def seek(self, offset, whence=0):
125 def seek(self, offset, whence=0):
126 '''virtual file offset spans real file and data'''
126 '''virtual file offset spans real file and data'''
127 if whence == 0:
127 if whence == 0:
128 self.offset = offset
128 self.offset = offset
129 elif whence == 1:
129 elif whence == 1:
130 self.offset += offset
130 self.offset += offset
131 elif whence == 2:
131 elif whence == 2:
132 self.offset = self.end() + offset
132 self.offset = self.end() + offset
133 if self.offset < self.size:
133 if self.offset < self.size:
134 self.fp.seek(self.offset)
134 self.fp.seek(self.offset)
135
135
136 def read(self, count=-1):
136 def read(self, count=-1):
137 '''only trick here is reads that span real file and data'''
137 '''only trick here is reads that span real file and data'''
138 ret = b""
138 ret = b""
139 if self.offset < self.size:
139 if self.offset < self.size:
140 s = self.fp.read(count)
140 s = self.fp.read(count)
141 ret = s
141 ret = s
142 self.offset += len(s)
142 self.offset += len(s)
143 if count > 0:
143 if count > 0:
144 count -= len(s)
144 count -= len(s)
145 if count != 0:
145 if count != 0:
146 doff = self.offset - self.size
146 doff = self.offset - self.size
147 self.data.insert(0, b"".join(self.data))
147 self.data.insert(0, b"".join(self.data))
148 del self.data[1:]
148 del self.data[1:]
149 s = self.data[0][doff : doff + count]
149 s = self.data[0][doff : doff + count]
150 self.offset += len(s)
150 self.offset += len(s)
151 ret += s
151 ret += s
152 return ret
152 return ret
153
153
154 def write(self, s):
154 def write(self, s):
155 self.data.append(bytes(s))
155 self.data.append(bytes(s))
156 self.offset += len(s)
156 self.offset += len(s)
157 self._end += len(s)
157 self._end += len(s)
158
158
159 def __enter__(self):
159 def __enter__(self):
160 self.fp.__enter__()
160 self.fp.__enter__()
161 return self
161 return self
162
162
163 def __exit__(self, *args):
163 def __exit__(self, *args):
164 return self.fp.__exit__(*args)
164 return self.fp.__exit__(*args)
165
165
166
166
167 def _divertopener(opener, target):
167 def _divertopener(opener, target):
168 """build an opener that writes in 'target.a' instead of 'target'"""
168 """build an opener that writes in 'target.a' instead of 'target'"""
169
169
170 def _divert(name, mode=b'r', checkambig=False):
170 def _divert(name, mode=b'r', checkambig=False):
171 if name != target:
171 if name != target:
172 return opener(name, mode)
172 return opener(name, mode)
173 return opener(name + b".a", mode)
173 return opener(name + b".a", mode)
174
174
175 return _divert
175 return _divert
176
176
177
177
178 def _delayopener(opener, target, buf):
178 def _delayopener(opener, target, buf):
179 """build an opener that stores chunks in 'buf' instead of 'target'"""
179 """build an opener that stores chunks in 'buf' instead of 'target'"""
180
180
181 def _delay(name, mode=b'r', checkambig=False):
181 def _delay(name, mode=b'r', checkambig=False):
182 if name != target:
182 if name != target:
183 return opener(name, mode)
183 return opener(name, mode)
184 return appender(opener, name, mode, buf)
184 return appender(opener, name, mode, buf)
185
185
186 return _delay
186 return _delay
187
187
188
188
189 @attr.s
189 @attr.s
190 class _changelogrevision(object):
190 class _changelogrevision(object):
191 # Extensions might modify _defaultextra, so let the constructor below pass
191 # Extensions might modify _defaultextra, so let the constructor below pass
192 # it in
192 # it in
193 extra = attr.ib()
193 extra = attr.ib()
194 manifest = attr.ib(default=nullid)
194 manifest = attr.ib(default=nullid)
195 user = attr.ib(default=b'')
195 user = attr.ib(default=b'')
196 date = attr.ib(default=(0, 0))
196 date = attr.ib(default=(0, 0))
197 files = attr.ib(default=attr.Factory(list))
197 files = attr.ib(default=attr.Factory(list))
198 filesadded = attr.ib(default=None)
198 filesadded = attr.ib(default=None)
199 filesremoved = attr.ib(default=None)
199 filesremoved = attr.ib(default=None)
200 p1copies = attr.ib(default=None)
200 p1copies = attr.ib(default=None)
201 p2copies = attr.ib(default=None)
201 p2copies = attr.ib(default=None)
202 description = attr.ib(default=b'')
202 description = attr.ib(default=b'')
203
203
204
204
205 class changelogrevision(object):
205 class changelogrevision(object):
206 """Holds results of a parsed changelog revision.
206 """Holds results of a parsed changelog revision.
207
207
208 Changelog revisions consist of multiple pieces of data, including
208 Changelog revisions consist of multiple pieces of data, including
209 the manifest node, user, and date. This object exposes a view into
209 the manifest node, user, and date. This object exposes a view into
210 the parsed object.
210 the parsed object.
211 """
211 """
212
212
213 __slots__ = (
213 __slots__ = (
214 r'_offsets',
214 r'_offsets',
215 r'_text',
215 r'_text',
216 r'_sidedata',
216 r'_sidedata',
217 r'_cpsd',
217 r'_cpsd',
218 )
218 )
219
219
220 def __new__(cls, text, sidedata, cpsd):
220 def __new__(cls, text, sidedata, cpsd):
221 if not text:
221 if not text:
222 return _changelogrevision(extra=_defaultextra)
222 return _changelogrevision(extra=_defaultextra)
223
223
224 self = super(changelogrevision, cls).__new__(cls)
224 self = super(changelogrevision, cls).__new__(cls)
225 # We could return here and implement the following as an __init__.
225 # We could return here and implement the following as an __init__.
226 # But doing it here is equivalent and saves an extra function call.
226 # But doing it here is equivalent and saves an extra function call.
227
227
228 # format used:
228 # format used:
229 # nodeid\n : manifest node in ascii
229 # nodeid\n : manifest node in ascii
230 # user\n : user, no \n or \r allowed
230 # user\n : user, no \n or \r allowed
231 # time tz extra\n : date (time is int or float, timezone is int)
231 # time tz extra\n : date (time is int or float, timezone is int)
232 # : extra is metadata, encoded and separated by '\0'
232 # : extra is metadata, encoded and separated by '\0'
233 # : older versions ignore it
233 # : older versions ignore it
234 # files\n\n : files modified by the cset, no \n or \r allowed
234 # files\n\n : files modified by the cset, no \n or \r allowed
235 # (.*) : comment (free text, ideally utf-8)
235 # (.*) : comment (free text, ideally utf-8)
236 #
236 #
237 # changelog v0 doesn't use extra
237 # changelog v0 doesn't use extra
238
238
239 nl1 = text.index(b'\n')
239 nl1 = text.index(b'\n')
240 nl2 = text.index(b'\n', nl1 + 1)
240 nl2 = text.index(b'\n', nl1 + 1)
241 nl3 = text.index(b'\n', nl2 + 1)
241 nl3 = text.index(b'\n', nl2 + 1)
242
242
243 # The list of files may be empty. Which means nl3 is the first of the
243 # The list of files may be empty. Which means nl3 is the first of the
244 # double newline that precedes the description.
244 # double newline that precedes the description.
245 if text[nl3 + 1 : nl3 + 2] == b'\n':
245 if text[nl3 + 1 : nl3 + 2] == b'\n':
246 doublenl = nl3
246 doublenl = nl3
247 else:
247 else:
248 doublenl = text.index(b'\n\n', nl3 + 1)
248 doublenl = text.index(b'\n\n', nl3 + 1)
249
249
250 self._offsets = (nl1, nl2, nl3, doublenl)
250 self._offsets = (nl1, nl2, nl3, doublenl)
251 self._text = text
251 self._text = text
252 self._sidedata = sidedata
252 self._sidedata = sidedata
253 self._cpsd = cpsd
253 self._cpsd = cpsd
254
254
255 return self
255 return self
256
256
257 @property
257 @property
258 def manifest(self):
258 def manifest(self):
259 return bin(self._text[0 : self._offsets[0]])
259 return bin(self._text[0 : self._offsets[0]])
260
260
261 @property
261 @property
262 def user(self):
262 def user(self):
263 off = self._offsets
263 off = self._offsets
264 return encoding.tolocal(self._text[off[0] + 1 : off[1]])
264 return encoding.tolocal(self._text[off[0] + 1 : off[1]])
265
265
266 @property
266 @property
267 def _rawdate(self):
267 def _rawdate(self):
268 off = self._offsets
268 off = self._offsets
269 dateextra = self._text[off[1] + 1 : off[2]]
269 dateextra = self._text[off[1] + 1 : off[2]]
270 return dateextra.split(b' ', 2)[0:2]
270 return dateextra.split(b' ', 2)[0:2]
271
271
272 @property
272 @property
273 def _rawextra(self):
273 def _rawextra(self):
274 off = self._offsets
274 off = self._offsets
275 dateextra = self._text[off[1] + 1 : off[2]]
275 dateextra = self._text[off[1] + 1 : off[2]]
276 fields = dateextra.split(b' ', 2)
276 fields = dateextra.split(b' ', 2)
277 if len(fields) != 3:
277 if len(fields) != 3:
278 return None
278 return None
279
279
280 return fields[2]
280 return fields[2]
281
281
282 @property
282 @property
283 def date(self):
283 def date(self):
284 raw = self._rawdate
284 raw = self._rawdate
285 time = float(raw[0])
285 time = float(raw[0])
286 # Various tools did silly things with the timezone.
286 # Various tools did silly things with the timezone.
287 try:
287 try:
288 timezone = int(raw[1])
288 timezone = int(raw[1])
289 except ValueError:
289 except ValueError:
290 timezone = 0
290 timezone = 0
291
291
292 return time, timezone
292 return time, timezone
293
293
294 @property
294 @property
295 def extra(self):
295 def extra(self):
296 raw = self._rawextra
296 raw = self._rawextra
297 if raw is None:
297 if raw is None:
298 return _defaultextra
298 return _defaultextra
299
299
300 return decodeextra(raw)
300 return decodeextra(raw)
301
301
302 @property
302 @property
303 def files(self):
303 def files(self):
304 off = self._offsets
304 off = self._offsets
305 if off[2] == off[3]:
305 if off[2] == off[3]:
306 return []
306 return []
307
307
308 return self._text[off[2] + 1 : off[3]].split(b'\n')
308 return self._text[off[2] + 1 : off[3]].split(b'\n')
309
309
310 @property
310 @property
311 def filesadded(self):
311 def filesadded(self):
312 if self._cpsd:
312 if self._cpsd:
313 rawindices = self._sidedata.get(sidedatamod.SD_FILESADDED)
313 rawindices = self._sidedata.get(sidedatamod.SD_FILESADDED)
314 if not rawindices:
314 if not rawindices:
315 return []
315 return []
316 else:
316 else:
317 rawindices = self.extra.get(b'filesadded')
317 rawindices = self.extra.get(b'filesadded')
318 if rawindices is None:
318 if rawindices is None:
319 return None
319 return None
320 return copies.decodefileindices(self.files, rawindices)
320 return copies.decodefileindices(self.files, rawindices)
321
321
322 @property
322 @property
323 def filesremoved(self):
323 def filesremoved(self):
324 if self._cpsd:
324 if self._cpsd:
325 rawindices = self._sidedata.get(sidedatamod.SD_FILESREMOVED)
325 rawindices = self._sidedata.get(sidedatamod.SD_FILESREMOVED)
326 if not rawindices:
326 if not rawindices:
327 return []
327 return []
328 else:
328 else:
329 rawindices = self.extra.get(b'filesremoved')
329 rawindices = self.extra.get(b'filesremoved')
330 if rawindices is None:
330 if rawindices is None:
331 return None
331 return None
332 return copies.decodefileindices(self.files, rawindices)
332 return copies.decodefileindices(self.files, rawindices)
333
333
334 @property
334 @property
335 def p1copies(self):
335 def p1copies(self):
336 if self._cpsd:
336 if self._cpsd:
337 rawcopies = self._sidedata.get(sidedatamod.SD_P1COPIES)
337 rawcopies = self._sidedata.get(sidedatamod.SD_P1COPIES)
338 if not rawcopies:
338 if not rawcopies:
339 return {}
339 return {}
340 else:
340 else:
341 rawcopies = self.extra.get(b'p1copies')
341 rawcopies = self.extra.get(b'p1copies')
342 if rawcopies is None:
342 if rawcopies is None:
343 return None
343 return None
344 return copies.decodecopies(self.files, rawcopies)
344 return copies.decodecopies(self.files, rawcopies)
345
345
346 @property
346 @property
347 def p2copies(self):
347 def p2copies(self):
348 if self._cpsd:
348 if self._cpsd:
349 rawcopies = self._sidedata.get(sidedatamod.SD_P2COPIES)
349 rawcopies = self._sidedata.get(sidedatamod.SD_P2COPIES)
350 if not rawcopies:
350 if not rawcopies:
351 return {}
351 return {}
352 else:
352 else:
353 rawcopies = self.extra.get(b'p2copies')
353 rawcopies = self.extra.get(b'p2copies')
354 if rawcopies is None:
354 if rawcopies is None:
355 return None
355 return None
356 return copies.decodecopies(self.files, rawcopies)
356 return copies.decodecopies(self.files, rawcopies)
357
357
358 @property
358 @property
359 def description(self):
359 def description(self):
360 return encoding.tolocal(self._text[self._offsets[3] + 2 :])
360 return encoding.tolocal(self._text[self._offsets[3] + 2 :])
361
361
362
362
363 class changelog(revlog.revlog):
363 class changelog(revlog.revlog):
364 def __init__(self, opener, trypending=False):
364 def __init__(self, opener, trypending=False):
365 """Load a changelog revlog using an opener.
365 """Load a changelog revlog using an opener.
366
366
367 If ``trypending`` is true, we attempt to load the index from a
367 If ``trypending`` is true, we attempt to load the index from a
368 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
368 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
369 The ``00changelog.i.a`` file contains index (and possibly inline
369 The ``00changelog.i.a`` file contains index (and possibly inline
370 revision) data for a transaction that hasn't been finalized yet.
370 revision) data for a transaction that hasn't been finalized yet.
371 It exists in a separate file to facilitate readers (such as
371 It exists in a separate file to facilitate readers (such as
372 hooks processes) accessing data before a transaction is finalized.
372 hooks processes) accessing data before a transaction is finalized.
373 """
373 """
374 if trypending and opener.exists(b'00changelog.i.a'):
374 if trypending and opener.exists(b'00changelog.i.a'):
375 indexfile = b'00changelog.i.a'
375 indexfile = b'00changelog.i.a'
376 else:
376 else:
377 indexfile = b'00changelog.i'
377 indexfile = b'00changelog.i'
378
378
379 datafile = b'00changelog.d'
379 datafile = b'00changelog.d'
380 revlog.revlog.__init__(
380 revlog.revlog.__init__(
381 self,
381 self,
382 opener,
382 opener,
383 indexfile,
383 indexfile,
384 datafile=datafile,
384 datafile=datafile,
385 checkambig=True,
385 checkambig=True,
386 mmaplargeindex=True,
386 mmaplargeindex=True,
387 )
387 )
388
388
389 if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
389 if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
390 # changelogs don't benefit from generaldelta.
390 # changelogs don't benefit from generaldelta.
391
391
392 self.version &= ~revlog.FLAG_GENERALDELTA
392 self.version &= ~revlog.FLAG_GENERALDELTA
393 self._generaldelta = False
393 self._generaldelta = False
394
394
395 # Delta chains for changelogs tend to be very small because entries
395 # Delta chains for changelogs tend to be very small because entries
396 # tend to be small and don't delta well with each. So disable delta
396 # tend to be small and don't delta well with each. So disable delta
397 # chains.
397 # chains.
398 self._storedeltachains = False
398 self._storedeltachains = False
399
399
400 self._realopener = opener
400 self._realopener = opener
401 self._delayed = False
401 self._delayed = False
402 self._delaybuf = None
402 self._delaybuf = None
403 self._divert = False
403 self._divert = False
404 self.filteredrevs = frozenset()
404 self.filteredrevs = frozenset()
405 self._copiesstorage = opener.options.get(b'copies-storage')
405 self._copiesstorage = opener.options.get(b'copies-storage')
406
406
407 def parentrevs(self, rev):
408 """filtered version of revlog.parentrevs"""
409 if rev in self.filteredrevs:
410 raise error.FilteredIndexError(rev)
411 return super(changelog, self).parentrevs(rev)
412
413 def flags(self, rev):
407 def flags(self, rev):
414 """filtered version of revlog.flags"""
408 """filtered version of revlog.flags"""
415 if rev in self.filteredrevs:
409 if rev in self.filteredrevs:
416 raise error.FilteredIndexError(rev)
410 raise error.FilteredIndexError(rev)
417 return super(changelog, self).flags(rev)
411 return super(changelog, self).flags(rev)
418
412
419 def delayupdate(self, tr):
413 def delayupdate(self, tr):
420 b"delay visibility of index updates to other readers"
414 b"delay visibility of index updates to other readers"
421
415
422 if not self._delayed:
416 if not self._delayed:
423 if len(self) == 0:
417 if len(self) == 0:
424 self._divert = True
418 self._divert = True
425 if self._realopener.exists(self.indexfile + b'.a'):
419 if self._realopener.exists(self.indexfile + b'.a'):
426 self._realopener.unlink(self.indexfile + b'.a')
420 self._realopener.unlink(self.indexfile + b'.a')
427 self.opener = _divertopener(self._realopener, self.indexfile)
421 self.opener = _divertopener(self._realopener, self.indexfile)
428 else:
422 else:
429 self._delaybuf = []
423 self._delaybuf = []
430 self.opener = _delayopener(
424 self.opener = _delayopener(
431 self._realopener, self.indexfile, self._delaybuf
425 self._realopener, self.indexfile, self._delaybuf
432 )
426 )
433 self._delayed = True
427 self._delayed = True
434 tr.addpending(b'cl-%i' % id(self), self._writepending)
428 tr.addpending(b'cl-%i' % id(self), self._writepending)
435 tr.addfinalize(b'cl-%i' % id(self), self._finalize)
429 tr.addfinalize(b'cl-%i' % id(self), self._finalize)
436
430
437 def _finalize(self, tr):
431 def _finalize(self, tr):
438 b"finalize index updates"
432 b"finalize index updates"
439 self._delayed = False
433 self._delayed = False
440 self.opener = self._realopener
434 self.opener = self._realopener
441 # move redirected index data back into place
435 # move redirected index data back into place
442 if self._divert:
436 if self._divert:
443 assert not self._delaybuf
437 assert not self._delaybuf
444 tmpname = self.indexfile + b".a"
438 tmpname = self.indexfile + b".a"
445 nfile = self.opener.open(tmpname)
439 nfile = self.opener.open(tmpname)
446 nfile.close()
440 nfile.close()
447 self.opener.rename(tmpname, self.indexfile, checkambig=True)
441 self.opener.rename(tmpname, self.indexfile, checkambig=True)
448 elif self._delaybuf:
442 elif self._delaybuf:
449 fp = self.opener(self.indexfile, b'a', checkambig=True)
443 fp = self.opener(self.indexfile, b'a', checkambig=True)
450 fp.write(b"".join(self._delaybuf))
444 fp.write(b"".join(self._delaybuf))
451 fp.close()
445 fp.close()
452 self._delaybuf = None
446 self._delaybuf = None
453 self._divert = False
447 self._divert = False
454 # split when we're done
448 # split when we're done
455 self._enforceinlinesize(tr)
449 self._enforceinlinesize(tr)
456
450
457 def _writepending(self, tr):
451 def _writepending(self, tr):
458 b"create a file containing the unfinalized state for pretxnchangegroup"
452 b"create a file containing the unfinalized state for pretxnchangegroup"
459 if self._delaybuf:
453 if self._delaybuf:
460 # make a temporary copy of the index
454 # make a temporary copy of the index
461 fp1 = self._realopener(self.indexfile)
455 fp1 = self._realopener(self.indexfile)
462 pendingfilename = self.indexfile + b".a"
456 pendingfilename = self.indexfile + b".a"
463 # register as a temp file to ensure cleanup on failure
457 # register as a temp file to ensure cleanup on failure
464 tr.registertmp(pendingfilename)
458 tr.registertmp(pendingfilename)
465 # write existing data
459 # write existing data
466 fp2 = self._realopener(pendingfilename, b"w")
460 fp2 = self._realopener(pendingfilename, b"w")
467 fp2.write(fp1.read())
461 fp2.write(fp1.read())
468 # add pending data
462 # add pending data
469 fp2.write(b"".join(self._delaybuf))
463 fp2.write(b"".join(self._delaybuf))
470 fp2.close()
464 fp2.close()
471 # switch modes so finalize can simply rename
465 # switch modes so finalize can simply rename
472 self._delaybuf = None
466 self._delaybuf = None
473 self._divert = True
467 self._divert = True
474 self.opener = _divertopener(self._realopener, self.indexfile)
468 self.opener = _divertopener(self._realopener, self.indexfile)
475
469
476 if self._divert:
470 if self._divert:
477 return True
471 return True
478
472
479 return False
473 return False
480
474
481 def _enforceinlinesize(self, tr, fp=None):
475 def _enforceinlinesize(self, tr, fp=None):
482 if not self._delayed:
476 if not self._delayed:
483 revlog.revlog._enforceinlinesize(self, tr, fp)
477 revlog.revlog._enforceinlinesize(self, tr, fp)
484
478
485 def read(self, node):
479 def read(self, node):
486 """Obtain data from a parsed changelog revision.
480 """Obtain data from a parsed changelog revision.
487
481
488 Returns a 6-tuple of:
482 Returns a 6-tuple of:
489
483
490 - manifest node in binary
484 - manifest node in binary
491 - author/user as a localstr
485 - author/user as a localstr
492 - date as a 2-tuple of (time, timezone)
486 - date as a 2-tuple of (time, timezone)
493 - list of files
487 - list of files
494 - commit message as a localstr
488 - commit message as a localstr
495 - dict of extra metadata
489 - dict of extra metadata
496
490
497 Unless you need to access all fields, consider calling
491 Unless you need to access all fields, consider calling
498 ``changelogrevision`` instead, as it is faster for partial object
492 ``changelogrevision`` instead, as it is faster for partial object
499 access.
493 access.
500 """
494 """
501 d, s = self._revisiondata(node)
495 d, s = self._revisiondata(node)
502 c = changelogrevision(
496 c = changelogrevision(
503 d, s, self._copiesstorage == b'changeset-sidedata'
497 d, s, self._copiesstorage == b'changeset-sidedata'
504 )
498 )
505 return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
499 return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
506
500
507 def changelogrevision(self, nodeorrev):
501 def changelogrevision(self, nodeorrev):
508 """Obtain a ``changelogrevision`` for a node or revision."""
502 """Obtain a ``changelogrevision`` for a node or revision."""
509 text, sidedata = self._revisiondata(nodeorrev)
503 text, sidedata = self._revisiondata(nodeorrev)
510 return changelogrevision(
504 return changelogrevision(
511 text, sidedata, self._copiesstorage == b'changeset-sidedata'
505 text, sidedata, self._copiesstorage == b'changeset-sidedata'
512 )
506 )
513
507
514 def readfiles(self, node):
508 def readfiles(self, node):
515 """
509 """
516 short version of read that only returns the files modified by the cset
510 short version of read that only returns the files modified by the cset
517 """
511 """
518 text = self.revision(node)
512 text = self.revision(node)
519 if not text:
513 if not text:
520 return []
514 return []
521 last = text.index(b"\n\n")
515 last = text.index(b"\n\n")
522 l = text[:last].split(b'\n')
516 l = text[:last].split(b'\n')
523 return l[3:]
517 return l[3:]
524
518
525 def add(
519 def add(
526 self,
520 self,
527 manifest,
521 manifest,
528 files,
522 files,
529 desc,
523 desc,
530 transaction,
524 transaction,
531 p1,
525 p1,
532 p2,
526 p2,
533 user,
527 user,
534 date=None,
528 date=None,
535 extra=None,
529 extra=None,
536 p1copies=None,
530 p1copies=None,
537 p2copies=None,
531 p2copies=None,
538 filesadded=None,
532 filesadded=None,
539 filesremoved=None,
533 filesremoved=None,
540 ):
534 ):
541 # Convert to UTF-8 encoded bytestrings as the very first
535 # Convert to UTF-8 encoded bytestrings as the very first
542 # thing: calling any method on a localstr object will turn it
536 # thing: calling any method on a localstr object will turn it
543 # into a str object and the cached UTF-8 string is thus lost.
537 # into a str object and the cached UTF-8 string is thus lost.
544 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
538 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
545
539
546 user = user.strip()
540 user = user.strip()
547 # An empty username or a username with a "\n" will make the
541 # An empty username or a username with a "\n" will make the
548 # revision text contain two "\n\n" sequences -> corrupt
542 # revision text contain two "\n\n" sequences -> corrupt
549 # repository since read cannot unpack the revision.
543 # repository since read cannot unpack the revision.
550 if not user:
544 if not user:
551 raise error.StorageError(_(b"empty username"))
545 raise error.StorageError(_(b"empty username"))
552 if b"\n" in user:
546 if b"\n" in user:
553 raise error.StorageError(
547 raise error.StorageError(
554 _(b"username %r contains a newline") % pycompat.bytestr(user)
548 _(b"username %r contains a newline") % pycompat.bytestr(user)
555 )
549 )
556
550
557 desc = stripdesc(desc)
551 desc = stripdesc(desc)
558
552
559 if date:
553 if date:
560 parseddate = b"%d %d" % dateutil.parsedate(date)
554 parseddate = b"%d %d" % dateutil.parsedate(date)
561 else:
555 else:
562 parseddate = b"%d %d" % dateutil.makedate()
556 parseddate = b"%d %d" % dateutil.makedate()
563 if extra:
557 if extra:
564 branch = extra.get(b"branch")
558 branch = extra.get(b"branch")
565 if branch in (b"default", b""):
559 if branch in (b"default", b""):
566 del extra[b"branch"]
560 del extra[b"branch"]
567 elif branch in (b".", b"null", b"tip"):
561 elif branch in (b".", b"null", b"tip"):
568 raise error.StorageError(
562 raise error.StorageError(
569 _(b'the name \'%s\' is reserved') % branch
563 _(b'the name \'%s\' is reserved') % branch
570 )
564 )
571 sortedfiles = sorted(files)
565 sortedfiles = sorted(files)
572 sidedata = None
566 sidedata = None
573 if extra is not None:
567 if extra is not None:
574 for name in (
568 for name in (
575 b'p1copies',
569 b'p1copies',
576 b'p2copies',
570 b'p2copies',
577 b'filesadded',
571 b'filesadded',
578 b'filesremoved',
572 b'filesremoved',
579 ):
573 ):
580 extra.pop(name, None)
574 extra.pop(name, None)
581 if p1copies is not None:
575 if p1copies is not None:
582 p1copies = copies.encodecopies(sortedfiles, p1copies)
576 p1copies = copies.encodecopies(sortedfiles, p1copies)
583 if p2copies is not None:
577 if p2copies is not None:
584 p2copies = copies.encodecopies(sortedfiles, p2copies)
578 p2copies = copies.encodecopies(sortedfiles, p2copies)
585 if filesadded is not None:
579 if filesadded is not None:
586 filesadded = copies.encodefileindices(sortedfiles, filesadded)
580 filesadded = copies.encodefileindices(sortedfiles, filesadded)
587 if filesremoved is not None:
581 if filesremoved is not None:
588 filesremoved = copies.encodefileindices(sortedfiles, filesremoved)
582 filesremoved = copies.encodefileindices(sortedfiles, filesremoved)
589 if self._copiesstorage == b'extra':
583 if self._copiesstorage == b'extra':
590 extrasentries = p1copies, p2copies, filesadded, filesremoved
584 extrasentries = p1copies, p2copies, filesadded, filesremoved
591 if extra is None and any(x is not None for x in extrasentries):
585 if extra is None and any(x is not None for x in extrasentries):
592 extra = {}
586 extra = {}
593 if p1copies is not None:
587 if p1copies is not None:
594 extra[b'p1copies'] = p1copies
588 extra[b'p1copies'] = p1copies
595 if p2copies is not None:
589 if p2copies is not None:
596 extra[b'p2copies'] = p2copies
590 extra[b'p2copies'] = p2copies
597 if filesadded is not None:
591 if filesadded is not None:
598 extra[b'filesadded'] = filesadded
592 extra[b'filesadded'] = filesadded
599 if filesremoved is not None:
593 if filesremoved is not None:
600 extra[b'filesremoved'] = filesremoved
594 extra[b'filesremoved'] = filesremoved
601 elif self._copiesstorage == b'changeset-sidedata':
595 elif self._copiesstorage == b'changeset-sidedata':
602 sidedata = {}
596 sidedata = {}
603 if p1copies:
597 if p1copies:
604 sidedata[sidedatamod.SD_P1COPIES] = p1copies
598 sidedata[sidedatamod.SD_P1COPIES] = p1copies
605 if p2copies:
599 if p2copies:
606 sidedata[sidedatamod.SD_P2COPIES] = p2copies
600 sidedata[sidedatamod.SD_P2COPIES] = p2copies
607 if filesadded:
601 if filesadded:
608 sidedata[sidedatamod.SD_FILESADDED] = filesadded
602 sidedata[sidedatamod.SD_FILESADDED] = filesadded
609 if filesremoved:
603 if filesremoved:
610 sidedata[sidedatamod.SD_FILESREMOVED] = filesremoved
604 sidedata[sidedatamod.SD_FILESREMOVED] = filesremoved
611 if not sidedata:
605 if not sidedata:
612 sidedata = None
606 sidedata = None
613
607
614 if extra:
608 if extra:
615 extra = encodeextra(extra)
609 extra = encodeextra(extra)
616 parseddate = b"%s %s" % (parseddate, extra)
610 parseddate = b"%s %s" % (parseddate, extra)
617 l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
611 l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
618 text = b"\n".join(l)
612 text = b"\n".join(l)
619 return self.addrevision(
613 return self.addrevision(
620 text, transaction, len(self), p1, p2, sidedata=sidedata
614 text, transaction, len(self), p1, p2, sidedata=sidedata
621 )
615 )
622
616
623 def branchinfo(self, rev):
617 def branchinfo(self, rev):
624 """return the branch name and open/close state of a revision
618 """return the branch name and open/close state of a revision
625
619
626 This function exists because creating a changectx object
620 This function exists because creating a changectx object
627 just to access this is costly."""
621 just to access this is costly."""
628 extra = self.read(rev)[5]
622 extra = self.read(rev)[5]
629 return encoding.tolocal(extra.get(b"branch")), b'close' in extra
623 return encoding.tolocal(extra.get(b"branch")), b'close' in extra
630
624
631 def _nodeduplicatecallback(self, transaction, node):
625 def _nodeduplicatecallback(self, transaction, node):
632 # keep track of revisions that got "re-added", eg: unbunde of know rev.
626 # keep track of revisions that got "re-added", eg: unbunde of know rev.
633 #
627 #
634 # We track them in a list to preserve their order from the source bundle
628 # We track them in a list to preserve their order from the source bundle
635 duplicates = transaction.changes.setdefault(b'revduplicates', [])
629 duplicates = transaction.changes.setdefault(b'revduplicates', [])
636 duplicates.append(self.rev(node))
630 duplicates.append(self.rev(node))
@@ -1,442 +1,448 b''
1 # repoview.py - Filtered view of a localrepo object
1 # repoview.py - Filtered view of a localrepo object
2 #
2 #
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
4 # Logilab SA <contact@logilab.fr>
4 # Logilab SA <contact@logilab.fr>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from __future__ import absolute_import
9 from __future__ import absolute_import
10
10
11 import copy
11 import copy
12 import weakref
12 import weakref
13
13
14 from .i18n import _
14 from .i18n import _
15 from .node import (
15 from .node import (
16 hex,
16 hex,
17 nullrev,
17 nullrev,
18 )
18 )
19 from .pycompat import (
19 from .pycompat import (
20 delattr,
20 delattr,
21 getattr,
21 getattr,
22 setattr,
22 setattr,
23 )
23 )
24 from . import (
24 from . import (
25 error,
25 error,
26 obsolete,
26 obsolete,
27 phases,
27 phases,
28 pycompat,
28 pycompat,
29 revlog,
29 revlog,
30 tags as tagsmod,
30 tags as tagsmod,
31 util,
31 util,
32 )
32 )
33 from .utils import repoviewutil
33 from .utils import repoviewutil
34
34
35
35
36 def hideablerevs(repo):
36 def hideablerevs(repo):
37 """Revision candidates to be hidden
37 """Revision candidates to be hidden
38
38
39 This is a standalone function to allow extensions to wrap it.
39 This is a standalone function to allow extensions to wrap it.
40
40
41 Because we use the set of immutable changesets as a fallback subset in
41 Because we use the set of immutable changesets as a fallback subset in
42 branchmap (see mercurial.utils.repoviewutils.subsettable), you cannot set
42 branchmap (see mercurial.utils.repoviewutils.subsettable), you cannot set
43 "public" changesets as "hideable". Doing so would break multiple code
43 "public" changesets as "hideable". Doing so would break multiple code
44 assertions and lead to crashes."""
44 assertions and lead to crashes."""
45 obsoletes = obsolete.getrevs(repo, b'obsolete')
45 obsoletes = obsolete.getrevs(repo, b'obsolete')
46 internals = repo._phasecache.getrevset(repo, phases.localhiddenphases)
46 internals = repo._phasecache.getrevset(repo, phases.localhiddenphases)
47 internals = frozenset(internals)
47 internals = frozenset(internals)
48 return obsoletes | internals
48 return obsoletes | internals
49
49
50
50
51 def pinnedrevs(repo):
51 def pinnedrevs(repo):
52 """revisions blocking hidden changesets from being filtered
52 """revisions blocking hidden changesets from being filtered
53 """
53 """
54
54
55 cl = repo.changelog
55 cl = repo.changelog
56 pinned = set()
56 pinned = set()
57 pinned.update([par.rev() for par in repo[None].parents()])
57 pinned.update([par.rev() for par in repo[None].parents()])
58 pinned.update([cl.rev(bm) for bm in repo._bookmarks.values()])
58 pinned.update([cl.rev(bm) for bm in repo._bookmarks.values()])
59
59
60 tags = {}
60 tags = {}
61 tagsmod.readlocaltags(repo.ui, repo, tags, {})
61 tagsmod.readlocaltags(repo.ui, repo, tags, {})
62 if tags:
62 if tags:
63 rev, nodemap = cl.rev, cl.nodemap
63 rev, nodemap = cl.rev, cl.nodemap
64 pinned.update(rev(t[0]) for t in tags.values() if t[0] in nodemap)
64 pinned.update(rev(t[0]) for t in tags.values() if t[0] in nodemap)
65 return pinned
65 return pinned
66
66
67
67
68 def _revealancestors(pfunc, hidden, revs):
68 def _revealancestors(pfunc, hidden, revs):
69 """reveals contiguous chains of hidden ancestors of 'revs' by removing them
69 """reveals contiguous chains of hidden ancestors of 'revs' by removing them
70 from 'hidden'
70 from 'hidden'
71
71
72 - pfunc(r): a funtion returning parent of 'r',
72 - pfunc(r): a funtion returning parent of 'r',
73 - hidden: the (preliminary) hidden revisions, to be updated
73 - hidden: the (preliminary) hidden revisions, to be updated
74 - revs: iterable of revnum,
74 - revs: iterable of revnum,
75
75
76 (Ancestors are revealed exclusively, i.e. the elements in 'revs' are
76 (Ancestors are revealed exclusively, i.e. the elements in 'revs' are
77 *not* revealed)
77 *not* revealed)
78 """
78 """
79 stack = list(revs)
79 stack = list(revs)
80 while stack:
80 while stack:
81 for p in pfunc(stack.pop()):
81 for p in pfunc(stack.pop()):
82 if p != nullrev and p in hidden:
82 if p != nullrev and p in hidden:
83 hidden.remove(p)
83 hidden.remove(p)
84 stack.append(p)
84 stack.append(p)
85
85
86
86
87 def computehidden(repo, visibilityexceptions=None):
87 def computehidden(repo, visibilityexceptions=None):
88 """compute the set of hidden revision to filter
88 """compute the set of hidden revision to filter
89
89
90 During most operation hidden should be filtered."""
90 During most operation hidden should be filtered."""
91 assert not repo.changelog.filteredrevs
91 assert not repo.changelog.filteredrevs
92
92
93 hidden = hideablerevs(repo)
93 hidden = hideablerevs(repo)
94 if hidden:
94 if hidden:
95 hidden = set(hidden - pinnedrevs(repo))
95 hidden = set(hidden - pinnedrevs(repo))
96 if visibilityexceptions:
96 if visibilityexceptions:
97 hidden -= visibilityexceptions
97 hidden -= visibilityexceptions
98 pfunc = repo.changelog.parentrevs
98 pfunc = repo.changelog.parentrevs
99 mutable = repo._phasecache.getrevset(repo, phases.mutablephases)
99 mutable = repo._phasecache.getrevset(repo, phases.mutablephases)
100
100
101 visible = mutable - hidden
101 visible = mutable - hidden
102 _revealancestors(pfunc, hidden, visible)
102 _revealancestors(pfunc, hidden, visible)
103 return frozenset(hidden)
103 return frozenset(hidden)
104
104
105
105
106 def computesecret(repo, visibilityexceptions=None):
106 def computesecret(repo, visibilityexceptions=None):
107 """compute the set of revision that can never be exposed through hgweb
107 """compute the set of revision that can never be exposed through hgweb
108
108
109 Changeset in the secret phase (or above) should stay unaccessible."""
109 Changeset in the secret phase (or above) should stay unaccessible."""
110 assert not repo.changelog.filteredrevs
110 assert not repo.changelog.filteredrevs
111 secrets = repo._phasecache.getrevset(repo, phases.remotehiddenphases)
111 secrets = repo._phasecache.getrevset(repo, phases.remotehiddenphases)
112 return frozenset(secrets)
112 return frozenset(secrets)
113
113
114
114
115 def computeunserved(repo, visibilityexceptions=None):
115 def computeunserved(repo, visibilityexceptions=None):
116 """compute the set of revision that should be filtered when used a server
116 """compute the set of revision that should be filtered when used a server
117
117
118 Secret and hidden changeset should not pretend to be here."""
118 Secret and hidden changeset should not pretend to be here."""
119 assert not repo.changelog.filteredrevs
119 assert not repo.changelog.filteredrevs
120 # fast path in simple case to avoid impact of non optimised code
120 # fast path in simple case to avoid impact of non optimised code
121 hiddens = filterrevs(repo, b'visible')
121 hiddens = filterrevs(repo, b'visible')
122 secrets = filterrevs(repo, b'served.hidden')
122 secrets = filterrevs(repo, b'served.hidden')
123 if secrets:
123 if secrets:
124 return frozenset(hiddens | secrets)
124 return frozenset(hiddens | secrets)
125 else:
125 else:
126 return hiddens
126 return hiddens
127
127
128
128
129 def computemutable(repo, visibilityexceptions=None):
129 def computemutable(repo, visibilityexceptions=None):
130 assert not repo.changelog.filteredrevs
130 assert not repo.changelog.filteredrevs
131 # fast check to avoid revset call on huge repo
131 # fast check to avoid revset call on huge repo
132 if any(repo._phasecache.phaseroots[1:]):
132 if any(repo._phasecache.phaseroots[1:]):
133 getphase = repo._phasecache.phase
133 getphase = repo._phasecache.phase
134 maymutable = filterrevs(repo, b'base')
134 maymutable = filterrevs(repo, b'base')
135 return frozenset(r for r in maymutable if getphase(repo, r))
135 return frozenset(r for r in maymutable if getphase(repo, r))
136 return frozenset()
136 return frozenset()
137
137
138
138
139 def computeimpactable(repo, visibilityexceptions=None):
139 def computeimpactable(repo, visibilityexceptions=None):
140 """Everything impactable by mutable revision
140 """Everything impactable by mutable revision
141
141
142 The immutable filter still have some chance to get invalidated. This will
142 The immutable filter still have some chance to get invalidated. This will
143 happen when:
143 happen when:
144
144
145 - you garbage collect hidden changeset,
145 - you garbage collect hidden changeset,
146 - public phase is moved backward,
146 - public phase is moved backward,
147 - something is changed in the filtering (this could be fixed)
147 - something is changed in the filtering (this could be fixed)
148
148
149 This filter out any mutable changeset and any public changeset that may be
149 This filter out any mutable changeset and any public changeset that may be
150 impacted by something happening to a mutable revision.
150 impacted by something happening to a mutable revision.
151
151
152 This is achieved by filtered everything with a revision number egal or
152 This is achieved by filtered everything with a revision number egal or
153 higher than the first mutable changeset is filtered."""
153 higher than the first mutable changeset is filtered."""
154 assert not repo.changelog.filteredrevs
154 assert not repo.changelog.filteredrevs
155 cl = repo.changelog
155 cl = repo.changelog
156 firstmutable = len(cl)
156 firstmutable = len(cl)
157 for roots in repo._phasecache.phaseroots[1:]:
157 for roots in repo._phasecache.phaseroots[1:]:
158 if roots:
158 if roots:
159 firstmutable = min(firstmutable, min(cl.rev(r) for r in roots))
159 firstmutable = min(firstmutable, min(cl.rev(r) for r in roots))
160 # protect from nullrev root
160 # protect from nullrev root
161 firstmutable = max(0, firstmutable)
161 firstmutable = max(0, firstmutable)
162 return frozenset(pycompat.xrange(firstmutable, len(cl)))
162 return frozenset(pycompat.xrange(firstmutable, len(cl)))
163
163
164
164
165 # function to compute filtered set
165 # function to compute filtered set
166 #
166 #
167 # When adding a new filter you MUST update the table at:
167 # When adding a new filter you MUST update the table at:
168 # mercurial.utils.repoviewutil.subsettable
168 # mercurial.utils.repoviewutil.subsettable
169 # Otherwise your filter will have to recompute all its branches cache
169 # Otherwise your filter will have to recompute all its branches cache
170 # from scratch (very slow).
170 # from scratch (very slow).
171 filtertable = {
171 filtertable = {
172 b'visible': computehidden,
172 b'visible': computehidden,
173 b'visible-hidden': computehidden,
173 b'visible-hidden': computehidden,
174 b'served.hidden': computesecret,
174 b'served.hidden': computesecret,
175 b'served': computeunserved,
175 b'served': computeunserved,
176 b'immutable': computemutable,
176 b'immutable': computemutable,
177 b'base': computeimpactable,
177 b'base': computeimpactable,
178 }
178 }
179
179
180 _basefiltername = list(filtertable)
180 _basefiltername = list(filtertable)
181
181
182
182
183 def extrafilter(ui):
183 def extrafilter(ui):
184 """initialize extra filter and return its id
184 """initialize extra filter and return its id
185
185
186 If extra filtering is configured, we make sure the associated filtered view
186 If extra filtering is configured, we make sure the associated filtered view
187 are declared and return the associated id.
187 are declared and return the associated id.
188 """
188 """
189 frevs = ui.config(b'experimental', b'extra-filter-revs')
189 frevs = ui.config(b'experimental', b'extra-filter-revs')
190 if frevs is None:
190 if frevs is None:
191 return None
191 return None
192
192
193 fid = pycompat.sysbytes(util.DIGESTS[b'sha1'](frevs).hexdigest())[:12]
193 fid = pycompat.sysbytes(util.DIGESTS[b'sha1'](frevs).hexdigest())[:12]
194
194
195 combine = lambda fname: fname + b'%' + fid
195 combine = lambda fname: fname + b'%' + fid
196
196
197 subsettable = repoviewutil.subsettable
197 subsettable = repoviewutil.subsettable
198
198
199 if combine(b'base') not in filtertable:
199 if combine(b'base') not in filtertable:
200 for name in _basefiltername:
200 for name in _basefiltername:
201
201
202 def extrafilteredrevs(repo, *args, **kwargs):
202 def extrafilteredrevs(repo, *args, **kwargs):
203 baserevs = filtertable[name](repo, *args, **kwargs)
203 baserevs = filtertable[name](repo, *args, **kwargs)
204 extrarevs = frozenset(repo.revs(frevs))
204 extrarevs = frozenset(repo.revs(frevs))
205 return baserevs | extrarevs
205 return baserevs | extrarevs
206
206
207 filtertable[combine(name)] = extrafilteredrevs
207 filtertable[combine(name)] = extrafilteredrevs
208 if name in subsettable:
208 if name in subsettable:
209 subsettable[combine(name)] = combine(subsettable[name])
209 subsettable[combine(name)] = combine(subsettable[name])
210 return fid
210 return fid
211
211
212
212
213 def filterrevs(repo, filtername, visibilityexceptions=None):
213 def filterrevs(repo, filtername, visibilityexceptions=None):
214 """returns set of filtered revision for this filter name
214 """returns set of filtered revision for this filter name
215
215
216 visibilityexceptions is a set of revs which must are exceptions for
216 visibilityexceptions is a set of revs which must are exceptions for
217 hidden-state and must be visible. They are dynamic and hence we should not
217 hidden-state and must be visible. They are dynamic and hence we should not
218 cache it's result"""
218 cache it's result"""
219 if filtername not in repo.filteredrevcache:
219 if filtername not in repo.filteredrevcache:
220 func = filtertable[filtername]
220 func = filtertable[filtername]
221 if visibilityexceptions:
221 if visibilityexceptions:
222 return func(repo.unfiltered, visibilityexceptions)
222 return func(repo.unfiltered, visibilityexceptions)
223 repo.filteredrevcache[filtername] = func(repo.unfiltered())
223 repo.filteredrevcache[filtername] = func(repo.unfiltered())
224 return repo.filteredrevcache[filtername]
224 return repo.filteredrevcache[filtername]
225
225
226
226
227 def wrapchangelog(unfichangelog, filteredrevs):
227 def wrapchangelog(unfichangelog, filteredrevs):
228 cl = copy.copy(unfichangelog)
228 cl = copy.copy(unfichangelog)
229 cl.filteredrevs = filteredrevs
229 cl.filteredrevs = filteredrevs
230
230
231 class filteredchangelog(cl.__class__):
231 class filteredchangelog(cl.__class__):
232 def tiprev(self):
232 def tiprev(self):
233 """filtered version of revlog.tiprev"""
233 """filtered version of revlog.tiprev"""
234 for i in pycompat.xrange(len(self) - 1, -2, -1):
234 for i in pycompat.xrange(len(self) - 1, -2, -1):
235 if i not in self.filteredrevs:
235 if i not in self.filteredrevs:
236 return i
236 return i
237
237
238 def __contains__(self, rev):
238 def __contains__(self, rev):
239 """filtered version of revlog.__contains__"""
239 """filtered version of revlog.__contains__"""
240 return 0 <= rev < len(self) and rev not in self.filteredrevs
240 return 0 <= rev < len(self) and rev not in self.filteredrevs
241
241
242 def __iter__(self):
242 def __iter__(self):
243 """filtered version of revlog.__iter__"""
243 """filtered version of revlog.__iter__"""
244 if len(self.filteredrevs) == 0:
244 if len(self.filteredrevs) == 0:
245 return revlog.revlog.__iter__(self)
245 return revlog.revlog.__iter__(self)
246
246
247
247
248 def filterediter():
248 def filterediter():
249 for i in pycompat.xrange(len(self)):
249 for i in pycompat.xrange(len(self)):
250 if i not in self.filteredrevs:
250 if i not in self.filteredrevs:
251 yield i
251 yield i
252
252
253 return filterediter()
253 return filterediter()
254
254
255 def revs(self, start=0, stop=None):
255 def revs(self, start=0, stop=None):
256 """filtered version of revlog.revs"""
256 """filtered version of revlog.revs"""
257 for i in super(filteredchangelog, self).revs(start, stop):
257 for i in super(filteredchangelog, self).revs(start, stop):
258 if i not in self.filteredrevs:
258 if i not in self.filteredrevs:
259 yield i
259 yield i
260
260
261 def _checknofilteredinrevs(self, revs):
261 def _checknofilteredinrevs(self, revs):
262 """raise the appropriate error if 'revs' contains a filtered revision
262 """raise the appropriate error if 'revs' contains a filtered revision
263
263
264 This returns a version of 'revs' to be used thereafter by the caller.
264 This returns a version of 'revs' to be used thereafter by the caller.
265 In particular, if revs is an iterator, it is converted into a set.
265 In particular, if revs is an iterator, it is converted into a set.
266 """
266 """
267 safehasattr = util.safehasattr
267 safehasattr = util.safehasattr
268 if safehasattr(revs, '__next__'):
268 if safehasattr(revs, '__next__'):
269 # Note that inspect.isgenerator() is not true for iterators,
269 # Note that inspect.isgenerator() is not true for iterators,
270 revs = set(revs)
270 revs = set(revs)
271
271
272 filteredrevs = self.filteredrevs
272 filteredrevs = self.filteredrevs
273 if safehasattr(revs, 'first'): # smartset
273 if safehasattr(revs, 'first'): # smartset
274 offenders = revs & filteredrevs
274 offenders = revs & filteredrevs
275 else:
275 else:
276 offenders = filteredrevs.intersection(revs)
276 offenders = filteredrevs.intersection(revs)
277
277
278 for rev in offenders:
278 for rev in offenders:
279 raise error.FilteredIndexError(rev)
279 raise error.FilteredIndexError(rev)
280 return revs
280 return revs
281
281
282 def headrevs(self, revs=None):
282 def headrevs(self, revs=None):
283 if revs is None and self.filteredrevs:
283 if revs is None and self.filteredrevs:
284 try:
284 try:
285 return self.index.headrevsfiltered(self.filteredrevs)
285 return self.index.headrevsfiltered(self.filteredrevs)
286 # AttributeError covers non-c-extension environments and
286 # AttributeError covers non-c-extension environments and
287 # old c extensions without filter handling.
287 # old c extensions without filter handling.
288 except AttributeError:
288 except AttributeError:
289 return self._headrevs()
289 return self._headrevs()
290
290
291 if self.filteredrevs:
291 if self.filteredrevs:
292 revs = self._checknofilteredinrevs(revs)
292 revs = self._checknofilteredinrevs(revs)
293 return super(filteredchangelog, self).headrevs(revs)
293 return super(filteredchangelog, self).headrevs(revs)
294
294
295 def strip(self, *args, **kwargs):
295 def strip(self, *args, **kwargs):
296 # XXX make something better than assert
296 # XXX make something better than assert
297 # We can't expect proper strip behavior if we are filtered.
297 # We can't expect proper strip behavior if we are filtered.
298 assert not self.filteredrevs
298 assert not self.filteredrevs
299 super(filteredchangelog, self).strip(*args, **kwargs)
299 super(filteredchangelog, self).strip(*args, **kwargs)
300
300
301 def rev(self, node):
301 def rev(self, node):
302 """filtered version of revlog.rev"""
302 """filtered version of revlog.rev"""
303 r = super(filteredchangelog, self).rev(node)
303 r = super(filteredchangelog, self).rev(node)
304 if r in self.filteredrevs:
304 if r in self.filteredrevs:
305 raise error.FilteredLookupError(
305 raise error.FilteredLookupError(
306 hex(node), self.indexfile, _(b'filtered node')
306 hex(node), self.indexfile, _(b'filtered node')
307 )
307 )
308 return r
308 return r
309
309
310 def node(self, rev):
310 def node(self, rev):
311 """filtered version of revlog.node"""
311 """filtered version of revlog.node"""
312 if rev in self.filteredrevs:
312 if rev in self.filteredrevs:
313 raise error.FilteredIndexError(rev)
313 raise error.FilteredIndexError(rev)
314 return super(filteredchangelog, self).node(rev)
314 return super(filteredchangelog, self).node(rev)
315
315
316 def linkrev(self, rev):
316 def linkrev(self, rev):
317 """filtered version of revlog.linkrev"""
317 """filtered version of revlog.linkrev"""
318 if rev in self.filteredrevs:
318 if rev in self.filteredrevs:
319 raise error.FilteredIndexError(rev)
319 raise error.FilteredIndexError(rev)
320 return super(filteredchangelog, self).linkrev(rev)
320 return super(filteredchangelog, self).linkrev(rev)
321
321
322 def parentrevs(self, rev):
323 """filtered version of revlog.parentrevs"""
324 if rev in self.filteredrevs:
325 raise error.FilteredIndexError(rev)
326 return super(filteredchangelog, self).parentrevs(rev)
327
322 cl.__class__ = filteredchangelog
328 cl.__class__ = filteredchangelog
323
329
324 return cl
330 return cl
325
331
326
332
327 class repoview(object):
333 class repoview(object):
328 """Provide a read/write view of a repo through a filtered changelog
334 """Provide a read/write view of a repo through a filtered changelog
329
335
330 This object is used to access a filtered version of a repository without
336 This object is used to access a filtered version of a repository without
331 altering the original repository object itself. We can not alter the
337 altering the original repository object itself. We can not alter the
332 original object for two main reasons:
338 original object for two main reasons:
333 - It prevents the use of a repo with multiple filters at the same time. In
339 - It prevents the use of a repo with multiple filters at the same time. In
334 particular when multiple threads are involved.
340 particular when multiple threads are involved.
335 - It makes scope of the filtering harder to control.
341 - It makes scope of the filtering harder to control.
336
342
337 This object behaves very closely to the original repository. All attribute
343 This object behaves very closely to the original repository. All attribute
338 operations are done on the original repository:
344 operations are done on the original repository:
339 - An access to `repoview.someattr` actually returns `repo.someattr`,
345 - An access to `repoview.someattr` actually returns `repo.someattr`,
340 - A write to `repoview.someattr` actually sets value of `repo.someattr`,
346 - A write to `repoview.someattr` actually sets value of `repo.someattr`,
341 - A deletion of `repoview.someattr` actually drops `someattr`
347 - A deletion of `repoview.someattr` actually drops `someattr`
342 from `repo.__dict__`.
348 from `repo.__dict__`.
343
349
344 The only exception is the `changelog` property. It is overridden to return
350 The only exception is the `changelog` property. It is overridden to return
345 a (surface) copy of `repo.changelog` with some revisions filtered. The
351 a (surface) copy of `repo.changelog` with some revisions filtered. The
346 `filtername` attribute of the view control the revisions that need to be
352 `filtername` attribute of the view control the revisions that need to be
347 filtered. (the fact the changelog is copied is an implementation detail).
353 filtered. (the fact the changelog is copied is an implementation detail).
348
354
349 Unlike attributes, this object intercepts all method calls. This means that
355 Unlike attributes, this object intercepts all method calls. This means that
350 all methods are run on the `repoview` object with the filtered `changelog`
356 all methods are run on the `repoview` object with the filtered `changelog`
351 property. For this purpose the simple `repoview` class must be mixed with
357 property. For this purpose the simple `repoview` class must be mixed with
352 the actual class of the repository. This ensures that the resulting
358 the actual class of the repository. This ensures that the resulting
353 `repoview` object have the very same methods than the repo object. This
359 `repoview` object have the very same methods than the repo object. This
354 leads to the property below.
360 leads to the property below.
355
361
356 repoview.method() --> repo.__class__.method(repoview)
362 repoview.method() --> repo.__class__.method(repoview)
357
363
358 The inheritance has to be done dynamically because `repo` can be of any
364 The inheritance has to be done dynamically because `repo` can be of any
359 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
365 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
360 """
366 """
361
367
362 def __init__(self, repo, filtername, visibilityexceptions=None):
368 def __init__(self, repo, filtername, visibilityexceptions=None):
363 object.__setattr__(self, r'_unfilteredrepo', repo)
369 object.__setattr__(self, r'_unfilteredrepo', repo)
364 object.__setattr__(self, r'filtername', filtername)
370 object.__setattr__(self, r'filtername', filtername)
365 object.__setattr__(self, r'_clcachekey', None)
371 object.__setattr__(self, r'_clcachekey', None)
366 object.__setattr__(self, r'_clcache', None)
372 object.__setattr__(self, r'_clcache', None)
367 # revs which are exceptions and must not be hidden
373 # revs which are exceptions and must not be hidden
368 object.__setattr__(self, r'_visibilityexceptions', visibilityexceptions)
374 object.__setattr__(self, r'_visibilityexceptions', visibilityexceptions)
369
375
370 # not a propertycache on purpose we shall implement a proper cache later
376 # not a propertycache on purpose we shall implement a proper cache later
371 @property
377 @property
372 def changelog(self):
378 def changelog(self):
373 """return a filtered version of the changeset
379 """return a filtered version of the changeset
374
380
375 this changelog must not be used for writing"""
381 this changelog must not be used for writing"""
376 # some cache may be implemented later
382 # some cache may be implemented later
377 unfi = self._unfilteredrepo
383 unfi = self._unfilteredrepo
378 unfichangelog = unfi.changelog
384 unfichangelog = unfi.changelog
379 # bypass call to changelog.method
385 # bypass call to changelog.method
380 unfiindex = unfichangelog.index
386 unfiindex = unfichangelog.index
381 unfilen = len(unfiindex)
387 unfilen = len(unfiindex)
382 unfinode = unfiindex[unfilen - 1][7]
388 unfinode = unfiindex[unfilen - 1][7]
383 with util.timedcm('repo filter for %s', self.filtername):
389 with util.timedcm('repo filter for %s', self.filtername):
384 revs = filterrevs(unfi, self.filtername, self._visibilityexceptions)
390 revs = filterrevs(unfi, self.filtername, self._visibilityexceptions)
385 cl = self._clcache
391 cl = self._clcache
386 newkey = (unfilen, unfinode, hash(revs), unfichangelog._delayed)
392 newkey = (unfilen, unfinode, hash(revs), unfichangelog._delayed)
387 # if cl.index is not unfiindex, unfi.changelog would be
393 # if cl.index is not unfiindex, unfi.changelog would be
388 # recreated, and our clcache refers to garbage object
394 # recreated, and our clcache refers to garbage object
389 if cl is not None and (
395 if cl is not None and (
390 cl.index is not unfiindex or newkey != self._clcachekey
396 cl.index is not unfiindex or newkey != self._clcachekey
391 ):
397 ):
392 cl = None
398 cl = None
393 # could have been made None by the previous if
399 # could have been made None by the previous if
394 if cl is None:
400 if cl is None:
395 cl = wrapchangelog(unfichangelog, revs)
401 cl = wrapchangelog(unfichangelog, revs)
396 object.__setattr__(self, r'_clcache', cl)
402 object.__setattr__(self, r'_clcache', cl)
397 object.__setattr__(self, r'_clcachekey', newkey)
403 object.__setattr__(self, r'_clcachekey', newkey)
398 return cl
404 return cl
399
405
400 def unfiltered(self):
406 def unfiltered(self):
401 """Return an unfiltered version of a repo"""
407 """Return an unfiltered version of a repo"""
402 return self._unfilteredrepo
408 return self._unfilteredrepo
403
409
404 def filtered(self, name, visibilityexceptions=None):
410 def filtered(self, name, visibilityexceptions=None):
405 """Return a filtered version of a repository"""
411 """Return a filtered version of a repository"""
406 if name == self.filtername and not visibilityexceptions:
412 if name == self.filtername and not visibilityexceptions:
407 return self
413 return self
408 return self.unfiltered().filtered(name, visibilityexceptions)
414 return self.unfiltered().filtered(name, visibilityexceptions)
409
415
410 def __repr__(self):
416 def __repr__(self):
411 return r'<%s:%s %r>' % (
417 return r'<%s:%s %r>' % (
412 self.__class__.__name__,
418 self.__class__.__name__,
413 pycompat.sysstr(self.filtername),
419 pycompat.sysstr(self.filtername),
414 self.unfiltered(),
420 self.unfiltered(),
415 )
421 )
416
422
417 # everything access are forwarded to the proxied repo
423 # everything access are forwarded to the proxied repo
418 def __getattr__(self, attr):
424 def __getattr__(self, attr):
419 return getattr(self._unfilteredrepo, attr)
425 return getattr(self._unfilteredrepo, attr)
420
426
421 def __setattr__(self, attr, value):
427 def __setattr__(self, attr, value):
422 return setattr(self._unfilteredrepo, attr, value)
428 return setattr(self._unfilteredrepo, attr, value)
423
429
424 def __delattr__(self, attr):
430 def __delattr__(self, attr):
425 return delattr(self._unfilteredrepo, attr)
431 return delattr(self._unfilteredrepo, attr)
426
432
427
433
428 # Python <3.4 easily leaks types via __mro__. See
434 # Python <3.4 easily leaks types via __mro__. See
429 # https://bugs.python.org/issue17950. We cache dynamically created types
435 # https://bugs.python.org/issue17950. We cache dynamically created types
430 # so they won't be leaked on every invocation of repo.filtered().
436 # so they won't be leaked on every invocation of repo.filtered().
431 _filteredrepotypes = weakref.WeakKeyDictionary()
437 _filteredrepotypes = weakref.WeakKeyDictionary()
432
438
433
439
434 def newtype(base):
440 def newtype(base):
435 """Create a new type with the repoview mixin and the given base class"""
441 """Create a new type with the repoview mixin and the given base class"""
436 if base not in _filteredrepotypes:
442 if base not in _filteredrepotypes:
437
443
438 class filteredrepo(repoview, base):
444 class filteredrepo(repoview, base):
439 pass
445 pass
440
446
441 _filteredrepotypes[base] = filteredrepo
447 _filteredrepotypes[base] = filteredrepo
442 return _filteredrepotypes[base]
448 return _filteredrepotypes[base]
General Comments 0
You need to be logged in to leave comments. Login now