##// END OF EJS Templates
repoview: move changelog.rev() override to filteredchangelog...
Martin von Zweigbergk -
r43754:c45195f9 default
parent child Browse files
Show More
@@ -1,657 +1,648 b''
1 # changelog.py - changelog class for mercurial
1 # changelog.py - changelog class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 from .i18n import _
10 from .i18n import _
11 from .node import (
11 from .node import (
12 bin,
12 bin,
13 hex,
13 hex,
14 nullid,
14 nullid,
15 )
15 )
16 from .thirdparty import attr
16 from .thirdparty import attr
17
17
18 from . import (
18 from . import (
19 copies,
19 copies,
20 encoding,
20 encoding,
21 error,
21 error,
22 pycompat,
22 pycompat,
23 revlog,
23 revlog,
24 )
24 )
25 from .utils import (
25 from .utils import (
26 dateutil,
26 dateutil,
27 stringutil,
27 stringutil,
28 )
28 )
29
29
30 from .revlogutils import sidedata as sidedatamod
30 from .revlogutils import sidedata as sidedatamod
31
31
32 _defaultextra = {b'branch': b'default'}
32 _defaultextra = {b'branch': b'default'}
33
33
34
34
35 def _string_escape(text):
35 def _string_escape(text):
36 """
36 """
37 >>> from .pycompat import bytechr as chr
37 >>> from .pycompat import bytechr as chr
38 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
38 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
39 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
39 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
40 >>> s
40 >>> s
41 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
41 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
42 >>> res = _string_escape(s)
42 >>> res = _string_escape(s)
43 >>> s == _string_unescape(res)
43 >>> s == _string_unescape(res)
44 True
44 True
45 """
45 """
46 # subset of the string_escape codec
46 # subset of the string_escape codec
47 text = (
47 text = (
48 text.replace(b'\\', b'\\\\')
48 text.replace(b'\\', b'\\\\')
49 .replace(b'\n', b'\\n')
49 .replace(b'\n', b'\\n')
50 .replace(b'\r', b'\\r')
50 .replace(b'\r', b'\\r')
51 )
51 )
52 return text.replace(b'\0', b'\\0')
52 return text.replace(b'\0', b'\\0')
53
53
54
54
55 def _string_unescape(text):
55 def _string_unescape(text):
56 if b'\\0' in text:
56 if b'\\0' in text:
57 # fix up \0 without getting into trouble with \\0
57 # fix up \0 without getting into trouble with \\0
58 text = text.replace(b'\\\\', b'\\\\\n')
58 text = text.replace(b'\\\\', b'\\\\\n')
59 text = text.replace(b'\\0', b'\0')
59 text = text.replace(b'\\0', b'\0')
60 text = text.replace(b'\n', b'')
60 text = text.replace(b'\n', b'')
61 return stringutil.unescapestr(text)
61 return stringutil.unescapestr(text)
62
62
63
63
64 def decodeextra(text):
64 def decodeextra(text):
65 """
65 """
66 >>> from .pycompat import bytechr as chr
66 >>> from .pycompat import bytechr as chr
67 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
67 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
68 ... ).items())
68 ... ).items())
69 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
69 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
70 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
70 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
71 ... b'baz': chr(92) + chr(0) + b'2'})
71 ... b'baz': chr(92) + chr(0) + b'2'})
72 ... ).items())
72 ... ).items())
73 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
73 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
74 """
74 """
75 extra = _defaultextra.copy()
75 extra = _defaultextra.copy()
76 for l in text.split(b'\0'):
76 for l in text.split(b'\0'):
77 if l:
77 if l:
78 k, v = _string_unescape(l).split(b':', 1)
78 k, v = _string_unescape(l).split(b':', 1)
79 extra[k] = v
79 extra[k] = v
80 return extra
80 return extra
81
81
82
82
83 def encodeextra(d):
83 def encodeextra(d):
84 # keys must be sorted to produce a deterministic changelog entry
84 # keys must be sorted to produce a deterministic changelog entry
85 items = [
85 items = [
86 _string_escape(b'%s:%s' % (k, pycompat.bytestr(d[k])))
86 _string_escape(b'%s:%s' % (k, pycompat.bytestr(d[k])))
87 for k in sorted(d)
87 for k in sorted(d)
88 ]
88 ]
89 return b"\0".join(items)
89 return b"\0".join(items)
90
90
91
91
92 def stripdesc(desc):
92 def stripdesc(desc):
93 """strip trailing whitespace and leading and trailing empty lines"""
93 """strip trailing whitespace and leading and trailing empty lines"""
94 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
94 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
95
95
96
96
97 class appender(object):
97 class appender(object):
98 '''the changelog index must be updated last on disk, so we use this class
98 '''the changelog index must be updated last on disk, so we use this class
99 to delay writes to it'''
99 to delay writes to it'''
100
100
101 def __init__(self, vfs, name, mode, buf):
101 def __init__(self, vfs, name, mode, buf):
102 self.data = buf
102 self.data = buf
103 fp = vfs(name, mode)
103 fp = vfs(name, mode)
104 self.fp = fp
104 self.fp = fp
105 self.offset = fp.tell()
105 self.offset = fp.tell()
106 self.size = vfs.fstat(fp).st_size
106 self.size = vfs.fstat(fp).st_size
107 self._end = self.size
107 self._end = self.size
108
108
109 def end(self):
109 def end(self):
110 return self._end
110 return self._end
111
111
112 def tell(self):
112 def tell(self):
113 return self.offset
113 return self.offset
114
114
115 def flush(self):
115 def flush(self):
116 pass
116 pass
117
117
118 @property
118 @property
119 def closed(self):
119 def closed(self):
120 return self.fp.closed
120 return self.fp.closed
121
121
122 def close(self):
122 def close(self):
123 self.fp.close()
123 self.fp.close()
124
124
125 def seek(self, offset, whence=0):
125 def seek(self, offset, whence=0):
126 '''virtual file offset spans real file and data'''
126 '''virtual file offset spans real file and data'''
127 if whence == 0:
127 if whence == 0:
128 self.offset = offset
128 self.offset = offset
129 elif whence == 1:
129 elif whence == 1:
130 self.offset += offset
130 self.offset += offset
131 elif whence == 2:
131 elif whence == 2:
132 self.offset = self.end() + offset
132 self.offset = self.end() + offset
133 if self.offset < self.size:
133 if self.offset < self.size:
134 self.fp.seek(self.offset)
134 self.fp.seek(self.offset)
135
135
136 def read(self, count=-1):
136 def read(self, count=-1):
137 '''only trick here is reads that span real file and data'''
137 '''only trick here is reads that span real file and data'''
138 ret = b""
138 ret = b""
139 if self.offset < self.size:
139 if self.offset < self.size:
140 s = self.fp.read(count)
140 s = self.fp.read(count)
141 ret = s
141 ret = s
142 self.offset += len(s)
142 self.offset += len(s)
143 if count > 0:
143 if count > 0:
144 count -= len(s)
144 count -= len(s)
145 if count != 0:
145 if count != 0:
146 doff = self.offset - self.size
146 doff = self.offset - self.size
147 self.data.insert(0, b"".join(self.data))
147 self.data.insert(0, b"".join(self.data))
148 del self.data[1:]
148 del self.data[1:]
149 s = self.data[0][doff : doff + count]
149 s = self.data[0][doff : doff + count]
150 self.offset += len(s)
150 self.offset += len(s)
151 ret += s
151 ret += s
152 return ret
152 return ret
153
153
154 def write(self, s):
154 def write(self, s):
155 self.data.append(bytes(s))
155 self.data.append(bytes(s))
156 self.offset += len(s)
156 self.offset += len(s)
157 self._end += len(s)
157 self._end += len(s)
158
158
159 def __enter__(self):
159 def __enter__(self):
160 self.fp.__enter__()
160 self.fp.__enter__()
161 return self
161 return self
162
162
163 def __exit__(self, *args):
163 def __exit__(self, *args):
164 return self.fp.__exit__(*args)
164 return self.fp.__exit__(*args)
165
165
166
166
167 def _divertopener(opener, target):
167 def _divertopener(opener, target):
168 """build an opener that writes in 'target.a' instead of 'target'"""
168 """build an opener that writes in 'target.a' instead of 'target'"""
169
169
170 def _divert(name, mode=b'r', checkambig=False):
170 def _divert(name, mode=b'r', checkambig=False):
171 if name != target:
171 if name != target:
172 return opener(name, mode)
172 return opener(name, mode)
173 return opener(name + b".a", mode)
173 return opener(name + b".a", mode)
174
174
175 return _divert
175 return _divert
176
176
177
177
178 def _delayopener(opener, target, buf):
178 def _delayopener(opener, target, buf):
179 """build an opener that stores chunks in 'buf' instead of 'target'"""
179 """build an opener that stores chunks in 'buf' instead of 'target'"""
180
180
181 def _delay(name, mode=b'r', checkambig=False):
181 def _delay(name, mode=b'r', checkambig=False):
182 if name != target:
182 if name != target:
183 return opener(name, mode)
183 return opener(name, mode)
184 return appender(opener, name, mode, buf)
184 return appender(opener, name, mode, buf)
185
185
186 return _delay
186 return _delay
187
187
188
188
189 @attr.s
189 @attr.s
190 class _changelogrevision(object):
190 class _changelogrevision(object):
191 # Extensions might modify _defaultextra, so let the constructor below pass
191 # Extensions might modify _defaultextra, so let the constructor below pass
192 # it in
192 # it in
193 extra = attr.ib()
193 extra = attr.ib()
194 manifest = attr.ib(default=nullid)
194 manifest = attr.ib(default=nullid)
195 user = attr.ib(default=b'')
195 user = attr.ib(default=b'')
196 date = attr.ib(default=(0, 0))
196 date = attr.ib(default=(0, 0))
197 files = attr.ib(default=attr.Factory(list))
197 files = attr.ib(default=attr.Factory(list))
198 filesadded = attr.ib(default=None)
198 filesadded = attr.ib(default=None)
199 filesremoved = attr.ib(default=None)
199 filesremoved = attr.ib(default=None)
200 p1copies = attr.ib(default=None)
200 p1copies = attr.ib(default=None)
201 p2copies = attr.ib(default=None)
201 p2copies = attr.ib(default=None)
202 description = attr.ib(default=b'')
202 description = attr.ib(default=b'')
203
203
204
204
205 class changelogrevision(object):
205 class changelogrevision(object):
206 """Holds results of a parsed changelog revision.
206 """Holds results of a parsed changelog revision.
207
207
208 Changelog revisions consist of multiple pieces of data, including
208 Changelog revisions consist of multiple pieces of data, including
209 the manifest node, user, and date. This object exposes a view into
209 the manifest node, user, and date. This object exposes a view into
210 the parsed object.
210 the parsed object.
211 """
211 """
212
212
213 __slots__ = (
213 __slots__ = (
214 r'_offsets',
214 r'_offsets',
215 r'_text',
215 r'_text',
216 r'_sidedata',
216 r'_sidedata',
217 r'_cpsd',
217 r'_cpsd',
218 )
218 )
219
219
220 def __new__(cls, text, sidedata, cpsd):
220 def __new__(cls, text, sidedata, cpsd):
221 if not text:
221 if not text:
222 return _changelogrevision(extra=_defaultextra)
222 return _changelogrevision(extra=_defaultextra)
223
223
224 self = super(changelogrevision, cls).__new__(cls)
224 self = super(changelogrevision, cls).__new__(cls)
225 # We could return here and implement the following as an __init__.
225 # We could return here and implement the following as an __init__.
226 # But doing it here is equivalent and saves an extra function call.
226 # But doing it here is equivalent and saves an extra function call.
227
227
228 # format used:
228 # format used:
229 # nodeid\n : manifest node in ascii
229 # nodeid\n : manifest node in ascii
230 # user\n : user, no \n or \r allowed
230 # user\n : user, no \n or \r allowed
231 # time tz extra\n : date (time is int or float, timezone is int)
231 # time tz extra\n : date (time is int or float, timezone is int)
232 # : extra is metadata, encoded and separated by '\0'
232 # : extra is metadata, encoded and separated by '\0'
233 # : older versions ignore it
233 # : older versions ignore it
234 # files\n\n : files modified by the cset, no \n or \r allowed
234 # files\n\n : files modified by the cset, no \n or \r allowed
235 # (.*) : comment (free text, ideally utf-8)
235 # (.*) : comment (free text, ideally utf-8)
236 #
236 #
237 # changelog v0 doesn't use extra
237 # changelog v0 doesn't use extra
238
238
239 nl1 = text.index(b'\n')
239 nl1 = text.index(b'\n')
240 nl2 = text.index(b'\n', nl1 + 1)
240 nl2 = text.index(b'\n', nl1 + 1)
241 nl3 = text.index(b'\n', nl2 + 1)
241 nl3 = text.index(b'\n', nl2 + 1)
242
242
243 # The list of files may be empty. Which means nl3 is the first of the
243 # The list of files may be empty. Which means nl3 is the first of the
244 # double newline that precedes the description.
244 # double newline that precedes the description.
245 if text[nl3 + 1 : nl3 + 2] == b'\n':
245 if text[nl3 + 1 : nl3 + 2] == b'\n':
246 doublenl = nl3
246 doublenl = nl3
247 else:
247 else:
248 doublenl = text.index(b'\n\n', nl3 + 1)
248 doublenl = text.index(b'\n\n', nl3 + 1)
249
249
250 self._offsets = (nl1, nl2, nl3, doublenl)
250 self._offsets = (nl1, nl2, nl3, doublenl)
251 self._text = text
251 self._text = text
252 self._sidedata = sidedata
252 self._sidedata = sidedata
253 self._cpsd = cpsd
253 self._cpsd = cpsd
254
254
255 return self
255 return self
256
256
257 @property
257 @property
258 def manifest(self):
258 def manifest(self):
259 return bin(self._text[0 : self._offsets[0]])
259 return bin(self._text[0 : self._offsets[0]])
260
260
261 @property
261 @property
262 def user(self):
262 def user(self):
263 off = self._offsets
263 off = self._offsets
264 return encoding.tolocal(self._text[off[0] + 1 : off[1]])
264 return encoding.tolocal(self._text[off[0] + 1 : off[1]])
265
265
266 @property
266 @property
267 def _rawdate(self):
267 def _rawdate(self):
268 off = self._offsets
268 off = self._offsets
269 dateextra = self._text[off[1] + 1 : off[2]]
269 dateextra = self._text[off[1] + 1 : off[2]]
270 return dateextra.split(b' ', 2)[0:2]
270 return dateextra.split(b' ', 2)[0:2]
271
271
272 @property
272 @property
273 def _rawextra(self):
273 def _rawextra(self):
274 off = self._offsets
274 off = self._offsets
275 dateextra = self._text[off[1] + 1 : off[2]]
275 dateextra = self._text[off[1] + 1 : off[2]]
276 fields = dateextra.split(b' ', 2)
276 fields = dateextra.split(b' ', 2)
277 if len(fields) != 3:
277 if len(fields) != 3:
278 return None
278 return None
279
279
280 return fields[2]
280 return fields[2]
281
281
282 @property
282 @property
283 def date(self):
283 def date(self):
284 raw = self._rawdate
284 raw = self._rawdate
285 time = float(raw[0])
285 time = float(raw[0])
286 # Various tools did silly things with the timezone.
286 # Various tools did silly things with the timezone.
287 try:
287 try:
288 timezone = int(raw[1])
288 timezone = int(raw[1])
289 except ValueError:
289 except ValueError:
290 timezone = 0
290 timezone = 0
291
291
292 return time, timezone
292 return time, timezone
293
293
294 @property
294 @property
295 def extra(self):
295 def extra(self):
296 raw = self._rawextra
296 raw = self._rawextra
297 if raw is None:
297 if raw is None:
298 return _defaultextra
298 return _defaultextra
299
299
300 return decodeextra(raw)
300 return decodeextra(raw)
301
301
302 @property
302 @property
303 def files(self):
303 def files(self):
304 off = self._offsets
304 off = self._offsets
305 if off[2] == off[3]:
305 if off[2] == off[3]:
306 return []
306 return []
307
307
308 return self._text[off[2] + 1 : off[3]].split(b'\n')
308 return self._text[off[2] + 1 : off[3]].split(b'\n')
309
309
310 @property
310 @property
311 def filesadded(self):
311 def filesadded(self):
312 if self._cpsd:
312 if self._cpsd:
313 rawindices = self._sidedata.get(sidedatamod.SD_FILESADDED)
313 rawindices = self._sidedata.get(sidedatamod.SD_FILESADDED)
314 if not rawindices:
314 if not rawindices:
315 return []
315 return []
316 else:
316 else:
317 rawindices = self.extra.get(b'filesadded')
317 rawindices = self.extra.get(b'filesadded')
318 if rawindices is None:
318 if rawindices is None:
319 return None
319 return None
320 return copies.decodefileindices(self.files, rawindices)
320 return copies.decodefileindices(self.files, rawindices)
321
321
322 @property
322 @property
323 def filesremoved(self):
323 def filesremoved(self):
324 if self._cpsd:
324 if self._cpsd:
325 rawindices = self._sidedata.get(sidedatamod.SD_FILESREMOVED)
325 rawindices = self._sidedata.get(sidedatamod.SD_FILESREMOVED)
326 if not rawindices:
326 if not rawindices:
327 return []
327 return []
328 else:
328 else:
329 rawindices = self.extra.get(b'filesremoved')
329 rawindices = self.extra.get(b'filesremoved')
330 if rawindices is None:
330 if rawindices is None:
331 return None
331 return None
332 return copies.decodefileindices(self.files, rawindices)
332 return copies.decodefileindices(self.files, rawindices)
333
333
334 @property
334 @property
335 def p1copies(self):
335 def p1copies(self):
336 if self._cpsd:
336 if self._cpsd:
337 rawcopies = self._sidedata.get(sidedatamod.SD_P1COPIES)
337 rawcopies = self._sidedata.get(sidedatamod.SD_P1COPIES)
338 if not rawcopies:
338 if not rawcopies:
339 return {}
339 return {}
340 else:
340 else:
341 rawcopies = self.extra.get(b'p1copies')
341 rawcopies = self.extra.get(b'p1copies')
342 if rawcopies is None:
342 if rawcopies is None:
343 return None
343 return None
344 return copies.decodecopies(self.files, rawcopies)
344 return copies.decodecopies(self.files, rawcopies)
345
345
346 @property
346 @property
347 def p2copies(self):
347 def p2copies(self):
348 if self._cpsd:
348 if self._cpsd:
349 rawcopies = self._sidedata.get(sidedatamod.SD_P2COPIES)
349 rawcopies = self._sidedata.get(sidedatamod.SD_P2COPIES)
350 if not rawcopies:
350 if not rawcopies:
351 return {}
351 return {}
352 else:
352 else:
353 rawcopies = self.extra.get(b'p2copies')
353 rawcopies = self.extra.get(b'p2copies')
354 if rawcopies is None:
354 if rawcopies is None:
355 return None
355 return None
356 return copies.decodecopies(self.files, rawcopies)
356 return copies.decodecopies(self.files, rawcopies)
357
357
358 @property
358 @property
359 def description(self):
359 def description(self):
360 return encoding.tolocal(self._text[self._offsets[3] + 2 :])
360 return encoding.tolocal(self._text[self._offsets[3] + 2 :])
361
361
362
362
363 class changelog(revlog.revlog):
363 class changelog(revlog.revlog):
364 def __init__(self, opener, trypending=False):
364 def __init__(self, opener, trypending=False):
365 """Load a changelog revlog using an opener.
365 """Load a changelog revlog using an opener.
366
366
367 If ``trypending`` is true, we attempt to load the index from a
367 If ``trypending`` is true, we attempt to load the index from a
368 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
368 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
369 The ``00changelog.i.a`` file contains index (and possibly inline
369 The ``00changelog.i.a`` file contains index (and possibly inline
370 revision) data for a transaction that hasn't been finalized yet.
370 revision) data for a transaction that hasn't been finalized yet.
371 It exists in a separate file to facilitate readers (such as
371 It exists in a separate file to facilitate readers (such as
372 hooks processes) accessing data before a transaction is finalized.
372 hooks processes) accessing data before a transaction is finalized.
373 """
373 """
374 if trypending and opener.exists(b'00changelog.i.a'):
374 if trypending and opener.exists(b'00changelog.i.a'):
375 indexfile = b'00changelog.i.a'
375 indexfile = b'00changelog.i.a'
376 else:
376 else:
377 indexfile = b'00changelog.i'
377 indexfile = b'00changelog.i'
378
378
379 datafile = b'00changelog.d'
379 datafile = b'00changelog.d'
380 revlog.revlog.__init__(
380 revlog.revlog.__init__(
381 self,
381 self,
382 opener,
382 opener,
383 indexfile,
383 indexfile,
384 datafile=datafile,
384 datafile=datafile,
385 checkambig=True,
385 checkambig=True,
386 mmaplargeindex=True,
386 mmaplargeindex=True,
387 )
387 )
388
388
389 if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
389 if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
390 # changelogs don't benefit from generaldelta.
390 # changelogs don't benefit from generaldelta.
391
391
392 self.version &= ~revlog.FLAG_GENERALDELTA
392 self.version &= ~revlog.FLAG_GENERALDELTA
393 self._generaldelta = False
393 self._generaldelta = False
394
394
395 # Delta chains for changelogs tend to be very small because entries
395 # Delta chains for changelogs tend to be very small because entries
396 # tend to be small and don't delta well with each. So disable delta
396 # tend to be small and don't delta well with each. So disable delta
397 # chains.
397 # chains.
398 self._storedeltachains = False
398 self._storedeltachains = False
399
399
400 self._realopener = opener
400 self._realopener = opener
401 self._delayed = False
401 self._delayed = False
402 self._delaybuf = None
402 self._delaybuf = None
403 self._divert = False
403 self._divert = False
404 self.filteredrevs = frozenset()
404 self.filteredrevs = frozenset()
405 self._copiesstorage = opener.options.get(b'copies-storage')
405 self._copiesstorage = opener.options.get(b'copies-storage')
406
406
407 def rev(self, node):
408 """filtered version of revlog.rev"""
409 r = super(changelog, self).rev(node)
410 if r in self.filteredrevs:
411 raise error.FilteredLookupError(
412 hex(node), self.indexfile, _(b'filtered node')
413 )
414 return r
415
416 def node(self, rev):
407 def node(self, rev):
417 """filtered version of revlog.node"""
408 """filtered version of revlog.node"""
418 if rev in self.filteredrevs:
409 if rev in self.filteredrevs:
419 raise error.FilteredIndexError(rev)
410 raise error.FilteredIndexError(rev)
420 return super(changelog, self).node(rev)
411 return super(changelog, self).node(rev)
421
412
422 def linkrev(self, rev):
413 def linkrev(self, rev):
423 """filtered version of revlog.linkrev"""
414 """filtered version of revlog.linkrev"""
424 if rev in self.filteredrevs:
415 if rev in self.filteredrevs:
425 raise error.FilteredIndexError(rev)
416 raise error.FilteredIndexError(rev)
426 return super(changelog, self).linkrev(rev)
417 return super(changelog, self).linkrev(rev)
427
418
428 def parentrevs(self, rev):
419 def parentrevs(self, rev):
429 """filtered version of revlog.parentrevs"""
420 """filtered version of revlog.parentrevs"""
430 if rev in self.filteredrevs:
421 if rev in self.filteredrevs:
431 raise error.FilteredIndexError(rev)
422 raise error.FilteredIndexError(rev)
432 return super(changelog, self).parentrevs(rev)
423 return super(changelog, self).parentrevs(rev)
433
424
434 def flags(self, rev):
425 def flags(self, rev):
435 """filtered version of revlog.flags"""
426 """filtered version of revlog.flags"""
436 if rev in self.filteredrevs:
427 if rev in self.filteredrevs:
437 raise error.FilteredIndexError(rev)
428 raise error.FilteredIndexError(rev)
438 return super(changelog, self).flags(rev)
429 return super(changelog, self).flags(rev)
439
430
440 def delayupdate(self, tr):
431 def delayupdate(self, tr):
441 b"delay visibility of index updates to other readers"
432 b"delay visibility of index updates to other readers"
442
433
443 if not self._delayed:
434 if not self._delayed:
444 if len(self) == 0:
435 if len(self) == 0:
445 self._divert = True
436 self._divert = True
446 if self._realopener.exists(self.indexfile + b'.a'):
437 if self._realopener.exists(self.indexfile + b'.a'):
447 self._realopener.unlink(self.indexfile + b'.a')
438 self._realopener.unlink(self.indexfile + b'.a')
448 self.opener = _divertopener(self._realopener, self.indexfile)
439 self.opener = _divertopener(self._realopener, self.indexfile)
449 else:
440 else:
450 self._delaybuf = []
441 self._delaybuf = []
451 self.opener = _delayopener(
442 self.opener = _delayopener(
452 self._realopener, self.indexfile, self._delaybuf
443 self._realopener, self.indexfile, self._delaybuf
453 )
444 )
454 self._delayed = True
445 self._delayed = True
455 tr.addpending(b'cl-%i' % id(self), self._writepending)
446 tr.addpending(b'cl-%i' % id(self), self._writepending)
456 tr.addfinalize(b'cl-%i' % id(self), self._finalize)
447 tr.addfinalize(b'cl-%i' % id(self), self._finalize)
457
448
458 def _finalize(self, tr):
449 def _finalize(self, tr):
459 b"finalize index updates"
450 b"finalize index updates"
460 self._delayed = False
451 self._delayed = False
461 self.opener = self._realopener
452 self.opener = self._realopener
462 # move redirected index data back into place
453 # move redirected index data back into place
463 if self._divert:
454 if self._divert:
464 assert not self._delaybuf
455 assert not self._delaybuf
465 tmpname = self.indexfile + b".a"
456 tmpname = self.indexfile + b".a"
466 nfile = self.opener.open(tmpname)
457 nfile = self.opener.open(tmpname)
467 nfile.close()
458 nfile.close()
468 self.opener.rename(tmpname, self.indexfile, checkambig=True)
459 self.opener.rename(tmpname, self.indexfile, checkambig=True)
469 elif self._delaybuf:
460 elif self._delaybuf:
470 fp = self.opener(self.indexfile, b'a', checkambig=True)
461 fp = self.opener(self.indexfile, b'a', checkambig=True)
471 fp.write(b"".join(self._delaybuf))
462 fp.write(b"".join(self._delaybuf))
472 fp.close()
463 fp.close()
473 self._delaybuf = None
464 self._delaybuf = None
474 self._divert = False
465 self._divert = False
475 # split when we're done
466 # split when we're done
476 self._enforceinlinesize(tr)
467 self._enforceinlinesize(tr)
477
468
478 def _writepending(self, tr):
469 def _writepending(self, tr):
479 b"create a file containing the unfinalized state for pretxnchangegroup"
470 b"create a file containing the unfinalized state for pretxnchangegroup"
480 if self._delaybuf:
471 if self._delaybuf:
481 # make a temporary copy of the index
472 # make a temporary copy of the index
482 fp1 = self._realopener(self.indexfile)
473 fp1 = self._realopener(self.indexfile)
483 pendingfilename = self.indexfile + b".a"
474 pendingfilename = self.indexfile + b".a"
484 # register as a temp file to ensure cleanup on failure
475 # register as a temp file to ensure cleanup on failure
485 tr.registertmp(pendingfilename)
476 tr.registertmp(pendingfilename)
486 # write existing data
477 # write existing data
487 fp2 = self._realopener(pendingfilename, b"w")
478 fp2 = self._realopener(pendingfilename, b"w")
488 fp2.write(fp1.read())
479 fp2.write(fp1.read())
489 # add pending data
480 # add pending data
490 fp2.write(b"".join(self._delaybuf))
481 fp2.write(b"".join(self._delaybuf))
491 fp2.close()
482 fp2.close()
492 # switch modes so finalize can simply rename
483 # switch modes so finalize can simply rename
493 self._delaybuf = None
484 self._delaybuf = None
494 self._divert = True
485 self._divert = True
495 self.opener = _divertopener(self._realopener, self.indexfile)
486 self.opener = _divertopener(self._realopener, self.indexfile)
496
487
497 if self._divert:
488 if self._divert:
498 return True
489 return True
499
490
500 return False
491 return False
501
492
502 def _enforceinlinesize(self, tr, fp=None):
493 def _enforceinlinesize(self, tr, fp=None):
503 if not self._delayed:
494 if not self._delayed:
504 revlog.revlog._enforceinlinesize(self, tr, fp)
495 revlog.revlog._enforceinlinesize(self, tr, fp)
505
496
506 def read(self, node):
497 def read(self, node):
507 """Obtain data from a parsed changelog revision.
498 """Obtain data from a parsed changelog revision.
508
499
509 Returns a 6-tuple of:
500 Returns a 6-tuple of:
510
501
511 - manifest node in binary
502 - manifest node in binary
512 - author/user as a localstr
503 - author/user as a localstr
513 - date as a 2-tuple of (time, timezone)
504 - date as a 2-tuple of (time, timezone)
514 - list of files
505 - list of files
515 - commit message as a localstr
506 - commit message as a localstr
516 - dict of extra metadata
507 - dict of extra metadata
517
508
518 Unless you need to access all fields, consider calling
509 Unless you need to access all fields, consider calling
519 ``changelogrevision`` instead, as it is faster for partial object
510 ``changelogrevision`` instead, as it is faster for partial object
520 access.
511 access.
521 """
512 """
522 d, s = self._revisiondata(node)
513 d, s = self._revisiondata(node)
523 c = changelogrevision(
514 c = changelogrevision(
524 d, s, self._copiesstorage == b'changeset-sidedata'
515 d, s, self._copiesstorage == b'changeset-sidedata'
525 )
516 )
526 return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
517 return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
527
518
528 def changelogrevision(self, nodeorrev):
519 def changelogrevision(self, nodeorrev):
529 """Obtain a ``changelogrevision`` for a node or revision."""
520 """Obtain a ``changelogrevision`` for a node or revision."""
530 text, sidedata = self._revisiondata(nodeorrev)
521 text, sidedata = self._revisiondata(nodeorrev)
531 return changelogrevision(
522 return changelogrevision(
532 text, sidedata, self._copiesstorage == b'changeset-sidedata'
523 text, sidedata, self._copiesstorage == b'changeset-sidedata'
533 )
524 )
534
525
535 def readfiles(self, node):
526 def readfiles(self, node):
536 """
527 """
537 short version of read that only returns the files modified by the cset
528 short version of read that only returns the files modified by the cset
538 """
529 """
539 text = self.revision(node)
530 text = self.revision(node)
540 if not text:
531 if not text:
541 return []
532 return []
542 last = text.index(b"\n\n")
533 last = text.index(b"\n\n")
543 l = text[:last].split(b'\n')
534 l = text[:last].split(b'\n')
544 return l[3:]
535 return l[3:]
545
536
546 def add(
537 def add(
547 self,
538 self,
548 manifest,
539 manifest,
549 files,
540 files,
550 desc,
541 desc,
551 transaction,
542 transaction,
552 p1,
543 p1,
553 p2,
544 p2,
554 user,
545 user,
555 date=None,
546 date=None,
556 extra=None,
547 extra=None,
557 p1copies=None,
548 p1copies=None,
558 p2copies=None,
549 p2copies=None,
559 filesadded=None,
550 filesadded=None,
560 filesremoved=None,
551 filesremoved=None,
561 ):
552 ):
562 # Convert to UTF-8 encoded bytestrings as the very first
553 # Convert to UTF-8 encoded bytestrings as the very first
563 # thing: calling any method on a localstr object will turn it
554 # thing: calling any method on a localstr object will turn it
564 # into a str object and the cached UTF-8 string is thus lost.
555 # into a str object and the cached UTF-8 string is thus lost.
565 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
556 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
566
557
567 user = user.strip()
558 user = user.strip()
568 # An empty username or a username with a "\n" will make the
559 # An empty username or a username with a "\n" will make the
569 # revision text contain two "\n\n" sequences -> corrupt
560 # revision text contain two "\n\n" sequences -> corrupt
570 # repository since read cannot unpack the revision.
561 # repository since read cannot unpack the revision.
571 if not user:
562 if not user:
572 raise error.StorageError(_(b"empty username"))
563 raise error.StorageError(_(b"empty username"))
573 if b"\n" in user:
564 if b"\n" in user:
574 raise error.StorageError(
565 raise error.StorageError(
575 _(b"username %r contains a newline") % pycompat.bytestr(user)
566 _(b"username %r contains a newline") % pycompat.bytestr(user)
576 )
567 )
577
568
578 desc = stripdesc(desc)
569 desc = stripdesc(desc)
579
570
580 if date:
571 if date:
581 parseddate = b"%d %d" % dateutil.parsedate(date)
572 parseddate = b"%d %d" % dateutil.parsedate(date)
582 else:
573 else:
583 parseddate = b"%d %d" % dateutil.makedate()
574 parseddate = b"%d %d" % dateutil.makedate()
584 if extra:
575 if extra:
585 branch = extra.get(b"branch")
576 branch = extra.get(b"branch")
586 if branch in (b"default", b""):
577 if branch in (b"default", b""):
587 del extra[b"branch"]
578 del extra[b"branch"]
588 elif branch in (b".", b"null", b"tip"):
579 elif branch in (b".", b"null", b"tip"):
589 raise error.StorageError(
580 raise error.StorageError(
590 _(b'the name \'%s\' is reserved') % branch
581 _(b'the name \'%s\' is reserved') % branch
591 )
582 )
592 sortedfiles = sorted(files)
583 sortedfiles = sorted(files)
593 sidedata = None
584 sidedata = None
594 if extra is not None:
585 if extra is not None:
595 for name in (
586 for name in (
596 b'p1copies',
587 b'p1copies',
597 b'p2copies',
588 b'p2copies',
598 b'filesadded',
589 b'filesadded',
599 b'filesremoved',
590 b'filesremoved',
600 ):
591 ):
601 extra.pop(name, None)
592 extra.pop(name, None)
602 if p1copies is not None:
593 if p1copies is not None:
603 p1copies = copies.encodecopies(sortedfiles, p1copies)
594 p1copies = copies.encodecopies(sortedfiles, p1copies)
604 if p2copies is not None:
595 if p2copies is not None:
605 p2copies = copies.encodecopies(sortedfiles, p2copies)
596 p2copies = copies.encodecopies(sortedfiles, p2copies)
606 if filesadded is not None:
597 if filesadded is not None:
607 filesadded = copies.encodefileindices(sortedfiles, filesadded)
598 filesadded = copies.encodefileindices(sortedfiles, filesadded)
608 if filesremoved is not None:
599 if filesremoved is not None:
609 filesremoved = copies.encodefileindices(sortedfiles, filesremoved)
600 filesremoved = copies.encodefileindices(sortedfiles, filesremoved)
610 if self._copiesstorage == b'extra':
601 if self._copiesstorage == b'extra':
611 extrasentries = p1copies, p2copies, filesadded, filesremoved
602 extrasentries = p1copies, p2copies, filesadded, filesremoved
612 if extra is None and any(x is not None for x in extrasentries):
603 if extra is None and any(x is not None for x in extrasentries):
613 extra = {}
604 extra = {}
614 if p1copies is not None:
605 if p1copies is not None:
615 extra[b'p1copies'] = p1copies
606 extra[b'p1copies'] = p1copies
616 if p2copies is not None:
607 if p2copies is not None:
617 extra[b'p2copies'] = p2copies
608 extra[b'p2copies'] = p2copies
618 if filesadded is not None:
609 if filesadded is not None:
619 extra[b'filesadded'] = filesadded
610 extra[b'filesadded'] = filesadded
620 if filesremoved is not None:
611 if filesremoved is not None:
621 extra[b'filesremoved'] = filesremoved
612 extra[b'filesremoved'] = filesremoved
622 elif self._copiesstorage == b'changeset-sidedata':
613 elif self._copiesstorage == b'changeset-sidedata':
623 sidedata = {}
614 sidedata = {}
624 if p1copies:
615 if p1copies:
625 sidedata[sidedatamod.SD_P1COPIES] = p1copies
616 sidedata[sidedatamod.SD_P1COPIES] = p1copies
626 if p2copies:
617 if p2copies:
627 sidedata[sidedatamod.SD_P2COPIES] = p2copies
618 sidedata[sidedatamod.SD_P2COPIES] = p2copies
628 if filesadded:
619 if filesadded:
629 sidedata[sidedatamod.SD_FILESADDED] = filesadded
620 sidedata[sidedatamod.SD_FILESADDED] = filesadded
630 if filesremoved:
621 if filesremoved:
631 sidedata[sidedatamod.SD_FILESREMOVED] = filesremoved
622 sidedata[sidedatamod.SD_FILESREMOVED] = filesremoved
632 if not sidedata:
623 if not sidedata:
633 sidedata = None
624 sidedata = None
634
625
635 if extra:
626 if extra:
636 extra = encodeextra(extra)
627 extra = encodeextra(extra)
637 parseddate = b"%s %s" % (parseddate, extra)
628 parseddate = b"%s %s" % (parseddate, extra)
638 l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
629 l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
639 text = b"\n".join(l)
630 text = b"\n".join(l)
640 return self.addrevision(
631 return self.addrevision(
641 text, transaction, len(self), p1, p2, sidedata=sidedata
632 text, transaction, len(self), p1, p2, sidedata=sidedata
642 )
633 )
643
634
644 def branchinfo(self, rev):
635 def branchinfo(self, rev):
645 """return the branch name and open/close state of a revision
636 """return the branch name and open/close state of a revision
646
637
647 This function exists because creating a changectx object
638 This function exists because creating a changectx object
648 just to access this is costly."""
639 just to access this is costly."""
649 extra = self.read(rev)[5]
640 extra = self.read(rev)[5]
650 return encoding.tolocal(extra.get(b"branch")), b'close' in extra
641 return encoding.tolocal(extra.get(b"branch")), b'close' in extra
651
642
652 def _nodeduplicatecallback(self, transaction, node):
643 def _nodeduplicatecallback(self, transaction, node):
653 # keep track of revisions that got "re-added", eg: unbunde of know rev.
644 # keep track of revisions that got "re-added", eg: unbunde of know rev.
654 #
645 #
655 # We track them in a list to preserve their order from the source bundle
646 # We track them in a list to preserve their order from the source bundle
656 duplicates = transaction.changes.setdefault(b'revduplicates', [])
647 duplicates = transaction.changes.setdefault(b'revduplicates', [])
657 duplicates.append(self.rev(node))
648 duplicates.append(self.rev(node))
@@ -1,417 +1,430 b''
1 # repoview.py - Filtered view of a localrepo object
1 # repoview.py - Filtered view of a localrepo object
2 #
2 #
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
4 # Logilab SA <contact@logilab.fr>
4 # Logilab SA <contact@logilab.fr>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from __future__ import absolute_import
9 from __future__ import absolute_import
10
10
11 import copy
11 import copy
12 import weakref
12 import weakref
13
13
14 from .node import nullrev
14 from .i18n import _
15 from .node import (
16 hex,
17 nullrev,
18 )
15 from .pycompat import (
19 from .pycompat import (
16 delattr,
20 delattr,
17 getattr,
21 getattr,
18 setattr,
22 setattr,
19 )
23 )
20 from . import (
24 from . import (
21 error,
25 error,
22 obsolete,
26 obsolete,
23 phases,
27 phases,
24 pycompat,
28 pycompat,
25 revlog,
29 revlog,
26 tags as tagsmod,
30 tags as tagsmod,
27 util,
31 util,
28 )
32 )
29 from .utils import repoviewutil
33 from .utils import repoviewutil
30
34
31
35
32 def hideablerevs(repo):
36 def hideablerevs(repo):
33 """Revision candidates to be hidden
37 """Revision candidates to be hidden
34
38
35 This is a standalone function to allow extensions to wrap it.
39 This is a standalone function to allow extensions to wrap it.
36
40
37 Because we use the set of immutable changesets as a fallback subset in
41 Because we use the set of immutable changesets as a fallback subset in
38 branchmap (see mercurial.utils.repoviewutils.subsettable), you cannot set
42 branchmap (see mercurial.utils.repoviewutils.subsettable), you cannot set
39 "public" changesets as "hideable". Doing so would break multiple code
43 "public" changesets as "hideable". Doing so would break multiple code
40 assertions and lead to crashes."""
44 assertions and lead to crashes."""
41 obsoletes = obsolete.getrevs(repo, b'obsolete')
45 obsoletes = obsolete.getrevs(repo, b'obsolete')
42 internals = repo._phasecache.getrevset(repo, phases.localhiddenphases)
46 internals = repo._phasecache.getrevset(repo, phases.localhiddenphases)
43 internals = frozenset(internals)
47 internals = frozenset(internals)
44 return obsoletes | internals
48 return obsoletes | internals
45
49
46
50
47 def pinnedrevs(repo):
51 def pinnedrevs(repo):
48 """revisions blocking hidden changesets from being filtered
52 """revisions blocking hidden changesets from being filtered
49 """
53 """
50
54
51 cl = repo.changelog
55 cl = repo.changelog
52 pinned = set()
56 pinned = set()
53 pinned.update([par.rev() for par in repo[None].parents()])
57 pinned.update([par.rev() for par in repo[None].parents()])
54 pinned.update([cl.rev(bm) for bm in repo._bookmarks.values()])
58 pinned.update([cl.rev(bm) for bm in repo._bookmarks.values()])
55
59
56 tags = {}
60 tags = {}
57 tagsmod.readlocaltags(repo.ui, repo, tags, {})
61 tagsmod.readlocaltags(repo.ui, repo, tags, {})
58 if tags:
62 if tags:
59 rev, nodemap = cl.rev, cl.nodemap
63 rev, nodemap = cl.rev, cl.nodemap
60 pinned.update(rev(t[0]) for t in tags.values() if t[0] in nodemap)
64 pinned.update(rev(t[0]) for t in tags.values() if t[0] in nodemap)
61 return pinned
65 return pinned
62
66
63
67
64 def _revealancestors(pfunc, hidden, revs):
68 def _revealancestors(pfunc, hidden, revs):
65 """reveals contiguous chains of hidden ancestors of 'revs' by removing them
69 """reveals contiguous chains of hidden ancestors of 'revs' by removing them
66 from 'hidden'
70 from 'hidden'
67
71
68 - pfunc(r): a funtion returning parent of 'r',
72 - pfunc(r): a funtion returning parent of 'r',
69 - hidden: the (preliminary) hidden revisions, to be updated
73 - hidden: the (preliminary) hidden revisions, to be updated
70 - revs: iterable of revnum,
74 - revs: iterable of revnum,
71
75
72 (Ancestors are revealed exclusively, i.e. the elements in 'revs' are
76 (Ancestors are revealed exclusively, i.e. the elements in 'revs' are
73 *not* revealed)
77 *not* revealed)
74 """
78 """
75 stack = list(revs)
79 stack = list(revs)
76 while stack:
80 while stack:
77 for p in pfunc(stack.pop()):
81 for p in pfunc(stack.pop()):
78 if p != nullrev and p in hidden:
82 if p != nullrev and p in hidden:
79 hidden.remove(p)
83 hidden.remove(p)
80 stack.append(p)
84 stack.append(p)
81
85
82
86
83 def computehidden(repo, visibilityexceptions=None):
87 def computehidden(repo, visibilityexceptions=None):
84 """compute the set of hidden revision to filter
88 """compute the set of hidden revision to filter
85
89
86 During most operation hidden should be filtered."""
90 During most operation hidden should be filtered."""
87 assert not repo.changelog.filteredrevs
91 assert not repo.changelog.filteredrevs
88
92
89 hidden = hideablerevs(repo)
93 hidden = hideablerevs(repo)
90 if hidden:
94 if hidden:
91 hidden = set(hidden - pinnedrevs(repo))
95 hidden = set(hidden - pinnedrevs(repo))
92 if visibilityexceptions:
96 if visibilityexceptions:
93 hidden -= visibilityexceptions
97 hidden -= visibilityexceptions
94 pfunc = repo.changelog.parentrevs
98 pfunc = repo.changelog.parentrevs
95 mutable = repo._phasecache.getrevset(repo, phases.mutablephases)
99 mutable = repo._phasecache.getrevset(repo, phases.mutablephases)
96
100
97 visible = mutable - hidden
101 visible = mutable - hidden
98 _revealancestors(pfunc, hidden, visible)
102 _revealancestors(pfunc, hidden, visible)
99 return frozenset(hidden)
103 return frozenset(hidden)
100
104
101
105
102 def computesecret(repo, visibilityexceptions=None):
106 def computesecret(repo, visibilityexceptions=None):
103 """compute the set of revision that can never be exposed through hgweb
107 """compute the set of revision that can never be exposed through hgweb
104
108
105 Changeset in the secret phase (or above) should stay unaccessible."""
109 Changeset in the secret phase (or above) should stay unaccessible."""
106 assert not repo.changelog.filteredrevs
110 assert not repo.changelog.filteredrevs
107 secrets = repo._phasecache.getrevset(repo, phases.remotehiddenphases)
111 secrets = repo._phasecache.getrevset(repo, phases.remotehiddenphases)
108 return frozenset(secrets)
112 return frozenset(secrets)
109
113
110
114
111 def computeunserved(repo, visibilityexceptions=None):
115 def computeunserved(repo, visibilityexceptions=None):
112 """compute the set of revision that should be filtered when used a server
116 """compute the set of revision that should be filtered when used a server
113
117
114 Secret and hidden changeset should not pretend to be here."""
118 Secret and hidden changeset should not pretend to be here."""
115 assert not repo.changelog.filteredrevs
119 assert not repo.changelog.filteredrevs
116 # fast path in simple case to avoid impact of non optimised code
120 # fast path in simple case to avoid impact of non optimised code
117 hiddens = filterrevs(repo, b'visible')
121 hiddens = filterrevs(repo, b'visible')
118 secrets = filterrevs(repo, b'served.hidden')
122 secrets = filterrevs(repo, b'served.hidden')
119 if secrets:
123 if secrets:
120 return frozenset(hiddens | secrets)
124 return frozenset(hiddens | secrets)
121 else:
125 else:
122 return hiddens
126 return hiddens
123
127
124
128
125 def computemutable(repo, visibilityexceptions=None):
129 def computemutable(repo, visibilityexceptions=None):
126 assert not repo.changelog.filteredrevs
130 assert not repo.changelog.filteredrevs
127 # fast check to avoid revset call on huge repo
131 # fast check to avoid revset call on huge repo
128 if any(repo._phasecache.phaseroots[1:]):
132 if any(repo._phasecache.phaseroots[1:]):
129 getphase = repo._phasecache.phase
133 getphase = repo._phasecache.phase
130 maymutable = filterrevs(repo, b'base')
134 maymutable = filterrevs(repo, b'base')
131 return frozenset(r for r in maymutable if getphase(repo, r))
135 return frozenset(r for r in maymutable if getphase(repo, r))
132 return frozenset()
136 return frozenset()
133
137
134
138
135 def computeimpactable(repo, visibilityexceptions=None):
139 def computeimpactable(repo, visibilityexceptions=None):
136 """Everything impactable by mutable revision
140 """Everything impactable by mutable revision
137
141
138 The immutable filter still have some chance to get invalidated. This will
142 The immutable filter still have some chance to get invalidated. This will
139 happen when:
143 happen when:
140
144
141 - you garbage collect hidden changeset,
145 - you garbage collect hidden changeset,
142 - public phase is moved backward,
146 - public phase is moved backward,
143 - something is changed in the filtering (this could be fixed)
147 - something is changed in the filtering (this could be fixed)
144
148
145 This filter out any mutable changeset and any public changeset that may be
149 This filter out any mutable changeset and any public changeset that may be
146 impacted by something happening to a mutable revision.
150 impacted by something happening to a mutable revision.
147
151
148 This is achieved by filtered everything with a revision number egal or
152 This is achieved by filtered everything with a revision number egal or
149 higher than the first mutable changeset is filtered."""
153 higher than the first mutable changeset is filtered."""
150 assert not repo.changelog.filteredrevs
154 assert not repo.changelog.filteredrevs
151 cl = repo.changelog
155 cl = repo.changelog
152 firstmutable = len(cl)
156 firstmutable = len(cl)
153 for roots in repo._phasecache.phaseroots[1:]:
157 for roots in repo._phasecache.phaseroots[1:]:
154 if roots:
158 if roots:
155 firstmutable = min(firstmutable, min(cl.rev(r) for r in roots))
159 firstmutable = min(firstmutable, min(cl.rev(r) for r in roots))
156 # protect from nullrev root
160 # protect from nullrev root
157 firstmutable = max(0, firstmutable)
161 firstmutable = max(0, firstmutable)
158 return frozenset(pycompat.xrange(firstmutable, len(cl)))
162 return frozenset(pycompat.xrange(firstmutable, len(cl)))
159
163
160
164
161 # function to compute filtered set
165 # function to compute filtered set
162 #
166 #
163 # When adding a new filter you MUST update the table at:
167 # When adding a new filter you MUST update the table at:
164 # mercurial.utils.repoviewutil.subsettable
168 # mercurial.utils.repoviewutil.subsettable
165 # Otherwise your filter will have to recompute all its branches cache
169 # Otherwise your filter will have to recompute all its branches cache
166 # from scratch (very slow).
170 # from scratch (very slow).
167 filtertable = {
171 filtertable = {
168 b'visible': computehidden,
172 b'visible': computehidden,
169 b'visible-hidden': computehidden,
173 b'visible-hidden': computehidden,
170 b'served.hidden': computesecret,
174 b'served.hidden': computesecret,
171 b'served': computeunserved,
175 b'served': computeunserved,
172 b'immutable': computemutable,
176 b'immutable': computemutable,
173 b'base': computeimpactable,
177 b'base': computeimpactable,
174 }
178 }
175
179
176 _basefiltername = list(filtertable)
180 _basefiltername = list(filtertable)
177
181
178
182
179 def extrafilter(ui):
183 def extrafilter(ui):
180 """initialize extra filter and return its id
184 """initialize extra filter and return its id
181
185
182 If extra filtering is configured, we make sure the associated filtered view
186 If extra filtering is configured, we make sure the associated filtered view
183 are declared and return the associated id.
187 are declared and return the associated id.
184 """
188 """
185 frevs = ui.config(b'experimental', b'extra-filter-revs')
189 frevs = ui.config(b'experimental', b'extra-filter-revs')
186 if frevs is None:
190 if frevs is None:
187 return None
191 return None
188
192
189 fid = pycompat.sysbytes(util.DIGESTS[b'sha1'](frevs).hexdigest())[:12]
193 fid = pycompat.sysbytes(util.DIGESTS[b'sha1'](frevs).hexdigest())[:12]
190
194
191 combine = lambda fname: fname + b'%' + fid
195 combine = lambda fname: fname + b'%' + fid
192
196
193 subsettable = repoviewutil.subsettable
197 subsettable = repoviewutil.subsettable
194
198
195 if combine(b'base') not in filtertable:
199 if combine(b'base') not in filtertable:
196 for name in _basefiltername:
200 for name in _basefiltername:
197
201
198 def extrafilteredrevs(repo, *args, **kwargs):
202 def extrafilteredrevs(repo, *args, **kwargs):
199 baserevs = filtertable[name](repo, *args, **kwargs)
203 baserevs = filtertable[name](repo, *args, **kwargs)
200 extrarevs = frozenset(repo.revs(frevs))
204 extrarevs = frozenset(repo.revs(frevs))
201 return baserevs | extrarevs
205 return baserevs | extrarevs
202
206
203 filtertable[combine(name)] = extrafilteredrevs
207 filtertable[combine(name)] = extrafilteredrevs
204 if name in subsettable:
208 if name in subsettable:
205 subsettable[combine(name)] = combine(subsettable[name])
209 subsettable[combine(name)] = combine(subsettable[name])
206 return fid
210 return fid
207
211
208
212
209 def filterrevs(repo, filtername, visibilityexceptions=None):
213 def filterrevs(repo, filtername, visibilityexceptions=None):
210 """returns set of filtered revision for this filter name
214 """returns set of filtered revision for this filter name
211
215
212 visibilityexceptions is a set of revs which must are exceptions for
216 visibilityexceptions is a set of revs which must are exceptions for
213 hidden-state and must be visible. They are dynamic and hence we should not
217 hidden-state and must be visible. They are dynamic and hence we should not
214 cache it's result"""
218 cache it's result"""
215 if filtername not in repo.filteredrevcache:
219 if filtername not in repo.filteredrevcache:
216 func = filtertable[filtername]
220 func = filtertable[filtername]
217 if visibilityexceptions:
221 if visibilityexceptions:
218 return func(repo.unfiltered, visibilityexceptions)
222 return func(repo.unfiltered, visibilityexceptions)
219 repo.filteredrevcache[filtername] = func(repo.unfiltered())
223 repo.filteredrevcache[filtername] = func(repo.unfiltered())
220 return repo.filteredrevcache[filtername]
224 return repo.filteredrevcache[filtername]
221
225
222
226
223 def wrapchangelog(unfichangelog, filteredrevs):
227 def wrapchangelog(unfichangelog, filteredrevs):
224 cl = copy.copy(unfichangelog)
228 cl = copy.copy(unfichangelog)
225 cl.filteredrevs = filteredrevs
229 cl.filteredrevs = filteredrevs
226
230
227 class filteredchangelog(cl.__class__):
231 class filteredchangelog(cl.__class__):
228 def tiprev(self):
232 def tiprev(self):
229 """filtered version of revlog.tiprev"""
233 """filtered version of revlog.tiprev"""
230 for i in pycompat.xrange(len(self) - 1, -2, -1):
234 for i in pycompat.xrange(len(self) - 1, -2, -1):
231 if i not in self.filteredrevs:
235 if i not in self.filteredrevs:
232 return i
236 return i
233
237
234 def __contains__(self, rev):
238 def __contains__(self, rev):
235 """filtered version of revlog.__contains__"""
239 """filtered version of revlog.__contains__"""
236 return 0 <= rev < len(self) and rev not in self.filteredrevs
240 return 0 <= rev < len(self) and rev not in self.filteredrevs
237
241
238 def __iter__(self):
242 def __iter__(self):
239 """filtered version of revlog.__iter__"""
243 """filtered version of revlog.__iter__"""
240 if len(self.filteredrevs) == 0:
244 if len(self.filteredrevs) == 0:
241 return revlog.revlog.__iter__(self)
245 return revlog.revlog.__iter__(self)
242
246
243
247
244 def filterediter():
248 def filterediter():
245 for i in pycompat.xrange(len(self)):
249 for i in pycompat.xrange(len(self)):
246 if i not in self.filteredrevs:
250 if i not in self.filteredrevs:
247 yield i
251 yield i
248
252
249 return filterediter()
253 return filterediter()
250
254
251 def revs(self, start=0, stop=None):
255 def revs(self, start=0, stop=None):
252 """filtered version of revlog.revs"""
256 """filtered version of revlog.revs"""
253 for i in super(filteredchangelog, self).revs(start, stop):
257 for i in super(filteredchangelog, self).revs(start, stop):
254 if i not in self.filteredrevs:
258 if i not in self.filteredrevs:
255 yield i
259 yield i
256
260
257 def _checknofilteredinrevs(self, revs):
261 def _checknofilteredinrevs(self, revs):
258 """raise the appropriate error if 'revs' contains a filtered revision
262 """raise the appropriate error if 'revs' contains a filtered revision
259
263
260 This returns a version of 'revs' to be used thereafter by the caller.
264 This returns a version of 'revs' to be used thereafter by the caller.
261 In particular, if revs is an iterator, it is converted into a set.
265 In particular, if revs is an iterator, it is converted into a set.
262 """
266 """
263 safehasattr = util.safehasattr
267 safehasattr = util.safehasattr
264 if safehasattr(revs, '__next__'):
268 if safehasattr(revs, '__next__'):
265 # Note that inspect.isgenerator() is not true for iterators,
269 # Note that inspect.isgenerator() is not true for iterators,
266 revs = set(revs)
270 revs = set(revs)
267
271
268 filteredrevs = self.filteredrevs
272 filteredrevs = self.filteredrevs
269 if safehasattr(revs, 'first'): # smartset
273 if safehasattr(revs, 'first'): # smartset
270 offenders = revs & filteredrevs
274 offenders = revs & filteredrevs
271 else:
275 else:
272 offenders = filteredrevs.intersection(revs)
276 offenders = filteredrevs.intersection(revs)
273
277
274 for rev in offenders:
278 for rev in offenders:
275 raise error.FilteredIndexError(rev)
279 raise error.FilteredIndexError(rev)
276 return revs
280 return revs
277
281
278 def headrevs(self, revs=None):
282 def headrevs(self, revs=None):
279 if revs is None and self.filteredrevs:
283 if revs is None and self.filteredrevs:
280 try:
284 try:
281 return self.index.headrevsfiltered(self.filteredrevs)
285 return self.index.headrevsfiltered(self.filteredrevs)
282 # AttributeError covers non-c-extension environments and
286 # AttributeError covers non-c-extension environments and
283 # old c extensions without filter handling.
287 # old c extensions without filter handling.
284 except AttributeError:
288 except AttributeError:
285 return self._headrevs()
289 return self._headrevs()
286
290
287 if self.filteredrevs:
291 if self.filteredrevs:
288 revs = self._checknofilteredinrevs(revs)
292 revs = self._checknofilteredinrevs(revs)
289 return super(filteredchangelog, self).headrevs(revs)
293 return super(filteredchangelog, self).headrevs(revs)
290
294
291 def strip(self, *args, **kwargs):
295 def strip(self, *args, **kwargs):
292 # XXX make something better than assert
296 # XXX make something better than assert
293 # We can't expect proper strip behavior if we are filtered.
297 # We can't expect proper strip behavior if we are filtered.
294 assert not self.filteredrevs
298 assert not self.filteredrevs
295 super(filteredchangelog, self).strip(*args, **kwargs)
299 super(filteredchangelog, self).strip(*args, **kwargs)
296
300
301 def rev(self, node):
302 """filtered version of revlog.rev"""
303 r = super(filteredchangelog, self).rev(node)
304 if r in self.filteredrevs:
305 raise error.FilteredLookupError(
306 hex(node), self.indexfile, _(b'filtered node')
307 )
308 return r
309
297 cl.__class__ = filteredchangelog
310 cl.__class__ = filteredchangelog
298
311
299 return cl
312 return cl
300
313
301
314
302 class repoview(object):
315 class repoview(object):
303 """Provide a read/write view of a repo through a filtered changelog
316 """Provide a read/write view of a repo through a filtered changelog
304
317
305 This object is used to access a filtered version of a repository without
318 This object is used to access a filtered version of a repository without
306 altering the original repository object itself. We can not alter the
319 altering the original repository object itself. We can not alter the
307 original object for two main reasons:
320 original object for two main reasons:
308 - It prevents the use of a repo with multiple filters at the same time. In
321 - It prevents the use of a repo with multiple filters at the same time. In
309 particular when multiple threads are involved.
322 particular when multiple threads are involved.
310 - It makes scope of the filtering harder to control.
323 - It makes scope of the filtering harder to control.
311
324
312 This object behaves very closely to the original repository. All attribute
325 This object behaves very closely to the original repository. All attribute
313 operations are done on the original repository:
326 operations are done on the original repository:
314 - An access to `repoview.someattr` actually returns `repo.someattr`,
327 - An access to `repoview.someattr` actually returns `repo.someattr`,
315 - A write to `repoview.someattr` actually sets value of `repo.someattr`,
328 - A write to `repoview.someattr` actually sets value of `repo.someattr`,
316 - A deletion of `repoview.someattr` actually drops `someattr`
329 - A deletion of `repoview.someattr` actually drops `someattr`
317 from `repo.__dict__`.
330 from `repo.__dict__`.
318
331
319 The only exception is the `changelog` property. It is overridden to return
332 The only exception is the `changelog` property. It is overridden to return
320 a (surface) copy of `repo.changelog` with some revisions filtered. The
333 a (surface) copy of `repo.changelog` with some revisions filtered. The
321 `filtername` attribute of the view control the revisions that need to be
334 `filtername` attribute of the view control the revisions that need to be
322 filtered. (the fact the changelog is copied is an implementation detail).
335 filtered. (the fact the changelog is copied is an implementation detail).
323
336
324 Unlike attributes, this object intercepts all method calls. This means that
337 Unlike attributes, this object intercepts all method calls. This means that
325 all methods are run on the `repoview` object with the filtered `changelog`
338 all methods are run on the `repoview` object with the filtered `changelog`
326 property. For this purpose the simple `repoview` class must be mixed with
339 property. For this purpose the simple `repoview` class must be mixed with
327 the actual class of the repository. This ensures that the resulting
340 the actual class of the repository. This ensures that the resulting
328 `repoview` object have the very same methods than the repo object. This
341 `repoview` object have the very same methods than the repo object. This
329 leads to the property below.
342 leads to the property below.
330
343
331 repoview.method() --> repo.__class__.method(repoview)
344 repoview.method() --> repo.__class__.method(repoview)
332
345
333 The inheritance has to be done dynamically because `repo` can be of any
346 The inheritance has to be done dynamically because `repo` can be of any
334 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
347 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
335 """
348 """
336
349
337 def __init__(self, repo, filtername, visibilityexceptions=None):
350 def __init__(self, repo, filtername, visibilityexceptions=None):
338 object.__setattr__(self, r'_unfilteredrepo', repo)
351 object.__setattr__(self, r'_unfilteredrepo', repo)
339 object.__setattr__(self, r'filtername', filtername)
352 object.__setattr__(self, r'filtername', filtername)
340 object.__setattr__(self, r'_clcachekey', None)
353 object.__setattr__(self, r'_clcachekey', None)
341 object.__setattr__(self, r'_clcache', None)
354 object.__setattr__(self, r'_clcache', None)
342 # revs which are exceptions and must not be hidden
355 # revs which are exceptions and must not be hidden
343 object.__setattr__(self, r'_visibilityexceptions', visibilityexceptions)
356 object.__setattr__(self, r'_visibilityexceptions', visibilityexceptions)
344
357
345 # not a propertycache on purpose we shall implement a proper cache later
358 # not a propertycache on purpose we shall implement a proper cache later
346 @property
359 @property
347 def changelog(self):
360 def changelog(self):
348 """return a filtered version of the changeset
361 """return a filtered version of the changeset
349
362
350 this changelog must not be used for writing"""
363 this changelog must not be used for writing"""
351 # some cache may be implemented later
364 # some cache may be implemented later
352 unfi = self._unfilteredrepo
365 unfi = self._unfilteredrepo
353 unfichangelog = unfi.changelog
366 unfichangelog = unfi.changelog
354 # bypass call to changelog.method
367 # bypass call to changelog.method
355 unfiindex = unfichangelog.index
368 unfiindex = unfichangelog.index
356 unfilen = len(unfiindex)
369 unfilen = len(unfiindex)
357 unfinode = unfiindex[unfilen - 1][7]
370 unfinode = unfiindex[unfilen - 1][7]
358 with util.timedcm('repo filter for %s', self.filtername):
371 with util.timedcm('repo filter for %s', self.filtername):
359 revs = filterrevs(unfi, self.filtername, self._visibilityexceptions)
372 revs = filterrevs(unfi, self.filtername, self._visibilityexceptions)
360 cl = self._clcache
373 cl = self._clcache
361 newkey = (unfilen, unfinode, hash(revs), unfichangelog._delayed)
374 newkey = (unfilen, unfinode, hash(revs), unfichangelog._delayed)
362 # if cl.index is not unfiindex, unfi.changelog would be
375 # if cl.index is not unfiindex, unfi.changelog would be
363 # recreated, and our clcache refers to garbage object
376 # recreated, and our clcache refers to garbage object
364 if cl is not None and (
377 if cl is not None and (
365 cl.index is not unfiindex or newkey != self._clcachekey
378 cl.index is not unfiindex or newkey != self._clcachekey
366 ):
379 ):
367 cl = None
380 cl = None
368 # could have been made None by the previous if
381 # could have been made None by the previous if
369 if cl is None:
382 if cl is None:
370 cl = wrapchangelog(unfichangelog, revs)
383 cl = wrapchangelog(unfichangelog, revs)
371 object.__setattr__(self, r'_clcache', cl)
384 object.__setattr__(self, r'_clcache', cl)
372 object.__setattr__(self, r'_clcachekey', newkey)
385 object.__setattr__(self, r'_clcachekey', newkey)
373 return cl
386 return cl
374
387
375 def unfiltered(self):
388 def unfiltered(self):
376 """Return an unfiltered version of a repo"""
389 """Return an unfiltered version of a repo"""
377 return self._unfilteredrepo
390 return self._unfilteredrepo
378
391
379 def filtered(self, name, visibilityexceptions=None):
392 def filtered(self, name, visibilityexceptions=None):
380 """Return a filtered version of a repository"""
393 """Return a filtered version of a repository"""
381 if name == self.filtername and not visibilityexceptions:
394 if name == self.filtername and not visibilityexceptions:
382 return self
395 return self
383 return self.unfiltered().filtered(name, visibilityexceptions)
396 return self.unfiltered().filtered(name, visibilityexceptions)
384
397
385 def __repr__(self):
398 def __repr__(self):
386 return r'<%s:%s %r>' % (
399 return r'<%s:%s %r>' % (
387 self.__class__.__name__,
400 self.__class__.__name__,
388 pycompat.sysstr(self.filtername),
401 pycompat.sysstr(self.filtername),
389 self.unfiltered(),
402 self.unfiltered(),
390 )
403 )
391
404
392 # everything access are forwarded to the proxied repo
405 # everything access are forwarded to the proxied repo
393 def __getattr__(self, attr):
406 def __getattr__(self, attr):
394 return getattr(self._unfilteredrepo, attr)
407 return getattr(self._unfilteredrepo, attr)
395
408
396 def __setattr__(self, attr, value):
409 def __setattr__(self, attr, value):
397 return setattr(self._unfilteredrepo, attr, value)
410 return setattr(self._unfilteredrepo, attr, value)
398
411
399 def __delattr__(self, attr):
412 def __delattr__(self, attr):
400 return delattr(self._unfilteredrepo, attr)
413 return delattr(self._unfilteredrepo, attr)
401
414
402
415
403 # Python <3.4 easily leaks types via __mro__. See
416 # Python <3.4 easily leaks types via __mro__. See
404 # https://bugs.python.org/issue17950. We cache dynamically created types
417 # https://bugs.python.org/issue17950. We cache dynamically created types
405 # so they won't be leaked on every invocation of repo.filtered().
418 # so they won't be leaked on every invocation of repo.filtered().
406 _filteredrepotypes = weakref.WeakKeyDictionary()
419 _filteredrepotypes = weakref.WeakKeyDictionary()
407
420
408
421
409 def newtype(base):
422 def newtype(base):
410 """Create a new type with the repoview mixin and the given base class"""
423 """Create a new type with the repoview mixin and the given base class"""
411 if base not in _filteredrepotypes:
424 if base not in _filteredrepotypes:
412
425
413 class filteredrepo(repoview, base):
426 class filteredrepo(repoview, base):
414 pass
427 pass
415
428
416 _filteredrepotypes[base] = filteredrepo
429 _filteredrepotypes[base] = filteredrepo
417 return _filteredrepotypes[base]
430 return _filteredrepotypes[base]
General Comments 0
You need to be logged in to leave comments. Login now