##// END OF EJS Templates
repoview: move changelog.strip() override to filteredchangelog...
Martin von Zweigbergk -
r43753:c31fa28d default
parent child Browse files
Show More
@@ -1,663 +1,657 b''
1 # changelog.py - changelog class for mercurial
1 # changelog.py - changelog class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 from .i18n import _
10 from .i18n import _
11 from .node import (
11 from .node import (
12 bin,
12 bin,
13 hex,
13 hex,
14 nullid,
14 nullid,
15 )
15 )
16 from .thirdparty import attr
16 from .thirdparty import attr
17
17
18 from . import (
18 from . import (
19 copies,
19 copies,
20 encoding,
20 encoding,
21 error,
21 error,
22 pycompat,
22 pycompat,
23 revlog,
23 revlog,
24 )
24 )
25 from .utils import (
25 from .utils import (
26 dateutil,
26 dateutil,
27 stringutil,
27 stringutil,
28 )
28 )
29
29
30 from .revlogutils import sidedata as sidedatamod
30 from .revlogutils import sidedata as sidedatamod
31
31
32 _defaultextra = {b'branch': b'default'}
32 _defaultextra = {b'branch': b'default'}
33
33
34
34
35 def _string_escape(text):
35 def _string_escape(text):
36 """
36 """
37 >>> from .pycompat import bytechr as chr
37 >>> from .pycompat import bytechr as chr
38 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
38 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
39 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
39 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
40 >>> s
40 >>> s
41 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
41 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
42 >>> res = _string_escape(s)
42 >>> res = _string_escape(s)
43 >>> s == _string_unescape(res)
43 >>> s == _string_unescape(res)
44 True
44 True
45 """
45 """
46 # subset of the string_escape codec
46 # subset of the string_escape codec
47 text = (
47 text = (
48 text.replace(b'\\', b'\\\\')
48 text.replace(b'\\', b'\\\\')
49 .replace(b'\n', b'\\n')
49 .replace(b'\n', b'\\n')
50 .replace(b'\r', b'\\r')
50 .replace(b'\r', b'\\r')
51 )
51 )
52 return text.replace(b'\0', b'\\0')
52 return text.replace(b'\0', b'\\0')
53
53
54
54
55 def _string_unescape(text):
55 def _string_unescape(text):
56 if b'\\0' in text:
56 if b'\\0' in text:
57 # fix up \0 without getting into trouble with \\0
57 # fix up \0 without getting into trouble with \\0
58 text = text.replace(b'\\\\', b'\\\\\n')
58 text = text.replace(b'\\\\', b'\\\\\n')
59 text = text.replace(b'\\0', b'\0')
59 text = text.replace(b'\\0', b'\0')
60 text = text.replace(b'\n', b'')
60 text = text.replace(b'\n', b'')
61 return stringutil.unescapestr(text)
61 return stringutil.unescapestr(text)
62
62
63
63
64 def decodeextra(text):
64 def decodeextra(text):
65 """
65 """
66 >>> from .pycompat import bytechr as chr
66 >>> from .pycompat import bytechr as chr
67 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
67 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
68 ... ).items())
68 ... ).items())
69 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
69 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
70 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
70 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
71 ... b'baz': chr(92) + chr(0) + b'2'})
71 ... b'baz': chr(92) + chr(0) + b'2'})
72 ... ).items())
72 ... ).items())
73 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
73 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
74 """
74 """
75 extra = _defaultextra.copy()
75 extra = _defaultextra.copy()
76 for l in text.split(b'\0'):
76 for l in text.split(b'\0'):
77 if l:
77 if l:
78 k, v = _string_unescape(l).split(b':', 1)
78 k, v = _string_unescape(l).split(b':', 1)
79 extra[k] = v
79 extra[k] = v
80 return extra
80 return extra
81
81
82
82
83 def encodeextra(d):
83 def encodeextra(d):
84 # keys must be sorted to produce a deterministic changelog entry
84 # keys must be sorted to produce a deterministic changelog entry
85 items = [
85 items = [
86 _string_escape(b'%s:%s' % (k, pycompat.bytestr(d[k])))
86 _string_escape(b'%s:%s' % (k, pycompat.bytestr(d[k])))
87 for k in sorted(d)
87 for k in sorted(d)
88 ]
88 ]
89 return b"\0".join(items)
89 return b"\0".join(items)
90
90
91
91
92 def stripdesc(desc):
92 def stripdesc(desc):
93 """strip trailing whitespace and leading and trailing empty lines"""
93 """strip trailing whitespace and leading and trailing empty lines"""
94 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
94 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
95
95
96
96
97 class appender(object):
97 class appender(object):
98 '''the changelog index must be updated last on disk, so we use this class
98 '''the changelog index must be updated last on disk, so we use this class
99 to delay writes to it'''
99 to delay writes to it'''
100
100
101 def __init__(self, vfs, name, mode, buf):
101 def __init__(self, vfs, name, mode, buf):
102 self.data = buf
102 self.data = buf
103 fp = vfs(name, mode)
103 fp = vfs(name, mode)
104 self.fp = fp
104 self.fp = fp
105 self.offset = fp.tell()
105 self.offset = fp.tell()
106 self.size = vfs.fstat(fp).st_size
106 self.size = vfs.fstat(fp).st_size
107 self._end = self.size
107 self._end = self.size
108
108
109 def end(self):
109 def end(self):
110 return self._end
110 return self._end
111
111
112 def tell(self):
112 def tell(self):
113 return self.offset
113 return self.offset
114
114
115 def flush(self):
115 def flush(self):
116 pass
116 pass
117
117
118 @property
118 @property
119 def closed(self):
119 def closed(self):
120 return self.fp.closed
120 return self.fp.closed
121
121
122 def close(self):
122 def close(self):
123 self.fp.close()
123 self.fp.close()
124
124
125 def seek(self, offset, whence=0):
125 def seek(self, offset, whence=0):
126 '''virtual file offset spans real file and data'''
126 '''virtual file offset spans real file and data'''
127 if whence == 0:
127 if whence == 0:
128 self.offset = offset
128 self.offset = offset
129 elif whence == 1:
129 elif whence == 1:
130 self.offset += offset
130 self.offset += offset
131 elif whence == 2:
131 elif whence == 2:
132 self.offset = self.end() + offset
132 self.offset = self.end() + offset
133 if self.offset < self.size:
133 if self.offset < self.size:
134 self.fp.seek(self.offset)
134 self.fp.seek(self.offset)
135
135
136 def read(self, count=-1):
136 def read(self, count=-1):
137 '''only trick here is reads that span real file and data'''
137 '''only trick here is reads that span real file and data'''
138 ret = b""
138 ret = b""
139 if self.offset < self.size:
139 if self.offset < self.size:
140 s = self.fp.read(count)
140 s = self.fp.read(count)
141 ret = s
141 ret = s
142 self.offset += len(s)
142 self.offset += len(s)
143 if count > 0:
143 if count > 0:
144 count -= len(s)
144 count -= len(s)
145 if count != 0:
145 if count != 0:
146 doff = self.offset - self.size
146 doff = self.offset - self.size
147 self.data.insert(0, b"".join(self.data))
147 self.data.insert(0, b"".join(self.data))
148 del self.data[1:]
148 del self.data[1:]
149 s = self.data[0][doff : doff + count]
149 s = self.data[0][doff : doff + count]
150 self.offset += len(s)
150 self.offset += len(s)
151 ret += s
151 ret += s
152 return ret
152 return ret
153
153
154 def write(self, s):
154 def write(self, s):
155 self.data.append(bytes(s))
155 self.data.append(bytes(s))
156 self.offset += len(s)
156 self.offset += len(s)
157 self._end += len(s)
157 self._end += len(s)
158
158
159 def __enter__(self):
159 def __enter__(self):
160 self.fp.__enter__()
160 self.fp.__enter__()
161 return self
161 return self
162
162
163 def __exit__(self, *args):
163 def __exit__(self, *args):
164 return self.fp.__exit__(*args)
164 return self.fp.__exit__(*args)
165
165
166
166
167 def _divertopener(opener, target):
167 def _divertopener(opener, target):
168 """build an opener that writes in 'target.a' instead of 'target'"""
168 """build an opener that writes in 'target.a' instead of 'target'"""
169
169
170 def _divert(name, mode=b'r', checkambig=False):
170 def _divert(name, mode=b'r', checkambig=False):
171 if name != target:
171 if name != target:
172 return opener(name, mode)
172 return opener(name, mode)
173 return opener(name + b".a", mode)
173 return opener(name + b".a", mode)
174
174
175 return _divert
175 return _divert
176
176
177
177
178 def _delayopener(opener, target, buf):
178 def _delayopener(opener, target, buf):
179 """build an opener that stores chunks in 'buf' instead of 'target'"""
179 """build an opener that stores chunks in 'buf' instead of 'target'"""
180
180
181 def _delay(name, mode=b'r', checkambig=False):
181 def _delay(name, mode=b'r', checkambig=False):
182 if name != target:
182 if name != target:
183 return opener(name, mode)
183 return opener(name, mode)
184 return appender(opener, name, mode, buf)
184 return appender(opener, name, mode, buf)
185
185
186 return _delay
186 return _delay
187
187
188
188
189 @attr.s
189 @attr.s
190 class _changelogrevision(object):
190 class _changelogrevision(object):
191 # Extensions might modify _defaultextra, so let the constructor below pass
191 # Extensions might modify _defaultextra, so let the constructor below pass
192 # it in
192 # it in
193 extra = attr.ib()
193 extra = attr.ib()
194 manifest = attr.ib(default=nullid)
194 manifest = attr.ib(default=nullid)
195 user = attr.ib(default=b'')
195 user = attr.ib(default=b'')
196 date = attr.ib(default=(0, 0))
196 date = attr.ib(default=(0, 0))
197 files = attr.ib(default=attr.Factory(list))
197 files = attr.ib(default=attr.Factory(list))
198 filesadded = attr.ib(default=None)
198 filesadded = attr.ib(default=None)
199 filesremoved = attr.ib(default=None)
199 filesremoved = attr.ib(default=None)
200 p1copies = attr.ib(default=None)
200 p1copies = attr.ib(default=None)
201 p2copies = attr.ib(default=None)
201 p2copies = attr.ib(default=None)
202 description = attr.ib(default=b'')
202 description = attr.ib(default=b'')
203
203
204
204
205 class changelogrevision(object):
205 class changelogrevision(object):
206 """Holds results of a parsed changelog revision.
206 """Holds results of a parsed changelog revision.
207
207
208 Changelog revisions consist of multiple pieces of data, including
208 Changelog revisions consist of multiple pieces of data, including
209 the manifest node, user, and date. This object exposes a view into
209 the manifest node, user, and date. This object exposes a view into
210 the parsed object.
210 the parsed object.
211 """
211 """
212
212
213 __slots__ = (
213 __slots__ = (
214 r'_offsets',
214 r'_offsets',
215 r'_text',
215 r'_text',
216 r'_sidedata',
216 r'_sidedata',
217 r'_cpsd',
217 r'_cpsd',
218 )
218 )
219
219
220 def __new__(cls, text, sidedata, cpsd):
220 def __new__(cls, text, sidedata, cpsd):
221 if not text:
221 if not text:
222 return _changelogrevision(extra=_defaultextra)
222 return _changelogrevision(extra=_defaultextra)
223
223
224 self = super(changelogrevision, cls).__new__(cls)
224 self = super(changelogrevision, cls).__new__(cls)
225 # We could return here and implement the following as an __init__.
225 # We could return here and implement the following as an __init__.
226 # But doing it here is equivalent and saves an extra function call.
226 # But doing it here is equivalent and saves an extra function call.
227
227
228 # format used:
228 # format used:
229 # nodeid\n : manifest node in ascii
229 # nodeid\n : manifest node in ascii
230 # user\n : user, no \n or \r allowed
230 # user\n : user, no \n or \r allowed
231 # time tz extra\n : date (time is int or float, timezone is int)
231 # time tz extra\n : date (time is int or float, timezone is int)
232 # : extra is metadata, encoded and separated by '\0'
232 # : extra is metadata, encoded and separated by '\0'
233 # : older versions ignore it
233 # : older versions ignore it
234 # files\n\n : files modified by the cset, no \n or \r allowed
234 # files\n\n : files modified by the cset, no \n or \r allowed
235 # (.*) : comment (free text, ideally utf-8)
235 # (.*) : comment (free text, ideally utf-8)
236 #
236 #
237 # changelog v0 doesn't use extra
237 # changelog v0 doesn't use extra
238
238
239 nl1 = text.index(b'\n')
239 nl1 = text.index(b'\n')
240 nl2 = text.index(b'\n', nl1 + 1)
240 nl2 = text.index(b'\n', nl1 + 1)
241 nl3 = text.index(b'\n', nl2 + 1)
241 nl3 = text.index(b'\n', nl2 + 1)
242
242
243 # The list of files may be empty. Which means nl3 is the first of the
243 # The list of files may be empty. Which means nl3 is the first of the
244 # double newline that precedes the description.
244 # double newline that precedes the description.
245 if text[nl3 + 1 : nl3 + 2] == b'\n':
245 if text[nl3 + 1 : nl3 + 2] == b'\n':
246 doublenl = nl3
246 doublenl = nl3
247 else:
247 else:
248 doublenl = text.index(b'\n\n', nl3 + 1)
248 doublenl = text.index(b'\n\n', nl3 + 1)
249
249
250 self._offsets = (nl1, nl2, nl3, doublenl)
250 self._offsets = (nl1, nl2, nl3, doublenl)
251 self._text = text
251 self._text = text
252 self._sidedata = sidedata
252 self._sidedata = sidedata
253 self._cpsd = cpsd
253 self._cpsd = cpsd
254
254
255 return self
255 return self
256
256
257 @property
257 @property
258 def manifest(self):
258 def manifest(self):
259 return bin(self._text[0 : self._offsets[0]])
259 return bin(self._text[0 : self._offsets[0]])
260
260
261 @property
261 @property
262 def user(self):
262 def user(self):
263 off = self._offsets
263 off = self._offsets
264 return encoding.tolocal(self._text[off[0] + 1 : off[1]])
264 return encoding.tolocal(self._text[off[0] + 1 : off[1]])
265
265
266 @property
266 @property
267 def _rawdate(self):
267 def _rawdate(self):
268 off = self._offsets
268 off = self._offsets
269 dateextra = self._text[off[1] + 1 : off[2]]
269 dateextra = self._text[off[1] + 1 : off[2]]
270 return dateextra.split(b' ', 2)[0:2]
270 return dateextra.split(b' ', 2)[0:2]
271
271
272 @property
272 @property
273 def _rawextra(self):
273 def _rawextra(self):
274 off = self._offsets
274 off = self._offsets
275 dateextra = self._text[off[1] + 1 : off[2]]
275 dateextra = self._text[off[1] + 1 : off[2]]
276 fields = dateextra.split(b' ', 2)
276 fields = dateextra.split(b' ', 2)
277 if len(fields) != 3:
277 if len(fields) != 3:
278 return None
278 return None
279
279
280 return fields[2]
280 return fields[2]
281
281
282 @property
282 @property
283 def date(self):
283 def date(self):
284 raw = self._rawdate
284 raw = self._rawdate
285 time = float(raw[0])
285 time = float(raw[0])
286 # Various tools did silly things with the timezone.
286 # Various tools did silly things with the timezone.
287 try:
287 try:
288 timezone = int(raw[1])
288 timezone = int(raw[1])
289 except ValueError:
289 except ValueError:
290 timezone = 0
290 timezone = 0
291
291
292 return time, timezone
292 return time, timezone
293
293
294 @property
294 @property
295 def extra(self):
295 def extra(self):
296 raw = self._rawextra
296 raw = self._rawextra
297 if raw is None:
297 if raw is None:
298 return _defaultextra
298 return _defaultextra
299
299
300 return decodeextra(raw)
300 return decodeextra(raw)
301
301
302 @property
302 @property
303 def files(self):
303 def files(self):
304 off = self._offsets
304 off = self._offsets
305 if off[2] == off[3]:
305 if off[2] == off[3]:
306 return []
306 return []
307
307
308 return self._text[off[2] + 1 : off[3]].split(b'\n')
308 return self._text[off[2] + 1 : off[3]].split(b'\n')
309
309
310 @property
310 @property
311 def filesadded(self):
311 def filesadded(self):
312 if self._cpsd:
312 if self._cpsd:
313 rawindices = self._sidedata.get(sidedatamod.SD_FILESADDED)
313 rawindices = self._sidedata.get(sidedatamod.SD_FILESADDED)
314 if not rawindices:
314 if not rawindices:
315 return []
315 return []
316 else:
316 else:
317 rawindices = self.extra.get(b'filesadded')
317 rawindices = self.extra.get(b'filesadded')
318 if rawindices is None:
318 if rawindices is None:
319 return None
319 return None
320 return copies.decodefileindices(self.files, rawindices)
320 return copies.decodefileindices(self.files, rawindices)
321
321
322 @property
322 @property
323 def filesremoved(self):
323 def filesremoved(self):
324 if self._cpsd:
324 if self._cpsd:
325 rawindices = self._sidedata.get(sidedatamod.SD_FILESREMOVED)
325 rawindices = self._sidedata.get(sidedatamod.SD_FILESREMOVED)
326 if not rawindices:
326 if not rawindices:
327 return []
327 return []
328 else:
328 else:
329 rawindices = self.extra.get(b'filesremoved')
329 rawindices = self.extra.get(b'filesremoved')
330 if rawindices is None:
330 if rawindices is None:
331 return None
331 return None
332 return copies.decodefileindices(self.files, rawindices)
332 return copies.decodefileindices(self.files, rawindices)
333
333
334 @property
334 @property
335 def p1copies(self):
335 def p1copies(self):
336 if self._cpsd:
336 if self._cpsd:
337 rawcopies = self._sidedata.get(sidedatamod.SD_P1COPIES)
337 rawcopies = self._sidedata.get(sidedatamod.SD_P1COPIES)
338 if not rawcopies:
338 if not rawcopies:
339 return {}
339 return {}
340 else:
340 else:
341 rawcopies = self.extra.get(b'p1copies')
341 rawcopies = self.extra.get(b'p1copies')
342 if rawcopies is None:
342 if rawcopies is None:
343 return None
343 return None
344 return copies.decodecopies(self.files, rawcopies)
344 return copies.decodecopies(self.files, rawcopies)
345
345
346 @property
346 @property
347 def p2copies(self):
347 def p2copies(self):
348 if self._cpsd:
348 if self._cpsd:
349 rawcopies = self._sidedata.get(sidedatamod.SD_P2COPIES)
349 rawcopies = self._sidedata.get(sidedatamod.SD_P2COPIES)
350 if not rawcopies:
350 if not rawcopies:
351 return {}
351 return {}
352 else:
352 else:
353 rawcopies = self.extra.get(b'p2copies')
353 rawcopies = self.extra.get(b'p2copies')
354 if rawcopies is None:
354 if rawcopies is None:
355 return None
355 return None
356 return copies.decodecopies(self.files, rawcopies)
356 return copies.decodecopies(self.files, rawcopies)
357
357
358 @property
358 @property
359 def description(self):
359 def description(self):
360 return encoding.tolocal(self._text[self._offsets[3] + 2 :])
360 return encoding.tolocal(self._text[self._offsets[3] + 2 :])
361
361
362
362
363 class changelog(revlog.revlog):
363 class changelog(revlog.revlog):
364 def __init__(self, opener, trypending=False):
364 def __init__(self, opener, trypending=False):
365 """Load a changelog revlog using an opener.
365 """Load a changelog revlog using an opener.
366
366
367 If ``trypending`` is true, we attempt to load the index from a
367 If ``trypending`` is true, we attempt to load the index from a
368 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
368 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
369 The ``00changelog.i.a`` file contains index (and possibly inline
369 The ``00changelog.i.a`` file contains index (and possibly inline
370 revision) data for a transaction that hasn't been finalized yet.
370 revision) data for a transaction that hasn't been finalized yet.
371 It exists in a separate file to facilitate readers (such as
371 It exists in a separate file to facilitate readers (such as
372 hooks processes) accessing data before a transaction is finalized.
372 hooks processes) accessing data before a transaction is finalized.
373 """
373 """
374 if trypending and opener.exists(b'00changelog.i.a'):
374 if trypending and opener.exists(b'00changelog.i.a'):
375 indexfile = b'00changelog.i.a'
375 indexfile = b'00changelog.i.a'
376 else:
376 else:
377 indexfile = b'00changelog.i'
377 indexfile = b'00changelog.i'
378
378
379 datafile = b'00changelog.d'
379 datafile = b'00changelog.d'
380 revlog.revlog.__init__(
380 revlog.revlog.__init__(
381 self,
381 self,
382 opener,
382 opener,
383 indexfile,
383 indexfile,
384 datafile=datafile,
384 datafile=datafile,
385 checkambig=True,
385 checkambig=True,
386 mmaplargeindex=True,
386 mmaplargeindex=True,
387 )
387 )
388
388
389 if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
389 if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
390 # changelogs don't benefit from generaldelta.
390 # changelogs don't benefit from generaldelta.
391
391
392 self.version &= ~revlog.FLAG_GENERALDELTA
392 self.version &= ~revlog.FLAG_GENERALDELTA
393 self._generaldelta = False
393 self._generaldelta = False
394
394
395 # Delta chains for changelogs tend to be very small because entries
395 # Delta chains for changelogs tend to be very small because entries
396 # tend to be small and don't delta well with each. So disable delta
396 # tend to be small and don't delta well with each. So disable delta
397 # chains.
397 # chains.
398 self._storedeltachains = False
398 self._storedeltachains = False
399
399
400 self._realopener = opener
400 self._realopener = opener
401 self._delayed = False
401 self._delayed = False
402 self._delaybuf = None
402 self._delaybuf = None
403 self._divert = False
403 self._divert = False
404 self.filteredrevs = frozenset()
404 self.filteredrevs = frozenset()
405 self._copiesstorage = opener.options.get(b'copies-storage')
405 self._copiesstorage = opener.options.get(b'copies-storage')
406
406
407 def strip(self, *args, **kwargs):
408 # XXX make something better than assert
409 # We can't expect proper strip behavior if we are filtered.
410 assert not self.filteredrevs
411 super(changelog, self).strip(*args, **kwargs)
412
413 def rev(self, node):
407 def rev(self, node):
414 """filtered version of revlog.rev"""
408 """filtered version of revlog.rev"""
415 r = super(changelog, self).rev(node)
409 r = super(changelog, self).rev(node)
416 if r in self.filteredrevs:
410 if r in self.filteredrevs:
417 raise error.FilteredLookupError(
411 raise error.FilteredLookupError(
418 hex(node), self.indexfile, _(b'filtered node')
412 hex(node), self.indexfile, _(b'filtered node')
419 )
413 )
420 return r
414 return r
421
415
422 def node(self, rev):
416 def node(self, rev):
423 """filtered version of revlog.node"""
417 """filtered version of revlog.node"""
424 if rev in self.filteredrevs:
418 if rev in self.filteredrevs:
425 raise error.FilteredIndexError(rev)
419 raise error.FilteredIndexError(rev)
426 return super(changelog, self).node(rev)
420 return super(changelog, self).node(rev)
427
421
428 def linkrev(self, rev):
422 def linkrev(self, rev):
429 """filtered version of revlog.linkrev"""
423 """filtered version of revlog.linkrev"""
430 if rev in self.filteredrevs:
424 if rev in self.filteredrevs:
431 raise error.FilteredIndexError(rev)
425 raise error.FilteredIndexError(rev)
432 return super(changelog, self).linkrev(rev)
426 return super(changelog, self).linkrev(rev)
433
427
434 def parentrevs(self, rev):
428 def parentrevs(self, rev):
435 """filtered version of revlog.parentrevs"""
429 """filtered version of revlog.parentrevs"""
436 if rev in self.filteredrevs:
430 if rev in self.filteredrevs:
437 raise error.FilteredIndexError(rev)
431 raise error.FilteredIndexError(rev)
438 return super(changelog, self).parentrevs(rev)
432 return super(changelog, self).parentrevs(rev)
439
433
440 def flags(self, rev):
434 def flags(self, rev):
441 """filtered version of revlog.flags"""
435 """filtered version of revlog.flags"""
442 if rev in self.filteredrevs:
436 if rev in self.filteredrevs:
443 raise error.FilteredIndexError(rev)
437 raise error.FilteredIndexError(rev)
444 return super(changelog, self).flags(rev)
438 return super(changelog, self).flags(rev)
445
439
446 def delayupdate(self, tr):
440 def delayupdate(self, tr):
447 b"delay visibility of index updates to other readers"
441 b"delay visibility of index updates to other readers"
448
442
449 if not self._delayed:
443 if not self._delayed:
450 if len(self) == 0:
444 if len(self) == 0:
451 self._divert = True
445 self._divert = True
452 if self._realopener.exists(self.indexfile + b'.a'):
446 if self._realopener.exists(self.indexfile + b'.a'):
453 self._realopener.unlink(self.indexfile + b'.a')
447 self._realopener.unlink(self.indexfile + b'.a')
454 self.opener = _divertopener(self._realopener, self.indexfile)
448 self.opener = _divertopener(self._realopener, self.indexfile)
455 else:
449 else:
456 self._delaybuf = []
450 self._delaybuf = []
457 self.opener = _delayopener(
451 self.opener = _delayopener(
458 self._realopener, self.indexfile, self._delaybuf
452 self._realopener, self.indexfile, self._delaybuf
459 )
453 )
460 self._delayed = True
454 self._delayed = True
461 tr.addpending(b'cl-%i' % id(self), self._writepending)
455 tr.addpending(b'cl-%i' % id(self), self._writepending)
462 tr.addfinalize(b'cl-%i' % id(self), self._finalize)
456 tr.addfinalize(b'cl-%i' % id(self), self._finalize)
463
457
464 def _finalize(self, tr):
458 def _finalize(self, tr):
465 b"finalize index updates"
459 b"finalize index updates"
466 self._delayed = False
460 self._delayed = False
467 self.opener = self._realopener
461 self.opener = self._realopener
468 # move redirected index data back into place
462 # move redirected index data back into place
469 if self._divert:
463 if self._divert:
470 assert not self._delaybuf
464 assert not self._delaybuf
471 tmpname = self.indexfile + b".a"
465 tmpname = self.indexfile + b".a"
472 nfile = self.opener.open(tmpname)
466 nfile = self.opener.open(tmpname)
473 nfile.close()
467 nfile.close()
474 self.opener.rename(tmpname, self.indexfile, checkambig=True)
468 self.opener.rename(tmpname, self.indexfile, checkambig=True)
475 elif self._delaybuf:
469 elif self._delaybuf:
476 fp = self.opener(self.indexfile, b'a', checkambig=True)
470 fp = self.opener(self.indexfile, b'a', checkambig=True)
477 fp.write(b"".join(self._delaybuf))
471 fp.write(b"".join(self._delaybuf))
478 fp.close()
472 fp.close()
479 self._delaybuf = None
473 self._delaybuf = None
480 self._divert = False
474 self._divert = False
481 # split when we're done
475 # split when we're done
482 self._enforceinlinesize(tr)
476 self._enforceinlinesize(tr)
483
477
484 def _writepending(self, tr):
478 def _writepending(self, tr):
485 b"create a file containing the unfinalized state for pretxnchangegroup"
479 b"create a file containing the unfinalized state for pretxnchangegroup"
486 if self._delaybuf:
480 if self._delaybuf:
487 # make a temporary copy of the index
481 # make a temporary copy of the index
488 fp1 = self._realopener(self.indexfile)
482 fp1 = self._realopener(self.indexfile)
489 pendingfilename = self.indexfile + b".a"
483 pendingfilename = self.indexfile + b".a"
490 # register as a temp file to ensure cleanup on failure
484 # register as a temp file to ensure cleanup on failure
491 tr.registertmp(pendingfilename)
485 tr.registertmp(pendingfilename)
492 # write existing data
486 # write existing data
493 fp2 = self._realopener(pendingfilename, b"w")
487 fp2 = self._realopener(pendingfilename, b"w")
494 fp2.write(fp1.read())
488 fp2.write(fp1.read())
495 # add pending data
489 # add pending data
496 fp2.write(b"".join(self._delaybuf))
490 fp2.write(b"".join(self._delaybuf))
497 fp2.close()
491 fp2.close()
498 # switch modes so finalize can simply rename
492 # switch modes so finalize can simply rename
499 self._delaybuf = None
493 self._delaybuf = None
500 self._divert = True
494 self._divert = True
501 self.opener = _divertopener(self._realopener, self.indexfile)
495 self.opener = _divertopener(self._realopener, self.indexfile)
502
496
503 if self._divert:
497 if self._divert:
504 return True
498 return True
505
499
506 return False
500 return False
507
501
508 def _enforceinlinesize(self, tr, fp=None):
502 def _enforceinlinesize(self, tr, fp=None):
509 if not self._delayed:
503 if not self._delayed:
510 revlog.revlog._enforceinlinesize(self, tr, fp)
504 revlog.revlog._enforceinlinesize(self, tr, fp)
511
505
512 def read(self, node):
506 def read(self, node):
513 """Obtain data from a parsed changelog revision.
507 """Obtain data from a parsed changelog revision.
514
508
515 Returns a 6-tuple of:
509 Returns a 6-tuple of:
516
510
517 - manifest node in binary
511 - manifest node in binary
518 - author/user as a localstr
512 - author/user as a localstr
519 - date as a 2-tuple of (time, timezone)
513 - date as a 2-tuple of (time, timezone)
520 - list of files
514 - list of files
521 - commit message as a localstr
515 - commit message as a localstr
522 - dict of extra metadata
516 - dict of extra metadata
523
517
524 Unless you need to access all fields, consider calling
518 Unless you need to access all fields, consider calling
525 ``changelogrevision`` instead, as it is faster for partial object
519 ``changelogrevision`` instead, as it is faster for partial object
526 access.
520 access.
527 """
521 """
528 d, s = self._revisiondata(node)
522 d, s = self._revisiondata(node)
529 c = changelogrevision(
523 c = changelogrevision(
530 d, s, self._copiesstorage == b'changeset-sidedata'
524 d, s, self._copiesstorage == b'changeset-sidedata'
531 )
525 )
532 return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
526 return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
533
527
534 def changelogrevision(self, nodeorrev):
528 def changelogrevision(self, nodeorrev):
535 """Obtain a ``changelogrevision`` for a node or revision."""
529 """Obtain a ``changelogrevision`` for a node or revision."""
536 text, sidedata = self._revisiondata(nodeorrev)
530 text, sidedata = self._revisiondata(nodeorrev)
537 return changelogrevision(
531 return changelogrevision(
538 text, sidedata, self._copiesstorage == b'changeset-sidedata'
532 text, sidedata, self._copiesstorage == b'changeset-sidedata'
539 )
533 )
540
534
541 def readfiles(self, node):
535 def readfiles(self, node):
542 """
536 """
543 short version of read that only returns the files modified by the cset
537 short version of read that only returns the files modified by the cset
544 """
538 """
545 text = self.revision(node)
539 text = self.revision(node)
546 if not text:
540 if not text:
547 return []
541 return []
548 last = text.index(b"\n\n")
542 last = text.index(b"\n\n")
549 l = text[:last].split(b'\n')
543 l = text[:last].split(b'\n')
550 return l[3:]
544 return l[3:]
551
545
552 def add(
546 def add(
553 self,
547 self,
554 manifest,
548 manifest,
555 files,
549 files,
556 desc,
550 desc,
557 transaction,
551 transaction,
558 p1,
552 p1,
559 p2,
553 p2,
560 user,
554 user,
561 date=None,
555 date=None,
562 extra=None,
556 extra=None,
563 p1copies=None,
557 p1copies=None,
564 p2copies=None,
558 p2copies=None,
565 filesadded=None,
559 filesadded=None,
566 filesremoved=None,
560 filesremoved=None,
567 ):
561 ):
568 # Convert to UTF-8 encoded bytestrings as the very first
562 # Convert to UTF-8 encoded bytestrings as the very first
569 # thing: calling any method on a localstr object will turn it
563 # thing: calling any method on a localstr object will turn it
570 # into a str object and the cached UTF-8 string is thus lost.
564 # into a str object and the cached UTF-8 string is thus lost.
571 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
565 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
572
566
573 user = user.strip()
567 user = user.strip()
574 # An empty username or a username with a "\n" will make the
568 # An empty username or a username with a "\n" will make the
575 # revision text contain two "\n\n" sequences -> corrupt
569 # revision text contain two "\n\n" sequences -> corrupt
576 # repository since read cannot unpack the revision.
570 # repository since read cannot unpack the revision.
577 if not user:
571 if not user:
578 raise error.StorageError(_(b"empty username"))
572 raise error.StorageError(_(b"empty username"))
579 if b"\n" in user:
573 if b"\n" in user:
580 raise error.StorageError(
574 raise error.StorageError(
581 _(b"username %r contains a newline") % pycompat.bytestr(user)
575 _(b"username %r contains a newline") % pycompat.bytestr(user)
582 )
576 )
583
577
584 desc = stripdesc(desc)
578 desc = stripdesc(desc)
585
579
586 if date:
580 if date:
587 parseddate = b"%d %d" % dateutil.parsedate(date)
581 parseddate = b"%d %d" % dateutil.parsedate(date)
588 else:
582 else:
589 parseddate = b"%d %d" % dateutil.makedate()
583 parseddate = b"%d %d" % dateutil.makedate()
590 if extra:
584 if extra:
591 branch = extra.get(b"branch")
585 branch = extra.get(b"branch")
592 if branch in (b"default", b""):
586 if branch in (b"default", b""):
593 del extra[b"branch"]
587 del extra[b"branch"]
594 elif branch in (b".", b"null", b"tip"):
588 elif branch in (b".", b"null", b"tip"):
595 raise error.StorageError(
589 raise error.StorageError(
596 _(b'the name \'%s\' is reserved') % branch
590 _(b'the name \'%s\' is reserved') % branch
597 )
591 )
598 sortedfiles = sorted(files)
592 sortedfiles = sorted(files)
599 sidedata = None
593 sidedata = None
600 if extra is not None:
594 if extra is not None:
601 for name in (
595 for name in (
602 b'p1copies',
596 b'p1copies',
603 b'p2copies',
597 b'p2copies',
604 b'filesadded',
598 b'filesadded',
605 b'filesremoved',
599 b'filesremoved',
606 ):
600 ):
607 extra.pop(name, None)
601 extra.pop(name, None)
608 if p1copies is not None:
602 if p1copies is not None:
609 p1copies = copies.encodecopies(sortedfiles, p1copies)
603 p1copies = copies.encodecopies(sortedfiles, p1copies)
610 if p2copies is not None:
604 if p2copies is not None:
611 p2copies = copies.encodecopies(sortedfiles, p2copies)
605 p2copies = copies.encodecopies(sortedfiles, p2copies)
612 if filesadded is not None:
606 if filesadded is not None:
613 filesadded = copies.encodefileindices(sortedfiles, filesadded)
607 filesadded = copies.encodefileindices(sortedfiles, filesadded)
614 if filesremoved is not None:
608 if filesremoved is not None:
615 filesremoved = copies.encodefileindices(sortedfiles, filesremoved)
609 filesremoved = copies.encodefileindices(sortedfiles, filesremoved)
616 if self._copiesstorage == b'extra':
610 if self._copiesstorage == b'extra':
617 extrasentries = p1copies, p2copies, filesadded, filesremoved
611 extrasentries = p1copies, p2copies, filesadded, filesremoved
618 if extra is None and any(x is not None for x in extrasentries):
612 if extra is None and any(x is not None for x in extrasentries):
619 extra = {}
613 extra = {}
620 if p1copies is not None:
614 if p1copies is not None:
621 extra[b'p1copies'] = p1copies
615 extra[b'p1copies'] = p1copies
622 if p2copies is not None:
616 if p2copies is not None:
623 extra[b'p2copies'] = p2copies
617 extra[b'p2copies'] = p2copies
624 if filesadded is not None:
618 if filesadded is not None:
625 extra[b'filesadded'] = filesadded
619 extra[b'filesadded'] = filesadded
626 if filesremoved is not None:
620 if filesremoved is not None:
627 extra[b'filesremoved'] = filesremoved
621 extra[b'filesremoved'] = filesremoved
628 elif self._copiesstorage == b'changeset-sidedata':
622 elif self._copiesstorage == b'changeset-sidedata':
629 sidedata = {}
623 sidedata = {}
630 if p1copies:
624 if p1copies:
631 sidedata[sidedatamod.SD_P1COPIES] = p1copies
625 sidedata[sidedatamod.SD_P1COPIES] = p1copies
632 if p2copies:
626 if p2copies:
633 sidedata[sidedatamod.SD_P2COPIES] = p2copies
627 sidedata[sidedatamod.SD_P2COPIES] = p2copies
634 if filesadded:
628 if filesadded:
635 sidedata[sidedatamod.SD_FILESADDED] = filesadded
629 sidedata[sidedatamod.SD_FILESADDED] = filesadded
636 if filesremoved:
630 if filesremoved:
637 sidedata[sidedatamod.SD_FILESREMOVED] = filesremoved
631 sidedata[sidedatamod.SD_FILESREMOVED] = filesremoved
638 if not sidedata:
632 if not sidedata:
639 sidedata = None
633 sidedata = None
640
634
641 if extra:
635 if extra:
642 extra = encodeextra(extra)
636 extra = encodeextra(extra)
643 parseddate = b"%s %s" % (parseddate, extra)
637 parseddate = b"%s %s" % (parseddate, extra)
644 l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
638 l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
645 text = b"\n".join(l)
639 text = b"\n".join(l)
646 return self.addrevision(
640 return self.addrevision(
647 text, transaction, len(self), p1, p2, sidedata=sidedata
641 text, transaction, len(self), p1, p2, sidedata=sidedata
648 )
642 )
649
643
650 def branchinfo(self, rev):
644 def branchinfo(self, rev):
651 """return the branch name and open/close state of a revision
645 """return the branch name and open/close state of a revision
652
646
653 This function exists because creating a changectx object
647 This function exists because creating a changectx object
654 just to access this is costly."""
648 just to access this is costly."""
655 extra = self.read(rev)[5]
649 extra = self.read(rev)[5]
656 return encoding.tolocal(extra.get(b"branch")), b'close' in extra
650 return encoding.tolocal(extra.get(b"branch")), b'close' in extra
657
651
658 def _nodeduplicatecallback(self, transaction, node):
652 def _nodeduplicatecallback(self, transaction, node):
659 # keep track of revisions that got "re-added", eg: unbunde of know rev.
653 # keep track of revisions that got "re-added", eg: unbunde of know rev.
660 #
654 #
661 # We track them in a list to preserve their order from the source bundle
655 # We track them in a list to preserve their order from the source bundle
662 duplicates = transaction.changes.setdefault(b'revduplicates', [])
656 duplicates = transaction.changes.setdefault(b'revduplicates', [])
663 duplicates.append(self.rev(node))
657 duplicates.append(self.rev(node))
@@ -1,411 +1,417 b''
1 # repoview.py - Filtered view of a localrepo object
1 # repoview.py - Filtered view of a localrepo object
2 #
2 #
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
4 # Logilab SA <contact@logilab.fr>
4 # Logilab SA <contact@logilab.fr>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from __future__ import absolute_import
9 from __future__ import absolute_import
10
10
11 import copy
11 import copy
12 import weakref
12 import weakref
13
13
14 from .node import nullrev
14 from .node import nullrev
15 from .pycompat import (
15 from .pycompat import (
16 delattr,
16 delattr,
17 getattr,
17 getattr,
18 setattr,
18 setattr,
19 )
19 )
20 from . import (
20 from . import (
21 error,
21 error,
22 obsolete,
22 obsolete,
23 phases,
23 phases,
24 pycompat,
24 pycompat,
25 revlog,
25 revlog,
26 tags as tagsmod,
26 tags as tagsmod,
27 util,
27 util,
28 )
28 )
29 from .utils import repoviewutil
29 from .utils import repoviewutil
30
30
31
31
32 def hideablerevs(repo):
32 def hideablerevs(repo):
33 """Revision candidates to be hidden
33 """Revision candidates to be hidden
34
34
35 This is a standalone function to allow extensions to wrap it.
35 This is a standalone function to allow extensions to wrap it.
36
36
37 Because we use the set of immutable changesets as a fallback subset in
37 Because we use the set of immutable changesets as a fallback subset in
38 branchmap (see mercurial.utils.repoviewutils.subsettable), you cannot set
38 branchmap (see mercurial.utils.repoviewutils.subsettable), you cannot set
39 "public" changesets as "hideable". Doing so would break multiple code
39 "public" changesets as "hideable". Doing so would break multiple code
40 assertions and lead to crashes."""
40 assertions and lead to crashes."""
41 obsoletes = obsolete.getrevs(repo, b'obsolete')
41 obsoletes = obsolete.getrevs(repo, b'obsolete')
42 internals = repo._phasecache.getrevset(repo, phases.localhiddenphases)
42 internals = repo._phasecache.getrevset(repo, phases.localhiddenphases)
43 internals = frozenset(internals)
43 internals = frozenset(internals)
44 return obsoletes | internals
44 return obsoletes | internals
45
45
46
46
47 def pinnedrevs(repo):
47 def pinnedrevs(repo):
48 """revisions blocking hidden changesets from being filtered
48 """revisions blocking hidden changesets from being filtered
49 """
49 """
50
50
51 cl = repo.changelog
51 cl = repo.changelog
52 pinned = set()
52 pinned = set()
53 pinned.update([par.rev() for par in repo[None].parents()])
53 pinned.update([par.rev() for par in repo[None].parents()])
54 pinned.update([cl.rev(bm) for bm in repo._bookmarks.values()])
54 pinned.update([cl.rev(bm) for bm in repo._bookmarks.values()])
55
55
56 tags = {}
56 tags = {}
57 tagsmod.readlocaltags(repo.ui, repo, tags, {})
57 tagsmod.readlocaltags(repo.ui, repo, tags, {})
58 if tags:
58 if tags:
59 rev, nodemap = cl.rev, cl.nodemap
59 rev, nodemap = cl.rev, cl.nodemap
60 pinned.update(rev(t[0]) for t in tags.values() if t[0] in nodemap)
60 pinned.update(rev(t[0]) for t in tags.values() if t[0] in nodemap)
61 return pinned
61 return pinned
62
62
63
63
64 def _revealancestors(pfunc, hidden, revs):
64 def _revealancestors(pfunc, hidden, revs):
65 """reveals contiguous chains of hidden ancestors of 'revs' by removing them
65 """reveals contiguous chains of hidden ancestors of 'revs' by removing them
66 from 'hidden'
66 from 'hidden'
67
67
68 - pfunc(r): a funtion returning parent of 'r',
68 - pfunc(r): a funtion returning parent of 'r',
69 - hidden: the (preliminary) hidden revisions, to be updated
69 - hidden: the (preliminary) hidden revisions, to be updated
70 - revs: iterable of revnum,
70 - revs: iterable of revnum,
71
71
72 (Ancestors are revealed exclusively, i.e. the elements in 'revs' are
72 (Ancestors are revealed exclusively, i.e. the elements in 'revs' are
73 *not* revealed)
73 *not* revealed)
74 """
74 """
75 stack = list(revs)
75 stack = list(revs)
76 while stack:
76 while stack:
77 for p in pfunc(stack.pop()):
77 for p in pfunc(stack.pop()):
78 if p != nullrev and p in hidden:
78 if p != nullrev and p in hidden:
79 hidden.remove(p)
79 hidden.remove(p)
80 stack.append(p)
80 stack.append(p)
81
81
82
82
83 def computehidden(repo, visibilityexceptions=None):
83 def computehidden(repo, visibilityexceptions=None):
84 """compute the set of hidden revision to filter
84 """compute the set of hidden revision to filter
85
85
86 During most operation hidden should be filtered."""
86 During most operation hidden should be filtered."""
87 assert not repo.changelog.filteredrevs
87 assert not repo.changelog.filteredrevs
88
88
89 hidden = hideablerevs(repo)
89 hidden = hideablerevs(repo)
90 if hidden:
90 if hidden:
91 hidden = set(hidden - pinnedrevs(repo))
91 hidden = set(hidden - pinnedrevs(repo))
92 if visibilityexceptions:
92 if visibilityexceptions:
93 hidden -= visibilityexceptions
93 hidden -= visibilityexceptions
94 pfunc = repo.changelog.parentrevs
94 pfunc = repo.changelog.parentrevs
95 mutable = repo._phasecache.getrevset(repo, phases.mutablephases)
95 mutable = repo._phasecache.getrevset(repo, phases.mutablephases)
96
96
97 visible = mutable - hidden
97 visible = mutable - hidden
98 _revealancestors(pfunc, hidden, visible)
98 _revealancestors(pfunc, hidden, visible)
99 return frozenset(hidden)
99 return frozenset(hidden)
100
100
101
101
102 def computesecret(repo, visibilityexceptions=None):
102 def computesecret(repo, visibilityexceptions=None):
103 """compute the set of revision that can never be exposed through hgweb
103 """compute the set of revision that can never be exposed through hgweb
104
104
105 Changeset in the secret phase (or above) should stay unaccessible."""
105 Changeset in the secret phase (or above) should stay unaccessible."""
106 assert not repo.changelog.filteredrevs
106 assert not repo.changelog.filteredrevs
107 secrets = repo._phasecache.getrevset(repo, phases.remotehiddenphases)
107 secrets = repo._phasecache.getrevset(repo, phases.remotehiddenphases)
108 return frozenset(secrets)
108 return frozenset(secrets)
109
109
110
110
111 def computeunserved(repo, visibilityexceptions=None):
111 def computeunserved(repo, visibilityexceptions=None):
112 """compute the set of revision that should be filtered when used a server
112 """compute the set of revision that should be filtered when used a server
113
113
114 Secret and hidden changeset should not pretend to be here."""
114 Secret and hidden changeset should not pretend to be here."""
115 assert not repo.changelog.filteredrevs
115 assert not repo.changelog.filteredrevs
116 # fast path in simple case to avoid impact of non optimised code
116 # fast path in simple case to avoid impact of non optimised code
117 hiddens = filterrevs(repo, b'visible')
117 hiddens = filterrevs(repo, b'visible')
118 secrets = filterrevs(repo, b'served.hidden')
118 secrets = filterrevs(repo, b'served.hidden')
119 if secrets:
119 if secrets:
120 return frozenset(hiddens | secrets)
120 return frozenset(hiddens | secrets)
121 else:
121 else:
122 return hiddens
122 return hiddens
123
123
124
124
125 def computemutable(repo, visibilityexceptions=None):
125 def computemutable(repo, visibilityexceptions=None):
126 assert not repo.changelog.filteredrevs
126 assert not repo.changelog.filteredrevs
127 # fast check to avoid revset call on huge repo
127 # fast check to avoid revset call on huge repo
128 if any(repo._phasecache.phaseroots[1:]):
128 if any(repo._phasecache.phaseroots[1:]):
129 getphase = repo._phasecache.phase
129 getphase = repo._phasecache.phase
130 maymutable = filterrevs(repo, b'base')
130 maymutable = filterrevs(repo, b'base')
131 return frozenset(r for r in maymutable if getphase(repo, r))
131 return frozenset(r for r in maymutable if getphase(repo, r))
132 return frozenset()
132 return frozenset()
133
133
134
134
135 def computeimpactable(repo, visibilityexceptions=None):
135 def computeimpactable(repo, visibilityexceptions=None):
136 """Everything impactable by mutable revision
136 """Everything impactable by mutable revision
137
137
138 The immutable filter still have some chance to get invalidated. This will
138 The immutable filter still have some chance to get invalidated. This will
139 happen when:
139 happen when:
140
140
141 - you garbage collect hidden changeset,
141 - you garbage collect hidden changeset,
142 - public phase is moved backward,
142 - public phase is moved backward,
143 - something is changed in the filtering (this could be fixed)
143 - something is changed in the filtering (this could be fixed)
144
144
145 This filter out any mutable changeset and any public changeset that may be
145 This filter out any mutable changeset and any public changeset that may be
146 impacted by something happening to a mutable revision.
146 impacted by something happening to a mutable revision.
147
147
148 This is achieved by filtered everything with a revision number egal or
148 This is achieved by filtered everything with a revision number egal or
149 higher than the first mutable changeset is filtered."""
149 higher than the first mutable changeset is filtered."""
150 assert not repo.changelog.filteredrevs
150 assert not repo.changelog.filteredrevs
151 cl = repo.changelog
151 cl = repo.changelog
152 firstmutable = len(cl)
152 firstmutable = len(cl)
153 for roots in repo._phasecache.phaseroots[1:]:
153 for roots in repo._phasecache.phaseroots[1:]:
154 if roots:
154 if roots:
155 firstmutable = min(firstmutable, min(cl.rev(r) for r in roots))
155 firstmutable = min(firstmutable, min(cl.rev(r) for r in roots))
156 # protect from nullrev root
156 # protect from nullrev root
157 firstmutable = max(0, firstmutable)
157 firstmutable = max(0, firstmutable)
158 return frozenset(pycompat.xrange(firstmutable, len(cl)))
158 return frozenset(pycompat.xrange(firstmutable, len(cl)))
159
159
160
160
161 # function to compute filtered set
161 # function to compute filtered set
162 #
162 #
163 # When adding a new filter you MUST update the table at:
163 # When adding a new filter you MUST update the table at:
164 # mercurial.utils.repoviewutil.subsettable
164 # mercurial.utils.repoviewutil.subsettable
165 # Otherwise your filter will have to recompute all its branches cache
165 # Otherwise your filter will have to recompute all its branches cache
166 # from scratch (very slow).
166 # from scratch (very slow).
167 filtertable = {
167 filtertable = {
168 b'visible': computehidden,
168 b'visible': computehidden,
169 b'visible-hidden': computehidden,
169 b'visible-hidden': computehidden,
170 b'served.hidden': computesecret,
170 b'served.hidden': computesecret,
171 b'served': computeunserved,
171 b'served': computeunserved,
172 b'immutable': computemutable,
172 b'immutable': computemutable,
173 b'base': computeimpactable,
173 b'base': computeimpactable,
174 }
174 }
175
175
176 _basefiltername = list(filtertable)
176 _basefiltername = list(filtertable)
177
177
178
178
179 def extrafilter(ui):
179 def extrafilter(ui):
180 """initialize extra filter and return its id
180 """initialize extra filter and return its id
181
181
182 If extra filtering is configured, we make sure the associated filtered view
182 If extra filtering is configured, we make sure the associated filtered view
183 are declared and return the associated id.
183 are declared and return the associated id.
184 """
184 """
185 frevs = ui.config(b'experimental', b'extra-filter-revs')
185 frevs = ui.config(b'experimental', b'extra-filter-revs')
186 if frevs is None:
186 if frevs is None:
187 return None
187 return None
188
188
189 fid = pycompat.sysbytes(util.DIGESTS[b'sha1'](frevs).hexdigest())[:12]
189 fid = pycompat.sysbytes(util.DIGESTS[b'sha1'](frevs).hexdigest())[:12]
190
190
191 combine = lambda fname: fname + b'%' + fid
191 combine = lambda fname: fname + b'%' + fid
192
192
193 subsettable = repoviewutil.subsettable
193 subsettable = repoviewutil.subsettable
194
194
195 if combine(b'base') not in filtertable:
195 if combine(b'base') not in filtertable:
196 for name in _basefiltername:
196 for name in _basefiltername:
197
197
198 def extrafilteredrevs(repo, *args, **kwargs):
198 def extrafilteredrevs(repo, *args, **kwargs):
199 baserevs = filtertable[name](repo, *args, **kwargs)
199 baserevs = filtertable[name](repo, *args, **kwargs)
200 extrarevs = frozenset(repo.revs(frevs))
200 extrarevs = frozenset(repo.revs(frevs))
201 return baserevs | extrarevs
201 return baserevs | extrarevs
202
202
203 filtertable[combine(name)] = extrafilteredrevs
203 filtertable[combine(name)] = extrafilteredrevs
204 if name in subsettable:
204 if name in subsettable:
205 subsettable[combine(name)] = combine(subsettable[name])
205 subsettable[combine(name)] = combine(subsettable[name])
206 return fid
206 return fid
207
207
208
208
209 def filterrevs(repo, filtername, visibilityexceptions=None):
209 def filterrevs(repo, filtername, visibilityexceptions=None):
210 """returns set of filtered revision for this filter name
210 """returns set of filtered revision for this filter name
211
211
212 visibilityexceptions is a set of revs which must are exceptions for
212 visibilityexceptions is a set of revs which must are exceptions for
213 hidden-state and must be visible. They are dynamic and hence we should not
213 hidden-state and must be visible. They are dynamic and hence we should not
214 cache it's result"""
214 cache it's result"""
215 if filtername not in repo.filteredrevcache:
215 if filtername not in repo.filteredrevcache:
216 func = filtertable[filtername]
216 func = filtertable[filtername]
217 if visibilityexceptions:
217 if visibilityexceptions:
218 return func(repo.unfiltered, visibilityexceptions)
218 return func(repo.unfiltered, visibilityexceptions)
219 repo.filteredrevcache[filtername] = func(repo.unfiltered())
219 repo.filteredrevcache[filtername] = func(repo.unfiltered())
220 return repo.filteredrevcache[filtername]
220 return repo.filteredrevcache[filtername]
221
221
222
222
223 def wrapchangelog(unfichangelog, filteredrevs):
223 def wrapchangelog(unfichangelog, filteredrevs):
224 cl = copy.copy(unfichangelog)
224 cl = copy.copy(unfichangelog)
225 cl.filteredrevs = filteredrevs
225 cl.filteredrevs = filteredrevs
226
226
227 class filteredchangelog(cl.__class__):
227 class filteredchangelog(cl.__class__):
228 def tiprev(self):
228 def tiprev(self):
229 """filtered version of revlog.tiprev"""
229 """filtered version of revlog.tiprev"""
230 for i in pycompat.xrange(len(self) - 1, -2, -1):
230 for i in pycompat.xrange(len(self) - 1, -2, -1):
231 if i not in self.filteredrevs:
231 if i not in self.filteredrevs:
232 return i
232 return i
233
233
234 def __contains__(self, rev):
234 def __contains__(self, rev):
235 """filtered version of revlog.__contains__"""
235 """filtered version of revlog.__contains__"""
236 return 0 <= rev < len(self) and rev not in self.filteredrevs
236 return 0 <= rev < len(self) and rev not in self.filteredrevs
237
237
238 def __iter__(self):
238 def __iter__(self):
239 """filtered version of revlog.__iter__"""
239 """filtered version of revlog.__iter__"""
240 if len(self.filteredrevs) == 0:
240 if len(self.filteredrevs) == 0:
241 return revlog.revlog.__iter__(self)
241 return revlog.revlog.__iter__(self)
242
242
243
243
244 def filterediter():
244 def filterediter():
245 for i in pycompat.xrange(len(self)):
245 for i in pycompat.xrange(len(self)):
246 if i not in self.filteredrevs:
246 if i not in self.filteredrevs:
247 yield i
247 yield i
248
248
249 return filterediter()
249 return filterediter()
250
250
251 def revs(self, start=0, stop=None):
251 def revs(self, start=0, stop=None):
252 """filtered version of revlog.revs"""
252 """filtered version of revlog.revs"""
253 for i in super(filteredchangelog, self).revs(start, stop):
253 for i in super(filteredchangelog, self).revs(start, stop):
254 if i not in self.filteredrevs:
254 if i not in self.filteredrevs:
255 yield i
255 yield i
256
256
257 def _checknofilteredinrevs(self, revs):
257 def _checknofilteredinrevs(self, revs):
258 """raise the appropriate error if 'revs' contains a filtered revision
258 """raise the appropriate error if 'revs' contains a filtered revision
259
259
260 This returns a version of 'revs' to be used thereafter by the caller.
260 This returns a version of 'revs' to be used thereafter by the caller.
261 In particular, if revs is an iterator, it is converted into a set.
261 In particular, if revs is an iterator, it is converted into a set.
262 """
262 """
263 safehasattr = util.safehasattr
263 safehasattr = util.safehasattr
264 if safehasattr(revs, '__next__'):
264 if safehasattr(revs, '__next__'):
265 # Note that inspect.isgenerator() is not true for iterators,
265 # Note that inspect.isgenerator() is not true for iterators,
266 revs = set(revs)
266 revs = set(revs)
267
267
268 filteredrevs = self.filteredrevs
268 filteredrevs = self.filteredrevs
269 if safehasattr(revs, 'first'): # smartset
269 if safehasattr(revs, 'first'): # smartset
270 offenders = revs & filteredrevs
270 offenders = revs & filteredrevs
271 else:
271 else:
272 offenders = filteredrevs.intersection(revs)
272 offenders = filteredrevs.intersection(revs)
273
273
274 for rev in offenders:
274 for rev in offenders:
275 raise error.FilteredIndexError(rev)
275 raise error.FilteredIndexError(rev)
276 return revs
276 return revs
277
277
278 def headrevs(self, revs=None):
278 def headrevs(self, revs=None):
279 if revs is None and self.filteredrevs:
279 if revs is None and self.filteredrevs:
280 try:
280 try:
281 return self.index.headrevsfiltered(self.filteredrevs)
281 return self.index.headrevsfiltered(self.filteredrevs)
282 # AttributeError covers non-c-extension environments and
282 # AttributeError covers non-c-extension environments and
283 # old c extensions without filter handling.
283 # old c extensions without filter handling.
284 except AttributeError:
284 except AttributeError:
285 return self._headrevs()
285 return self._headrevs()
286
286
287 if self.filteredrevs:
287 if self.filteredrevs:
288 revs = self._checknofilteredinrevs(revs)
288 revs = self._checknofilteredinrevs(revs)
289 return super(filteredchangelog, self).headrevs(revs)
289 return super(filteredchangelog, self).headrevs(revs)
290
290
291 def strip(self, *args, **kwargs):
292 # XXX make something better than assert
293 # We can't expect proper strip behavior if we are filtered.
294 assert not self.filteredrevs
295 super(filteredchangelog, self).strip(*args, **kwargs)
296
291 cl.__class__ = filteredchangelog
297 cl.__class__ = filteredchangelog
292
298
293 return cl
299 return cl
294
300
295
301
296 class repoview(object):
302 class repoview(object):
297 """Provide a read/write view of a repo through a filtered changelog
303 """Provide a read/write view of a repo through a filtered changelog
298
304
299 This object is used to access a filtered version of a repository without
305 This object is used to access a filtered version of a repository without
300 altering the original repository object itself. We can not alter the
306 altering the original repository object itself. We can not alter the
301 original object for two main reasons:
307 original object for two main reasons:
302 - It prevents the use of a repo with multiple filters at the same time. In
308 - It prevents the use of a repo with multiple filters at the same time. In
303 particular when multiple threads are involved.
309 particular when multiple threads are involved.
304 - It makes scope of the filtering harder to control.
310 - It makes scope of the filtering harder to control.
305
311
306 This object behaves very closely to the original repository. All attribute
312 This object behaves very closely to the original repository. All attribute
307 operations are done on the original repository:
313 operations are done on the original repository:
308 - An access to `repoview.someattr` actually returns `repo.someattr`,
314 - An access to `repoview.someattr` actually returns `repo.someattr`,
309 - A write to `repoview.someattr` actually sets value of `repo.someattr`,
315 - A write to `repoview.someattr` actually sets value of `repo.someattr`,
310 - A deletion of `repoview.someattr` actually drops `someattr`
316 - A deletion of `repoview.someattr` actually drops `someattr`
311 from `repo.__dict__`.
317 from `repo.__dict__`.
312
318
313 The only exception is the `changelog` property. It is overridden to return
319 The only exception is the `changelog` property. It is overridden to return
314 a (surface) copy of `repo.changelog` with some revisions filtered. The
320 a (surface) copy of `repo.changelog` with some revisions filtered. The
315 `filtername` attribute of the view control the revisions that need to be
321 `filtername` attribute of the view control the revisions that need to be
316 filtered. (the fact the changelog is copied is an implementation detail).
322 filtered. (the fact the changelog is copied is an implementation detail).
317
323
318 Unlike attributes, this object intercepts all method calls. This means that
324 Unlike attributes, this object intercepts all method calls. This means that
319 all methods are run on the `repoview` object with the filtered `changelog`
325 all methods are run on the `repoview` object with the filtered `changelog`
320 property. For this purpose the simple `repoview` class must be mixed with
326 property. For this purpose the simple `repoview` class must be mixed with
321 the actual class of the repository. This ensures that the resulting
327 the actual class of the repository. This ensures that the resulting
322 `repoview` object have the very same methods than the repo object. This
328 `repoview` object have the very same methods than the repo object. This
323 leads to the property below.
329 leads to the property below.
324
330
325 repoview.method() --> repo.__class__.method(repoview)
331 repoview.method() --> repo.__class__.method(repoview)
326
332
327 The inheritance has to be done dynamically because `repo` can be of any
333 The inheritance has to be done dynamically because `repo` can be of any
328 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
334 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
329 """
335 """
330
336
331 def __init__(self, repo, filtername, visibilityexceptions=None):
337 def __init__(self, repo, filtername, visibilityexceptions=None):
332 object.__setattr__(self, r'_unfilteredrepo', repo)
338 object.__setattr__(self, r'_unfilteredrepo', repo)
333 object.__setattr__(self, r'filtername', filtername)
339 object.__setattr__(self, r'filtername', filtername)
334 object.__setattr__(self, r'_clcachekey', None)
340 object.__setattr__(self, r'_clcachekey', None)
335 object.__setattr__(self, r'_clcache', None)
341 object.__setattr__(self, r'_clcache', None)
336 # revs which are exceptions and must not be hidden
342 # revs which are exceptions and must not be hidden
337 object.__setattr__(self, r'_visibilityexceptions', visibilityexceptions)
343 object.__setattr__(self, r'_visibilityexceptions', visibilityexceptions)
338
344
339 # not a propertycache on purpose we shall implement a proper cache later
345 # not a propertycache on purpose we shall implement a proper cache later
340 @property
346 @property
341 def changelog(self):
347 def changelog(self):
342 """return a filtered version of the changeset
348 """return a filtered version of the changeset
343
349
344 this changelog must not be used for writing"""
350 this changelog must not be used for writing"""
345 # some cache may be implemented later
351 # some cache may be implemented later
346 unfi = self._unfilteredrepo
352 unfi = self._unfilteredrepo
347 unfichangelog = unfi.changelog
353 unfichangelog = unfi.changelog
348 # bypass call to changelog.method
354 # bypass call to changelog.method
349 unfiindex = unfichangelog.index
355 unfiindex = unfichangelog.index
350 unfilen = len(unfiindex)
356 unfilen = len(unfiindex)
351 unfinode = unfiindex[unfilen - 1][7]
357 unfinode = unfiindex[unfilen - 1][7]
352 with util.timedcm('repo filter for %s', self.filtername):
358 with util.timedcm('repo filter for %s', self.filtername):
353 revs = filterrevs(unfi, self.filtername, self._visibilityexceptions)
359 revs = filterrevs(unfi, self.filtername, self._visibilityexceptions)
354 cl = self._clcache
360 cl = self._clcache
355 newkey = (unfilen, unfinode, hash(revs), unfichangelog._delayed)
361 newkey = (unfilen, unfinode, hash(revs), unfichangelog._delayed)
356 # if cl.index is not unfiindex, unfi.changelog would be
362 # if cl.index is not unfiindex, unfi.changelog would be
357 # recreated, and our clcache refers to garbage object
363 # recreated, and our clcache refers to garbage object
358 if cl is not None and (
364 if cl is not None and (
359 cl.index is not unfiindex or newkey != self._clcachekey
365 cl.index is not unfiindex or newkey != self._clcachekey
360 ):
366 ):
361 cl = None
367 cl = None
362 # could have been made None by the previous if
368 # could have been made None by the previous if
363 if cl is None:
369 if cl is None:
364 cl = wrapchangelog(unfichangelog, revs)
370 cl = wrapchangelog(unfichangelog, revs)
365 object.__setattr__(self, r'_clcache', cl)
371 object.__setattr__(self, r'_clcache', cl)
366 object.__setattr__(self, r'_clcachekey', newkey)
372 object.__setattr__(self, r'_clcachekey', newkey)
367 return cl
373 return cl
368
374
369 def unfiltered(self):
375 def unfiltered(self):
370 """Return an unfiltered version of a repo"""
376 """Return an unfiltered version of a repo"""
371 return self._unfilteredrepo
377 return self._unfilteredrepo
372
378
373 def filtered(self, name, visibilityexceptions=None):
379 def filtered(self, name, visibilityexceptions=None):
374 """Return a filtered version of a repository"""
380 """Return a filtered version of a repository"""
375 if name == self.filtername and not visibilityexceptions:
381 if name == self.filtername and not visibilityexceptions:
376 return self
382 return self
377 return self.unfiltered().filtered(name, visibilityexceptions)
383 return self.unfiltered().filtered(name, visibilityexceptions)
378
384
379 def __repr__(self):
385 def __repr__(self):
380 return r'<%s:%s %r>' % (
386 return r'<%s:%s %r>' % (
381 self.__class__.__name__,
387 self.__class__.__name__,
382 pycompat.sysstr(self.filtername),
388 pycompat.sysstr(self.filtername),
383 self.unfiltered(),
389 self.unfiltered(),
384 )
390 )
385
391
386 # everything access are forwarded to the proxied repo
392 # everything access are forwarded to the proxied repo
387 def __getattr__(self, attr):
393 def __getattr__(self, attr):
388 return getattr(self._unfilteredrepo, attr)
394 return getattr(self._unfilteredrepo, attr)
389
395
390 def __setattr__(self, attr, value):
396 def __setattr__(self, attr, value):
391 return setattr(self._unfilteredrepo, attr, value)
397 return setattr(self._unfilteredrepo, attr, value)
392
398
393 def __delattr__(self, attr):
399 def __delattr__(self, attr):
394 return delattr(self._unfilteredrepo, attr)
400 return delattr(self._unfilteredrepo, attr)
395
401
396
402
397 # Python <3.4 easily leaks types via __mro__. See
403 # Python <3.4 easily leaks types via __mro__. See
398 # https://bugs.python.org/issue17950. We cache dynamically created types
404 # https://bugs.python.org/issue17950. We cache dynamically created types
399 # so they won't be leaked on every invocation of repo.filtered().
405 # so they won't be leaked on every invocation of repo.filtered().
400 _filteredrepotypes = weakref.WeakKeyDictionary()
406 _filteredrepotypes = weakref.WeakKeyDictionary()
401
407
402
408
403 def newtype(base):
409 def newtype(base):
404 """Create a new type with the repoview mixin and the given base class"""
410 """Create a new type with the repoview mixin and the given base class"""
405 if base not in _filteredrepotypes:
411 if base not in _filteredrepotypes:
406
412
407 class filteredrepo(repoview, base):
413 class filteredrepo(repoview, base):
408 pass
414 pass
409
415
410 _filteredrepotypes[base] = filteredrepo
416 _filteredrepotypes[base] = filteredrepo
411 return _filteredrepotypes[base]
417 return _filteredrepotypes[base]
General Comments 0
You need to be logged in to leave comments. Login now