##// END OF EJS Templates
repoview: move changelog.flags() override to filteredchangelog...
Martin von Zweigbergk -
r43758:61c37210 default
parent child Browse files
Show More
@@ -1,630 +1,624 b''
1 # changelog.py - changelog class for mercurial
1 # changelog.py - changelog class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 from .i18n import _
10 from .i18n import _
11 from .node import (
11 from .node import (
12 bin,
12 bin,
13 hex,
13 hex,
14 nullid,
14 nullid,
15 )
15 )
16 from .thirdparty import attr
16 from .thirdparty import attr
17
17
18 from . import (
18 from . import (
19 copies,
19 copies,
20 encoding,
20 encoding,
21 error,
21 error,
22 pycompat,
22 pycompat,
23 revlog,
23 revlog,
24 )
24 )
25 from .utils import (
25 from .utils import (
26 dateutil,
26 dateutil,
27 stringutil,
27 stringutil,
28 )
28 )
29
29
30 from .revlogutils import sidedata as sidedatamod
30 from .revlogutils import sidedata as sidedatamod
31
31
32 _defaultextra = {b'branch': b'default'}
32 _defaultextra = {b'branch': b'default'}
33
33
34
34
35 def _string_escape(text):
35 def _string_escape(text):
36 """
36 """
37 >>> from .pycompat import bytechr as chr
37 >>> from .pycompat import bytechr as chr
38 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
38 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
39 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
39 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
40 >>> s
40 >>> s
41 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
41 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
42 >>> res = _string_escape(s)
42 >>> res = _string_escape(s)
43 >>> s == _string_unescape(res)
43 >>> s == _string_unescape(res)
44 True
44 True
45 """
45 """
46 # subset of the string_escape codec
46 # subset of the string_escape codec
47 text = (
47 text = (
48 text.replace(b'\\', b'\\\\')
48 text.replace(b'\\', b'\\\\')
49 .replace(b'\n', b'\\n')
49 .replace(b'\n', b'\\n')
50 .replace(b'\r', b'\\r')
50 .replace(b'\r', b'\\r')
51 )
51 )
52 return text.replace(b'\0', b'\\0')
52 return text.replace(b'\0', b'\\0')
53
53
54
54
55 def _string_unescape(text):
55 def _string_unescape(text):
56 if b'\\0' in text:
56 if b'\\0' in text:
57 # fix up \0 without getting into trouble with \\0
57 # fix up \0 without getting into trouble with \\0
58 text = text.replace(b'\\\\', b'\\\\\n')
58 text = text.replace(b'\\\\', b'\\\\\n')
59 text = text.replace(b'\\0', b'\0')
59 text = text.replace(b'\\0', b'\0')
60 text = text.replace(b'\n', b'')
60 text = text.replace(b'\n', b'')
61 return stringutil.unescapestr(text)
61 return stringutil.unescapestr(text)
62
62
63
63
64 def decodeextra(text):
64 def decodeextra(text):
65 """
65 """
66 >>> from .pycompat import bytechr as chr
66 >>> from .pycompat import bytechr as chr
67 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
67 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
68 ... ).items())
68 ... ).items())
69 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
69 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
70 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
70 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
71 ... b'baz': chr(92) + chr(0) + b'2'})
71 ... b'baz': chr(92) + chr(0) + b'2'})
72 ... ).items())
72 ... ).items())
73 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
73 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
74 """
74 """
75 extra = _defaultextra.copy()
75 extra = _defaultextra.copy()
76 for l in text.split(b'\0'):
76 for l in text.split(b'\0'):
77 if l:
77 if l:
78 k, v = _string_unescape(l).split(b':', 1)
78 k, v = _string_unescape(l).split(b':', 1)
79 extra[k] = v
79 extra[k] = v
80 return extra
80 return extra
81
81
82
82
83 def encodeextra(d):
83 def encodeextra(d):
84 # keys must be sorted to produce a deterministic changelog entry
84 # keys must be sorted to produce a deterministic changelog entry
85 items = [
85 items = [
86 _string_escape(b'%s:%s' % (k, pycompat.bytestr(d[k])))
86 _string_escape(b'%s:%s' % (k, pycompat.bytestr(d[k])))
87 for k in sorted(d)
87 for k in sorted(d)
88 ]
88 ]
89 return b"\0".join(items)
89 return b"\0".join(items)
90
90
91
91
92 def stripdesc(desc):
92 def stripdesc(desc):
93 """strip trailing whitespace and leading and trailing empty lines"""
93 """strip trailing whitespace and leading and trailing empty lines"""
94 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
94 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
95
95
96
96
97 class appender(object):
97 class appender(object):
98 '''the changelog index must be updated last on disk, so we use this class
98 '''the changelog index must be updated last on disk, so we use this class
99 to delay writes to it'''
99 to delay writes to it'''
100
100
101 def __init__(self, vfs, name, mode, buf):
101 def __init__(self, vfs, name, mode, buf):
102 self.data = buf
102 self.data = buf
103 fp = vfs(name, mode)
103 fp = vfs(name, mode)
104 self.fp = fp
104 self.fp = fp
105 self.offset = fp.tell()
105 self.offset = fp.tell()
106 self.size = vfs.fstat(fp).st_size
106 self.size = vfs.fstat(fp).st_size
107 self._end = self.size
107 self._end = self.size
108
108
109 def end(self):
109 def end(self):
110 return self._end
110 return self._end
111
111
112 def tell(self):
112 def tell(self):
113 return self.offset
113 return self.offset
114
114
115 def flush(self):
115 def flush(self):
116 pass
116 pass
117
117
118 @property
118 @property
119 def closed(self):
119 def closed(self):
120 return self.fp.closed
120 return self.fp.closed
121
121
122 def close(self):
122 def close(self):
123 self.fp.close()
123 self.fp.close()
124
124
125 def seek(self, offset, whence=0):
125 def seek(self, offset, whence=0):
126 '''virtual file offset spans real file and data'''
126 '''virtual file offset spans real file and data'''
127 if whence == 0:
127 if whence == 0:
128 self.offset = offset
128 self.offset = offset
129 elif whence == 1:
129 elif whence == 1:
130 self.offset += offset
130 self.offset += offset
131 elif whence == 2:
131 elif whence == 2:
132 self.offset = self.end() + offset
132 self.offset = self.end() + offset
133 if self.offset < self.size:
133 if self.offset < self.size:
134 self.fp.seek(self.offset)
134 self.fp.seek(self.offset)
135
135
136 def read(self, count=-1):
136 def read(self, count=-1):
137 '''only trick here is reads that span real file and data'''
137 '''only trick here is reads that span real file and data'''
138 ret = b""
138 ret = b""
139 if self.offset < self.size:
139 if self.offset < self.size:
140 s = self.fp.read(count)
140 s = self.fp.read(count)
141 ret = s
141 ret = s
142 self.offset += len(s)
142 self.offset += len(s)
143 if count > 0:
143 if count > 0:
144 count -= len(s)
144 count -= len(s)
145 if count != 0:
145 if count != 0:
146 doff = self.offset - self.size
146 doff = self.offset - self.size
147 self.data.insert(0, b"".join(self.data))
147 self.data.insert(0, b"".join(self.data))
148 del self.data[1:]
148 del self.data[1:]
149 s = self.data[0][doff : doff + count]
149 s = self.data[0][doff : doff + count]
150 self.offset += len(s)
150 self.offset += len(s)
151 ret += s
151 ret += s
152 return ret
152 return ret
153
153
154 def write(self, s):
154 def write(self, s):
155 self.data.append(bytes(s))
155 self.data.append(bytes(s))
156 self.offset += len(s)
156 self.offset += len(s)
157 self._end += len(s)
157 self._end += len(s)
158
158
159 def __enter__(self):
159 def __enter__(self):
160 self.fp.__enter__()
160 self.fp.__enter__()
161 return self
161 return self
162
162
163 def __exit__(self, *args):
163 def __exit__(self, *args):
164 return self.fp.__exit__(*args)
164 return self.fp.__exit__(*args)
165
165
166
166
167 def _divertopener(opener, target):
167 def _divertopener(opener, target):
168 """build an opener that writes in 'target.a' instead of 'target'"""
168 """build an opener that writes in 'target.a' instead of 'target'"""
169
169
170 def _divert(name, mode=b'r', checkambig=False):
170 def _divert(name, mode=b'r', checkambig=False):
171 if name != target:
171 if name != target:
172 return opener(name, mode)
172 return opener(name, mode)
173 return opener(name + b".a", mode)
173 return opener(name + b".a", mode)
174
174
175 return _divert
175 return _divert
176
176
177
177
178 def _delayopener(opener, target, buf):
178 def _delayopener(opener, target, buf):
179 """build an opener that stores chunks in 'buf' instead of 'target'"""
179 """build an opener that stores chunks in 'buf' instead of 'target'"""
180
180
181 def _delay(name, mode=b'r', checkambig=False):
181 def _delay(name, mode=b'r', checkambig=False):
182 if name != target:
182 if name != target:
183 return opener(name, mode)
183 return opener(name, mode)
184 return appender(opener, name, mode, buf)
184 return appender(opener, name, mode, buf)
185
185
186 return _delay
186 return _delay
187
187
188
188
189 @attr.s
189 @attr.s
190 class _changelogrevision(object):
190 class _changelogrevision(object):
191 # Extensions might modify _defaultextra, so let the constructor below pass
191 # Extensions might modify _defaultextra, so let the constructor below pass
192 # it in
192 # it in
193 extra = attr.ib()
193 extra = attr.ib()
194 manifest = attr.ib(default=nullid)
194 manifest = attr.ib(default=nullid)
195 user = attr.ib(default=b'')
195 user = attr.ib(default=b'')
196 date = attr.ib(default=(0, 0))
196 date = attr.ib(default=(0, 0))
197 files = attr.ib(default=attr.Factory(list))
197 files = attr.ib(default=attr.Factory(list))
198 filesadded = attr.ib(default=None)
198 filesadded = attr.ib(default=None)
199 filesremoved = attr.ib(default=None)
199 filesremoved = attr.ib(default=None)
200 p1copies = attr.ib(default=None)
200 p1copies = attr.ib(default=None)
201 p2copies = attr.ib(default=None)
201 p2copies = attr.ib(default=None)
202 description = attr.ib(default=b'')
202 description = attr.ib(default=b'')
203
203
204
204
205 class changelogrevision(object):
205 class changelogrevision(object):
206 """Holds results of a parsed changelog revision.
206 """Holds results of a parsed changelog revision.
207
207
208 Changelog revisions consist of multiple pieces of data, including
208 Changelog revisions consist of multiple pieces of data, including
209 the manifest node, user, and date. This object exposes a view into
209 the manifest node, user, and date. This object exposes a view into
210 the parsed object.
210 the parsed object.
211 """
211 """
212
212
213 __slots__ = (
213 __slots__ = (
214 r'_offsets',
214 r'_offsets',
215 r'_text',
215 r'_text',
216 r'_sidedata',
216 r'_sidedata',
217 r'_cpsd',
217 r'_cpsd',
218 )
218 )
219
219
220 def __new__(cls, text, sidedata, cpsd):
220 def __new__(cls, text, sidedata, cpsd):
221 if not text:
221 if not text:
222 return _changelogrevision(extra=_defaultextra)
222 return _changelogrevision(extra=_defaultextra)
223
223
224 self = super(changelogrevision, cls).__new__(cls)
224 self = super(changelogrevision, cls).__new__(cls)
225 # We could return here and implement the following as an __init__.
225 # We could return here and implement the following as an __init__.
226 # But doing it here is equivalent and saves an extra function call.
226 # But doing it here is equivalent and saves an extra function call.
227
227
228 # format used:
228 # format used:
229 # nodeid\n : manifest node in ascii
229 # nodeid\n : manifest node in ascii
230 # user\n : user, no \n or \r allowed
230 # user\n : user, no \n or \r allowed
231 # time tz extra\n : date (time is int or float, timezone is int)
231 # time tz extra\n : date (time is int or float, timezone is int)
232 # : extra is metadata, encoded and separated by '\0'
232 # : extra is metadata, encoded and separated by '\0'
233 # : older versions ignore it
233 # : older versions ignore it
234 # files\n\n : files modified by the cset, no \n or \r allowed
234 # files\n\n : files modified by the cset, no \n or \r allowed
235 # (.*) : comment (free text, ideally utf-8)
235 # (.*) : comment (free text, ideally utf-8)
236 #
236 #
237 # changelog v0 doesn't use extra
237 # changelog v0 doesn't use extra
238
238
239 nl1 = text.index(b'\n')
239 nl1 = text.index(b'\n')
240 nl2 = text.index(b'\n', nl1 + 1)
240 nl2 = text.index(b'\n', nl1 + 1)
241 nl3 = text.index(b'\n', nl2 + 1)
241 nl3 = text.index(b'\n', nl2 + 1)
242
242
243 # The list of files may be empty. Which means nl3 is the first of the
243 # The list of files may be empty. Which means nl3 is the first of the
244 # double newline that precedes the description.
244 # double newline that precedes the description.
245 if text[nl3 + 1 : nl3 + 2] == b'\n':
245 if text[nl3 + 1 : nl3 + 2] == b'\n':
246 doublenl = nl3
246 doublenl = nl3
247 else:
247 else:
248 doublenl = text.index(b'\n\n', nl3 + 1)
248 doublenl = text.index(b'\n\n', nl3 + 1)
249
249
250 self._offsets = (nl1, nl2, nl3, doublenl)
250 self._offsets = (nl1, nl2, nl3, doublenl)
251 self._text = text
251 self._text = text
252 self._sidedata = sidedata
252 self._sidedata = sidedata
253 self._cpsd = cpsd
253 self._cpsd = cpsd
254
254
255 return self
255 return self
256
256
257 @property
257 @property
258 def manifest(self):
258 def manifest(self):
259 return bin(self._text[0 : self._offsets[0]])
259 return bin(self._text[0 : self._offsets[0]])
260
260
261 @property
261 @property
262 def user(self):
262 def user(self):
263 off = self._offsets
263 off = self._offsets
264 return encoding.tolocal(self._text[off[0] + 1 : off[1]])
264 return encoding.tolocal(self._text[off[0] + 1 : off[1]])
265
265
266 @property
266 @property
267 def _rawdate(self):
267 def _rawdate(self):
268 off = self._offsets
268 off = self._offsets
269 dateextra = self._text[off[1] + 1 : off[2]]
269 dateextra = self._text[off[1] + 1 : off[2]]
270 return dateextra.split(b' ', 2)[0:2]
270 return dateextra.split(b' ', 2)[0:2]
271
271
272 @property
272 @property
273 def _rawextra(self):
273 def _rawextra(self):
274 off = self._offsets
274 off = self._offsets
275 dateextra = self._text[off[1] + 1 : off[2]]
275 dateextra = self._text[off[1] + 1 : off[2]]
276 fields = dateextra.split(b' ', 2)
276 fields = dateextra.split(b' ', 2)
277 if len(fields) != 3:
277 if len(fields) != 3:
278 return None
278 return None
279
279
280 return fields[2]
280 return fields[2]
281
281
282 @property
282 @property
283 def date(self):
283 def date(self):
284 raw = self._rawdate
284 raw = self._rawdate
285 time = float(raw[0])
285 time = float(raw[0])
286 # Various tools did silly things with the timezone.
286 # Various tools did silly things with the timezone.
287 try:
287 try:
288 timezone = int(raw[1])
288 timezone = int(raw[1])
289 except ValueError:
289 except ValueError:
290 timezone = 0
290 timezone = 0
291
291
292 return time, timezone
292 return time, timezone
293
293
294 @property
294 @property
295 def extra(self):
295 def extra(self):
296 raw = self._rawextra
296 raw = self._rawextra
297 if raw is None:
297 if raw is None:
298 return _defaultextra
298 return _defaultextra
299
299
300 return decodeextra(raw)
300 return decodeextra(raw)
301
301
302 @property
302 @property
303 def files(self):
303 def files(self):
304 off = self._offsets
304 off = self._offsets
305 if off[2] == off[3]:
305 if off[2] == off[3]:
306 return []
306 return []
307
307
308 return self._text[off[2] + 1 : off[3]].split(b'\n')
308 return self._text[off[2] + 1 : off[3]].split(b'\n')
309
309
310 @property
310 @property
311 def filesadded(self):
311 def filesadded(self):
312 if self._cpsd:
312 if self._cpsd:
313 rawindices = self._sidedata.get(sidedatamod.SD_FILESADDED)
313 rawindices = self._sidedata.get(sidedatamod.SD_FILESADDED)
314 if not rawindices:
314 if not rawindices:
315 return []
315 return []
316 else:
316 else:
317 rawindices = self.extra.get(b'filesadded')
317 rawindices = self.extra.get(b'filesadded')
318 if rawindices is None:
318 if rawindices is None:
319 return None
319 return None
320 return copies.decodefileindices(self.files, rawindices)
320 return copies.decodefileindices(self.files, rawindices)
321
321
322 @property
322 @property
323 def filesremoved(self):
323 def filesremoved(self):
324 if self._cpsd:
324 if self._cpsd:
325 rawindices = self._sidedata.get(sidedatamod.SD_FILESREMOVED)
325 rawindices = self._sidedata.get(sidedatamod.SD_FILESREMOVED)
326 if not rawindices:
326 if not rawindices:
327 return []
327 return []
328 else:
328 else:
329 rawindices = self.extra.get(b'filesremoved')
329 rawindices = self.extra.get(b'filesremoved')
330 if rawindices is None:
330 if rawindices is None:
331 return None
331 return None
332 return copies.decodefileindices(self.files, rawindices)
332 return copies.decodefileindices(self.files, rawindices)
333
333
334 @property
334 @property
335 def p1copies(self):
335 def p1copies(self):
336 if self._cpsd:
336 if self._cpsd:
337 rawcopies = self._sidedata.get(sidedatamod.SD_P1COPIES)
337 rawcopies = self._sidedata.get(sidedatamod.SD_P1COPIES)
338 if not rawcopies:
338 if not rawcopies:
339 return {}
339 return {}
340 else:
340 else:
341 rawcopies = self.extra.get(b'p1copies')
341 rawcopies = self.extra.get(b'p1copies')
342 if rawcopies is None:
342 if rawcopies is None:
343 return None
343 return None
344 return copies.decodecopies(self.files, rawcopies)
344 return copies.decodecopies(self.files, rawcopies)
345
345
346 @property
346 @property
347 def p2copies(self):
347 def p2copies(self):
348 if self._cpsd:
348 if self._cpsd:
349 rawcopies = self._sidedata.get(sidedatamod.SD_P2COPIES)
349 rawcopies = self._sidedata.get(sidedatamod.SD_P2COPIES)
350 if not rawcopies:
350 if not rawcopies:
351 return {}
351 return {}
352 else:
352 else:
353 rawcopies = self.extra.get(b'p2copies')
353 rawcopies = self.extra.get(b'p2copies')
354 if rawcopies is None:
354 if rawcopies is None:
355 return None
355 return None
356 return copies.decodecopies(self.files, rawcopies)
356 return copies.decodecopies(self.files, rawcopies)
357
357
358 @property
358 @property
359 def description(self):
359 def description(self):
360 return encoding.tolocal(self._text[self._offsets[3] + 2 :])
360 return encoding.tolocal(self._text[self._offsets[3] + 2 :])
361
361
362
362
363 class changelog(revlog.revlog):
363 class changelog(revlog.revlog):
364 def __init__(self, opener, trypending=False):
364 def __init__(self, opener, trypending=False):
365 """Load a changelog revlog using an opener.
365 """Load a changelog revlog using an opener.
366
366
367 If ``trypending`` is true, we attempt to load the index from a
367 If ``trypending`` is true, we attempt to load the index from a
368 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
368 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
369 The ``00changelog.i.a`` file contains index (and possibly inline
369 The ``00changelog.i.a`` file contains index (and possibly inline
370 revision) data for a transaction that hasn't been finalized yet.
370 revision) data for a transaction that hasn't been finalized yet.
371 It exists in a separate file to facilitate readers (such as
371 It exists in a separate file to facilitate readers (such as
372 hooks processes) accessing data before a transaction is finalized.
372 hooks processes) accessing data before a transaction is finalized.
373 """
373 """
374 if trypending and opener.exists(b'00changelog.i.a'):
374 if trypending and opener.exists(b'00changelog.i.a'):
375 indexfile = b'00changelog.i.a'
375 indexfile = b'00changelog.i.a'
376 else:
376 else:
377 indexfile = b'00changelog.i'
377 indexfile = b'00changelog.i'
378
378
379 datafile = b'00changelog.d'
379 datafile = b'00changelog.d'
380 revlog.revlog.__init__(
380 revlog.revlog.__init__(
381 self,
381 self,
382 opener,
382 opener,
383 indexfile,
383 indexfile,
384 datafile=datafile,
384 datafile=datafile,
385 checkambig=True,
385 checkambig=True,
386 mmaplargeindex=True,
386 mmaplargeindex=True,
387 )
387 )
388
388
389 if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
389 if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
390 # changelogs don't benefit from generaldelta.
390 # changelogs don't benefit from generaldelta.
391
391
392 self.version &= ~revlog.FLAG_GENERALDELTA
392 self.version &= ~revlog.FLAG_GENERALDELTA
393 self._generaldelta = False
393 self._generaldelta = False
394
394
395 # Delta chains for changelogs tend to be very small because entries
395 # Delta chains for changelogs tend to be very small because entries
396 # tend to be small and don't delta well with each. So disable delta
396 # tend to be small and don't delta well with each. So disable delta
397 # chains.
397 # chains.
398 self._storedeltachains = False
398 self._storedeltachains = False
399
399
400 self._realopener = opener
400 self._realopener = opener
401 self._delayed = False
401 self._delayed = False
402 self._delaybuf = None
402 self._delaybuf = None
403 self._divert = False
403 self._divert = False
404 self.filteredrevs = frozenset()
404 self.filteredrevs = frozenset()
405 self._copiesstorage = opener.options.get(b'copies-storage')
405 self._copiesstorage = opener.options.get(b'copies-storage')
406
406
407 def flags(self, rev):
408 """filtered version of revlog.flags"""
409 if rev in self.filteredrevs:
410 raise error.FilteredIndexError(rev)
411 return super(changelog, self).flags(rev)
412
413 def delayupdate(self, tr):
407 def delayupdate(self, tr):
414 b"delay visibility of index updates to other readers"
408 b"delay visibility of index updates to other readers"
415
409
416 if not self._delayed:
410 if not self._delayed:
417 if len(self) == 0:
411 if len(self) == 0:
418 self._divert = True
412 self._divert = True
419 if self._realopener.exists(self.indexfile + b'.a'):
413 if self._realopener.exists(self.indexfile + b'.a'):
420 self._realopener.unlink(self.indexfile + b'.a')
414 self._realopener.unlink(self.indexfile + b'.a')
421 self.opener = _divertopener(self._realopener, self.indexfile)
415 self.opener = _divertopener(self._realopener, self.indexfile)
422 else:
416 else:
423 self._delaybuf = []
417 self._delaybuf = []
424 self.opener = _delayopener(
418 self.opener = _delayopener(
425 self._realopener, self.indexfile, self._delaybuf
419 self._realopener, self.indexfile, self._delaybuf
426 )
420 )
427 self._delayed = True
421 self._delayed = True
428 tr.addpending(b'cl-%i' % id(self), self._writepending)
422 tr.addpending(b'cl-%i' % id(self), self._writepending)
429 tr.addfinalize(b'cl-%i' % id(self), self._finalize)
423 tr.addfinalize(b'cl-%i' % id(self), self._finalize)
430
424
431 def _finalize(self, tr):
425 def _finalize(self, tr):
432 b"finalize index updates"
426 b"finalize index updates"
433 self._delayed = False
427 self._delayed = False
434 self.opener = self._realopener
428 self.opener = self._realopener
435 # move redirected index data back into place
429 # move redirected index data back into place
436 if self._divert:
430 if self._divert:
437 assert not self._delaybuf
431 assert not self._delaybuf
438 tmpname = self.indexfile + b".a"
432 tmpname = self.indexfile + b".a"
439 nfile = self.opener.open(tmpname)
433 nfile = self.opener.open(tmpname)
440 nfile.close()
434 nfile.close()
441 self.opener.rename(tmpname, self.indexfile, checkambig=True)
435 self.opener.rename(tmpname, self.indexfile, checkambig=True)
442 elif self._delaybuf:
436 elif self._delaybuf:
443 fp = self.opener(self.indexfile, b'a', checkambig=True)
437 fp = self.opener(self.indexfile, b'a', checkambig=True)
444 fp.write(b"".join(self._delaybuf))
438 fp.write(b"".join(self._delaybuf))
445 fp.close()
439 fp.close()
446 self._delaybuf = None
440 self._delaybuf = None
447 self._divert = False
441 self._divert = False
448 # split when we're done
442 # split when we're done
449 self._enforceinlinesize(tr)
443 self._enforceinlinesize(tr)
450
444
451 def _writepending(self, tr):
445 def _writepending(self, tr):
452 b"create a file containing the unfinalized state for pretxnchangegroup"
446 b"create a file containing the unfinalized state for pretxnchangegroup"
453 if self._delaybuf:
447 if self._delaybuf:
454 # make a temporary copy of the index
448 # make a temporary copy of the index
455 fp1 = self._realopener(self.indexfile)
449 fp1 = self._realopener(self.indexfile)
456 pendingfilename = self.indexfile + b".a"
450 pendingfilename = self.indexfile + b".a"
457 # register as a temp file to ensure cleanup on failure
451 # register as a temp file to ensure cleanup on failure
458 tr.registertmp(pendingfilename)
452 tr.registertmp(pendingfilename)
459 # write existing data
453 # write existing data
460 fp2 = self._realopener(pendingfilename, b"w")
454 fp2 = self._realopener(pendingfilename, b"w")
461 fp2.write(fp1.read())
455 fp2.write(fp1.read())
462 # add pending data
456 # add pending data
463 fp2.write(b"".join(self._delaybuf))
457 fp2.write(b"".join(self._delaybuf))
464 fp2.close()
458 fp2.close()
465 # switch modes so finalize can simply rename
459 # switch modes so finalize can simply rename
466 self._delaybuf = None
460 self._delaybuf = None
467 self._divert = True
461 self._divert = True
468 self.opener = _divertopener(self._realopener, self.indexfile)
462 self.opener = _divertopener(self._realopener, self.indexfile)
469
463
470 if self._divert:
464 if self._divert:
471 return True
465 return True
472
466
473 return False
467 return False
474
468
475 def _enforceinlinesize(self, tr, fp=None):
469 def _enforceinlinesize(self, tr, fp=None):
476 if not self._delayed:
470 if not self._delayed:
477 revlog.revlog._enforceinlinesize(self, tr, fp)
471 revlog.revlog._enforceinlinesize(self, tr, fp)
478
472
479 def read(self, node):
473 def read(self, node):
480 """Obtain data from a parsed changelog revision.
474 """Obtain data from a parsed changelog revision.
481
475
482 Returns a 6-tuple of:
476 Returns a 6-tuple of:
483
477
484 - manifest node in binary
478 - manifest node in binary
485 - author/user as a localstr
479 - author/user as a localstr
486 - date as a 2-tuple of (time, timezone)
480 - date as a 2-tuple of (time, timezone)
487 - list of files
481 - list of files
488 - commit message as a localstr
482 - commit message as a localstr
489 - dict of extra metadata
483 - dict of extra metadata
490
484
491 Unless you need to access all fields, consider calling
485 Unless you need to access all fields, consider calling
492 ``changelogrevision`` instead, as it is faster for partial object
486 ``changelogrevision`` instead, as it is faster for partial object
493 access.
487 access.
494 """
488 """
495 d, s = self._revisiondata(node)
489 d, s = self._revisiondata(node)
496 c = changelogrevision(
490 c = changelogrevision(
497 d, s, self._copiesstorage == b'changeset-sidedata'
491 d, s, self._copiesstorage == b'changeset-sidedata'
498 )
492 )
499 return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
493 return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
500
494
501 def changelogrevision(self, nodeorrev):
495 def changelogrevision(self, nodeorrev):
502 """Obtain a ``changelogrevision`` for a node or revision."""
496 """Obtain a ``changelogrevision`` for a node or revision."""
503 text, sidedata = self._revisiondata(nodeorrev)
497 text, sidedata = self._revisiondata(nodeorrev)
504 return changelogrevision(
498 return changelogrevision(
505 text, sidedata, self._copiesstorage == b'changeset-sidedata'
499 text, sidedata, self._copiesstorage == b'changeset-sidedata'
506 )
500 )
507
501
508 def readfiles(self, node):
502 def readfiles(self, node):
509 """
503 """
510 short version of read that only returns the files modified by the cset
504 short version of read that only returns the files modified by the cset
511 """
505 """
512 text = self.revision(node)
506 text = self.revision(node)
513 if not text:
507 if not text:
514 return []
508 return []
515 last = text.index(b"\n\n")
509 last = text.index(b"\n\n")
516 l = text[:last].split(b'\n')
510 l = text[:last].split(b'\n')
517 return l[3:]
511 return l[3:]
518
512
519 def add(
513 def add(
520 self,
514 self,
521 manifest,
515 manifest,
522 files,
516 files,
523 desc,
517 desc,
524 transaction,
518 transaction,
525 p1,
519 p1,
526 p2,
520 p2,
527 user,
521 user,
528 date=None,
522 date=None,
529 extra=None,
523 extra=None,
530 p1copies=None,
524 p1copies=None,
531 p2copies=None,
525 p2copies=None,
532 filesadded=None,
526 filesadded=None,
533 filesremoved=None,
527 filesremoved=None,
534 ):
528 ):
535 # Convert to UTF-8 encoded bytestrings as the very first
529 # Convert to UTF-8 encoded bytestrings as the very first
536 # thing: calling any method on a localstr object will turn it
530 # thing: calling any method on a localstr object will turn it
537 # into a str object and the cached UTF-8 string is thus lost.
531 # into a str object and the cached UTF-8 string is thus lost.
538 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
532 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
539
533
540 user = user.strip()
534 user = user.strip()
541 # An empty username or a username with a "\n" will make the
535 # An empty username or a username with a "\n" will make the
542 # revision text contain two "\n\n" sequences -> corrupt
536 # revision text contain two "\n\n" sequences -> corrupt
543 # repository since read cannot unpack the revision.
537 # repository since read cannot unpack the revision.
544 if not user:
538 if not user:
545 raise error.StorageError(_(b"empty username"))
539 raise error.StorageError(_(b"empty username"))
546 if b"\n" in user:
540 if b"\n" in user:
547 raise error.StorageError(
541 raise error.StorageError(
548 _(b"username %r contains a newline") % pycompat.bytestr(user)
542 _(b"username %r contains a newline") % pycompat.bytestr(user)
549 )
543 )
550
544
551 desc = stripdesc(desc)
545 desc = stripdesc(desc)
552
546
553 if date:
547 if date:
554 parseddate = b"%d %d" % dateutil.parsedate(date)
548 parseddate = b"%d %d" % dateutil.parsedate(date)
555 else:
549 else:
556 parseddate = b"%d %d" % dateutil.makedate()
550 parseddate = b"%d %d" % dateutil.makedate()
557 if extra:
551 if extra:
558 branch = extra.get(b"branch")
552 branch = extra.get(b"branch")
559 if branch in (b"default", b""):
553 if branch in (b"default", b""):
560 del extra[b"branch"]
554 del extra[b"branch"]
561 elif branch in (b".", b"null", b"tip"):
555 elif branch in (b".", b"null", b"tip"):
562 raise error.StorageError(
556 raise error.StorageError(
563 _(b'the name \'%s\' is reserved') % branch
557 _(b'the name \'%s\' is reserved') % branch
564 )
558 )
565 sortedfiles = sorted(files)
559 sortedfiles = sorted(files)
566 sidedata = None
560 sidedata = None
567 if extra is not None:
561 if extra is not None:
568 for name in (
562 for name in (
569 b'p1copies',
563 b'p1copies',
570 b'p2copies',
564 b'p2copies',
571 b'filesadded',
565 b'filesadded',
572 b'filesremoved',
566 b'filesremoved',
573 ):
567 ):
574 extra.pop(name, None)
568 extra.pop(name, None)
575 if p1copies is not None:
569 if p1copies is not None:
576 p1copies = copies.encodecopies(sortedfiles, p1copies)
570 p1copies = copies.encodecopies(sortedfiles, p1copies)
577 if p2copies is not None:
571 if p2copies is not None:
578 p2copies = copies.encodecopies(sortedfiles, p2copies)
572 p2copies = copies.encodecopies(sortedfiles, p2copies)
579 if filesadded is not None:
573 if filesadded is not None:
580 filesadded = copies.encodefileindices(sortedfiles, filesadded)
574 filesadded = copies.encodefileindices(sortedfiles, filesadded)
581 if filesremoved is not None:
575 if filesremoved is not None:
582 filesremoved = copies.encodefileindices(sortedfiles, filesremoved)
576 filesremoved = copies.encodefileindices(sortedfiles, filesremoved)
583 if self._copiesstorage == b'extra':
577 if self._copiesstorage == b'extra':
584 extrasentries = p1copies, p2copies, filesadded, filesremoved
578 extrasentries = p1copies, p2copies, filesadded, filesremoved
585 if extra is None and any(x is not None for x in extrasentries):
579 if extra is None and any(x is not None for x in extrasentries):
586 extra = {}
580 extra = {}
587 if p1copies is not None:
581 if p1copies is not None:
588 extra[b'p1copies'] = p1copies
582 extra[b'p1copies'] = p1copies
589 if p2copies is not None:
583 if p2copies is not None:
590 extra[b'p2copies'] = p2copies
584 extra[b'p2copies'] = p2copies
591 if filesadded is not None:
585 if filesadded is not None:
592 extra[b'filesadded'] = filesadded
586 extra[b'filesadded'] = filesadded
593 if filesremoved is not None:
587 if filesremoved is not None:
594 extra[b'filesremoved'] = filesremoved
588 extra[b'filesremoved'] = filesremoved
595 elif self._copiesstorage == b'changeset-sidedata':
589 elif self._copiesstorage == b'changeset-sidedata':
596 sidedata = {}
590 sidedata = {}
597 if p1copies:
591 if p1copies:
598 sidedata[sidedatamod.SD_P1COPIES] = p1copies
592 sidedata[sidedatamod.SD_P1COPIES] = p1copies
599 if p2copies:
593 if p2copies:
600 sidedata[sidedatamod.SD_P2COPIES] = p2copies
594 sidedata[sidedatamod.SD_P2COPIES] = p2copies
601 if filesadded:
595 if filesadded:
602 sidedata[sidedatamod.SD_FILESADDED] = filesadded
596 sidedata[sidedatamod.SD_FILESADDED] = filesadded
603 if filesremoved:
597 if filesremoved:
604 sidedata[sidedatamod.SD_FILESREMOVED] = filesremoved
598 sidedata[sidedatamod.SD_FILESREMOVED] = filesremoved
605 if not sidedata:
599 if not sidedata:
606 sidedata = None
600 sidedata = None
607
601
608 if extra:
602 if extra:
609 extra = encodeextra(extra)
603 extra = encodeextra(extra)
610 parseddate = b"%s %s" % (parseddate, extra)
604 parseddate = b"%s %s" % (parseddate, extra)
611 l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
605 l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
612 text = b"\n".join(l)
606 text = b"\n".join(l)
613 return self.addrevision(
607 return self.addrevision(
614 text, transaction, len(self), p1, p2, sidedata=sidedata
608 text, transaction, len(self), p1, p2, sidedata=sidedata
615 )
609 )
616
610
617 def branchinfo(self, rev):
611 def branchinfo(self, rev):
618 """return the branch name and open/close state of a revision
612 """return the branch name and open/close state of a revision
619
613
620 This function exists because creating a changectx object
614 This function exists because creating a changectx object
621 just to access this is costly."""
615 just to access this is costly."""
622 extra = self.read(rev)[5]
616 extra = self.read(rev)[5]
623 return encoding.tolocal(extra.get(b"branch")), b'close' in extra
617 return encoding.tolocal(extra.get(b"branch")), b'close' in extra
624
618
625 def _nodeduplicatecallback(self, transaction, node):
619 def _nodeduplicatecallback(self, transaction, node):
626 # keep track of revisions that got "re-added", eg: unbunde of know rev.
620 # keep track of revisions that got "re-added", eg: unbunde of know rev.
627 #
621 #
628 # We track them in a list to preserve their order from the source bundle
622 # We track them in a list to preserve their order from the source bundle
629 duplicates = transaction.changes.setdefault(b'revduplicates', [])
623 duplicates = transaction.changes.setdefault(b'revduplicates', [])
630 duplicates.append(self.rev(node))
624 duplicates.append(self.rev(node))
@@ -1,448 +1,454 b''
1 # repoview.py - Filtered view of a localrepo object
1 # repoview.py - Filtered view of a localrepo object
2 #
2 #
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
4 # Logilab SA <contact@logilab.fr>
4 # Logilab SA <contact@logilab.fr>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from __future__ import absolute_import
9 from __future__ import absolute_import
10
10
11 import copy
11 import copy
12 import weakref
12 import weakref
13
13
14 from .i18n import _
14 from .i18n import _
15 from .node import (
15 from .node import (
16 hex,
16 hex,
17 nullrev,
17 nullrev,
18 )
18 )
19 from .pycompat import (
19 from .pycompat import (
20 delattr,
20 delattr,
21 getattr,
21 getattr,
22 setattr,
22 setattr,
23 )
23 )
24 from . import (
24 from . import (
25 error,
25 error,
26 obsolete,
26 obsolete,
27 phases,
27 phases,
28 pycompat,
28 pycompat,
29 revlog,
29 revlog,
30 tags as tagsmod,
30 tags as tagsmod,
31 util,
31 util,
32 )
32 )
33 from .utils import repoviewutil
33 from .utils import repoviewutil
34
34
35
35
36 def hideablerevs(repo):
36 def hideablerevs(repo):
37 """Revision candidates to be hidden
37 """Revision candidates to be hidden
38
38
39 This is a standalone function to allow extensions to wrap it.
39 This is a standalone function to allow extensions to wrap it.
40
40
41 Because we use the set of immutable changesets as a fallback subset in
41 Because we use the set of immutable changesets as a fallback subset in
42 branchmap (see mercurial.utils.repoviewutils.subsettable), you cannot set
42 branchmap (see mercurial.utils.repoviewutils.subsettable), you cannot set
43 "public" changesets as "hideable". Doing so would break multiple code
43 "public" changesets as "hideable". Doing so would break multiple code
44 assertions and lead to crashes."""
44 assertions and lead to crashes."""
45 obsoletes = obsolete.getrevs(repo, b'obsolete')
45 obsoletes = obsolete.getrevs(repo, b'obsolete')
46 internals = repo._phasecache.getrevset(repo, phases.localhiddenphases)
46 internals = repo._phasecache.getrevset(repo, phases.localhiddenphases)
47 internals = frozenset(internals)
47 internals = frozenset(internals)
48 return obsoletes | internals
48 return obsoletes | internals
49
49
50
50
51 def pinnedrevs(repo):
51 def pinnedrevs(repo):
52 """revisions blocking hidden changesets from being filtered
52 """revisions blocking hidden changesets from being filtered
53 """
53 """
54
54
55 cl = repo.changelog
55 cl = repo.changelog
56 pinned = set()
56 pinned = set()
57 pinned.update([par.rev() for par in repo[None].parents()])
57 pinned.update([par.rev() for par in repo[None].parents()])
58 pinned.update([cl.rev(bm) for bm in repo._bookmarks.values()])
58 pinned.update([cl.rev(bm) for bm in repo._bookmarks.values()])
59
59
60 tags = {}
60 tags = {}
61 tagsmod.readlocaltags(repo.ui, repo, tags, {})
61 tagsmod.readlocaltags(repo.ui, repo, tags, {})
62 if tags:
62 if tags:
63 rev, nodemap = cl.rev, cl.nodemap
63 rev, nodemap = cl.rev, cl.nodemap
64 pinned.update(rev(t[0]) for t in tags.values() if t[0] in nodemap)
64 pinned.update(rev(t[0]) for t in tags.values() if t[0] in nodemap)
65 return pinned
65 return pinned
66
66
67
67
68 def _revealancestors(pfunc, hidden, revs):
68 def _revealancestors(pfunc, hidden, revs):
69 """reveals contiguous chains of hidden ancestors of 'revs' by removing them
69 """reveals contiguous chains of hidden ancestors of 'revs' by removing them
70 from 'hidden'
70 from 'hidden'
71
71
72 - pfunc(r): a funtion returning parent of 'r',
72 - pfunc(r): a funtion returning parent of 'r',
73 - hidden: the (preliminary) hidden revisions, to be updated
73 - hidden: the (preliminary) hidden revisions, to be updated
74 - revs: iterable of revnum,
74 - revs: iterable of revnum,
75
75
76 (Ancestors are revealed exclusively, i.e. the elements in 'revs' are
76 (Ancestors are revealed exclusively, i.e. the elements in 'revs' are
77 *not* revealed)
77 *not* revealed)
78 """
78 """
79 stack = list(revs)
79 stack = list(revs)
80 while stack:
80 while stack:
81 for p in pfunc(stack.pop()):
81 for p in pfunc(stack.pop()):
82 if p != nullrev and p in hidden:
82 if p != nullrev and p in hidden:
83 hidden.remove(p)
83 hidden.remove(p)
84 stack.append(p)
84 stack.append(p)
85
85
86
86
87 def computehidden(repo, visibilityexceptions=None):
87 def computehidden(repo, visibilityexceptions=None):
88 """compute the set of hidden revision to filter
88 """compute the set of hidden revision to filter
89
89
90 During most operation hidden should be filtered."""
90 During most operation hidden should be filtered."""
91 assert not repo.changelog.filteredrevs
91 assert not repo.changelog.filteredrevs
92
92
93 hidden = hideablerevs(repo)
93 hidden = hideablerevs(repo)
94 if hidden:
94 if hidden:
95 hidden = set(hidden - pinnedrevs(repo))
95 hidden = set(hidden - pinnedrevs(repo))
96 if visibilityexceptions:
96 if visibilityexceptions:
97 hidden -= visibilityexceptions
97 hidden -= visibilityexceptions
98 pfunc = repo.changelog.parentrevs
98 pfunc = repo.changelog.parentrevs
99 mutable = repo._phasecache.getrevset(repo, phases.mutablephases)
99 mutable = repo._phasecache.getrevset(repo, phases.mutablephases)
100
100
101 visible = mutable - hidden
101 visible = mutable - hidden
102 _revealancestors(pfunc, hidden, visible)
102 _revealancestors(pfunc, hidden, visible)
103 return frozenset(hidden)
103 return frozenset(hidden)
104
104
105
105
106 def computesecret(repo, visibilityexceptions=None):
106 def computesecret(repo, visibilityexceptions=None):
107 """compute the set of revision that can never be exposed through hgweb
107 """compute the set of revision that can never be exposed through hgweb
108
108
109 Changeset in the secret phase (or above) should stay unaccessible."""
109 Changeset in the secret phase (or above) should stay unaccessible."""
110 assert not repo.changelog.filteredrevs
110 assert not repo.changelog.filteredrevs
111 secrets = repo._phasecache.getrevset(repo, phases.remotehiddenphases)
111 secrets = repo._phasecache.getrevset(repo, phases.remotehiddenphases)
112 return frozenset(secrets)
112 return frozenset(secrets)
113
113
114
114
115 def computeunserved(repo, visibilityexceptions=None):
115 def computeunserved(repo, visibilityexceptions=None):
116 """compute the set of revision that should be filtered when used a server
116 """compute the set of revision that should be filtered when used a server
117
117
118 Secret and hidden changeset should not pretend to be here."""
118 Secret and hidden changeset should not pretend to be here."""
119 assert not repo.changelog.filteredrevs
119 assert not repo.changelog.filteredrevs
120 # fast path in simple case to avoid impact of non optimised code
120 # fast path in simple case to avoid impact of non optimised code
121 hiddens = filterrevs(repo, b'visible')
121 hiddens = filterrevs(repo, b'visible')
122 secrets = filterrevs(repo, b'served.hidden')
122 secrets = filterrevs(repo, b'served.hidden')
123 if secrets:
123 if secrets:
124 return frozenset(hiddens | secrets)
124 return frozenset(hiddens | secrets)
125 else:
125 else:
126 return hiddens
126 return hiddens
127
127
128
128
129 def computemutable(repo, visibilityexceptions=None):
129 def computemutable(repo, visibilityexceptions=None):
130 assert not repo.changelog.filteredrevs
130 assert not repo.changelog.filteredrevs
131 # fast check to avoid revset call on huge repo
131 # fast check to avoid revset call on huge repo
132 if any(repo._phasecache.phaseroots[1:]):
132 if any(repo._phasecache.phaseroots[1:]):
133 getphase = repo._phasecache.phase
133 getphase = repo._phasecache.phase
134 maymutable = filterrevs(repo, b'base')
134 maymutable = filterrevs(repo, b'base')
135 return frozenset(r for r in maymutable if getphase(repo, r))
135 return frozenset(r for r in maymutable if getphase(repo, r))
136 return frozenset()
136 return frozenset()
137
137
138
138
139 def computeimpactable(repo, visibilityexceptions=None):
139 def computeimpactable(repo, visibilityexceptions=None):
140 """Everything impactable by mutable revision
140 """Everything impactable by mutable revision
141
141
142 The immutable filter still have some chance to get invalidated. This will
142 The immutable filter still have some chance to get invalidated. This will
143 happen when:
143 happen when:
144
144
145 - you garbage collect hidden changeset,
145 - you garbage collect hidden changeset,
146 - public phase is moved backward,
146 - public phase is moved backward,
147 - something is changed in the filtering (this could be fixed)
147 - something is changed in the filtering (this could be fixed)
148
148
149 This filter out any mutable changeset and any public changeset that may be
149 This filter out any mutable changeset and any public changeset that may be
150 impacted by something happening to a mutable revision.
150 impacted by something happening to a mutable revision.
151
151
152 This is achieved by filtered everything with a revision number egal or
152 This is achieved by filtered everything with a revision number egal or
153 higher than the first mutable changeset is filtered."""
153 higher than the first mutable changeset is filtered."""
154 assert not repo.changelog.filteredrevs
154 assert not repo.changelog.filteredrevs
155 cl = repo.changelog
155 cl = repo.changelog
156 firstmutable = len(cl)
156 firstmutable = len(cl)
157 for roots in repo._phasecache.phaseroots[1:]:
157 for roots in repo._phasecache.phaseroots[1:]:
158 if roots:
158 if roots:
159 firstmutable = min(firstmutable, min(cl.rev(r) for r in roots))
159 firstmutable = min(firstmutable, min(cl.rev(r) for r in roots))
160 # protect from nullrev root
160 # protect from nullrev root
161 firstmutable = max(0, firstmutable)
161 firstmutable = max(0, firstmutable)
162 return frozenset(pycompat.xrange(firstmutable, len(cl)))
162 return frozenset(pycompat.xrange(firstmutable, len(cl)))
163
163
164
164
165 # function to compute filtered set
165 # function to compute filtered set
166 #
166 #
167 # When adding a new filter you MUST update the table at:
167 # When adding a new filter you MUST update the table at:
168 # mercurial.utils.repoviewutil.subsettable
168 # mercurial.utils.repoviewutil.subsettable
169 # Otherwise your filter will have to recompute all its branches cache
169 # Otherwise your filter will have to recompute all its branches cache
170 # from scratch (very slow).
170 # from scratch (very slow).
171 filtertable = {
171 filtertable = {
172 b'visible': computehidden,
172 b'visible': computehidden,
173 b'visible-hidden': computehidden,
173 b'visible-hidden': computehidden,
174 b'served.hidden': computesecret,
174 b'served.hidden': computesecret,
175 b'served': computeunserved,
175 b'served': computeunserved,
176 b'immutable': computemutable,
176 b'immutable': computemutable,
177 b'base': computeimpactable,
177 b'base': computeimpactable,
178 }
178 }
179
179
180 _basefiltername = list(filtertable)
180 _basefiltername = list(filtertable)
181
181
182
182
183 def extrafilter(ui):
183 def extrafilter(ui):
184 """initialize extra filter and return its id
184 """initialize extra filter and return its id
185
185
186 If extra filtering is configured, we make sure the associated filtered view
186 If extra filtering is configured, we make sure the associated filtered view
187 are declared and return the associated id.
187 are declared and return the associated id.
188 """
188 """
189 frevs = ui.config(b'experimental', b'extra-filter-revs')
189 frevs = ui.config(b'experimental', b'extra-filter-revs')
190 if frevs is None:
190 if frevs is None:
191 return None
191 return None
192
192
193 fid = pycompat.sysbytes(util.DIGESTS[b'sha1'](frevs).hexdigest())[:12]
193 fid = pycompat.sysbytes(util.DIGESTS[b'sha1'](frevs).hexdigest())[:12]
194
194
195 combine = lambda fname: fname + b'%' + fid
195 combine = lambda fname: fname + b'%' + fid
196
196
197 subsettable = repoviewutil.subsettable
197 subsettable = repoviewutil.subsettable
198
198
199 if combine(b'base') not in filtertable:
199 if combine(b'base') not in filtertable:
200 for name in _basefiltername:
200 for name in _basefiltername:
201
201
202 def extrafilteredrevs(repo, *args, **kwargs):
202 def extrafilteredrevs(repo, *args, **kwargs):
203 baserevs = filtertable[name](repo, *args, **kwargs)
203 baserevs = filtertable[name](repo, *args, **kwargs)
204 extrarevs = frozenset(repo.revs(frevs))
204 extrarevs = frozenset(repo.revs(frevs))
205 return baserevs | extrarevs
205 return baserevs | extrarevs
206
206
207 filtertable[combine(name)] = extrafilteredrevs
207 filtertable[combine(name)] = extrafilteredrevs
208 if name in subsettable:
208 if name in subsettable:
209 subsettable[combine(name)] = combine(subsettable[name])
209 subsettable[combine(name)] = combine(subsettable[name])
210 return fid
210 return fid
211
211
212
212
213 def filterrevs(repo, filtername, visibilityexceptions=None):
213 def filterrevs(repo, filtername, visibilityexceptions=None):
214 """returns set of filtered revision for this filter name
214 """returns set of filtered revision for this filter name
215
215
216 visibilityexceptions is a set of revs which must are exceptions for
216 visibilityexceptions is a set of revs which must are exceptions for
217 hidden-state and must be visible. They are dynamic and hence we should not
217 hidden-state and must be visible. They are dynamic and hence we should not
218 cache it's result"""
218 cache it's result"""
219 if filtername not in repo.filteredrevcache:
219 if filtername not in repo.filteredrevcache:
220 func = filtertable[filtername]
220 func = filtertable[filtername]
221 if visibilityexceptions:
221 if visibilityexceptions:
222 return func(repo.unfiltered, visibilityexceptions)
222 return func(repo.unfiltered, visibilityexceptions)
223 repo.filteredrevcache[filtername] = func(repo.unfiltered())
223 repo.filteredrevcache[filtername] = func(repo.unfiltered())
224 return repo.filteredrevcache[filtername]
224 return repo.filteredrevcache[filtername]
225
225
226
226
227 def wrapchangelog(unfichangelog, filteredrevs):
227 def wrapchangelog(unfichangelog, filteredrevs):
228 cl = copy.copy(unfichangelog)
228 cl = copy.copy(unfichangelog)
229 cl.filteredrevs = filteredrevs
229 cl.filteredrevs = filteredrevs
230
230
231 class filteredchangelog(cl.__class__):
231 class filteredchangelog(cl.__class__):
232 def tiprev(self):
232 def tiprev(self):
233 """filtered version of revlog.tiprev"""
233 """filtered version of revlog.tiprev"""
234 for i in pycompat.xrange(len(self) - 1, -2, -1):
234 for i in pycompat.xrange(len(self) - 1, -2, -1):
235 if i not in self.filteredrevs:
235 if i not in self.filteredrevs:
236 return i
236 return i
237
237
238 def __contains__(self, rev):
238 def __contains__(self, rev):
239 """filtered version of revlog.__contains__"""
239 """filtered version of revlog.__contains__"""
240 return 0 <= rev < len(self) and rev not in self.filteredrevs
240 return 0 <= rev < len(self) and rev not in self.filteredrevs
241
241
242 def __iter__(self):
242 def __iter__(self):
243 """filtered version of revlog.__iter__"""
243 """filtered version of revlog.__iter__"""
244 if len(self.filteredrevs) == 0:
244 if len(self.filteredrevs) == 0:
245 return revlog.revlog.__iter__(self)
245 return revlog.revlog.__iter__(self)
246
246
247
247
248 def filterediter():
248 def filterediter():
249 for i in pycompat.xrange(len(self)):
249 for i in pycompat.xrange(len(self)):
250 if i not in self.filteredrevs:
250 if i not in self.filteredrevs:
251 yield i
251 yield i
252
252
253 return filterediter()
253 return filterediter()
254
254
255 def revs(self, start=0, stop=None):
255 def revs(self, start=0, stop=None):
256 """filtered version of revlog.revs"""
256 """filtered version of revlog.revs"""
257 for i in super(filteredchangelog, self).revs(start, stop):
257 for i in super(filteredchangelog, self).revs(start, stop):
258 if i not in self.filteredrevs:
258 if i not in self.filteredrevs:
259 yield i
259 yield i
260
260
261 def _checknofilteredinrevs(self, revs):
261 def _checknofilteredinrevs(self, revs):
262 """raise the appropriate error if 'revs' contains a filtered revision
262 """raise the appropriate error if 'revs' contains a filtered revision
263
263
264 This returns a version of 'revs' to be used thereafter by the caller.
264 This returns a version of 'revs' to be used thereafter by the caller.
265 In particular, if revs is an iterator, it is converted into a set.
265 In particular, if revs is an iterator, it is converted into a set.
266 """
266 """
267 safehasattr = util.safehasattr
267 safehasattr = util.safehasattr
268 if safehasattr(revs, '__next__'):
268 if safehasattr(revs, '__next__'):
269 # Note that inspect.isgenerator() is not true for iterators,
269 # Note that inspect.isgenerator() is not true for iterators,
270 revs = set(revs)
270 revs = set(revs)
271
271
272 filteredrevs = self.filteredrevs
272 filteredrevs = self.filteredrevs
273 if safehasattr(revs, 'first'): # smartset
273 if safehasattr(revs, 'first'): # smartset
274 offenders = revs & filteredrevs
274 offenders = revs & filteredrevs
275 else:
275 else:
276 offenders = filteredrevs.intersection(revs)
276 offenders = filteredrevs.intersection(revs)
277
277
278 for rev in offenders:
278 for rev in offenders:
279 raise error.FilteredIndexError(rev)
279 raise error.FilteredIndexError(rev)
280 return revs
280 return revs
281
281
282 def headrevs(self, revs=None):
282 def headrevs(self, revs=None):
283 if revs is None and self.filteredrevs:
283 if revs is None and self.filteredrevs:
284 try:
284 try:
285 return self.index.headrevsfiltered(self.filteredrevs)
285 return self.index.headrevsfiltered(self.filteredrevs)
286 # AttributeError covers non-c-extension environments and
286 # AttributeError covers non-c-extension environments and
287 # old c extensions without filter handling.
287 # old c extensions without filter handling.
288 except AttributeError:
288 except AttributeError:
289 return self._headrevs()
289 return self._headrevs()
290
290
291 if self.filteredrevs:
291 if self.filteredrevs:
292 revs = self._checknofilteredinrevs(revs)
292 revs = self._checknofilteredinrevs(revs)
293 return super(filteredchangelog, self).headrevs(revs)
293 return super(filteredchangelog, self).headrevs(revs)
294
294
295 def strip(self, *args, **kwargs):
295 def strip(self, *args, **kwargs):
296 # XXX make something better than assert
296 # XXX make something better than assert
297 # We can't expect proper strip behavior if we are filtered.
297 # We can't expect proper strip behavior if we are filtered.
298 assert not self.filteredrevs
298 assert not self.filteredrevs
299 super(filteredchangelog, self).strip(*args, **kwargs)
299 super(filteredchangelog, self).strip(*args, **kwargs)
300
300
301 def rev(self, node):
301 def rev(self, node):
302 """filtered version of revlog.rev"""
302 """filtered version of revlog.rev"""
303 r = super(filteredchangelog, self).rev(node)
303 r = super(filteredchangelog, self).rev(node)
304 if r in self.filteredrevs:
304 if r in self.filteredrevs:
305 raise error.FilteredLookupError(
305 raise error.FilteredLookupError(
306 hex(node), self.indexfile, _(b'filtered node')
306 hex(node), self.indexfile, _(b'filtered node')
307 )
307 )
308 return r
308 return r
309
309
310 def node(self, rev):
310 def node(self, rev):
311 """filtered version of revlog.node"""
311 """filtered version of revlog.node"""
312 if rev in self.filteredrevs:
312 if rev in self.filteredrevs:
313 raise error.FilteredIndexError(rev)
313 raise error.FilteredIndexError(rev)
314 return super(filteredchangelog, self).node(rev)
314 return super(filteredchangelog, self).node(rev)
315
315
316 def linkrev(self, rev):
316 def linkrev(self, rev):
317 """filtered version of revlog.linkrev"""
317 """filtered version of revlog.linkrev"""
318 if rev in self.filteredrevs:
318 if rev in self.filteredrevs:
319 raise error.FilteredIndexError(rev)
319 raise error.FilteredIndexError(rev)
320 return super(filteredchangelog, self).linkrev(rev)
320 return super(filteredchangelog, self).linkrev(rev)
321
321
322 def parentrevs(self, rev):
322 def parentrevs(self, rev):
323 """filtered version of revlog.parentrevs"""
323 """filtered version of revlog.parentrevs"""
324 if rev in self.filteredrevs:
324 if rev in self.filteredrevs:
325 raise error.FilteredIndexError(rev)
325 raise error.FilteredIndexError(rev)
326 return super(filteredchangelog, self).parentrevs(rev)
326 return super(filteredchangelog, self).parentrevs(rev)
327
327
328 def flags(self, rev):
329 """filtered version of revlog.flags"""
330 if rev in self.filteredrevs:
331 raise error.FilteredIndexError(rev)
332 return super(filteredchangelog, self).flags(rev)
333
328 cl.__class__ = filteredchangelog
334 cl.__class__ = filteredchangelog
329
335
330 return cl
336 return cl
331
337
332
338
333 class repoview(object):
339 class repoview(object):
334 """Provide a read/write view of a repo through a filtered changelog
340 """Provide a read/write view of a repo through a filtered changelog
335
341
336 This object is used to access a filtered version of a repository without
342 This object is used to access a filtered version of a repository without
337 altering the original repository object itself. We can not alter the
343 altering the original repository object itself. We can not alter the
338 original object for two main reasons:
344 original object for two main reasons:
339 - It prevents the use of a repo with multiple filters at the same time. In
345 - It prevents the use of a repo with multiple filters at the same time. In
340 particular when multiple threads are involved.
346 particular when multiple threads are involved.
341 - It makes scope of the filtering harder to control.
347 - It makes scope of the filtering harder to control.
342
348
343 This object behaves very closely to the original repository. All attribute
349 This object behaves very closely to the original repository. All attribute
344 operations are done on the original repository:
350 operations are done on the original repository:
345 - An access to `repoview.someattr` actually returns `repo.someattr`,
351 - An access to `repoview.someattr` actually returns `repo.someattr`,
346 - A write to `repoview.someattr` actually sets value of `repo.someattr`,
352 - A write to `repoview.someattr` actually sets value of `repo.someattr`,
347 - A deletion of `repoview.someattr` actually drops `someattr`
353 - A deletion of `repoview.someattr` actually drops `someattr`
348 from `repo.__dict__`.
354 from `repo.__dict__`.
349
355
350 The only exception is the `changelog` property. It is overridden to return
356 The only exception is the `changelog` property. It is overridden to return
351 a (surface) copy of `repo.changelog` with some revisions filtered. The
357 a (surface) copy of `repo.changelog` with some revisions filtered. The
352 `filtername` attribute of the view control the revisions that need to be
358 `filtername` attribute of the view control the revisions that need to be
353 filtered. (the fact the changelog is copied is an implementation detail).
359 filtered. (the fact the changelog is copied is an implementation detail).
354
360
355 Unlike attributes, this object intercepts all method calls. This means that
361 Unlike attributes, this object intercepts all method calls. This means that
356 all methods are run on the `repoview` object with the filtered `changelog`
362 all methods are run on the `repoview` object with the filtered `changelog`
357 property. For this purpose the simple `repoview` class must be mixed with
363 property. For this purpose the simple `repoview` class must be mixed with
358 the actual class of the repository. This ensures that the resulting
364 the actual class of the repository. This ensures that the resulting
359 `repoview` object have the very same methods than the repo object. This
365 `repoview` object have the very same methods than the repo object. This
360 leads to the property below.
366 leads to the property below.
361
367
362 repoview.method() --> repo.__class__.method(repoview)
368 repoview.method() --> repo.__class__.method(repoview)
363
369
364 The inheritance has to be done dynamically because `repo` can be of any
370 The inheritance has to be done dynamically because `repo` can be of any
365 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
371 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
366 """
372 """
367
373
368 def __init__(self, repo, filtername, visibilityexceptions=None):
374 def __init__(self, repo, filtername, visibilityexceptions=None):
369 object.__setattr__(self, r'_unfilteredrepo', repo)
375 object.__setattr__(self, r'_unfilteredrepo', repo)
370 object.__setattr__(self, r'filtername', filtername)
376 object.__setattr__(self, r'filtername', filtername)
371 object.__setattr__(self, r'_clcachekey', None)
377 object.__setattr__(self, r'_clcachekey', None)
372 object.__setattr__(self, r'_clcache', None)
378 object.__setattr__(self, r'_clcache', None)
373 # revs which are exceptions and must not be hidden
379 # revs which are exceptions and must not be hidden
374 object.__setattr__(self, r'_visibilityexceptions', visibilityexceptions)
380 object.__setattr__(self, r'_visibilityexceptions', visibilityexceptions)
375
381
376 # not a propertycache on purpose we shall implement a proper cache later
382 # not a propertycache on purpose we shall implement a proper cache later
377 @property
383 @property
378 def changelog(self):
384 def changelog(self):
379 """return a filtered version of the changeset
385 """return a filtered version of the changeset
380
386
381 this changelog must not be used for writing"""
387 this changelog must not be used for writing"""
382 # some cache may be implemented later
388 # some cache may be implemented later
383 unfi = self._unfilteredrepo
389 unfi = self._unfilteredrepo
384 unfichangelog = unfi.changelog
390 unfichangelog = unfi.changelog
385 # bypass call to changelog.method
391 # bypass call to changelog.method
386 unfiindex = unfichangelog.index
392 unfiindex = unfichangelog.index
387 unfilen = len(unfiindex)
393 unfilen = len(unfiindex)
388 unfinode = unfiindex[unfilen - 1][7]
394 unfinode = unfiindex[unfilen - 1][7]
389 with util.timedcm('repo filter for %s', self.filtername):
395 with util.timedcm('repo filter for %s', self.filtername):
390 revs = filterrevs(unfi, self.filtername, self._visibilityexceptions)
396 revs = filterrevs(unfi, self.filtername, self._visibilityexceptions)
391 cl = self._clcache
397 cl = self._clcache
392 newkey = (unfilen, unfinode, hash(revs), unfichangelog._delayed)
398 newkey = (unfilen, unfinode, hash(revs), unfichangelog._delayed)
393 # if cl.index is not unfiindex, unfi.changelog would be
399 # if cl.index is not unfiindex, unfi.changelog would be
394 # recreated, and our clcache refers to garbage object
400 # recreated, and our clcache refers to garbage object
395 if cl is not None and (
401 if cl is not None and (
396 cl.index is not unfiindex or newkey != self._clcachekey
402 cl.index is not unfiindex or newkey != self._clcachekey
397 ):
403 ):
398 cl = None
404 cl = None
399 # could have been made None by the previous if
405 # could have been made None by the previous if
400 if cl is None:
406 if cl is None:
401 cl = wrapchangelog(unfichangelog, revs)
407 cl = wrapchangelog(unfichangelog, revs)
402 object.__setattr__(self, r'_clcache', cl)
408 object.__setattr__(self, r'_clcache', cl)
403 object.__setattr__(self, r'_clcachekey', newkey)
409 object.__setattr__(self, r'_clcachekey', newkey)
404 return cl
410 return cl
405
411
406 def unfiltered(self):
412 def unfiltered(self):
407 """Return an unfiltered version of a repo"""
413 """Return an unfiltered version of a repo"""
408 return self._unfilteredrepo
414 return self._unfilteredrepo
409
415
410 def filtered(self, name, visibilityexceptions=None):
416 def filtered(self, name, visibilityexceptions=None):
411 """Return a filtered version of a repository"""
417 """Return a filtered version of a repository"""
412 if name == self.filtername and not visibilityexceptions:
418 if name == self.filtername and not visibilityexceptions:
413 return self
419 return self
414 return self.unfiltered().filtered(name, visibilityexceptions)
420 return self.unfiltered().filtered(name, visibilityexceptions)
415
421
416 def __repr__(self):
422 def __repr__(self):
417 return r'<%s:%s %r>' % (
423 return r'<%s:%s %r>' % (
418 self.__class__.__name__,
424 self.__class__.__name__,
419 pycompat.sysstr(self.filtername),
425 pycompat.sysstr(self.filtername),
420 self.unfiltered(),
426 self.unfiltered(),
421 )
427 )
422
428
423 # everything access are forwarded to the proxied repo
429 # everything access are forwarded to the proxied repo
424 def __getattr__(self, attr):
430 def __getattr__(self, attr):
425 return getattr(self._unfilteredrepo, attr)
431 return getattr(self._unfilteredrepo, attr)
426
432
427 def __setattr__(self, attr, value):
433 def __setattr__(self, attr, value):
428 return setattr(self._unfilteredrepo, attr, value)
434 return setattr(self._unfilteredrepo, attr, value)
429
435
430 def __delattr__(self, attr):
436 def __delattr__(self, attr):
431 return delattr(self._unfilteredrepo, attr)
437 return delattr(self._unfilteredrepo, attr)
432
438
433
439
434 # Python <3.4 easily leaks types via __mro__. See
440 # Python <3.4 easily leaks types via __mro__. See
435 # https://bugs.python.org/issue17950. We cache dynamically created types
441 # https://bugs.python.org/issue17950. We cache dynamically created types
436 # so they won't be leaked on every invocation of repo.filtered().
442 # so they won't be leaked on every invocation of repo.filtered().
437 _filteredrepotypes = weakref.WeakKeyDictionary()
443 _filteredrepotypes = weakref.WeakKeyDictionary()
438
444
439
445
440 def newtype(base):
446 def newtype(base):
441 """Create a new type with the repoview mixin and the given base class"""
447 """Create a new type with the repoview mixin and the given base class"""
442 if base not in _filteredrepotypes:
448 if base not in _filteredrepotypes:
443
449
444 class filteredrepo(repoview, base):
450 class filteredrepo(repoview, base):
445 pass
451 pass
446
452
447 _filteredrepotypes[base] = filteredrepo
453 _filteredrepotypes[base] = filteredrepo
448 return _filteredrepotypes[base]
454 return _filteredrepotypes[base]
General Comments 0
You need to be logged in to leave comments. Login now