##// END OF EJS Templates
repoview: move changelog.headrevs() override to filteredchangelog...
Martin von Zweigbergk -
r43752:476754ed default
parent child Browse files
Show More
@@ -1,698 +1,663 b''
1 # changelog.py - changelog class for mercurial
1 # changelog.py - changelog class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 from .i18n import _
10 from .i18n import _
11 from .node import (
11 from .node import (
12 bin,
12 bin,
13 hex,
13 hex,
14 nullid,
14 nullid,
15 )
15 )
16 from .thirdparty import attr
16 from .thirdparty import attr
17
17
18 from . import (
18 from . import (
19 copies,
19 copies,
20 encoding,
20 encoding,
21 error,
21 error,
22 pycompat,
22 pycompat,
23 revlog,
23 revlog,
24 util,
25 )
24 )
26 from .utils import (
25 from .utils import (
27 dateutil,
26 dateutil,
28 stringutil,
27 stringutil,
29 )
28 )
30
29
31 from .revlogutils import sidedata as sidedatamod
30 from .revlogutils import sidedata as sidedatamod
32
31
33 _defaultextra = {b'branch': b'default'}
32 _defaultextra = {b'branch': b'default'}
34
33
35
34
36 def _string_escape(text):
35 def _string_escape(text):
37 """
36 """
38 >>> from .pycompat import bytechr as chr
37 >>> from .pycompat import bytechr as chr
39 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
38 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
40 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
39 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
41 >>> s
40 >>> s
42 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
41 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
43 >>> res = _string_escape(s)
42 >>> res = _string_escape(s)
44 >>> s == _string_unescape(res)
43 >>> s == _string_unescape(res)
45 True
44 True
46 """
45 """
47 # subset of the string_escape codec
46 # subset of the string_escape codec
48 text = (
47 text = (
49 text.replace(b'\\', b'\\\\')
48 text.replace(b'\\', b'\\\\')
50 .replace(b'\n', b'\\n')
49 .replace(b'\n', b'\\n')
51 .replace(b'\r', b'\\r')
50 .replace(b'\r', b'\\r')
52 )
51 )
53 return text.replace(b'\0', b'\\0')
52 return text.replace(b'\0', b'\\0')
54
53
55
54
56 def _string_unescape(text):
55 def _string_unescape(text):
57 if b'\\0' in text:
56 if b'\\0' in text:
58 # fix up \0 without getting into trouble with \\0
57 # fix up \0 without getting into trouble with \\0
59 text = text.replace(b'\\\\', b'\\\\\n')
58 text = text.replace(b'\\\\', b'\\\\\n')
60 text = text.replace(b'\\0', b'\0')
59 text = text.replace(b'\\0', b'\0')
61 text = text.replace(b'\n', b'')
60 text = text.replace(b'\n', b'')
62 return stringutil.unescapestr(text)
61 return stringutil.unescapestr(text)
63
62
64
63
65 def decodeextra(text):
64 def decodeextra(text):
66 """
65 """
67 >>> from .pycompat import bytechr as chr
66 >>> from .pycompat import bytechr as chr
68 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
67 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
69 ... ).items())
68 ... ).items())
70 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
69 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
71 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
70 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
72 ... b'baz': chr(92) + chr(0) + b'2'})
71 ... b'baz': chr(92) + chr(0) + b'2'})
73 ... ).items())
72 ... ).items())
74 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
73 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
75 """
74 """
76 extra = _defaultextra.copy()
75 extra = _defaultextra.copy()
77 for l in text.split(b'\0'):
76 for l in text.split(b'\0'):
78 if l:
77 if l:
79 k, v = _string_unescape(l).split(b':', 1)
78 k, v = _string_unescape(l).split(b':', 1)
80 extra[k] = v
79 extra[k] = v
81 return extra
80 return extra
82
81
83
82
84 def encodeextra(d):
83 def encodeextra(d):
85 # keys must be sorted to produce a deterministic changelog entry
84 # keys must be sorted to produce a deterministic changelog entry
86 items = [
85 items = [
87 _string_escape(b'%s:%s' % (k, pycompat.bytestr(d[k])))
86 _string_escape(b'%s:%s' % (k, pycompat.bytestr(d[k])))
88 for k in sorted(d)
87 for k in sorted(d)
89 ]
88 ]
90 return b"\0".join(items)
89 return b"\0".join(items)
91
90
92
91
93 def stripdesc(desc):
92 def stripdesc(desc):
94 """strip trailing whitespace and leading and trailing empty lines"""
93 """strip trailing whitespace and leading and trailing empty lines"""
95 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
94 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
96
95
97
96
98 class appender(object):
97 class appender(object):
99 '''the changelog index must be updated last on disk, so we use this class
98 '''the changelog index must be updated last on disk, so we use this class
100 to delay writes to it'''
99 to delay writes to it'''
101
100
102 def __init__(self, vfs, name, mode, buf):
101 def __init__(self, vfs, name, mode, buf):
103 self.data = buf
102 self.data = buf
104 fp = vfs(name, mode)
103 fp = vfs(name, mode)
105 self.fp = fp
104 self.fp = fp
106 self.offset = fp.tell()
105 self.offset = fp.tell()
107 self.size = vfs.fstat(fp).st_size
106 self.size = vfs.fstat(fp).st_size
108 self._end = self.size
107 self._end = self.size
109
108
110 def end(self):
109 def end(self):
111 return self._end
110 return self._end
112
111
113 def tell(self):
112 def tell(self):
114 return self.offset
113 return self.offset
115
114
116 def flush(self):
115 def flush(self):
117 pass
116 pass
118
117
119 @property
118 @property
120 def closed(self):
119 def closed(self):
121 return self.fp.closed
120 return self.fp.closed
122
121
123 def close(self):
122 def close(self):
124 self.fp.close()
123 self.fp.close()
125
124
126 def seek(self, offset, whence=0):
125 def seek(self, offset, whence=0):
127 '''virtual file offset spans real file and data'''
126 '''virtual file offset spans real file and data'''
128 if whence == 0:
127 if whence == 0:
129 self.offset = offset
128 self.offset = offset
130 elif whence == 1:
129 elif whence == 1:
131 self.offset += offset
130 self.offset += offset
132 elif whence == 2:
131 elif whence == 2:
133 self.offset = self.end() + offset
132 self.offset = self.end() + offset
134 if self.offset < self.size:
133 if self.offset < self.size:
135 self.fp.seek(self.offset)
134 self.fp.seek(self.offset)
136
135
137 def read(self, count=-1):
136 def read(self, count=-1):
138 '''only trick here is reads that span real file and data'''
137 '''only trick here is reads that span real file and data'''
139 ret = b""
138 ret = b""
140 if self.offset < self.size:
139 if self.offset < self.size:
141 s = self.fp.read(count)
140 s = self.fp.read(count)
142 ret = s
141 ret = s
143 self.offset += len(s)
142 self.offset += len(s)
144 if count > 0:
143 if count > 0:
145 count -= len(s)
144 count -= len(s)
146 if count != 0:
145 if count != 0:
147 doff = self.offset - self.size
146 doff = self.offset - self.size
148 self.data.insert(0, b"".join(self.data))
147 self.data.insert(0, b"".join(self.data))
149 del self.data[1:]
148 del self.data[1:]
150 s = self.data[0][doff : doff + count]
149 s = self.data[0][doff : doff + count]
151 self.offset += len(s)
150 self.offset += len(s)
152 ret += s
151 ret += s
153 return ret
152 return ret
154
153
155 def write(self, s):
154 def write(self, s):
156 self.data.append(bytes(s))
155 self.data.append(bytes(s))
157 self.offset += len(s)
156 self.offset += len(s)
158 self._end += len(s)
157 self._end += len(s)
159
158
160 def __enter__(self):
159 def __enter__(self):
161 self.fp.__enter__()
160 self.fp.__enter__()
162 return self
161 return self
163
162
164 def __exit__(self, *args):
163 def __exit__(self, *args):
165 return self.fp.__exit__(*args)
164 return self.fp.__exit__(*args)
166
165
167
166
168 def _divertopener(opener, target):
167 def _divertopener(opener, target):
169 """build an opener that writes in 'target.a' instead of 'target'"""
168 """build an opener that writes in 'target.a' instead of 'target'"""
170
169
171 def _divert(name, mode=b'r', checkambig=False):
170 def _divert(name, mode=b'r', checkambig=False):
172 if name != target:
171 if name != target:
173 return opener(name, mode)
172 return opener(name, mode)
174 return opener(name + b".a", mode)
173 return opener(name + b".a", mode)
175
174
176 return _divert
175 return _divert
177
176
178
177
179 def _delayopener(opener, target, buf):
178 def _delayopener(opener, target, buf):
180 """build an opener that stores chunks in 'buf' instead of 'target'"""
179 """build an opener that stores chunks in 'buf' instead of 'target'"""
181
180
182 def _delay(name, mode=b'r', checkambig=False):
181 def _delay(name, mode=b'r', checkambig=False):
183 if name != target:
182 if name != target:
184 return opener(name, mode)
183 return opener(name, mode)
185 return appender(opener, name, mode, buf)
184 return appender(opener, name, mode, buf)
186
185
187 return _delay
186 return _delay
188
187
189
188
190 @attr.s
189 @attr.s
191 class _changelogrevision(object):
190 class _changelogrevision(object):
192 # Extensions might modify _defaultextra, so let the constructor below pass
191 # Extensions might modify _defaultextra, so let the constructor below pass
193 # it in
192 # it in
194 extra = attr.ib()
193 extra = attr.ib()
195 manifest = attr.ib(default=nullid)
194 manifest = attr.ib(default=nullid)
196 user = attr.ib(default=b'')
195 user = attr.ib(default=b'')
197 date = attr.ib(default=(0, 0))
196 date = attr.ib(default=(0, 0))
198 files = attr.ib(default=attr.Factory(list))
197 files = attr.ib(default=attr.Factory(list))
199 filesadded = attr.ib(default=None)
198 filesadded = attr.ib(default=None)
200 filesremoved = attr.ib(default=None)
199 filesremoved = attr.ib(default=None)
201 p1copies = attr.ib(default=None)
200 p1copies = attr.ib(default=None)
202 p2copies = attr.ib(default=None)
201 p2copies = attr.ib(default=None)
203 description = attr.ib(default=b'')
202 description = attr.ib(default=b'')
204
203
205
204
206 class changelogrevision(object):
205 class changelogrevision(object):
207 """Holds results of a parsed changelog revision.
206 """Holds results of a parsed changelog revision.
208
207
209 Changelog revisions consist of multiple pieces of data, including
208 Changelog revisions consist of multiple pieces of data, including
210 the manifest node, user, and date. This object exposes a view into
209 the manifest node, user, and date. This object exposes a view into
211 the parsed object.
210 the parsed object.
212 """
211 """
213
212
214 __slots__ = (
213 __slots__ = (
215 r'_offsets',
214 r'_offsets',
216 r'_text',
215 r'_text',
217 r'_sidedata',
216 r'_sidedata',
218 r'_cpsd',
217 r'_cpsd',
219 )
218 )
220
219
221 def __new__(cls, text, sidedata, cpsd):
220 def __new__(cls, text, sidedata, cpsd):
222 if not text:
221 if not text:
223 return _changelogrevision(extra=_defaultextra)
222 return _changelogrevision(extra=_defaultextra)
224
223
225 self = super(changelogrevision, cls).__new__(cls)
224 self = super(changelogrevision, cls).__new__(cls)
226 # We could return here and implement the following as an __init__.
225 # We could return here and implement the following as an __init__.
227 # But doing it here is equivalent and saves an extra function call.
226 # But doing it here is equivalent and saves an extra function call.
228
227
229 # format used:
228 # format used:
230 # nodeid\n : manifest node in ascii
229 # nodeid\n : manifest node in ascii
231 # user\n : user, no \n or \r allowed
230 # user\n : user, no \n or \r allowed
232 # time tz extra\n : date (time is int or float, timezone is int)
231 # time tz extra\n : date (time is int or float, timezone is int)
233 # : extra is metadata, encoded and separated by '\0'
232 # : extra is metadata, encoded and separated by '\0'
234 # : older versions ignore it
233 # : older versions ignore it
235 # files\n\n : files modified by the cset, no \n or \r allowed
234 # files\n\n : files modified by the cset, no \n or \r allowed
236 # (.*) : comment (free text, ideally utf-8)
235 # (.*) : comment (free text, ideally utf-8)
237 #
236 #
238 # changelog v0 doesn't use extra
237 # changelog v0 doesn't use extra
239
238
240 nl1 = text.index(b'\n')
239 nl1 = text.index(b'\n')
241 nl2 = text.index(b'\n', nl1 + 1)
240 nl2 = text.index(b'\n', nl1 + 1)
242 nl3 = text.index(b'\n', nl2 + 1)
241 nl3 = text.index(b'\n', nl2 + 1)
243
242
244 # The list of files may be empty. Which means nl3 is the first of the
243 # The list of files may be empty. Which means nl3 is the first of the
245 # double newline that precedes the description.
244 # double newline that precedes the description.
246 if text[nl3 + 1 : nl3 + 2] == b'\n':
245 if text[nl3 + 1 : nl3 + 2] == b'\n':
247 doublenl = nl3
246 doublenl = nl3
248 else:
247 else:
249 doublenl = text.index(b'\n\n', nl3 + 1)
248 doublenl = text.index(b'\n\n', nl3 + 1)
250
249
251 self._offsets = (nl1, nl2, nl3, doublenl)
250 self._offsets = (nl1, nl2, nl3, doublenl)
252 self._text = text
251 self._text = text
253 self._sidedata = sidedata
252 self._sidedata = sidedata
254 self._cpsd = cpsd
253 self._cpsd = cpsd
255
254
256 return self
255 return self
257
256
258 @property
257 @property
259 def manifest(self):
258 def manifest(self):
260 return bin(self._text[0 : self._offsets[0]])
259 return bin(self._text[0 : self._offsets[0]])
261
260
262 @property
261 @property
263 def user(self):
262 def user(self):
264 off = self._offsets
263 off = self._offsets
265 return encoding.tolocal(self._text[off[0] + 1 : off[1]])
264 return encoding.tolocal(self._text[off[0] + 1 : off[1]])
266
265
267 @property
266 @property
268 def _rawdate(self):
267 def _rawdate(self):
269 off = self._offsets
268 off = self._offsets
270 dateextra = self._text[off[1] + 1 : off[2]]
269 dateextra = self._text[off[1] + 1 : off[2]]
271 return dateextra.split(b' ', 2)[0:2]
270 return dateextra.split(b' ', 2)[0:2]
272
271
273 @property
272 @property
274 def _rawextra(self):
273 def _rawextra(self):
275 off = self._offsets
274 off = self._offsets
276 dateextra = self._text[off[1] + 1 : off[2]]
275 dateextra = self._text[off[1] + 1 : off[2]]
277 fields = dateextra.split(b' ', 2)
276 fields = dateextra.split(b' ', 2)
278 if len(fields) != 3:
277 if len(fields) != 3:
279 return None
278 return None
280
279
281 return fields[2]
280 return fields[2]
282
281
283 @property
282 @property
284 def date(self):
283 def date(self):
285 raw = self._rawdate
284 raw = self._rawdate
286 time = float(raw[0])
285 time = float(raw[0])
287 # Various tools did silly things with the timezone.
286 # Various tools did silly things with the timezone.
288 try:
287 try:
289 timezone = int(raw[1])
288 timezone = int(raw[1])
290 except ValueError:
289 except ValueError:
291 timezone = 0
290 timezone = 0
292
291
293 return time, timezone
292 return time, timezone
294
293
295 @property
294 @property
296 def extra(self):
295 def extra(self):
297 raw = self._rawextra
296 raw = self._rawextra
298 if raw is None:
297 if raw is None:
299 return _defaultextra
298 return _defaultextra
300
299
301 return decodeextra(raw)
300 return decodeextra(raw)
302
301
303 @property
302 @property
304 def files(self):
303 def files(self):
305 off = self._offsets
304 off = self._offsets
306 if off[2] == off[3]:
305 if off[2] == off[3]:
307 return []
306 return []
308
307
309 return self._text[off[2] + 1 : off[3]].split(b'\n')
308 return self._text[off[2] + 1 : off[3]].split(b'\n')
310
309
311 @property
310 @property
312 def filesadded(self):
311 def filesadded(self):
313 if self._cpsd:
312 if self._cpsd:
314 rawindices = self._sidedata.get(sidedatamod.SD_FILESADDED)
313 rawindices = self._sidedata.get(sidedatamod.SD_FILESADDED)
315 if not rawindices:
314 if not rawindices:
316 return []
315 return []
317 else:
316 else:
318 rawindices = self.extra.get(b'filesadded')
317 rawindices = self.extra.get(b'filesadded')
319 if rawindices is None:
318 if rawindices is None:
320 return None
319 return None
321 return copies.decodefileindices(self.files, rawindices)
320 return copies.decodefileindices(self.files, rawindices)
322
321
323 @property
322 @property
324 def filesremoved(self):
323 def filesremoved(self):
325 if self._cpsd:
324 if self._cpsd:
326 rawindices = self._sidedata.get(sidedatamod.SD_FILESREMOVED)
325 rawindices = self._sidedata.get(sidedatamod.SD_FILESREMOVED)
327 if not rawindices:
326 if not rawindices:
328 return []
327 return []
329 else:
328 else:
330 rawindices = self.extra.get(b'filesremoved')
329 rawindices = self.extra.get(b'filesremoved')
331 if rawindices is None:
330 if rawindices is None:
332 return None
331 return None
333 return copies.decodefileindices(self.files, rawindices)
332 return copies.decodefileindices(self.files, rawindices)
334
333
335 @property
334 @property
336 def p1copies(self):
335 def p1copies(self):
337 if self._cpsd:
336 if self._cpsd:
338 rawcopies = self._sidedata.get(sidedatamod.SD_P1COPIES)
337 rawcopies = self._sidedata.get(sidedatamod.SD_P1COPIES)
339 if not rawcopies:
338 if not rawcopies:
340 return {}
339 return {}
341 else:
340 else:
342 rawcopies = self.extra.get(b'p1copies')
341 rawcopies = self.extra.get(b'p1copies')
343 if rawcopies is None:
342 if rawcopies is None:
344 return None
343 return None
345 return copies.decodecopies(self.files, rawcopies)
344 return copies.decodecopies(self.files, rawcopies)
346
345
347 @property
346 @property
348 def p2copies(self):
347 def p2copies(self):
349 if self._cpsd:
348 if self._cpsd:
350 rawcopies = self._sidedata.get(sidedatamod.SD_P2COPIES)
349 rawcopies = self._sidedata.get(sidedatamod.SD_P2COPIES)
351 if not rawcopies:
350 if not rawcopies:
352 return {}
351 return {}
353 else:
352 else:
354 rawcopies = self.extra.get(b'p2copies')
353 rawcopies = self.extra.get(b'p2copies')
355 if rawcopies is None:
354 if rawcopies is None:
356 return None
355 return None
357 return copies.decodecopies(self.files, rawcopies)
356 return copies.decodecopies(self.files, rawcopies)
358
357
359 @property
358 @property
360 def description(self):
359 def description(self):
361 return encoding.tolocal(self._text[self._offsets[3] + 2 :])
360 return encoding.tolocal(self._text[self._offsets[3] + 2 :])
362
361
363
362
364 class changelog(revlog.revlog):
363 class changelog(revlog.revlog):
365 def __init__(self, opener, trypending=False):
364 def __init__(self, opener, trypending=False):
366 """Load a changelog revlog using an opener.
365 """Load a changelog revlog using an opener.
367
366
368 If ``trypending`` is true, we attempt to load the index from a
367 If ``trypending`` is true, we attempt to load the index from a
369 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
368 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
370 The ``00changelog.i.a`` file contains index (and possibly inline
369 The ``00changelog.i.a`` file contains index (and possibly inline
371 revision) data for a transaction that hasn't been finalized yet.
370 revision) data for a transaction that hasn't been finalized yet.
372 It exists in a separate file to facilitate readers (such as
371 It exists in a separate file to facilitate readers (such as
373 hooks processes) accessing data before a transaction is finalized.
372 hooks processes) accessing data before a transaction is finalized.
374 """
373 """
375 if trypending and opener.exists(b'00changelog.i.a'):
374 if trypending and opener.exists(b'00changelog.i.a'):
376 indexfile = b'00changelog.i.a'
375 indexfile = b'00changelog.i.a'
377 else:
376 else:
378 indexfile = b'00changelog.i'
377 indexfile = b'00changelog.i'
379
378
380 datafile = b'00changelog.d'
379 datafile = b'00changelog.d'
381 revlog.revlog.__init__(
380 revlog.revlog.__init__(
382 self,
381 self,
383 opener,
382 opener,
384 indexfile,
383 indexfile,
385 datafile=datafile,
384 datafile=datafile,
386 checkambig=True,
385 checkambig=True,
387 mmaplargeindex=True,
386 mmaplargeindex=True,
388 )
387 )
389
388
390 if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
389 if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
391 # changelogs don't benefit from generaldelta.
390 # changelogs don't benefit from generaldelta.
392
391
393 self.version &= ~revlog.FLAG_GENERALDELTA
392 self.version &= ~revlog.FLAG_GENERALDELTA
394 self._generaldelta = False
393 self._generaldelta = False
395
394
396 # Delta chains for changelogs tend to be very small because entries
395 # Delta chains for changelogs tend to be very small because entries
397 # tend to be small and don't delta well with each. So disable delta
396 # tend to be small and don't delta well with each. So disable delta
398 # chains.
397 # chains.
399 self._storedeltachains = False
398 self._storedeltachains = False
400
399
401 self._realopener = opener
400 self._realopener = opener
402 self._delayed = False
401 self._delayed = False
403 self._delaybuf = None
402 self._delaybuf = None
404 self._divert = False
403 self._divert = False
405 self.filteredrevs = frozenset()
404 self.filteredrevs = frozenset()
406 self._copiesstorage = opener.options.get(b'copies-storage')
405 self._copiesstorage = opener.options.get(b'copies-storage')
407
406
408 def _checknofilteredinrevs(self, revs):
409 """raise the appropriate error if 'revs' contains a filtered revision
410
411 This returns a version of 'revs' to be used thereafter by the caller.
412 In particular, if revs is an iterator, it is converted into a set.
413 """
414 safehasattr = util.safehasattr
415 if safehasattr(revs, '__next__'):
416 # Note that inspect.isgenerator() is not true for iterators,
417 revs = set(revs)
418
419 filteredrevs = self.filteredrevs
420 if safehasattr(revs, 'first'): # smartset
421 offenders = revs & filteredrevs
422 else:
423 offenders = filteredrevs.intersection(revs)
424
425 for rev in offenders:
426 raise error.FilteredIndexError(rev)
427 return revs
428
429 def headrevs(self, revs=None):
430 if revs is None and self.filteredrevs:
431 try:
432 return self.index.headrevsfiltered(self.filteredrevs)
433 # AttributeError covers non-c-extension environments and
434 # old c extensions without filter handling.
435 except AttributeError:
436 return self._headrevs()
437
438 if self.filteredrevs:
439 revs = self._checknofilteredinrevs(revs)
440 return super(changelog, self).headrevs(revs)
441
442 def strip(self, *args, **kwargs):
407 def strip(self, *args, **kwargs):
443 # XXX make something better than assert
408 # XXX make something better than assert
444 # We can't expect proper strip behavior if we are filtered.
409 # We can't expect proper strip behavior if we are filtered.
445 assert not self.filteredrevs
410 assert not self.filteredrevs
446 super(changelog, self).strip(*args, **kwargs)
411 super(changelog, self).strip(*args, **kwargs)
447
412
448 def rev(self, node):
413 def rev(self, node):
449 """filtered version of revlog.rev"""
414 """filtered version of revlog.rev"""
450 r = super(changelog, self).rev(node)
415 r = super(changelog, self).rev(node)
451 if r in self.filteredrevs:
416 if r in self.filteredrevs:
452 raise error.FilteredLookupError(
417 raise error.FilteredLookupError(
453 hex(node), self.indexfile, _(b'filtered node')
418 hex(node), self.indexfile, _(b'filtered node')
454 )
419 )
455 return r
420 return r
456
421
457 def node(self, rev):
422 def node(self, rev):
458 """filtered version of revlog.node"""
423 """filtered version of revlog.node"""
459 if rev in self.filteredrevs:
424 if rev in self.filteredrevs:
460 raise error.FilteredIndexError(rev)
425 raise error.FilteredIndexError(rev)
461 return super(changelog, self).node(rev)
426 return super(changelog, self).node(rev)
462
427
463 def linkrev(self, rev):
428 def linkrev(self, rev):
464 """filtered version of revlog.linkrev"""
429 """filtered version of revlog.linkrev"""
465 if rev in self.filteredrevs:
430 if rev in self.filteredrevs:
466 raise error.FilteredIndexError(rev)
431 raise error.FilteredIndexError(rev)
467 return super(changelog, self).linkrev(rev)
432 return super(changelog, self).linkrev(rev)
468
433
469 def parentrevs(self, rev):
434 def parentrevs(self, rev):
470 """filtered version of revlog.parentrevs"""
435 """filtered version of revlog.parentrevs"""
471 if rev in self.filteredrevs:
436 if rev in self.filteredrevs:
472 raise error.FilteredIndexError(rev)
437 raise error.FilteredIndexError(rev)
473 return super(changelog, self).parentrevs(rev)
438 return super(changelog, self).parentrevs(rev)
474
439
475 def flags(self, rev):
440 def flags(self, rev):
476 """filtered version of revlog.flags"""
441 """filtered version of revlog.flags"""
477 if rev in self.filteredrevs:
442 if rev in self.filteredrevs:
478 raise error.FilteredIndexError(rev)
443 raise error.FilteredIndexError(rev)
479 return super(changelog, self).flags(rev)
444 return super(changelog, self).flags(rev)
480
445
481 def delayupdate(self, tr):
446 def delayupdate(self, tr):
482 b"delay visibility of index updates to other readers"
447 b"delay visibility of index updates to other readers"
483
448
484 if not self._delayed:
449 if not self._delayed:
485 if len(self) == 0:
450 if len(self) == 0:
486 self._divert = True
451 self._divert = True
487 if self._realopener.exists(self.indexfile + b'.a'):
452 if self._realopener.exists(self.indexfile + b'.a'):
488 self._realopener.unlink(self.indexfile + b'.a')
453 self._realopener.unlink(self.indexfile + b'.a')
489 self.opener = _divertopener(self._realopener, self.indexfile)
454 self.opener = _divertopener(self._realopener, self.indexfile)
490 else:
455 else:
491 self._delaybuf = []
456 self._delaybuf = []
492 self.opener = _delayopener(
457 self.opener = _delayopener(
493 self._realopener, self.indexfile, self._delaybuf
458 self._realopener, self.indexfile, self._delaybuf
494 )
459 )
495 self._delayed = True
460 self._delayed = True
496 tr.addpending(b'cl-%i' % id(self), self._writepending)
461 tr.addpending(b'cl-%i' % id(self), self._writepending)
497 tr.addfinalize(b'cl-%i' % id(self), self._finalize)
462 tr.addfinalize(b'cl-%i' % id(self), self._finalize)
498
463
499 def _finalize(self, tr):
464 def _finalize(self, tr):
500 b"finalize index updates"
465 b"finalize index updates"
501 self._delayed = False
466 self._delayed = False
502 self.opener = self._realopener
467 self.opener = self._realopener
503 # move redirected index data back into place
468 # move redirected index data back into place
504 if self._divert:
469 if self._divert:
505 assert not self._delaybuf
470 assert not self._delaybuf
506 tmpname = self.indexfile + b".a"
471 tmpname = self.indexfile + b".a"
507 nfile = self.opener.open(tmpname)
472 nfile = self.opener.open(tmpname)
508 nfile.close()
473 nfile.close()
509 self.opener.rename(tmpname, self.indexfile, checkambig=True)
474 self.opener.rename(tmpname, self.indexfile, checkambig=True)
510 elif self._delaybuf:
475 elif self._delaybuf:
511 fp = self.opener(self.indexfile, b'a', checkambig=True)
476 fp = self.opener(self.indexfile, b'a', checkambig=True)
512 fp.write(b"".join(self._delaybuf))
477 fp.write(b"".join(self._delaybuf))
513 fp.close()
478 fp.close()
514 self._delaybuf = None
479 self._delaybuf = None
515 self._divert = False
480 self._divert = False
516 # split when we're done
481 # split when we're done
517 self._enforceinlinesize(tr)
482 self._enforceinlinesize(tr)
518
483
519 def _writepending(self, tr):
484 def _writepending(self, tr):
520 b"create a file containing the unfinalized state for pretxnchangegroup"
485 b"create a file containing the unfinalized state for pretxnchangegroup"
521 if self._delaybuf:
486 if self._delaybuf:
522 # make a temporary copy of the index
487 # make a temporary copy of the index
523 fp1 = self._realopener(self.indexfile)
488 fp1 = self._realopener(self.indexfile)
524 pendingfilename = self.indexfile + b".a"
489 pendingfilename = self.indexfile + b".a"
525 # register as a temp file to ensure cleanup on failure
490 # register as a temp file to ensure cleanup on failure
526 tr.registertmp(pendingfilename)
491 tr.registertmp(pendingfilename)
527 # write existing data
492 # write existing data
528 fp2 = self._realopener(pendingfilename, b"w")
493 fp2 = self._realopener(pendingfilename, b"w")
529 fp2.write(fp1.read())
494 fp2.write(fp1.read())
530 # add pending data
495 # add pending data
531 fp2.write(b"".join(self._delaybuf))
496 fp2.write(b"".join(self._delaybuf))
532 fp2.close()
497 fp2.close()
533 # switch modes so finalize can simply rename
498 # switch modes so finalize can simply rename
534 self._delaybuf = None
499 self._delaybuf = None
535 self._divert = True
500 self._divert = True
536 self.opener = _divertopener(self._realopener, self.indexfile)
501 self.opener = _divertopener(self._realopener, self.indexfile)
537
502
538 if self._divert:
503 if self._divert:
539 return True
504 return True
540
505
541 return False
506 return False
542
507
543 def _enforceinlinesize(self, tr, fp=None):
508 def _enforceinlinesize(self, tr, fp=None):
544 if not self._delayed:
509 if not self._delayed:
545 revlog.revlog._enforceinlinesize(self, tr, fp)
510 revlog.revlog._enforceinlinesize(self, tr, fp)
546
511
547 def read(self, node):
512 def read(self, node):
548 """Obtain data from a parsed changelog revision.
513 """Obtain data from a parsed changelog revision.
549
514
550 Returns a 6-tuple of:
515 Returns a 6-tuple of:
551
516
552 - manifest node in binary
517 - manifest node in binary
553 - author/user as a localstr
518 - author/user as a localstr
554 - date as a 2-tuple of (time, timezone)
519 - date as a 2-tuple of (time, timezone)
555 - list of files
520 - list of files
556 - commit message as a localstr
521 - commit message as a localstr
557 - dict of extra metadata
522 - dict of extra metadata
558
523
559 Unless you need to access all fields, consider calling
524 Unless you need to access all fields, consider calling
560 ``changelogrevision`` instead, as it is faster for partial object
525 ``changelogrevision`` instead, as it is faster for partial object
561 access.
526 access.
562 """
527 """
563 d, s = self._revisiondata(node)
528 d, s = self._revisiondata(node)
564 c = changelogrevision(
529 c = changelogrevision(
565 d, s, self._copiesstorage == b'changeset-sidedata'
530 d, s, self._copiesstorage == b'changeset-sidedata'
566 )
531 )
567 return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
532 return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
568
533
569 def changelogrevision(self, nodeorrev):
534 def changelogrevision(self, nodeorrev):
570 """Obtain a ``changelogrevision`` for a node or revision."""
535 """Obtain a ``changelogrevision`` for a node or revision."""
571 text, sidedata = self._revisiondata(nodeorrev)
536 text, sidedata = self._revisiondata(nodeorrev)
572 return changelogrevision(
537 return changelogrevision(
573 text, sidedata, self._copiesstorage == b'changeset-sidedata'
538 text, sidedata, self._copiesstorage == b'changeset-sidedata'
574 )
539 )
575
540
576 def readfiles(self, node):
541 def readfiles(self, node):
577 """
542 """
578 short version of read that only returns the files modified by the cset
543 short version of read that only returns the files modified by the cset
579 """
544 """
580 text = self.revision(node)
545 text = self.revision(node)
581 if not text:
546 if not text:
582 return []
547 return []
583 last = text.index(b"\n\n")
548 last = text.index(b"\n\n")
584 l = text[:last].split(b'\n')
549 l = text[:last].split(b'\n')
585 return l[3:]
550 return l[3:]
586
551
587 def add(
552 def add(
588 self,
553 self,
589 manifest,
554 manifest,
590 files,
555 files,
591 desc,
556 desc,
592 transaction,
557 transaction,
593 p1,
558 p1,
594 p2,
559 p2,
595 user,
560 user,
596 date=None,
561 date=None,
597 extra=None,
562 extra=None,
598 p1copies=None,
563 p1copies=None,
599 p2copies=None,
564 p2copies=None,
600 filesadded=None,
565 filesadded=None,
601 filesremoved=None,
566 filesremoved=None,
602 ):
567 ):
603 # Convert to UTF-8 encoded bytestrings as the very first
568 # Convert to UTF-8 encoded bytestrings as the very first
604 # thing: calling any method on a localstr object will turn it
569 # thing: calling any method on a localstr object will turn it
605 # into a str object and the cached UTF-8 string is thus lost.
570 # into a str object and the cached UTF-8 string is thus lost.
606 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
571 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
607
572
608 user = user.strip()
573 user = user.strip()
609 # An empty username or a username with a "\n" will make the
574 # An empty username or a username with a "\n" will make the
610 # revision text contain two "\n\n" sequences -> corrupt
575 # revision text contain two "\n\n" sequences -> corrupt
611 # repository since read cannot unpack the revision.
576 # repository since read cannot unpack the revision.
612 if not user:
577 if not user:
613 raise error.StorageError(_(b"empty username"))
578 raise error.StorageError(_(b"empty username"))
614 if b"\n" in user:
579 if b"\n" in user:
615 raise error.StorageError(
580 raise error.StorageError(
616 _(b"username %r contains a newline") % pycompat.bytestr(user)
581 _(b"username %r contains a newline") % pycompat.bytestr(user)
617 )
582 )
618
583
619 desc = stripdesc(desc)
584 desc = stripdesc(desc)
620
585
621 if date:
586 if date:
622 parseddate = b"%d %d" % dateutil.parsedate(date)
587 parseddate = b"%d %d" % dateutil.parsedate(date)
623 else:
588 else:
624 parseddate = b"%d %d" % dateutil.makedate()
589 parseddate = b"%d %d" % dateutil.makedate()
625 if extra:
590 if extra:
626 branch = extra.get(b"branch")
591 branch = extra.get(b"branch")
627 if branch in (b"default", b""):
592 if branch in (b"default", b""):
628 del extra[b"branch"]
593 del extra[b"branch"]
629 elif branch in (b".", b"null", b"tip"):
594 elif branch in (b".", b"null", b"tip"):
630 raise error.StorageError(
595 raise error.StorageError(
631 _(b'the name \'%s\' is reserved') % branch
596 _(b'the name \'%s\' is reserved') % branch
632 )
597 )
633 sortedfiles = sorted(files)
598 sortedfiles = sorted(files)
634 sidedata = None
599 sidedata = None
635 if extra is not None:
600 if extra is not None:
636 for name in (
601 for name in (
637 b'p1copies',
602 b'p1copies',
638 b'p2copies',
603 b'p2copies',
639 b'filesadded',
604 b'filesadded',
640 b'filesremoved',
605 b'filesremoved',
641 ):
606 ):
642 extra.pop(name, None)
607 extra.pop(name, None)
643 if p1copies is not None:
608 if p1copies is not None:
644 p1copies = copies.encodecopies(sortedfiles, p1copies)
609 p1copies = copies.encodecopies(sortedfiles, p1copies)
645 if p2copies is not None:
610 if p2copies is not None:
646 p2copies = copies.encodecopies(sortedfiles, p2copies)
611 p2copies = copies.encodecopies(sortedfiles, p2copies)
647 if filesadded is not None:
612 if filesadded is not None:
648 filesadded = copies.encodefileindices(sortedfiles, filesadded)
613 filesadded = copies.encodefileindices(sortedfiles, filesadded)
649 if filesremoved is not None:
614 if filesremoved is not None:
650 filesremoved = copies.encodefileindices(sortedfiles, filesremoved)
615 filesremoved = copies.encodefileindices(sortedfiles, filesremoved)
651 if self._copiesstorage == b'extra':
616 if self._copiesstorage == b'extra':
652 extrasentries = p1copies, p2copies, filesadded, filesremoved
617 extrasentries = p1copies, p2copies, filesadded, filesremoved
653 if extra is None and any(x is not None for x in extrasentries):
618 if extra is None and any(x is not None for x in extrasentries):
654 extra = {}
619 extra = {}
655 if p1copies is not None:
620 if p1copies is not None:
656 extra[b'p1copies'] = p1copies
621 extra[b'p1copies'] = p1copies
657 if p2copies is not None:
622 if p2copies is not None:
658 extra[b'p2copies'] = p2copies
623 extra[b'p2copies'] = p2copies
659 if filesadded is not None:
624 if filesadded is not None:
660 extra[b'filesadded'] = filesadded
625 extra[b'filesadded'] = filesadded
661 if filesremoved is not None:
626 if filesremoved is not None:
662 extra[b'filesremoved'] = filesremoved
627 extra[b'filesremoved'] = filesremoved
663 elif self._copiesstorage == b'changeset-sidedata':
628 elif self._copiesstorage == b'changeset-sidedata':
664 sidedata = {}
629 sidedata = {}
665 if p1copies:
630 if p1copies:
666 sidedata[sidedatamod.SD_P1COPIES] = p1copies
631 sidedata[sidedatamod.SD_P1COPIES] = p1copies
667 if p2copies:
632 if p2copies:
668 sidedata[sidedatamod.SD_P2COPIES] = p2copies
633 sidedata[sidedatamod.SD_P2COPIES] = p2copies
669 if filesadded:
634 if filesadded:
670 sidedata[sidedatamod.SD_FILESADDED] = filesadded
635 sidedata[sidedatamod.SD_FILESADDED] = filesadded
671 if filesremoved:
636 if filesremoved:
672 sidedata[sidedatamod.SD_FILESREMOVED] = filesremoved
637 sidedata[sidedatamod.SD_FILESREMOVED] = filesremoved
673 if not sidedata:
638 if not sidedata:
674 sidedata = None
639 sidedata = None
675
640
676 if extra:
641 if extra:
677 extra = encodeextra(extra)
642 extra = encodeextra(extra)
678 parseddate = b"%s %s" % (parseddate, extra)
643 parseddate = b"%s %s" % (parseddate, extra)
679 l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
644 l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
680 text = b"\n".join(l)
645 text = b"\n".join(l)
681 return self.addrevision(
646 return self.addrevision(
682 text, transaction, len(self), p1, p2, sidedata=sidedata
647 text, transaction, len(self), p1, p2, sidedata=sidedata
683 )
648 )
684
649
685 def branchinfo(self, rev):
650 def branchinfo(self, rev):
686 """return the branch name and open/close state of a revision
651 """return the branch name and open/close state of a revision
687
652
688 This function exists because creating a changectx object
653 This function exists because creating a changectx object
689 just to access this is costly."""
654 just to access this is costly."""
690 extra = self.read(rev)[5]
655 extra = self.read(rev)[5]
691 return encoding.tolocal(extra.get(b"branch")), b'close' in extra
656 return encoding.tolocal(extra.get(b"branch")), b'close' in extra
692
657
693 def _nodeduplicatecallback(self, transaction, node):
658 def _nodeduplicatecallback(self, transaction, node):
694 # keep track of revisions that got "re-added", eg: unbunde of know rev.
659 # keep track of revisions that got "re-added", eg: unbunde of know rev.
695 #
660 #
696 # We track them in a list to preserve their order from the source bundle
661 # We track them in a list to preserve their order from the source bundle
697 duplicates = transaction.changes.setdefault(b'revduplicates', [])
662 duplicates = transaction.changes.setdefault(b'revduplicates', [])
698 duplicates.append(self.rev(node))
663 duplicates.append(self.rev(node))
@@ -1,376 +1,411 b''
1 # repoview.py - Filtered view of a localrepo object
1 # repoview.py - Filtered view of a localrepo object
2 #
2 #
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
4 # Logilab SA <contact@logilab.fr>
4 # Logilab SA <contact@logilab.fr>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from __future__ import absolute_import
9 from __future__ import absolute_import
10
10
11 import copy
11 import copy
12 import weakref
12 import weakref
13
13
14 from .node import nullrev
14 from .node import nullrev
15 from .pycompat import (
15 from .pycompat import (
16 delattr,
16 delattr,
17 getattr,
17 getattr,
18 setattr,
18 setattr,
19 )
19 )
20 from . import (
20 from . import (
21 error,
21 obsolete,
22 obsolete,
22 phases,
23 phases,
23 pycompat,
24 pycompat,
24 revlog,
25 revlog,
25 tags as tagsmod,
26 tags as tagsmod,
26 util,
27 util,
27 )
28 )
28 from .utils import repoviewutil
29 from .utils import repoviewutil
29
30
30
31
31 def hideablerevs(repo):
32 def hideablerevs(repo):
32 """Revision candidates to be hidden
33 """Revision candidates to be hidden
33
34
34 This is a standalone function to allow extensions to wrap it.
35 This is a standalone function to allow extensions to wrap it.
35
36
36 Because we use the set of immutable changesets as a fallback subset in
37 Because we use the set of immutable changesets as a fallback subset in
37 branchmap (see mercurial.utils.repoviewutils.subsettable), you cannot set
38 branchmap (see mercurial.utils.repoviewutils.subsettable), you cannot set
38 "public" changesets as "hideable". Doing so would break multiple code
39 "public" changesets as "hideable". Doing so would break multiple code
39 assertions and lead to crashes."""
40 assertions and lead to crashes."""
40 obsoletes = obsolete.getrevs(repo, b'obsolete')
41 obsoletes = obsolete.getrevs(repo, b'obsolete')
41 internals = repo._phasecache.getrevset(repo, phases.localhiddenphases)
42 internals = repo._phasecache.getrevset(repo, phases.localhiddenphases)
42 internals = frozenset(internals)
43 internals = frozenset(internals)
43 return obsoletes | internals
44 return obsoletes | internals
44
45
45
46
46 def pinnedrevs(repo):
47 def pinnedrevs(repo):
47 """revisions blocking hidden changesets from being filtered
48 """revisions blocking hidden changesets from being filtered
48 """
49 """
49
50
50 cl = repo.changelog
51 cl = repo.changelog
51 pinned = set()
52 pinned = set()
52 pinned.update([par.rev() for par in repo[None].parents()])
53 pinned.update([par.rev() for par in repo[None].parents()])
53 pinned.update([cl.rev(bm) for bm in repo._bookmarks.values()])
54 pinned.update([cl.rev(bm) for bm in repo._bookmarks.values()])
54
55
55 tags = {}
56 tags = {}
56 tagsmod.readlocaltags(repo.ui, repo, tags, {})
57 tagsmod.readlocaltags(repo.ui, repo, tags, {})
57 if tags:
58 if tags:
58 rev, nodemap = cl.rev, cl.nodemap
59 rev, nodemap = cl.rev, cl.nodemap
59 pinned.update(rev(t[0]) for t in tags.values() if t[0] in nodemap)
60 pinned.update(rev(t[0]) for t in tags.values() if t[0] in nodemap)
60 return pinned
61 return pinned
61
62
62
63
63 def _revealancestors(pfunc, hidden, revs):
64 def _revealancestors(pfunc, hidden, revs):
64 """reveals contiguous chains of hidden ancestors of 'revs' by removing them
65 """reveals contiguous chains of hidden ancestors of 'revs' by removing them
65 from 'hidden'
66 from 'hidden'
66
67
67 - pfunc(r): a funtion returning parent of 'r',
68 - pfunc(r): a funtion returning parent of 'r',
68 - hidden: the (preliminary) hidden revisions, to be updated
69 - hidden: the (preliminary) hidden revisions, to be updated
69 - revs: iterable of revnum,
70 - revs: iterable of revnum,
70
71
71 (Ancestors are revealed exclusively, i.e. the elements in 'revs' are
72 (Ancestors are revealed exclusively, i.e. the elements in 'revs' are
72 *not* revealed)
73 *not* revealed)
73 """
74 """
74 stack = list(revs)
75 stack = list(revs)
75 while stack:
76 while stack:
76 for p in pfunc(stack.pop()):
77 for p in pfunc(stack.pop()):
77 if p != nullrev and p in hidden:
78 if p != nullrev and p in hidden:
78 hidden.remove(p)
79 hidden.remove(p)
79 stack.append(p)
80 stack.append(p)
80
81
81
82
82 def computehidden(repo, visibilityexceptions=None):
83 def computehidden(repo, visibilityexceptions=None):
83 """compute the set of hidden revision to filter
84 """compute the set of hidden revision to filter
84
85
85 During most operation hidden should be filtered."""
86 During most operation hidden should be filtered."""
86 assert not repo.changelog.filteredrevs
87 assert not repo.changelog.filteredrevs
87
88
88 hidden = hideablerevs(repo)
89 hidden = hideablerevs(repo)
89 if hidden:
90 if hidden:
90 hidden = set(hidden - pinnedrevs(repo))
91 hidden = set(hidden - pinnedrevs(repo))
91 if visibilityexceptions:
92 if visibilityexceptions:
92 hidden -= visibilityexceptions
93 hidden -= visibilityexceptions
93 pfunc = repo.changelog.parentrevs
94 pfunc = repo.changelog.parentrevs
94 mutable = repo._phasecache.getrevset(repo, phases.mutablephases)
95 mutable = repo._phasecache.getrevset(repo, phases.mutablephases)
95
96
96 visible = mutable - hidden
97 visible = mutable - hidden
97 _revealancestors(pfunc, hidden, visible)
98 _revealancestors(pfunc, hidden, visible)
98 return frozenset(hidden)
99 return frozenset(hidden)
99
100
100
101
101 def computesecret(repo, visibilityexceptions=None):
102 def computesecret(repo, visibilityexceptions=None):
102 """compute the set of revision that can never be exposed through hgweb
103 """compute the set of revision that can never be exposed through hgweb
103
104
104 Changeset in the secret phase (or above) should stay unaccessible."""
105 Changeset in the secret phase (or above) should stay unaccessible."""
105 assert not repo.changelog.filteredrevs
106 assert not repo.changelog.filteredrevs
106 secrets = repo._phasecache.getrevset(repo, phases.remotehiddenphases)
107 secrets = repo._phasecache.getrevset(repo, phases.remotehiddenphases)
107 return frozenset(secrets)
108 return frozenset(secrets)
108
109
109
110
110 def computeunserved(repo, visibilityexceptions=None):
111 def computeunserved(repo, visibilityexceptions=None):
111 """compute the set of revision that should be filtered when used a server
112 """compute the set of revision that should be filtered when used a server
112
113
113 Secret and hidden changeset should not pretend to be here."""
114 Secret and hidden changeset should not pretend to be here."""
114 assert not repo.changelog.filteredrevs
115 assert not repo.changelog.filteredrevs
115 # fast path in simple case to avoid impact of non optimised code
116 # fast path in simple case to avoid impact of non optimised code
116 hiddens = filterrevs(repo, b'visible')
117 hiddens = filterrevs(repo, b'visible')
117 secrets = filterrevs(repo, b'served.hidden')
118 secrets = filterrevs(repo, b'served.hidden')
118 if secrets:
119 if secrets:
119 return frozenset(hiddens | secrets)
120 return frozenset(hiddens | secrets)
120 else:
121 else:
121 return hiddens
122 return hiddens
122
123
123
124
124 def computemutable(repo, visibilityexceptions=None):
125 def computemutable(repo, visibilityexceptions=None):
125 assert not repo.changelog.filteredrevs
126 assert not repo.changelog.filteredrevs
126 # fast check to avoid revset call on huge repo
127 # fast check to avoid revset call on huge repo
127 if any(repo._phasecache.phaseroots[1:]):
128 if any(repo._phasecache.phaseroots[1:]):
128 getphase = repo._phasecache.phase
129 getphase = repo._phasecache.phase
129 maymutable = filterrevs(repo, b'base')
130 maymutable = filterrevs(repo, b'base')
130 return frozenset(r for r in maymutable if getphase(repo, r))
131 return frozenset(r for r in maymutable if getphase(repo, r))
131 return frozenset()
132 return frozenset()
132
133
133
134
134 def computeimpactable(repo, visibilityexceptions=None):
135 def computeimpactable(repo, visibilityexceptions=None):
135 """Everything impactable by mutable revision
136 """Everything impactable by mutable revision
136
137
137 The immutable filter still have some chance to get invalidated. This will
138 The immutable filter still have some chance to get invalidated. This will
138 happen when:
139 happen when:
139
140
140 - you garbage collect hidden changeset,
141 - you garbage collect hidden changeset,
141 - public phase is moved backward,
142 - public phase is moved backward,
142 - something is changed in the filtering (this could be fixed)
143 - something is changed in the filtering (this could be fixed)
143
144
144 This filter out any mutable changeset and any public changeset that may be
145 This filter out any mutable changeset and any public changeset that may be
145 impacted by something happening to a mutable revision.
146 impacted by something happening to a mutable revision.
146
147
147 This is achieved by filtered everything with a revision number egal or
148 This is achieved by filtered everything with a revision number egal or
148 higher than the first mutable changeset is filtered."""
149 higher than the first mutable changeset is filtered."""
149 assert not repo.changelog.filteredrevs
150 assert not repo.changelog.filteredrevs
150 cl = repo.changelog
151 cl = repo.changelog
151 firstmutable = len(cl)
152 firstmutable = len(cl)
152 for roots in repo._phasecache.phaseroots[1:]:
153 for roots in repo._phasecache.phaseroots[1:]:
153 if roots:
154 if roots:
154 firstmutable = min(firstmutable, min(cl.rev(r) for r in roots))
155 firstmutable = min(firstmutable, min(cl.rev(r) for r in roots))
155 # protect from nullrev root
156 # protect from nullrev root
156 firstmutable = max(0, firstmutable)
157 firstmutable = max(0, firstmutable)
157 return frozenset(pycompat.xrange(firstmutable, len(cl)))
158 return frozenset(pycompat.xrange(firstmutable, len(cl)))
158
159
159
160
160 # function to compute filtered set
161 # function to compute filtered set
161 #
162 #
162 # When adding a new filter you MUST update the table at:
163 # When adding a new filter you MUST update the table at:
163 # mercurial.utils.repoviewutil.subsettable
164 # mercurial.utils.repoviewutil.subsettable
164 # Otherwise your filter will have to recompute all its branches cache
165 # Otherwise your filter will have to recompute all its branches cache
165 # from scratch (very slow).
166 # from scratch (very slow).
166 filtertable = {
167 filtertable = {
167 b'visible': computehidden,
168 b'visible': computehidden,
168 b'visible-hidden': computehidden,
169 b'visible-hidden': computehidden,
169 b'served.hidden': computesecret,
170 b'served.hidden': computesecret,
170 b'served': computeunserved,
171 b'served': computeunserved,
171 b'immutable': computemutable,
172 b'immutable': computemutable,
172 b'base': computeimpactable,
173 b'base': computeimpactable,
173 }
174 }
174
175
175 _basefiltername = list(filtertable)
176 _basefiltername = list(filtertable)
176
177
177
178
178 def extrafilter(ui):
179 def extrafilter(ui):
179 """initialize extra filter and return its id
180 """initialize extra filter and return its id
180
181
181 If extra filtering is configured, we make sure the associated filtered view
182 If extra filtering is configured, we make sure the associated filtered view
182 are declared and return the associated id.
183 are declared and return the associated id.
183 """
184 """
184 frevs = ui.config(b'experimental', b'extra-filter-revs')
185 frevs = ui.config(b'experimental', b'extra-filter-revs')
185 if frevs is None:
186 if frevs is None:
186 return None
187 return None
187
188
188 fid = pycompat.sysbytes(util.DIGESTS[b'sha1'](frevs).hexdigest())[:12]
189 fid = pycompat.sysbytes(util.DIGESTS[b'sha1'](frevs).hexdigest())[:12]
189
190
190 combine = lambda fname: fname + b'%' + fid
191 combine = lambda fname: fname + b'%' + fid
191
192
192 subsettable = repoviewutil.subsettable
193 subsettable = repoviewutil.subsettable
193
194
194 if combine(b'base') not in filtertable:
195 if combine(b'base') not in filtertable:
195 for name in _basefiltername:
196 for name in _basefiltername:
196
197
197 def extrafilteredrevs(repo, *args, **kwargs):
198 def extrafilteredrevs(repo, *args, **kwargs):
198 baserevs = filtertable[name](repo, *args, **kwargs)
199 baserevs = filtertable[name](repo, *args, **kwargs)
199 extrarevs = frozenset(repo.revs(frevs))
200 extrarevs = frozenset(repo.revs(frevs))
200 return baserevs | extrarevs
201 return baserevs | extrarevs
201
202
202 filtertable[combine(name)] = extrafilteredrevs
203 filtertable[combine(name)] = extrafilteredrevs
203 if name in subsettable:
204 if name in subsettable:
204 subsettable[combine(name)] = combine(subsettable[name])
205 subsettable[combine(name)] = combine(subsettable[name])
205 return fid
206 return fid
206
207
207
208
208 def filterrevs(repo, filtername, visibilityexceptions=None):
209 def filterrevs(repo, filtername, visibilityexceptions=None):
209 """returns set of filtered revision for this filter name
210 """returns set of filtered revision for this filter name
210
211
211 visibilityexceptions is a set of revs which must are exceptions for
212 visibilityexceptions is a set of revs which must are exceptions for
212 hidden-state and must be visible. They are dynamic and hence we should not
213 hidden-state and must be visible. They are dynamic and hence we should not
213 cache it's result"""
214 cache it's result"""
214 if filtername not in repo.filteredrevcache:
215 if filtername not in repo.filteredrevcache:
215 func = filtertable[filtername]
216 func = filtertable[filtername]
216 if visibilityexceptions:
217 if visibilityexceptions:
217 return func(repo.unfiltered, visibilityexceptions)
218 return func(repo.unfiltered, visibilityexceptions)
218 repo.filteredrevcache[filtername] = func(repo.unfiltered())
219 repo.filteredrevcache[filtername] = func(repo.unfiltered())
219 return repo.filteredrevcache[filtername]
220 return repo.filteredrevcache[filtername]
220
221
221
222
222 def wrapchangelog(unfichangelog, filteredrevs):
223 def wrapchangelog(unfichangelog, filteredrevs):
223 cl = copy.copy(unfichangelog)
224 cl = copy.copy(unfichangelog)
224 cl.filteredrevs = filteredrevs
225 cl.filteredrevs = filteredrevs
225
226
226 class filteredchangelog(cl.__class__):
227 class filteredchangelog(cl.__class__):
227 def tiprev(self):
228 def tiprev(self):
228 """filtered version of revlog.tiprev"""
229 """filtered version of revlog.tiprev"""
229 for i in pycompat.xrange(len(self) - 1, -2, -1):
230 for i in pycompat.xrange(len(self) - 1, -2, -1):
230 if i not in self.filteredrevs:
231 if i not in self.filteredrevs:
231 return i
232 return i
232
233
233 def __contains__(self, rev):
234 def __contains__(self, rev):
234 """filtered version of revlog.__contains__"""
235 """filtered version of revlog.__contains__"""
235 return 0 <= rev < len(self) and rev not in self.filteredrevs
236 return 0 <= rev < len(self) and rev not in self.filteredrevs
236
237
237 def __iter__(self):
238 def __iter__(self):
238 """filtered version of revlog.__iter__"""
239 """filtered version of revlog.__iter__"""
239 if len(self.filteredrevs) == 0:
240 if len(self.filteredrevs) == 0:
240 return revlog.revlog.__iter__(self)
241 return revlog.revlog.__iter__(self)
241
242
242
243
243 def filterediter():
244 def filterediter():
244 for i in pycompat.xrange(len(self)):
245 for i in pycompat.xrange(len(self)):
245 if i not in self.filteredrevs:
246 if i not in self.filteredrevs:
246 yield i
247 yield i
247
248
248 return filterediter()
249 return filterediter()
249
250
250 def revs(self, start=0, stop=None):
251 def revs(self, start=0, stop=None):
251 """filtered version of revlog.revs"""
252 """filtered version of revlog.revs"""
252 for i in super(filteredchangelog, self).revs(start, stop):
253 for i in super(filteredchangelog, self).revs(start, stop):
253 if i not in self.filteredrevs:
254 if i not in self.filteredrevs:
254 yield i
255 yield i
255
256
257 def _checknofilteredinrevs(self, revs):
258 """raise the appropriate error if 'revs' contains a filtered revision
259
260 This returns a version of 'revs' to be used thereafter by the caller.
261 In particular, if revs is an iterator, it is converted into a set.
262 """
263 safehasattr = util.safehasattr
264 if safehasattr(revs, '__next__'):
265 # Note that inspect.isgenerator() is not true for iterators,
266 revs = set(revs)
267
268 filteredrevs = self.filteredrevs
269 if safehasattr(revs, 'first'): # smartset
270 offenders = revs & filteredrevs
271 else:
272 offenders = filteredrevs.intersection(revs)
273
274 for rev in offenders:
275 raise error.FilteredIndexError(rev)
276 return revs
277
278 def headrevs(self, revs=None):
279 if revs is None and self.filteredrevs:
280 try:
281 return self.index.headrevsfiltered(self.filteredrevs)
282 # AttributeError covers non-c-extension environments and
283 # old c extensions without filter handling.
284 except AttributeError:
285 return self._headrevs()
286
287 if self.filteredrevs:
288 revs = self._checknofilteredinrevs(revs)
289 return super(filteredchangelog, self).headrevs(revs)
290
256 cl.__class__ = filteredchangelog
291 cl.__class__ = filteredchangelog
257
292
258 return cl
293 return cl
259
294
260
295
261 class repoview(object):
296 class repoview(object):
262 """Provide a read/write view of a repo through a filtered changelog
297 """Provide a read/write view of a repo through a filtered changelog
263
298
264 This object is used to access a filtered version of a repository without
299 This object is used to access a filtered version of a repository without
265 altering the original repository object itself. We can not alter the
300 altering the original repository object itself. We can not alter the
266 original object for two main reasons:
301 original object for two main reasons:
267 - It prevents the use of a repo with multiple filters at the same time. In
302 - It prevents the use of a repo with multiple filters at the same time. In
268 particular when multiple threads are involved.
303 particular when multiple threads are involved.
269 - It makes scope of the filtering harder to control.
304 - It makes scope of the filtering harder to control.
270
305
271 This object behaves very closely to the original repository. All attribute
306 This object behaves very closely to the original repository. All attribute
272 operations are done on the original repository:
307 operations are done on the original repository:
273 - An access to `repoview.someattr` actually returns `repo.someattr`,
308 - An access to `repoview.someattr` actually returns `repo.someattr`,
274 - A write to `repoview.someattr` actually sets value of `repo.someattr`,
309 - A write to `repoview.someattr` actually sets value of `repo.someattr`,
275 - A deletion of `repoview.someattr` actually drops `someattr`
310 - A deletion of `repoview.someattr` actually drops `someattr`
276 from `repo.__dict__`.
311 from `repo.__dict__`.
277
312
278 The only exception is the `changelog` property. It is overridden to return
313 The only exception is the `changelog` property. It is overridden to return
279 a (surface) copy of `repo.changelog` with some revisions filtered. The
314 a (surface) copy of `repo.changelog` with some revisions filtered. The
280 `filtername` attribute of the view control the revisions that need to be
315 `filtername` attribute of the view control the revisions that need to be
281 filtered. (the fact the changelog is copied is an implementation detail).
316 filtered. (the fact the changelog is copied is an implementation detail).
282
317
283 Unlike attributes, this object intercepts all method calls. This means that
318 Unlike attributes, this object intercepts all method calls. This means that
284 all methods are run on the `repoview` object with the filtered `changelog`
319 all methods are run on the `repoview` object with the filtered `changelog`
285 property. For this purpose the simple `repoview` class must be mixed with
320 property. For this purpose the simple `repoview` class must be mixed with
286 the actual class of the repository. This ensures that the resulting
321 the actual class of the repository. This ensures that the resulting
287 `repoview` object have the very same methods than the repo object. This
322 `repoview` object have the very same methods than the repo object. This
288 leads to the property below.
323 leads to the property below.
289
324
290 repoview.method() --> repo.__class__.method(repoview)
325 repoview.method() --> repo.__class__.method(repoview)
291
326
292 The inheritance has to be done dynamically because `repo` can be of any
327 The inheritance has to be done dynamically because `repo` can be of any
293 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
328 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
294 """
329 """
295
330
296 def __init__(self, repo, filtername, visibilityexceptions=None):
331 def __init__(self, repo, filtername, visibilityexceptions=None):
297 object.__setattr__(self, r'_unfilteredrepo', repo)
332 object.__setattr__(self, r'_unfilteredrepo', repo)
298 object.__setattr__(self, r'filtername', filtername)
333 object.__setattr__(self, r'filtername', filtername)
299 object.__setattr__(self, r'_clcachekey', None)
334 object.__setattr__(self, r'_clcachekey', None)
300 object.__setattr__(self, r'_clcache', None)
335 object.__setattr__(self, r'_clcache', None)
301 # revs which are exceptions and must not be hidden
336 # revs which are exceptions and must not be hidden
302 object.__setattr__(self, r'_visibilityexceptions', visibilityexceptions)
337 object.__setattr__(self, r'_visibilityexceptions', visibilityexceptions)
303
338
304 # not a propertycache on purpose we shall implement a proper cache later
339 # not a propertycache on purpose we shall implement a proper cache later
305 @property
340 @property
306 def changelog(self):
341 def changelog(self):
307 """return a filtered version of the changeset
342 """return a filtered version of the changeset
308
343
309 this changelog must not be used for writing"""
344 this changelog must not be used for writing"""
310 # some cache may be implemented later
345 # some cache may be implemented later
311 unfi = self._unfilteredrepo
346 unfi = self._unfilteredrepo
312 unfichangelog = unfi.changelog
347 unfichangelog = unfi.changelog
313 # bypass call to changelog.method
348 # bypass call to changelog.method
314 unfiindex = unfichangelog.index
349 unfiindex = unfichangelog.index
315 unfilen = len(unfiindex)
350 unfilen = len(unfiindex)
316 unfinode = unfiindex[unfilen - 1][7]
351 unfinode = unfiindex[unfilen - 1][7]
317 with util.timedcm('repo filter for %s', self.filtername):
352 with util.timedcm('repo filter for %s', self.filtername):
318 revs = filterrevs(unfi, self.filtername, self._visibilityexceptions)
353 revs = filterrevs(unfi, self.filtername, self._visibilityexceptions)
319 cl = self._clcache
354 cl = self._clcache
320 newkey = (unfilen, unfinode, hash(revs), unfichangelog._delayed)
355 newkey = (unfilen, unfinode, hash(revs), unfichangelog._delayed)
321 # if cl.index is not unfiindex, unfi.changelog would be
356 # if cl.index is not unfiindex, unfi.changelog would be
322 # recreated, and our clcache refers to garbage object
357 # recreated, and our clcache refers to garbage object
323 if cl is not None and (
358 if cl is not None and (
324 cl.index is not unfiindex or newkey != self._clcachekey
359 cl.index is not unfiindex or newkey != self._clcachekey
325 ):
360 ):
326 cl = None
361 cl = None
327 # could have been made None by the previous if
362 # could have been made None by the previous if
328 if cl is None:
363 if cl is None:
329 cl = wrapchangelog(unfichangelog, revs)
364 cl = wrapchangelog(unfichangelog, revs)
330 object.__setattr__(self, r'_clcache', cl)
365 object.__setattr__(self, r'_clcache', cl)
331 object.__setattr__(self, r'_clcachekey', newkey)
366 object.__setattr__(self, r'_clcachekey', newkey)
332 return cl
367 return cl
333
368
334 def unfiltered(self):
369 def unfiltered(self):
335 """Return an unfiltered version of a repo"""
370 """Return an unfiltered version of a repo"""
336 return self._unfilteredrepo
371 return self._unfilteredrepo
337
372
338 def filtered(self, name, visibilityexceptions=None):
373 def filtered(self, name, visibilityexceptions=None):
339 """Return a filtered version of a repository"""
374 """Return a filtered version of a repository"""
340 if name == self.filtername and not visibilityexceptions:
375 if name == self.filtername and not visibilityexceptions:
341 return self
376 return self
342 return self.unfiltered().filtered(name, visibilityexceptions)
377 return self.unfiltered().filtered(name, visibilityexceptions)
343
378
344 def __repr__(self):
379 def __repr__(self):
345 return r'<%s:%s %r>' % (
380 return r'<%s:%s %r>' % (
346 self.__class__.__name__,
381 self.__class__.__name__,
347 pycompat.sysstr(self.filtername),
382 pycompat.sysstr(self.filtername),
348 self.unfiltered(),
383 self.unfiltered(),
349 )
384 )
350
385
351 # everything access are forwarded to the proxied repo
386 # everything access are forwarded to the proxied repo
352 def __getattr__(self, attr):
387 def __getattr__(self, attr):
353 return getattr(self._unfilteredrepo, attr)
388 return getattr(self._unfilteredrepo, attr)
354
389
355 def __setattr__(self, attr, value):
390 def __setattr__(self, attr, value):
356 return setattr(self._unfilteredrepo, attr, value)
391 return setattr(self._unfilteredrepo, attr, value)
357
392
358 def __delattr__(self, attr):
393 def __delattr__(self, attr):
359 return delattr(self._unfilteredrepo, attr)
394 return delattr(self._unfilteredrepo, attr)
360
395
361
396
362 # Python <3.4 easily leaks types via __mro__. See
397 # Python <3.4 easily leaks types via __mro__. See
363 # https://bugs.python.org/issue17950. We cache dynamically created types
398 # https://bugs.python.org/issue17950. We cache dynamically created types
364 # so they won't be leaked on every invocation of repo.filtered().
399 # so they won't be leaked on every invocation of repo.filtered().
365 _filteredrepotypes = weakref.WeakKeyDictionary()
400 _filteredrepotypes = weakref.WeakKeyDictionary()
366
401
367
402
368 def newtype(base):
403 def newtype(base):
369 """Create a new type with the repoview mixin and the given base class"""
404 """Create a new type with the repoview mixin and the given base class"""
370 if base not in _filteredrepotypes:
405 if base not in _filteredrepotypes:
371
406
372 class filteredrepo(repoview, base):
407 class filteredrepo(repoview, base):
373 pass
408 pass
374
409
375 _filteredrepotypes[base] = filteredrepo
410 _filteredrepotypes[base] = filteredrepo
376 return _filteredrepotypes[base]
411 return _filteredrepotypes[base]
General Comments 0
You need to be logged in to leave comments. Login now