##// END OF EJS Templates
repoview: move '_getdynamicblock' next to 'hideablerevs'...
marmoute -
r32426:06aa645e default
parent child Browse files
Show More
@@ -1,363 +1,363 b''
1 # repoview.py - Filtered view of a localrepo object
1 # repoview.py - Filtered view of a localrepo object
2 #
2 #
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
4 # Logilab SA <contact@logilab.fr>
4 # Logilab SA <contact@logilab.fr>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from __future__ import absolute_import
9 from __future__ import absolute_import
10
10
11 import copy
11 import copy
12 import hashlib
12 import hashlib
13 import heapq
13 import heapq
14 import struct
14 import struct
15
15
16 from .node import nullrev
16 from .node import nullrev
17 from . import (
17 from . import (
18 error,
18 error,
19 obsolete,
19 obsolete,
20 phases,
20 phases,
21 tags as tagsmod,
21 tags as tagsmod,
22 )
22 )
23
23
24 def hideablerevs(repo):
24 def hideablerevs(repo):
25 """Revision candidates to be hidden
25 """Revision candidates to be hidden
26
26
27 This is a standalone function to allow extensions to wrap it.
27 This is a standalone function to allow extensions to wrap it.
28
28
29 Because we use the set of immutable changesets as a fallback subset in
29 Because we use the set of immutable changesets as a fallback subset in
30 branchmap (see mercurial.branchmap.subsettable), you cannot set "public"
30 branchmap (see mercurial.branchmap.subsettable), you cannot set "public"
31 changesets as "hideable". Doing so would break multiple code assertions and
31 changesets as "hideable". Doing so would break multiple code assertions and
32 lead to crashes."""
32 lead to crashes."""
33 return obsolete.getrevs(repo, 'obsolete')
33 return obsolete.getrevs(repo, 'obsolete')
34
34
35 def _getdynamicblockers(repo):
36 """Non-cacheable revisions blocking hidden changesets from being filtered.
37
38 Get revisions that will block hidden changesets and are likely to change,
39 but unlikely to create hidden blockers. They won't be cached, so be careful
40 with adding additional computation."""
41
42 cl = repo.changelog
43 blockers = set()
44 blockers.update([par.rev() for par in repo[None].parents()])
45 blockers.update([cl.rev(bm) for bm in repo._bookmarks.values()])
46
47 tags = {}
48 tagsmod.readlocaltags(repo.ui, repo, tags, {})
49 if tags:
50 rev, nodemap = cl.rev, cl.nodemap
51 blockers.update(rev(t[0]) for t in tags.values() if t[0] in nodemap)
52 return blockers
53
35 def _getstatichidden(repo):
54 def _getstatichidden(repo):
36 """Revision to be hidden (disregarding dynamic blocker)
55 """Revision to be hidden (disregarding dynamic blocker)
37
56
38 To keep a consistent graph, we cannot hide any revisions with
57 To keep a consistent graph, we cannot hide any revisions with
39 non-hidden descendants. This function computes the set of
58 non-hidden descendants. This function computes the set of
40 revisions that could be hidden while keeping the graph consistent.
59 revisions that could be hidden while keeping the graph consistent.
41
60
42 A second pass will be done to apply "dynamic blocker" like bookmarks or
61 A second pass will be done to apply "dynamic blocker" like bookmarks or
43 working directory parents.
62 working directory parents.
44
63
45 """
64 """
46 assert not repo.changelog.filteredrevs
65 assert not repo.changelog.filteredrevs
47 hidden = set(hideablerevs(repo))
66 hidden = set(hideablerevs(repo))
48 if hidden:
67 if hidden:
49 getphase = repo._phasecache.phase
68 getphase = repo._phasecache.phase
50 getparentrevs = repo.changelog.parentrevs
69 getparentrevs = repo.changelog.parentrevs
51 # Skip heads which are public (guaranteed to not be hidden)
70 # Skip heads which are public (guaranteed to not be hidden)
52 heap = [-r for r in repo.changelog.headrevs() if getphase(repo, r)]
71 heap = [-r for r in repo.changelog.headrevs() if getphase(repo, r)]
53 heapq.heapify(heap)
72 heapq.heapify(heap)
54 heappop = heapq.heappop
73 heappop = heapq.heappop
55 heappush = heapq.heappush
74 heappush = heapq.heappush
56 seen = set() # no need to init it with heads, they have no children
75 seen = set() # no need to init it with heads, they have no children
57 while heap:
76 while heap:
58 rev = -heappop(heap)
77 rev = -heappop(heap)
59 # All children have been processed so at that point, if no children
78 # All children have been processed so at that point, if no children
60 # removed 'rev' from the 'hidden' set, 'rev' is going to be hidden.
79 # removed 'rev' from the 'hidden' set, 'rev' is going to be hidden.
61 blocker = rev not in hidden
80 blocker = rev not in hidden
62 for parent in getparentrevs(rev):
81 for parent in getparentrevs(rev):
63 if parent == nullrev:
82 if parent == nullrev:
64 continue
83 continue
65 if blocker:
84 if blocker:
66 # If visible, ensure parent will be visible too
85 # If visible, ensure parent will be visible too
67 hidden.discard(parent)
86 hidden.discard(parent)
68 # - Avoid adding the same revision twice
87 # - Avoid adding the same revision twice
69 # - Skip nodes which are public (guaranteed to not be hidden)
88 # - Skip nodes which are public (guaranteed to not be hidden)
70 pre = len(seen)
89 pre = len(seen)
71 seen.add(parent)
90 seen.add(parent)
72 if pre < len(seen) and getphase(repo, rev):
91 if pre < len(seen) and getphase(repo, rev):
73 heappush(heap, -parent)
92 heappush(heap, -parent)
74 return hidden
93 return hidden
75
94
76 def _getdynamicblockers(repo):
77 """Non-cacheable revisions blocking hidden changesets from being filtered.
78
79 Get revisions that will block hidden changesets and are likely to change,
80 but unlikely to create hidden blockers. They won't be cached, so be careful
81 with adding additional computation."""
82
83 cl = repo.changelog
84 blockers = set()
85 blockers.update([par.rev() for par in repo[None].parents()])
86 blockers.update([cl.rev(bm) for bm in repo._bookmarks.values()])
87
88 tags = {}
89 tagsmod.readlocaltags(repo.ui, repo, tags, {})
90 if tags:
91 rev, nodemap = cl.rev, cl.nodemap
92 blockers.update(rev(t[0]) for t in tags.values() if t[0] in nodemap)
93 return blockers
94
95 cacheversion = 1
95 cacheversion = 1
96 cachefile = 'cache/hidden'
96 cachefile = 'cache/hidden'
97
97
98 def cachehash(repo, hideable):
98 def cachehash(repo, hideable):
99 """return sha1 hash of repository data to identify a valid cache.
99 """return sha1 hash of repository data to identify a valid cache.
100
100
101 We calculate a sha1 of repo heads and the content of the obsstore and write
101 We calculate a sha1 of repo heads and the content of the obsstore and write
102 it to the cache. Upon reading we can easily validate by checking the hash
102 it to the cache. Upon reading we can easily validate by checking the hash
103 against the stored one and discard the cache in case the hashes don't match.
103 against the stored one and discard the cache in case the hashes don't match.
104 """
104 """
105 h = hashlib.sha1()
105 h = hashlib.sha1()
106 h.update(''.join(repo.heads()))
106 h.update(''.join(repo.heads()))
107 h.update('%d' % hash(frozenset(hideable)))
107 h.update('%d' % hash(frozenset(hideable)))
108 return h.digest()
108 return h.digest()
109
109
110 def _writehiddencache(cachefile, cachehash, hidden):
110 def _writehiddencache(cachefile, cachehash, hidden):
111 """write hidden data to a cache file"""
111 """write hidden data to a cache file"""
112 data = struct.pack('>%ii' % len(hidden), *sorted(hidden))
112 data = struct.pack('>%ii' % len(hidden), *sorted(hidden))
113 cachefile.write(struct.pack(">H", cacheversion))
113 cachefile.write(struct.pack(">H", cacheversion))
114 cachefile.write(cachehash)
114 cachefile.write(cachehash)
115 cachefile.write(data)
115 cachefile.write(data)
116
116
117 def trywritehiddencache(repo, hideable, hidden):
117 def trywritehiddencache(repo, hideable, hidden):
118 """write cache of hidden changesets to disk
118 """write cache of hidden changesets to disk
119
119
120 Will not write the cache if a wlock cannot be obtained lazily.
120 Will not write the cache if a wlock cannot be obtained lazily.
121 The cache consists of a head of 22byte:
121 The cache consists of a head of 22byte:
122 2 byte version number of the cache
122 2 byte version number of the cache
123 20 byte sha1 to validate the cache
123 20 byte sha1 to validate the cache
124 n*4 byte hidden revs
124 n*4 byte hidden revs
125 """
125 """
126 wlock = fh = None
126 wlock = fh = None
127 try:
127 try:
128 wlock = repo.wlock(wait=False)
128 wlock = repo.wlock(wait=False)
129 # write cache to file
129 # write cache to file
130 newhash = cachehash(repo, hideable)
130 newhash = cachehash(repo, hideable)
131 fh = repo.vfs.open(cachefile, 'w+b', atomictemp=True)
131 fh = repo.vfs.open(cachefile, 'w+b', atomictemp=True)
132 _writehiddencache(fh, newhash, hidden)
132 _writehiddencache(fh, newhash, hidden)
133 fh.close()
133 fh.close()
134 except (IOError, OSError):
134 except (IOError, OSError):
135 repo.ui.debug('error writing hidden changesets cache\n')
135 repo.ui.debug('error writing hidden changesets cache\n')
136 except error.LockHeld:
136 except error.LockHeld:
137 repo.ui.debug('cannot obtain lock to write hidden changesets cache\n')
137 repo.ui.debug('cannot obtain lock to write hidden changesets cache\n')
138 finally:
138 finally:
139 if wlock:
139 if wlock:
140 wlock.release()
140 wlock.release()
141
141
142 def _readhiddencache(repo, cachefilename, newhash):
142 def _readhiddencache(repo, cachefilename, newhash):
143 hidden = fh = None
143 hidden = fh = None
144 try:
144 try:
145 if repo.vfs.exists(cachefile):
145 if repo.vfs.exists(cachefile):
146 fh = repo.vfs.open(cachefile, 'rb')
146 fh = repo.vfs.open(cachefile, 'rb')
147 version, = struct.unpack(">H", fh.read(2))
147 version, = struct.unpack(">H", fh.read(2))
148 oldhash = fh.read(20)
148 oldhash = fh.read(20)
149 if (cacheversion, oldhash) == (version, newhash):
149 if (cacheversion, oldhash) == (version, newhash):
150 # cache is valid, so we can start reading the hidden revs
150 # cache is valid, so we can start reading the hidden revs
151 data = fh.read()
151 data = fh.read()
152 count = len(data) / 4
152 count = len(data) / 4
153 hidden = frozenset(struct.unpack('>%ii' % count, data))
153 hidden = frozenset(struct.unpack('>%ii' % count, data))
154 return hidden
154 return hidden
155 except struct.error:
155 except struct.error:
156 repo.ui.debug('corrupted hidden cache\n')
156 repo.ui.debug('corrupted hidden cache\n')
157 # No need to fix the content as it will get rewritten
157 # No need to fix the content as it will get rewritten
158 return None
158 return None
159 except (IOError, OSError):
159 except (IOError, OSError):
160 repo.ui.debug('cannot read hidden cache\n')
160 repo.ui.debug('cannot read hidden cache\n')
161 return None
161 return None
162 finally:
162 finally:
163 if fh:
163 if fh:
164 fh.close()
164 fh.close()
165
165
166 def tryreadcache(repo, hideable):
166 def tryreadcache(repo, hideable):
167 """read a cache if the cache exists and is valid, otherwise returns None."""
167 """read a cache if the cache exists and is valid, otherwise returns None."""
168 newhash = cachehash(repo, hideable)
168 newhash = cachehash(repo, hideable)
169 return _readhiddencache(repo, cachefile, newhash)
169 return _readhiddencache(repo, cachefile, newhash)
170
170
171 def computehidden(repo):
171 def computehidden(repo):
172 """compute the set of hidden revision to filter
172 """compute the set of hidden revision to filter
173
173
174 During most operation hidden should be filtered."""
174 During most operation hidden should be filtered."""
175 assert not repo.changelog.filteredrevs
175 assert not repo.changelog.filteredrevs
176
176
177 hidden = frozenset()
177 hidden = frozenset()
178 hideable = hideablerevs(repo)
178 hideable = hideablerevs(repo)
179 if hideable:
179 if hideable:
180 cl = repo.changelog
180 cl = repo.changelog
181 hidden = tryreadcache(repo, hideable)
181 hidden = tryreadcache(repo, hideable)
182 if hidden is None:
182 if hidden is None:
183 hidden = frozenset(_getstatichidden(repo))
183 hidden = frozenset(_getstatichidden(repo))
184 trywritehiddencache(repo, hideable, hidden)
184 trywritehiddencache(repo, hideable, hidden)
185
185
186 # check if we have wd parents, bookmarks or tags pointing to hidden
186 # check if we have wd parents, bookmarks or tags pointing to hidden
187 # changesets and remove those.
187 # changesets and remove those.
188 dynamic = hidden & _getdynamicblockers(repo)
188 dynamic = hidden & _getdynamicblockers(repo)
189 if dynamic:
189 if dynamic:
190 blocked = cl.ancestors(dynamic, inclusive=True)
190 blocked = cl.ancestors(dynamic, inclusive=True)
191 hidden = frozenset(r for r in hidden if r not in blocked)
191 hidden = frozenset(r for r in hidden if r not in blocked)
192 return hidden
192 return hidden
193
193
194 def computeunserved(repo):
194 def computeunserved(repo):
195 """compute the set of revision that should be filtered when used a server
195 """compute the set of revision that should be filtered when used a server
196
196
197 Secret and hidden changeset should not pretend to be here."""
197 Secret and hidden changeset should not pretend to be here."""
198 assert not repo.changelog.filteredrevs
198 assert not repo.changelog.filteredrevs
199 # fast path in simple case to avoid impact of non optimised code
199 # fast path in simple case to avoid impact of non optimised code
200 hiddens = filterrevs(repo, 'visible')
200 hiddens = filterrevs(repo, 'visible')
201 if phases.hassecret(repo):
201 if phases.hassecret(repo):
202 cl = repo.changelog
202 cl = repo.changelog
203 secret = phases.secret
203 secret = phases.secret
204 getphase = repo._phasecache.phase
204 getphase = repo._phasecache.phase
205 first = min(cl.rev(n) for n in repo._phasecache.phaseroots[secret])
205 first = min(cl.rev(n) for n in repo._phasecache.phaseroots[secret])
206 revs = cl.revs(start=first)
206 revs = cl.revs(start=first)
207 secrets = set(r for r in revs if getphase(repo, r) >= secret)
207 secrets = set(r for r in revs if getphase(repo, r) >= secret)
208 return frozenset(hiddens | secrets)
208 return frozenset(hiddens | secrets)
209 else:
209 else:
210 return hiddens
210 return hiddens
211
211
212 def computemutable(repo):
212 def computemutable(repo):
213 """compute the set of revision that should be filtered when used a server
213 """compute the set of revision that should be filtered when used a server
214
214
215 Secret and hidden changeset should not pretend to be here."""
215 Secret and hidden changeset should not pretend to be here."""
216 assert not repo.changelog.filteredrevs
216 assert not repo.changelog.filteredrevs
217 # fast check to avoid revset call on huge repo
217 # fast check to avoid revset call on huge repo
218 if any(repo._phasecache.phaseroots[1:]):
218 if any(repo._phasecache.phaseroots[1:]):
219 getphase = repo._phasecache.phase
219 getphase = repo._phasecache.phase
220 maymutable = filterrevs(repo, 'base')
220 maymutable = filterrevs(repo, 'base')
221 return frozenset(r for r in maymutable if getphase(repo, r))
221 return frozenset(r for r in maymutable if getphase(repo, r))
222 return frozenset()
222 return frozenset()
223
223
224 def computeimpactable(repo):
224 def computeimpactable(repo):
225 """Everything impactable by mutable revision
225 """Everything impactable by mutable revision
226
226
227 The immutable filter still have some chance to get invalidated. This will
227 The immutable filter still have some chance to get invalidated. This will
228 happen when:
228 happen when:
229
229
230 - you garbage collect hidden changeset,
230 - you garbage collect hidden changeset,
231 - public phase is moved backward,
231 - public phase is moved backward,
232 - something is changed in the filtering (this could be fixed)
232 - something is changed in the filtering (this could be fixed)
233
233
234 This filter out any mutable changeset and any public changeset that may be
234 This filter out any mutable changeset and any public changeset that may be
235 impacted by something happening to a mutable revision.
235 impacted by something happening to a mutable revision.
236
236
237 This is achieved by filtered everything with a revision number egal or
237 This is achieved by filtered everything with a revision number egal or
238 higher than the first mutable changeset is filtered."""
238 higher than the first mutable changeset is filtered."""
239 assert not repo.changelog.filteredrevs
239 assert not repo.changelog.filteredrevs
240 cl = repo.changelog
240 cl = repo.changelog
241 firstmutable = len(cl)
241 firstmutable = len(cl)
242 for roots in repo._phasecache.phaseroots[1:]:
242 for roots in repo._phasecache.phaseroots[1:]:
243 if roots:
243 if roots:
244 firstmutable = min(firstmutable, min(cl.rev(r) for r in roots))
244 firstmutable = min(firstmutable, min(cl.rev(r) for r in roots))
245 # protect from nullrev root
245 # protect from nullrev root
246 firstmutable = max(0, firstmutable)
246 firstmutable = max(0, firstmutable)
247 return frozenset(xrange(firstmutable, len(cl)))
247 return frozenset(xrange(firstmutable, len(cl)))
248
248
249 # function to compute filtered set
249 # function to compute filtered set
250 #
250 #
251 # When adding a new filter you MUST update the table at:
251 # When adding a new filter you MUST update the table at:
252 # mercurial.branchmap.subsettable
252 # mercurial.branchmap.subsettable
253 # Otherwise your filter will have to recompute all its branches cache
253 # Otherwise your filter will have to recompute all its branches cache
254 # from scratch (very slow).
254 # from scratch (very slow).
255 filtertable = {'visible': computehidden,
255 filtertable = {'visible': computehidden,
256 'served': computeunserved,
256 'served': computeunserved,
257 'immutable': computemutable,
257 'immutable': computemutable,
258 'base': computeimpactable}
258 'base': computeimpactable}
259
259
260 def filterrevs(repo, filtername):
260 def filterrevs(repo, filtername):
261 """returns set of filtered revision for this filter name"""
261 """returns set of filtered revision for this filter name"""
262 if filtername not in repo.filteredrevcache:
262 if filtername not in repo.filteredrevcache:
263 func = filtertable[filtername]
263 func = filtertable[filtername]
264 repo.filteredrevcache[filtername] = func(repo.unfiltered())
264 repo.filteredrevcache[filtername] = func(repo.unfiltered())
265 return repo.filteredrevcache[filtername]
265 return repo.filteredrevcache[filtername]
266
266
267 class repoview(object):
267 class repoview(object):
268 """Provide a read/write view of a repo through a filtered changelog
268 """Provide a read/write view of a repo through a filtered changelog
269
269
270 This object is used to access a filtered version of a repository without
270 This object is used to access a filtered version of a repository without
271 altering the original repository object itself. We can not alter the
271 altering the original repository object itself. We can not alter the
272 original object for two main reasons:
272 original object for two main reasons:
273 - It prevents the use of a repo with multiple filters at the same time. In
273 - It prevents the use of a repo with multiple filters at the same time. In
274 particular when multiple threads are involved.
274 particular when multiple threads are involved.
275 - It makes scope of the filtering harder to control.
275 - It makes scope of the filtering harder to control.
276
276
277 This object behaves very closely to the original repository. All attribute
277 This object behaves very closely to the original repository. All attribute
278 operations are done on the original repository:
278 operations are done on the original repository:
279 - An access to `repoview.someattr` actually returns `repo.someattr`,
279 - An access to `repoview.someattr` actually returns `repo.someattr`,
280 - A write to `repoview.someattr` actually sets value of `repo.someattr`,
280 - A write to `repoview.someattr` actually sets value of `repo.someattr`,
281 - A deletion of `repoview.someattr` actually drops `someattr`
281 - A deletion of `repoview.someattr` actually drops `someattr`
282 from `repo.__dict__`.
282 from `repo.__dict__`.
283
283
284 The only exception is the `changelog` property. It is overridden to return
284 The only exception is the `changelog` property. It is overridden to return
285 a (surface) copy of `repo.changelog` with some revisions filtered. The
285 a (surface) copy of `repo.changelog` with some revisions filtered. The
286 `filtername` attribute of the view control the revisions that need to be
286 `filtername` attribute of the view control the revisions that need to be
287 filtered. (the fact the changelog is copied is an implementation detail).
287 filtered. (the fact the changelog is copied is an implementation detail).
288
288
289 Unlike attributes, this object intercepts all method calls. This means that
289 Unlike attributes, this object intercepts all method calls. This means that
290 all methods are run on the `repoview` object with the filtered `changelog`
290 all methods are run on the `repoview` object with the filtered `changelog`
291 property. For this purpose the simple `repoview` class must be mixed with
291 property. For this purpose the simple `repoview` class must be mixed with
292 the actual class of the repository. This ensures that the resulting
292 the actual class of the repository. This ensures that the resulting
293 `repoview` object have the very same methods than the repo object. This
293 `repoview` object have the very same methods than the repo object. This
294 leads to the property below.
294 leads to the property below.
295
295
296 repoview.method() --> repo.__class__.method(repoview)
296 repoview.method() --> repo.__class__.method(repoview)
297
297
298 The inheritance has to be done dynamically because `repo` can be of any
298 The inheritance has to be done dynamically because `repo` can be of any
299 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
299 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
300 """
300 """
301
301
302 def __init__(self, repo, filtername):
302 def __init__(self, repo, filtername):
303 object.__setattr__(self, r'_unfilteredrepo', repo)
303 object.__setattr__(self, r'_unfilteredrepo', repo)
304 object.__setattr__(self, r'filtername', filtername)
304 object.__setattr__(self, r'filtername', filtername)
305 object.__setattr__(self, r'_clcachekey', None)
305 object.__setattr__(self, r'_clcachekey', None)
306 object.__setattr__(self, r'_clcache', None)
306 object.__setattr__(self, r'_clcache', None)
307
307
308 # not a propertycache on purpose we shall implement a proper cache later
308 # not a propertycache on purpose we shall implement a proper cache later
309 @property
309 @property
310 def changelog(self):
310 def changelog(self):
311 """return a filtered version of the changeset
311 """return a filtered version of the changeset
312
312
313 this changelog must not be used for writing"""
313 this changelog must not be used for writing"""
314 # some cache may be implemented later
314 # some cache may be implemented later
315 unfi = self._unfilteredrepo
315 unfi = self._unfilteredrepo
316 unfichangelog = unfi.changelog
316 unfichangelog = unfi.changelog
317 # bypass call to changelog.method
317 # bypass call to changelog.method
318 unfiindex = unfichangelog.index
318 unfiindex = unfichangelog.index
319 unfilen = len(unfiindex) - 1
319 unfilen = len(unfiindex) - 1
320 unfinode = unfiindex[unfilen - 1][7]
320 unfinode = unfiindex[unfilen - 1][7]
321
321
322 revs = filterrevs(unfi, self.filtername)
322 revs = filterrevs(unfi, self.filtername)
323 cl = self._clcache
323 cl = self._clcache
324 newkey = (unfilen, unfinode, hash(revs), unfichangelog._delayed)
324 newkey = (unfilen, unfinode, hash(revs), unfichangelog._delayed)
325 # if cl.index is not unfiindex, unfi.changelog would be
325 # if cl.index is not unfiindex, unfi.changelog would be
326 # recreated, and our clcache refers to garbage object
326 # recreated, and our clcache refers to garbage object
327 if (cl is not None and
327 if (cl is not None and
328 (cl.index is not unfiindex or newkey != self._clcachekey)):
328 (cl.index is not unfiindex or newkey != self._clcachekey)):
329 cl = None
329 cl = None
330 # could have been made None by the previous if
330 # could have been made None by the previous if
331 if cl is None:
331 if cl is None:
332 cl = copy.copy(unfichangelog)
332 cl = copy.copy(unfichangelog)
333 cl.filteredrevs = revs
333 cl.filteredrevs = revs
334 object.__setattr__(self, r'_clcache', cl)
334 object.__setattr__(self, r'_clcache', cl)
335 object.__setattr__(self, r'_clcachekey', newkey)
335 object.__setattr__(self, r'_clcachekey', newkey)
336 return cl
336 return cl
337
337
338 def unfiltered(self):
338 def unfiltered(self):
339 """Return an unfiltered version of a repo"""
339 """Return an unfiltered version of a repo"""
340 return self._unfilteredrepo
340 return self._unfilteredrepo
341
341
342 def filtered(self, name):
342 def filtered(self, name):
343 """Return a filtered version of a repository"""
343 """Return a filtered version of a repository"""
344 if name == self.filtername:
344 if name == self.filtername:
345 return self
345 return self
346 return self.unfiltered().filtered(name)
346 return self.unfiltered().filtered(name)
347
347
348 # everything access are forwarded to the proxied repo
348 # everything access are forwarded to the proxied repo
349 def __getattr__(self, attr):
349 def __getattr__(self, attr):
350 return getattr(self._unfilteredrepo, attr)
350 return getattr(self._unfilteredrepo, attr)
351
351
352 def __setattr__(self, attr, value):
352 def __setattr__(self, attr, value):
353 return setattr(self._unfilteredrepo, attr, value)
353 return setattr(self._unfilteredrepo, attr, value)
354
354
355 def __delattr__(self, attr):
355 def __delattr__(self, attr):
356 return delattr(self._unfilteredrepo, attr)
356 return delattr(self._unfilteredrepo, attr)
357
357
358 # The `requirements` attribute is initialized during __init__. But
358 # The `requirements` attribute is initialized during __init__. But
359 # __getattr__ won't be called as it also exists on the class. We need
359 # __getattr__ won't be called as it also exists on the class. We need
360 # explicit forwarding to main repo here
360 # explicit forwarding to main repo here
361 @property
361 @property
362 def requirements(self):
362 def requirements(self):
363 return self._unfilteredrepo.requirements
363 return self._unfilteredrepo.requirements
General Comments 0
You need to be logged in to leave comments. Login now