##// END OF EJS Templates
repoview: avoid processing the same rev twice in _getstatichidden...
Pierre-Yves David -
r24620:7c6f9097 default
parent child Browse files
Show More
@@ -1,344 +1,348 b''
1 # repoview.py - Filtered view of a localrepo object
1 # repoview.py - Filtered view of a localrepo object
2 #
2 #
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
4 # Logilab SA <contact@logilab.fr>
4 # Logilab SA <contact@logilab.fr>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 import heapq
9 import heapq
10 import copy
10 import copy
11 import error
11 import error
12 import phases
12 import phases
13 import util
13 import util
14 import obsolete
14 import obsolete
15 import struct
15 import struct
16 import tags as tagsmod
16 import tags as tagsmod
17 from node import nullrev
17 from node import nullrev
18
18
19 def hideablerevs(repo):
19 def hideablerevs(repo):
20 """Revisions candidates to be hidden
20 """Revisions candidates to be hidden
21
21
22 This is a standalone function to help extensions to wrap it."""
22 This is a standalone function to help extensions to wrap it."""
23 return obsolete.getrevs(repo, 'obsolete')
23 return obsolete.getrevs(repo, 'obsolete')
24
24
25 def _getstatichidden(repo):
25 def _getstatichidden(repo):
26 """Revision to be hidden (disregarding dynamic blocker)
26 """Revision to be hidden (disregarding dynamic blocker)
27
27
28 To keep a consistent graph, we cannot hide any revisions with
28 To keep a consistent graph, we cannot hide any revisions with
29 non-hidden descendants. This function computes the set of
29 non-hidden descendants. This function computes the set of
30 revisions that could be hidden while keeping the graph consistent.
30 revisions that could be hidden while keeping the graph consistent.
31
31
32 A second pass will be done to apply "dynamic blocker" like bookmarks or
32 A second pass will be done to apply "dynamic blocker" like bookmarks or
33 working directory parents.
33 working directory parents.
34
34
35 """
35 """
36 assert not repo.changelog.filteredrevs
36 assert not repo.changelog.filteredrevs
37 hidden = set(hideablerevs(repo))
37 hidden = set(hideablerevs(repo))
38 if hidden:
38 if hidden:
39 getphase = repo._phasecache.phase
39 getphase = repo._phasecache.phase
40 getparentrevs = repo.changelog.parentrevs
40 getparentrevs = repo.changelog.parentrevs
41 # Skip heads which are public (guaranteed to not be hidden)
41 # Skip heads which are public (guaranteed to not be hidden)
42 heap = [-r for r in repo.changelog.headrevs() if getphase(repo, r)]
42 heap = [-r for r in repo.changelog.headrevs() if getphase(repo, r)]
43 heapq.heapify(heap)
43 heapq.heapify(heap)
44 heappop = heapq.heappop
44 heappop = heapq.heappop
45 heappush = heapq.heappush
45 heappush = heapq.heappush
46 seen = set() # no need to init it with heads, they have no children
46 while heap:
47 while heap:
47 rev = -heappop(heap)
48 rev = -heappop(heap)
48 # All children have been processed so at that point, if no children
49 # All children have been processed so at that point, if no children
49 # removed 'rev' from the 'hidden' set, 'rev' is going to be hidden.
50 # removed 'rev' from the 'hidden' set, 'rev' is going to be hidden.
50 blocker = rev not in hidden
51 blocker = rev not in hidden
51 for parent in getparentrevs(rev):
52 for parent in getparentrevs(rev):
52 if parent == nullrev:
53 if parent == nullrev:
53 continue
54 continue
54 if blocker:
55 if blocker:
55 # If visible, ensure parent will be visible too
56 # If visible, ensure parent will be visible too
56 hidden.discard(parent)
57 hidden.discard(parent)
57 # Skip nodes which are public (guaranteed to not be hidden)
58 # - Avoid adding the same revision twice
58 if getphase(repo, rev):
59 # - Skip nodes which are public (guaranteed to not be hidden)
60 pre = len(seen)
61 seen.add(parent)
62 if pre < len(seen) and getphase(repo, rev):
59 heappush(heap, -parent)
63 heappush(heap, -parent)
60 return hidden
64 return hidden
61
65
62 def _getdynamicblockers(repo):
66 def _getdynamicblockers(repo):
63 """Non-cacheable revisions blocking hidden changesets from being filtered.
67 """Non-cacheable revisions blocking hidden changesets from being filtered.
64
68
65 Get revisions that will block hidden changesets and are likely to change,
69 Get revisions that will block hidden changesets and are likely to change,
66 but unlikely to create hidden blockers. They won't be cached, so be careful
70 but unlikely to create hidden blockers. They won't be cached, so be careful
67 with adding additional computation."""
71 with adding additional computation."""
68
72
69 cl = repo.changelog
73 cl = repo.changelog
70 blockers = set()
74 blockers = set()
71 blockers.update([par.rev() for par in repo[None].parents()])
75 blockers.update([par.rev() for par in repo[None].parents()])
72 blockers.update([cl.rev(bm) for bm in repo._bookmarks.values()])
76 blockers.update([cl.rev(bm) for bm in repo._bookmarks.values()])
73
77
74 tags = {}
78 tags = {}
75 tagsmod.readlocaltags(repo.ui, repo, tags, {})
79 tagsmod.readlocaltags(repo.ui, repo, tags, {})
76 if tags:
80 if tags:
77 rev, nodemap = cl.rev, cl.nodemap
81 rev, nodemap = cl.rev, cl.nodemap
78 blockers.update(rev(t[0]) for t in tags.values() if t[0] in nodemap)
82 blockers.update(rev(t[0]) for t in tags.values() if t[0] in nodemap)
79 return blockers
83 return blockers
80
84
81 cacheversion = 1
85 cacheversion = 1
82 cachefile = 'cache/hidden'
86 cachefile = 'cache/hidden'
83
87
84 def cachehash(repo, hideable):
88 def cachehash(repo, hideable):
85 """return sha1 hash of repository data to identify a valid cache.
89 """return sha1 hash of repository data to identify a valid cache.
86
90
87 We calculate a sha1 of repo heads and the content of the obsstore and write
91 We calculate a sha1 of repo heads and the content of the obsstore and write
88 it to the cache. Upon reading we can easily validate by checking the hash
92 it to the cache. Upon reading we can easily validate by checking the hash
89 against the stored one and discard the cache in case the hashes don't match.
93 against the stored one and discard the cache in case the hashes don't match.
90 """
94 """
91 h = util.sha1()
95 h = util.sha1()
92 h.update(''.join(repo.heads()))
96 h.update(''.join(repo.heads()))
93 h.update(str(hash(frozenset(hideable))))
97 h.update(str(hash(frozenset(hideable))))
94 return h.digest()
98 return h.digest()
95
99
96 def _writehiddencache(cachefile, cachehash, hidden):
100 def _writehiddencache(cachefile, cachehash, hidden):
97 """write hidden data to a cache file"""
101 """write hidden data to a cache file"""
98 data = struct.pack('>%ii' % len(hidden), *sorted(hidden))
102 data = struct.pack('>%ii' % len(hidden), *sorted(hidden))
99 cachefile.write(struct.pack(">H", cacheversion))
103 cachefile.write(struct.pack(">H", cacheversion))
100 cachefile.write(cachehash)
104 cachefile.write(cachehash)
101 cachefile.write(data)
105 cachefile.write(data)
102
106
103 def trywritehiddencache(repo, hideable, hidden):
107 def trywritehiddencache(repo, hideable, hidden):
104 """write cache of hidden changesets to disk
108 """write cache of hidden changesets to disk
105
109
106 Will not write the cache if a wlock cannot be obtained lazily.
110 Will not write the cache if a wlock cannot be obtained lazily.
107 The cache consists of a head of 22byte:
111 The cache consists of a head of 22byte:
108 2 byte version number of the cache
112 2 byte version number of the cache
109 20 byte sha1 to validate the cache
113 20 byte sha1 to validate the cache
110 n*4 byte hidden revs
114 n*4 byte hidden revs
111 """
115 """
112 wlock = fh = None
116 wlock = fh = None
113 try:
117 try:
114 try:
118 try:
115 wlock = repo.wlock(wait=False)
119 wlock = repo.wlock(wait=False)
116 # write cache to file
120 # write cache to file
117 newhash = cachehash(repo, hideable)
121 newhash = cachehash(repo, hideable)
118 fh = repo.vfs.open(cachefile, 'w+b', atomictemp=True)
122 fh = repo.vfs.open(cachefile, 'w+b', atomictemp=True)
119 _writehiddencache(fh, newhash, hidden)
123 _writehiddencache(fh, newhash, hidden)
120 except (IOError, OSError):
124 except (IOError, OSError):
121 repo.ui.debug('error writing hidden changesets cache')
125 repo.ui.debug('error writing hidden changesets cache')
122 except error.LockHeld:
126 except error.LockHeld:
123 repo.ui.debug('cannot obtain lock to write hidden changesets cache')
127 repo.ui.debug('cannot obtain lock to write hidden changesets cache')
124 finally:
128 finally:
125 if fh:
129 if fh:
126 fh.close()
130 fh.close()
127 if wlock:
131 if wlock:
128 wlock.release()
132 wlock.release()
129
133
130 def tryreadcache(repo, hideable):
134 def tryreadcache(repo, hideable):
131 """read a cache if the cache exists and is valid, otherwise returns None."""
135 """read a cache if the cache exists and is valid, otherwise returns None."""
132 hidden = fh = None
136 hidden = fh = None
133 try:
137 try:
134 if repo.vfs.exists(cachefile):
138 if repo.vfs.exists(cachefile):
135 fh = repo.vfs.open(cachefile, 'rb')
139 fh = repo.vfs.open(cachefile, 'rb')
136 version, = struct.unpack(">H", fh.read(2))
140 version, = struct.unpack(">H", fh.read(2))
137 oldhash = fh.read(20)
141 oldhash = fh.read(20)
138 newhash = cachehash(repo, hideable)
142 newhash = cachehash(repo, hideable)
139 if (cacheversion, oldhash) == (version, newhash):
143 if (cacheversion, oldhash) == (version, newhash):
140 # cache is valid, so we can start reading the hidden revs
144 # cache is valid, so we can start reading the hidden revs
141 data = fh.read()
145 data = fh.read()
142 count = len(data) / 4
146 count = len(data) / 4
143 hidden = frozenset(struct.unpack('>%ii' % count, data))
147 hidden = frozenset(struct.unpack('>%ii' % count, data))
144 return hidden
148 return hidden
145 finally:
149 finally:
146 if fh:
150 if fh:
147 fh.close()
151 fh.close()
148
152
149 def computehidden(repo):
153 def computehidden(repo):
150 """compute the set of hidden revision to filter
154 """compute the set of hidden revision to filter
151
155
152 During most operation hidden should be filtered."""
156 During most operation hidden should be filtered."""
153 assert not repo.changelog.filteredrevs
157 assert not repo.changelog.filteredrevs
154
158
155 hidden = frozenset()
159 hidden = frozenset()
156 hideable = hideablerevs(repo)
160 hideable = hideablerevs(repo)
157 if hideable:
161 if hideable:
158 cl = repo.changelog
162 cl = repo.changelog
159 hidden = tryreadcache(repo, hideable)
163 hidden = tryreadcache(repo, hideable)
160 if hidden is None:
164 if hidden is None:
161 hidden = frozenset(_getstatichidden(repo))
165 hidden = frozenset(_getstatichidden(repo))
162 trywritehiddencache(repo, hideable, hidden)
166 trywritehiddencache(repo, hideable, hidden)
163
167
164 # check if we have wd parents, bookmarks or tags pointing to hidden
168 # check if we have wd parents, bookmarks or tags pointing to hidden
165 # changesets and remove those.
169 # changesets and remove those.
166 dynamic = hidden & _getdynamicblockers(repo)
170 dynamic = hidden & _getdynamicblockers(repo)
167 if dynamic:
171 if dynamic:
168 blocked = cl.ancestors(dynamic, inclusive=True)
172 blocked = cl.ancestors(dynamic, inclusive=True)
169 hidden = frozenset(r for r in hidden if r not in blocked)
173 hidden = frozenset(r for r in hidden if r not in blocked)
170 return hidden
174 return hidden
171
175
172 def computeunserved(repo):
176 def computeunserved(repo):
173 """compute the set of revision that should be filtered when used a server
177 """compute the set of revision that should be filtered when used a server
174
178
175 Secret and hidden changeset should not pretend to be here."""
179 Secret and hidden changeset should not pretend to be here."""
176 assert not repo.changelog.filteredrevs
180 assert not repo.changelog.filteredrevs
177 # fast path in simple case to avoid impact of non optimised code
181 # fast path in simple case to avoid impact of non optimised code
178 hiddens = filterrevs(repo, 'visible')
182 hiddens = filterrevs(repo, 'visible')
179 if phases.hassecret(repo):
183 if phases.hassecret(repo):
180 cl = repo.changelog
184 cl = repo.changelog
181 secret = phases.secret
185 secret = phases.secret
182 getphase = repo._phasecache.phase
186 getphase = repo._phasecache.phase
183 first = min(cl.rev(n) for n in repo._phasecache.phaseroots[secret])
187 first = min(cl.rev(n) for n in repo._phasecache.phaseroots[secret])
184 revs = cl.revs(start=first)
188 revs = cl.revs(start=first)
185 secrets = set(r for r in revs if getphase(repo, r) >= secret)
189 secrets = set(r for r in revs if getphase(repo, r) >= secret)
186 return frozenset(hiddens | secrets)
190 return frozenset(hiddens | secrets)
187 else:
191 else:
188 return hiddens
192 return hiddens
189
193
190 def computemutable(repo):
194 def computemutable(repo):
191 """compute the set of revision that should be filtered when used a server
195 """compute the set of revision that should be filtered when used a server
192
196
193 Secret and hidden changeset should not pretend to be here."""
197 Secret and hidden changeset should not pretend to be here."""
194 assert not repo.changelog.filteredrevs
198 assert not repo.changelog.filteredrevs
195 # fast check to avoid revset call on huge repo
199 # fast check to avoid revset call on huge repo
196 if util.any(repo._phasecache.phaseroots[1:]):
200 if util.any(repo._phasecache.phaseroots[1:]):
197 getphase = repo._phasecache.phase
201 getphase = repo._phasecache.phase
198 maymutable = filterrevs(repo, 'base')
202 maymutable = filterrevs(repo, 'base')
199 return frozenset(r for r in maymutable if getphase(repo, r))
203 return frozenset(r for r in maymutable if getphase(repo, r))
200 return frozenset()
204 return frozenset()
201
205
202 def computeimpactable(repo):
206 def computeimpactable(repo):
203 """Everything impactable by mutable revision
207 """Everything impactable by mutable revision
204
208
205 The immutable filter still have some chance to get invalidated. This will
209 The immutable filter still have some chance to get invalidated. This will
206 happen when:
210 happen when:
207
211
208 - you garbage collect hidden changeset,
212 - you garbage collect hidden changeset,
209 - public phase is moved backward,
213 - public phase is moved backward,
210 - something is changed in the filtering (this could be fixed)
214 - something is changed in the filtering (this could be fixed)
211
215
212 This filter out any mutable changeset and any public changeset that may be
216 This filter out any mutable changeset and any public changeset that may be
213 impacted by something happening to a mutable revision.
217 impacted by something happening to a mutable revision.
214
218
215 This is achieved by filtered everything with a revision number egal or
219 This is achieved by filtered everything with a revision number egal or
216 higher than the first mutable changeset is filtered."""
220 higher than the first mutable changeset is filtered."""
217 assert not repo.changelog.filteredrevs
221 assert not repo.changelog.filteredrevs
218 cl = repo.changelog
222 cl = repo.changelog
219 firstmutable = len(cl)
223 firstmutable = len(cl)
220 for roots in repo._phasecache.phaseroots[1:]:
224 for roots in repo._phasecache.phaseroots[1:]:
221 if roots:
225 if roots:
222 firstmutable = min(firstmutable, min(cl.rev(r) for r in roots))
226 firstmutable = min(firstmutable, min(cl.rev(r) for r in roots))
223 # protect from nullrev root
227 # protect from nullrev root
224 firstmutable = max(0, firstmutable)
228 firstmutable = max(0, firstmutable)
225 return frozenset(xrange(firstmutable, len(cl)))
229 return frozenset(xrange(firstmutable, len(cl)))
226
230
227 # function to compute filtered set
231 # function to compute filtered set
228 #
232 #
229 # When adding a new filter you MUST update the table at:
233 # When adding a new filter you MUST update the table at:
230 # mercurial.branchmap.subsettable
234 # mercurial.branchmap.subsettable
231 # Otherwise your filter will have to recompute all its branches cache
235 # Otherwise your filter will have to recompute all its branches cache
232 # from scratch (very slow).
236 # from scratch (very slow).
233 filtertable = {'visible': computehidden,
237 filtertable = {'visible': computehidden,
234 'served': computeunserved,
238 'served': computeunserved,
235 'immutable': computemutable,
239 'immutable': computemutable,
236 'base': computeimpactable}
240 'base': computeimpactable}
237
241
238 def filterrevs(repo, filtername):
242 def filterrevs(repo, filtername):
239 """returns set of filtered revision for this filter name"""
243 """returns set of filtered revision for this filter name"""
240 if filtername not in repo.filteredrevcache:
244 if filtername not in repo.filteredrevcache:
241 func = filtertable[filtername]
245 func = filtertable[filtername]
242 repo.filteredrevcache[filtername] = func(repo.unfiltered())
246 repo.filteredrevcache[filtername] = func(repo.unfiltered())
243 return repo.filteredrevcache[filtername]
247 return repo.filteredrevcache[filtername]
244
248
245 class repoview(object):
249 class repoview(object):
246 """Provide a read/write view of a repo through a filtered changelog
250 """Provide a read/write view of a repo through a filtered changelog
247
251
248 This object is used to access a filtered version of a repository without
252 This object is used to access a filtered version of a repository without
249 altering the original repository object itself. We can not alter the
253 altering the original repository object itself. We can not alter the
250 original object for two main reasons:
254 original object for two main reasons:
251 - It prevents the use of a repo with multiple filters at the same time. In
255 - It prevents the use of a repo with multiple filters at the same time. In
252 particular when multiple threads are involved.
256 particular when multiple threads are involved.
253 - It makes scope of the filtering harder to control.
257 - It makes scope of the filtering harder to control.
254
258
255 This object behaves very closely to the original repository. All attribute
259 This object behaves very closely to the original repository. All attribute
256 operations are done on the original repository:
260 operations are done on the original repository:
257 - An access to `repoview.someattr` actually returns `repo.someattr`,
261 - An access to `repoview.someattr` actually returns `repo.someattr`,
258 - A write to `repoview.someattr` actually sets value of `repo.someattr`,
262 - A write to `repoview.someattr` actually sets value of `repo.someattr`,
259 - A deletion of `repoview.someattr` actually drops `someattr`
263 - A deletion of `repoview.someattr` actually drops `someattr`
260 from `repo.__dict__`.
264 from `repo.__dict__`.
261
265
262 The only exception is the `changelog` property. It is overridden to return
266 The only exception is the `changelog` property. It is overridden to return
263 a (surface) copy of `repo.changelog` with some revisions filtered. The
267 a (surface) copy of `repo.changelog` with some revisions filtered. The
264 `filtername` attribute of the view control the revisions that need to be
268 `filtername` attribute of the view control the revisions that need to be
265 filtered. (the fact the changelog is copied is an implementation detail).
269 filtered. (the fact the changelog is copied is an implementation detail).
266
270
267 Unlike attributes, this object intercepts all method calls. This means that
271 Unlike attributes, this object intercepts all method calls. This means that
268 all methods are run on the `repoview` object with the filtered `changelog`
272 all methods are run on the `repoview` object with the filtered `changelog`
269 property. For this purpose the simple `repoview` class must be mixed with
273 property. For this purpose the simple `repoview` class must be mixed with
270 the actual class of the repository. This ensures that the resulting
274 the actual class of the repository. This ensures that the resulting
271 `repoview` object have the very same methods than the repo object. This
275 `repoview` object have the very same methods than the repo object. This
272 leads to the property below.
276 leads to the property below.
273
277
274 repoview.method() --> repo.__class__.method(repoview)
278 repoview.method() --> repo.__class__.method(repoview)
275
279
276 The inheritance has to be done dynamically because `repo` can be of any
280 The inheritance has to be done dynamically because `repo` can be of any
277 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
281 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
278 """
282 """
279
283
280 def __init__(self, repo, filtername):
284 def __init__(self, repo, filtername):
281 object.__setattr__(self, '_unfilteredrepo', repo)
285 object.__setattr__(self, '_unfilteredrepo', repo)
282 object.__setattr__(self, 'filtername', filtername)
286 object.__setattr__(self, 'filtername', filtername)
283 object.__setattr__(self, '_clcachekey', None)
287 object.__setattr__(self, '_clcachekey', None)
284 object.__setattr__(self, '_clcache', None)
288 object.__setattr__(self, '_clcache', None)
285
289
286 # not a propertycache on purpose we shall implement a proper cache later
290 # not a propertycache on purpose we shall implement a proper cache later
287 @property
291 @property
288 def changelog(self):
292 def changelog(self):
289 """return a filtered version of the changeset
293 """return a filtered version of the changeset
290
294
291 this changelog must not be used for writing"""
295 this changelog must not be used for writing"""
292 # some cache may be implemented later
296 # some cache may be implemented later
293 unfi = self._unfilteredrepo
297 unfi = self._unfilteredrepo
294 unfichangelog = unfi.changelog
298 unfichangelog = unfi.changelog
295 revs = filterrevs(unfi, self.filtername)
299 revs = filterrevs(unfi, self.filtername)
296 cl = self._clcache
300 cl = self._clcache
297 newkey = (len(unfichangelog), unfichangelog.tip(), hash(revs),
301 newkey = (len(unfichangelog), unfichangelog.tip(), hash(revs),
298 unfichangelog._delayed)
302 unfichangelog._delayed)
299 if cl is not None:
303 if cl is not None:
300 # we need to check curkey too for some obscure reason.
304 # we need to check curkey too for some obscure reason.
301 # MQ test show a corruption of the underlying repo (in _clcache)
305 # MQ test show a corruption of the underlying repo (in _clcache)
302 # without change in the cachekey.
306 # without change in the cachekey.
303 oldfilter = cl.filteredrevs
307 oldfilter = cl.filteredrevs
304 try:
308 try:
305 cl.filteredrevs = () # disable filtering for tip
309 cl.filteredrevs = () # disable filtering for tip
306 curkey = (len(cl), cl.tip(), hash(oldfilter), cl._delayed)
310 curkey = (len(cl), cl.tip(), hash(oldfilter), cl._delayed)
307 finally:
311 finally:
308 cl.filteredrevs = oldfilter
312 cl.filteredrevs = oldfilter
309 if newkey != self._clcachekey or newkey != curkey:
313 if newkey != self._clcachekey or newkey != curkey:
310 cl = None
314 cl = None
311 # could have been made None by the previous if
315 # could have been made None by the previous if
312 if cl is None:
316 if cl is None:
313 cl = copy.copy(unfichangelog)
317 cl = copy.copy(unfichangelog)
314 cl.filteredrevs = revs
318 cl.filteredrevs = revs
315 object.__setattr__(self, '_clcache', cl)
319 object.__setattr__(self, '_clcache', cl)
316 object.__setattr__(self, '_clcachekey', newkey)
320 object.__setattr__(self, '_clcachekey', newkey)
317 return cl
321 return cl
318
322
319 def unfiltered(self):
323 def unfiltered(self):
320 """Return an unfiltered version of a repo"""
324 """Return an unfiltered version of a repo"""
321 return self._unfilteredrepo
325 return self._unfilteredrepo
322
326
323 def filtered(self, name):
327 def filtered(self, name):
324 """Return a filtered version of a repository"""
328 """Return a filtered version of a repository"""
325 if name == self.filtername:
329 if name == self.filtername:
326 return self
330 return self
327 return self.unfiltered().filtered(name)
331 return self.unfiltered().filtered(name)
328
332
329 # everything access are forwarded to the proxied repo
333 # everything access are forwarded to the proxied repo
330 def __getattr__(self, attr):
334 def __getattr__(self, attr):
331 return getattr(self._unfilteredrepo, attr)
335 return getattr(self._unfilteredrepo, attr)
332
336
333 def __setattr__(self, attr, value):
337 def __setattr__(self, attr, value):
334 return setattr(self._unfilteredrepo, attr, value)
338 return setattr(self._unfilteredrepo, attr, value)
335
339
336 def __delattr__(self, attr):
340 def __delattr__(self, attr):
337 return delattr(self._unfilteredrepo, attr)
341 return delattr(self._unfilteredrepo, attr)
338
342
339 # The `requirements` attribute is initialized during __init__. But
343 # The `requirements` attribute is initialized during __init__. But
340 # __getattr__ won't be called as it also exists on the class. We need
344 # __getattr__ won't be called as it also exists on the class. We need
341 # explicit forwarding to main repo here
345 # explicit forwarding to main repo here
342 @property
346 @property
343 def requirements(self):
347 def requirements(self):
344 return self._unfilteredrepo.requirements
348 return self._unfilteredrepo.requirements
General Comments 0
You need to be logged in to leave comments. Login now