##// END OF EJS Templates
repoview: use a heap in _getstatichidden...
Pierre-Yves David -
r24616:72d34c5a default
parent child Browse files
Show More
@@ -1,345 +1,349
1 # repoview.py - Filtered view of a localrepo object
1 # repoview.py - Filtered view of a localrepo object
2 #
2 #
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
4 # Logilab SA <contact@logilab.fr>
4 # Logilab SA <contact@logilab.fr>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 import collections
9 import heapq
10 import copy
10 import copy
11 import error
11 import error
12 import phases
12 import phases
13 import util
13 import util
14 import obsolete
14 import obsolete
15 import struct
15 import struct
16 import tags as tagsmod
16 import tags as tagsmod
17 from node import nullrev
17 from node import nullrev
18
18
19 def hideablerevs(repo):
19 def hideablerevs(repo):
20 """Revisions candidates to be hidden
20 """Revisions candidates to be hidden
21
21
22 This is a standalone function to help extensions to wrap it."""
22 This is a standalone function to help extensions to wrap it."""
23 return obsolete.getrevs(repo, 'obsolete')
23 return obsolete.getrevs(repo, 'obsolete')
24
24
25 def _getstatichidden(repo):
25 def _getstatichidden(repo):
26 """Revision to be hidden (disregarding dynamic blocker)
26 """Revision to be hidden (disregarding dynamic blocker)
27
27
28 To keep a consistent graph, we cannot hide any revisions with
28 To keep a consistent graph, we cannot hide any revisions with
29 non-hidden descendants. This function computes the set of
29 non-hidden descendants. This function computes the set of
30 revisions that could be hidden while keeping the graph consistent.
30 revisions that could be hidden while keeping the graph consistent.
31
31
32 A second pass will be done to apply "dynamic blocker" like bookmarks or
32 A second pass will be done to apply "dynamic blocker" like bookmarks or
33 working directory parents.
33 working directory parents.
34
34
35 """
35 """
36 assert not repo.changelog.filteredrevs
36 assert not repo.changelog.filteredrevs
37 hideable = hideablerevs(repo)
37 hideable = hideablerevs(repo)
38 if hideable:
38 if hideable:
39 actuallyhidden = {}
39 actuallyhidden = {}
40 getphase = repo._phasecache.phase
40 getphase = repo._phasecache.phase
41 getparentrevs = repo.changelog.parentrevs
41 getparentrevs = repo.changelog.parentrevs
42 queue = collections.deque((r, False) for r in repo.changelog.headrevs())
42 heap = [(-r, False) for r in repo.changelog.headrevs()]
43 while queue:
43 heapq.heapify(heap)
44 rev, blocked = queue.popleft()
44 heappop = heapq.heappop
45 heappush = heapq.heappush
46 while heap:
47 rev, blocked = heappop(heap)
48 rev = - rev
45 phase = getphase(repo, rev)
49 phase = getphase(repo, rev)
46 # Skip nodes which are public (guaranteed to not be hidden) and
50 # Skip nodes which are public (guaranteed to not be hidden) and
47 # nodes which have already been processed and won't be blocked by
51 # nodes which have already been processed and won't be blocked by
48 # the previous node.
52 # the previous node.
49 if phase == 0 or (not blocked and rev in actuallyhidden):
53 if phase == 0 or (not blocked and rev in actuallyhidden):
50 continue
54 continue
51 if rev in hideable:
55 if rev in hideable:
52 if blocked:
56 if blocked:
53 actuallyhidden[rev] = False
57 actuallyhidden[rev] = False
54 else:
58 else:
55 actuallyhidden.setdefault(rev, True)
59 actuallyhidden.setdefault(rev, True)
56 else:
60 else:
57 blocked = True
61 blocked = True
58
62
59 for parent in (p for p in getparentrevs(rev) if p != nullrev):
63 for parent in (p for p in getparentrevs(rev) if p != nullrev):
60 queue.append((parent, blocked))
64 heappush(heap, (- parent, blocked))
61 return set(rev for rev, hidden in actuallyhidden.iteritems() if hidden)
65 return set(rev for rev, hidden in actuallyhidden.iteritems() if hidden)
62
66
63 def _getdynamicblockers(repo):
67 def _getdynamicblockers(repo):
64 """Non-cacheable revisions blocking hidden changesets from being filtered.
68 """Non-cacheable revisions blocking hidden changesets from being filtered.
65
69
66 Get revisions that will block hidden changesets and are likely to change,
70 Get revisions that will block hidden changesets and are likely to change,
67 but unlikely to create hidden blockers. They won't be cached, so be careful
71 but unlikely to create hidden blockers. They won't be cached, so be careful
68 with adding additional computation."""
72 with adding additional computation."""
69
73
70 cl = repo.changelog
74 cl = repo.changelog
71 blockers = set()
75 blockers = set()
72 blockers.update([par.rev() for par in repo[None].parents()])
76 blockers.update([par.rev() for par in repo[None].parents()])
73 blockers.update([cl.rev(bm) for bm in repo._bookmarks.values()])
77 blockers.update([cl.rev(bm) for bm in repo._bookmarks.values()])
74
78
75 tags = {}
79 tags = {}
76 tagsmod.readlocaltags(repo.ui, repo, tags, {})
80 tagsmod.readlocaltags(repo.ui, repo, tags, {})
77 if tags:
81 if tags:
78 rev, nodemap = cl.rev, cl.nodemap
82 rev, nodemap = cl.rev, cl.nodemap
79 blockers.update(rev(t[0]) for t in tags.values() if t[0] in nodemap)
83 blockers.update(rev(t[0]) for t in tags.values() if t[0] in nodemap)
80 return blockers
84 return blockers
81
85
82 cacheversion = 1
86 cacheversion = 1
83 cachefile = 'cache/hidden'
87 cachefile = 'cache/hidden'
84
88
85 def cachehash(repo, hideable):
89 def cachehash(repo, hideable):
86 """return sha1 hash of repository data to identify a valid cache.
90 """return sha1 hash of repository data to identify a valid cache.
87
91
88 We calculate a sha1 of repo heads and the content of the obsstore and write
92 We calculate a sha1 of repo heads and the content of the obsstore and write
89 it to the cache. Upon reading we can easily validate by checking the hash
93 it to the cache. Upon reading we can easily validate by checking the hash
90 against the stored one and discard the cache in case the hashes don't match.
94 against the stored one and discard the cache in case the hashes don't match.
91 """
95 """
92 h = util.sha1()
96 h = util.sha1()
93 h.update(''.join(repo.heads()))
97 h.update(''.join(repo.heads()))
94 h.update(str(hash(frozenset(hideable))))
98 h.update(str(hash(frozenset(hideable))))
95 return h.digest()
99 return h.digest()
96
100
97 def _writehiddencache(cachefile, cachehash, hidden):
101 def _writehiddencache(cachefile, cachehash, hidden):
98 """write hidden data to a cache file"""
102 """write hidden data to a cache file"""
99 data = struct.pack('>%ii' % len(hidden), *sorted(hidden))
103 data = struct.pack('>%ii' % len(hidden), *sorted(hidden))
100 cachefile.write(struct.pack(">H", cacheversion))
104 cachefile.write(struct.pack(">H", cacheversion))
101 cachefile.write(cachehash)
105 cachefile.write(cachehash)
102 cachefile.write(data)
106 cachefile.write(data)
103
107
104 def trywritehiddencache(repo, hideable, hidden):
108 def trywritehiddencache(repo, hideable, hidden):
105 """write cache of hidden changesets to disk
109 """write cache of hidden changesets to disk
106
110
107 Will not write the cache if a wlock cannot be obtained lazily.
111 Will not write the cache if a wlock cannot be obtained lazily.
108 The cache consists of a head of 22byte:
112 The cache consists of a head of 22byte:
109 2 byte version number of the cache
113 2 byte version number of the cache
110 20 byte sha1 to validate the cache
114 20 byte sha1 to validate the cache
111 n*4 byte hidden revs
115 n*4 byte hidden revs
112 """
116 """
113 wlock = fh = None
117 wlock = fh = None
114 try:
118 try:
115 try:
119 try:
116 wlock = repo.wlock(wait=False)
120 wlock = repo.wlock(wait=False)
117 # write cache to file
121 # write cache to file
118 newhash = cachehash(repo, hideable)
122 newhash = cachehash(repo, hideable)
119 fh = repo.vfs.open(cachefile, 'w+b', atomictemp=True)
123 fh = repo.vfs.open(cachefile, 'w+b', atomictemp=True)
120 _writehiddencache(fh, newhash, hidden)
124 _writehiddencache(fh, newhash, hidden)
121 except (IOError, OSError):
125 except (IOError, OSError):
122 repo.ui.debug('error writing hidden changesets cache')
126 repo.ui.debug('error writing hidden changesets cache')
123 except error.LockHeld:
127 except error.LockHeld:
124 repo.ui.debug('cannot obtain lock to write hidden changesets cache')
128 repo.ui.debug('cannot obtain lock to write hidden changesets cache')
125 finally:
129 finally:
126 if fh:
130 if fh:
127 fh.close()
131 fh.close()
128 if wlock:
132 if wlock:
129 wlock.release()
133 wlock.release()
130
134
131 def tryreadcache(repo, hideable):
135 def tryreadcache(repo, hideable):
132 """read a cache if the cache exists and is valid, otherwise returns None."""
136 """read a cache if the cache exists and is valid, otherwise returns None."""
133 hidden = fh = None
137 hidden = fh = None
134 try:
138 try:
135 if repo.vfs.exists(cachefile):
139 if repo.vfs.exists(cachefile):
136 fh = repo.vfs.open(cachefile, 'rb')
140 fh = repo.vfs.open(cachefile, 'rb')
137 version, = struct.unpack(">H", fh.read(2))
141 version, = struct.unpack(">H", fh.read(2))
138 oldhash = fh.read(20)
142 oldhash = fh.read(20)
139 newhash = cachehash(repo, hideable)
143 newhash = cachehash(repo, hideable)
140 if (cacheversion, oldhash) == (version, newhash):
144 if (cacheversion, oldhash) == (version, newhash):
141 # cache is valid, so we can start reading the hidden revs
145 # cache is valid, so we can start reading the hidden revs
142 data = fh.read()
146 data = fh.read()
143 count = len(data) / 4
147 count = len(data) / 4
144 hidden = frozenset(struct.unpack('>%ii' % count, data))
148 hidden = frozenset(struct.unpack('>%ii' % count, data))
145 return hidden
149 return hidden
146 finally:
150 finally:
147 if fh:
151 if fh:
148 fh.close()
152 fh.close()
149
153
150 def computehidden(repo):
154 def computehidden(repo):
151 """compute the set of hidden revision to filter
155 """compute the set of hidden revision to filter
152
156
153 During most operation hidden should be filtered."""
157 During most operation hidden should be filtered."""
154 assert not repo.changelog.filteredrevs
158 assert not repo.changelog.filteredrevs
155
159
156 hidden = frozenset()
160 hidden = frozenset()
157 hideable = hideablerevs(repo)
161 hideable = hideablerevs(repo)
158 if hideable:
162 if hideable:
159 cl = repo.changelog
163 cl = repo.changelog
160 hidden = tryreadcache(repo, hideable)
164 hidden = tryreadcache(repo, hideable)
161 if hidden is None:
165 if hidden is None:
162 hidden = frozenset(_getstatichidden(repo))
166 hidden = frozenset(_getstatichidden(repo))
163 trywritehiddencache(repo, hideable, hidden)
167 trywritehiddencache(repo, hideable, hidden)
164
168
165 # check if we have wd parents, bookmarks or tags pointing to hidden
169 # check if we have wd parents, bookmarks or tags pointing to hidden
166 # changesets and remove those.
170 # changesets and remove those.
167 dynamic = hidden & _getdynamicblockers(repo)
171 dynamic = hidden & _getdynamicblockers(repo)
168 if dynamic:
172 if dynamic:
169 blocked = cl.ancestors(dynamic, inclusive=True)
173 blocked = cl.ancestors(dynamic, inclusive=True)
170 hidden = frozenset(r for r in hidden if r not in blocked)
174 hidden = frozenset(r for r in hidden if r not in blocked)
171 return hidden
175 return hidden
172
176
173 def computeunserved(repo):
177 def computeunserved(repo):
174 """compute the set of revision that should be filtered when used a server
178 """compute the set of revision that should be filtered when used a server
175
179
176 Secret and hidden changeset should not pretend to be here."""
180 Secret and hidden changeset should not pretend to be here."""
177 assert not repo.changelog.filteredrevs
181 assert not repo.changelog.filteredrevs
178 # fast path in simple case to avoid impact of non optimised code
182 # fast path in simple case to avoid impact of non optimised code
179 hiddens = filterrevs(repo, 'visible')
183 hiddens = filterrevs(repo, 'visible')
180 if phases.hassecret(repo):
184 if phases.hassecret(repo):
181 cl = repo.changelog
185 cl = repo.changelog
182 secret = phases.secret
186 secret = phases.secret
183 getphase = repo._phasecache.phase
187 getphase = repo._phasecache.phase
184 first = min(cl.rev(n) for n in repo._phasecache.phaseroots[secret])
188 first = min(cl.rev(n) for n in repo._phasecache.phaseroots[secret])
185 revs = cl.revs(start=first)
189 revs = cl.revs(start=first)
186 secrets = set(r for r in revs if getphase(repo, r) >= secret)
190 secrets = set(r for r in revs if getphase(repo, r) >= secret)
187 return frozenset(hiddens | secrets)
191 return frozenset(hiddens | secrets)
188 else:
192 else:
189 return hiddens
193 return hiddens
190
194
191 def computemutable(repo):
195 def computemutable(repo):
192 """compute the set of revision that should be filtered when used a server
196 """compute the set of revision that should be filtered when used a server
193
197
194 Secret and hidden changeset should not pretend to be here."""
198 Secret and hidden changeset should not pretend to be here."""
195 assert not repo.changelog.filteredrevs
199 assert not repo.changelog.filteredrevs
196 # fast check to avoid revset call on huge repo
200 # fast check to avoid revset call on huge repo
197 if util.any(repo._phasecache.phaseroots[1:]):
201 if util.any(repo._phasecache.phaseroots[1:]):
198 getphase = repo._phasecache.phase
202 getphase = repo._phasecache.phase
199 maymutable = filterrevs(repo, 'base')
203 maymutable = filterrevs(repo, 'base')
200 return frozenset(r for r in maymutable if getphase(repo, r))
204 return frozenset(r for r in maymutable if getphase(repo, r))
201 return frozenset()
205 return frozenset()
202
206
203 def computeimpactable(repo):
207 def computeimpactable(repo):
204 """Everything impactable by mutable revision
208 """Everything impactable by mutable revision
205
209
206 The immutable filter still have some chance to get invalidated. This will
210 The immutable filter still have some chance to get invalidated. This will
207 happen when:
211 happen when:
208
212
209 - you garbage collect hidden changeset,
213 - you garbage collect hidden changeset,
210 - public phase is moved backward,
214 - public phase is moved backward,
211 - something is changed in the filtering (this could be fixed)
215 - something is changed in the filtering (this could be fixed)
212
216
213 This filter out any mutable changeset and any public changeset that may be
217 This filter out any mutable changeset and any public changeset that may be
214 impacted by something happening to a mutable revision.
218 impacted by something happening to a mutable revision.
215
219
216 This is achieved by filtered everything with a revision number egal or
220 This is achieved by filtered everything with a revision number egal or
217 higher than the first mutable changeset is filtered."""
221 higher than the first mutable changeset is filtered."""
218 assert not repo.changelog.filteredrevs
222 assert not repo.changelog.filteredrevs
219 cl = repo.changelog
223 cl = repo.changelog
220 firstmutable = len(cl)
224 firstmutable = len(cl)
221 for roots in repo._phasecache.phaseroots[1:]:
225 for roots in repo._phasecache.phaseroots[1:]:
222 if roots:
226 if roots:
223 firstmutable = min(firstmutable, min(cl.rev(r) for r in roots))
227 firstmutable = min(firstmutable, min(cl.rev(r) for r in roots))
224 # protect from nullrev root
228 # protect from nullrev root
225 firstmutable = max(0, firstmutable)
229 firstmutable = max(0, firstmutable)
226 return frozenset(xrange(firstmutable, len(cl)))
230 return frozenset(xrange(firstmutable, len(cl)))
227
231
228 # function to compute filtered set
232 # function to compute filtered set
229 #
233 #
230 # When adding a new filter you MUST update the table at:
234 # When adding a new filter you MUST update the table at:
231 # mercurial.branchmap.subsettable
235 # mercurial.branchmap.subsettable
232 # Otherwise your filter will have to recompute all its branches cache
236 # Otherwise your filter will have to recompute all its branches cache
233 # from scratch (very slow).
237 # from scratch (very slow).
234 filtertable = {'visible': computehidden,
238 filtertable = {'visible': computehidden,
235 'served': computeunserved,
239 'served': computeunserved,
236 'immutable': computemutable,
240 'immutable': computemutable,
237 'base': computeimpactable}
241 'base': computeimpactable}
238
242
239 def filterrevs(repo, filtername):
243 def filterrevs(repo, filtername):
240 """returns set of filtered revision for this filter name"""
244 """returns set of filtered revision for this filter name"""
241 if filtername not in repo.filteredrevcache:
245 if filtername not in repo.filteredrevcache:
242 func = filtertable[filtername]
246 func = filtertable[filtername]
243 repo.filteredrevcache[filtername] = func(repo.unfiltered())
247 repo.filteredrevcache[filtername] = func(repo.unfiltered())
244 return repo.filteredrevcache[filtername]
248 return repo.filteredrevcache[filtername]
245
249
246 class repoview(object):
250 class repoview(object):
247 """Provide a read/write view of a repo through a filtered changelog
251 """Provide a read/write view of a repo through a filtered changelog
248
252
249 This object is used to access a filtered version of a repository without
253 This object is used to access a filtered version of a repository without
250 altering the original repository object itself. We can not alter the
254 altering the original repository object itself. We can not alter the
251 original object for two main reasons:
255 original object for two main reasons:
252 - It prevents the use of a repo with multiple filters at the same time. In
256 - It prevents the use of a repo with multiple filters at the same time. In
253 particular when multiple threads are involved.
257 particular when multiple threads are involved.
254 - It makes scope of the filtering harder to control.
258 - It makes scope of the filtering harder to control.
255
259
256 This object behaves very closely to the original repository. All attribute
260 This object behaves very closely to the original repository. All attribute
257 operations are done on the original repository:
261 operations are done on the original repository:
258 - An access to `repoview.someattr` actually returns `repo.someattr`,
262 - An access to `repoview.someattr` actually returns `repo.someattr`,
259 - A write to `repoview.someattr` actually sets value of `repo.someattr`,
263 - A write to `repoview.someattr` actually sets value of `repo.someattr`,
260 - A deletion of `repoview.someattr` actually drops `someattr`
264 - A deletion of `repoview.someattr` actually drops `someattr`
261 from `repo.__dict__`.
265 from `repo.__dict__`.
262
266
263 The only exception is the `changelog` property. It is overridden to return
267 The only exception is the `changelog` property. It is overridden to return
264 a (surface) copy of `repo.changelog` with some revisions filtered. The
268 a (surface) copy of `repo.changelog` with some revisions filtered. The
265 `filtername` attribute of the view control the revisions that need to be
269 `filtername` attribute of the view control the revisions that need to be
266 filtered. (the fact the changelog is copied is an implementation detail).
270 filtered. (the fact the changelog is copied is an implementation detail).
267
271
268 Unlike attributes, this object intercepts all method calls. This means that
272 Unlike attributes, this object intercepts all method calls. This means that
269 all methods are run on the `repoview` object with the filtered `changelog`
273 all methods are run on the `repoview` object with the filtered `changelog`
270 property. For this purpose the simple `repoview` class must be mixed with
274 property. For this purpose the simple `repoview` class must be mixed with
271 the actual class of the repository. This ensures that the resulting
275 the actual class of the repository. This ensures that the resulting
272 `repoview` object have the very same methods than the repo object. This
276 `repoview` object have the very same methods than the repo object. This
273 leads to the property below.
277 leads to the property below.
274
278
275 repoview.method() --> repo.__class__.method(repoview)
279 repoview.method() --> repo.__class__.method(repoview)
276
280
277 The inheritance has to be done dynamically because `repo` can be of any
281 The inheritance has to be done dynamically because `repo` can be of any
278 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
282 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
279 """
283 """
280
284
281 def __init__(self, repo, filtername):
285 def __init__(self, repo, filtername):
282 object.__setattr__(self, '_unfilteredrepo', repo)
286 object.__setattr__(self, '_unfilteredrepo', repo)
283 object.__setattr__(self, 'filtername', filtername)
287 object.__setattr__(self, 'filtername', filtername)
284 object.__setattr__(self, '_clcachekey', None)
288 object.__setattr__(self, '_clcachekey', None)
285 object.__setattr__(self, '_clcache', None)
289 object.__setattr__(self, '_clcache', None)
286
290
287 # not a propertycache on purpose we shall implement a proper cache later
291 # not a propertycache on purpose we shall implement a proper cache later
288 @property
292 @property
289 def changelog(self):
293 def changelog(self):
290 """return a filtered version of the changeset
294 """return a filtered version of the changeset
291
295
292 this changelog must not be used for writing"""
296 this changelog must not be used for writing"""
293 # some cache may be implemented later
297 # some cache may be implemented later
294 unfi = self._unfilteredrepo
298 unfi = self._unfilteredrepo
295 unfichangelog = unfi.changelog
299 unfichangelog = unfi.changelog
296 revs = filterrevs(unfi, self.filtername)
300 revs = filterrevs(unfi, self.filtername)
297 cl = self._clcache
301 cl = self._clcache
298 newkey = (len(unfichangelog), unfichangelog.tip(), hash(revs),
302 newkey = (len(unfichangelog), unfichangelog.tip(), hash(revs),
299 unfichangelog._delayed)
303 unfichangelog._delayed)
300 if cl is not None:
304 if cl is not None:
301 # we need to check curkey too for some obscure reason.
305 # we need to check curkey too for some obscure reason.
302 # MQ test show a corruption of the underlying repo (in _clcache)
306 # MQ test show a corruption of the underlying repo (in _clcache)
303 # without change in the cachekey.
307 # without change in the cachekey.
304 oldfilter = cl.filteredrevs
308 oldfilter = cl.filteredrevs
305 try:
309 try:
306 cl.filteredrevs = () # disable filtering for tip
310 cl.filteredrevs = () # disable filtering for tip
307 curkey = (len(cl), cl.tip(), hash(oldfilter), cl._delayed)
311 curkey = (len(cl), cl.tip(), hash(oldfilter), cl._delayed)
308 finally:
312 finally:
309 cl.filteredrevs = oldfilter
313 cl.filteredrevs = oldfilter
310 if newkey != self._clcachekey or newkey != curkey:
314 if newkey != self._clcachekey or newkey != curkey:
311 cl = None
315 cl = None
312 # could have been made None by the previous if
316 # could have been made None by the previous if
313 if cl is None:
317 if cl is None:
314 cl = copy.copy(unfichangelog)
318 cl = copy.copy(unfichangelog)
315 cl.filteredrevs = revs
319 cl.filteredrevs = revs
316 object.__setattr__(self, '_clcache', cl)
320 object.__setattr__(self, '_clcache', cl)
317 object.__setattr__(self, '_clcachekey', newkey)
321 object.__setattr__(self, '_clcachekey', newkey)
318 return cl
322 return cl
319
323
320 def unfiltered(self):
324 def unfiltered(self):
321 """Return an unfiltered version of a repo"""
325 """Return an unfiltered version of a repo"""
322 return self._unfilteredrepo
326 return self._unfilteredrepo
323
327
324 def filtered(self, name):
328 def filtered(self, name):
325 """Return a filtered version of a repository"""
329 """Return a filtered version of a repository"""
326 if name == self.filtername:
330 if name == self.filtername:
327 return self
331 return self
328 return self.unfiltered().filtered(name)
332 return self.unfiltered().filtered(name)
329
333
330 # everything access are forwarded to the proxied repo
334 # everything access are forwarded to the proxied repo
331 def __getattr__(self, attr):
335 def __getattr__(self, attr):
332 return getattr(self._unfilteredrepo, attr)
336 return getattr(self._unfilteredrepo, attr)
333
337
334 def __setattr__(self, attr, value):
338 def __setattr__(self, attr, value):
335 return setattr(self._unfilteredrepo, attr, value)
339 return setattr(self._unfilteredrepo, attr, value)
336
340
337 def __delattr__(self, attr):
341 def __delattr__(self, attr):
338 return delattr(self._unfilteredrepo, attr)
342 return delattr(self._unfilteredrepo, attr)
339
343
340 # The `requirements` attribute is initialized during __init__. But
344 # The `requirements` attribute is initialized during __init__. But
341 # __getattr__ won't be called as it also exists on the class. We need
345 # __getattr__ won't be called as it also exists on the class. We need
342 # explicit forwarding to main repo here
346 # explicit forwarding to main repo here
343 @property
347 @property
344 def requirements(self):
348 def requirements(self):
345 return self._unfilteredrepo.requirements
349 return self._unfilteredrepo.requirements
General Comments 0
You need to be logged in to leave comments. Login now