##// END OF EJS Templates
repoview: update documentation of _getstatichidden...
Pierre-Yves David -
r24615:9e558b78 default
parent child Browse files
Show More
@@ -1,339 +1,345 b''
1 # repoview.py - Filtered view of a localrepo object
1 # repoview.py - Filtered view of a localrepo object
2 #
2 #
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
4 # Logilab SA <contact@logilab.fr>
4 # Logilab SA <contact@logilab.fr>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 import collections
9 import collections
10 import copy
10 import copy
11 import error
11 import error
12 import phases
12 import phases
13 import util
13 import util
14 import obsolete
14 import obsolete
15 import struct
15 import struct
16 import tags as tagsmod
16 import tags as tagsmod
17 from node import nullrev
17 from node import nullrev
18
18
19 def hideablerevs(repo):
19 def hideablerevs(repo):
20 """Revisions candidates to be hidden
20 """Revisions candidates to be hidden
21
21
22 This is a standalone function to help extensions to wrap it."""
22 This is a standalone function to help extensions to wrap it."""
23 return obsolete.getrevs(repo, 'obsolete')
23 return obsolete.getrevs(repo, 'obsolete')
24
24
25 def _getstatichidden(repo):
25 def _getstatichidden(repo):
26 """Cacheable revisions blocking hidden changesets from being filtered.
26 """Revision to be hidden (disregarding dynamic blocker)
27
27
28 Additional non-cached hidden blockers are computed in _getdynamicblockers.
28 To keep a consistent graph, we cannot hide any revisions with
29 This is a standalone function to help extensions to wrap it."""
29 non-hidden descendants. This function computes the set of
30 revisions that could be hidden while keeping the graph consistent.
31
32 A second pass will be done to apply "dynamic blocker" like bookmarks or
33 working directory parents.
34
35 """
30 assert not repo.changelog.filteredrevs
36 assert not repo.changelog.filteredrevs
31 hideable = hideablerevs(repo)
37 hideable = hideablerevs(repo)
32 if hideable:
38 if hideable:
33 actuallyhidden = {}
39 actuallyhidden = {}
34 getphase = repo._phasecache.phase
40 getphase = repo._phasecache.phase
35 getparentrevs = repo.changelog.parentrevs
41 getparentrevs = repo.changelog.parentrevs
36 queue = collections.deque((r, False) for r in repo.changelog.headrevs())
42 queue = collections.deque((r, False) for r in repo.changelog.headrevs())
37 while queue:
43 while queue:
38 rev, blocked = queue.popleft()
44 rev, blocked = queue.popleft()
39 phase = getphase(repo, rev)
45 phase = getphase(repo, rev)
40 # Skip nodes which are public (guaranteed to not be hidden) and
46 # Skip nodes which are public (guaranteed to not be hidden) and
41 # nodes which have already been processed and won't be blocked by
47 # nodes which have already been processed and won't be blocked by
42 # the previous node.
48 # the previous node.
43 if phase == 0 or (not blocked and rev in actuallyhidden):
49 if phase == 0 or (not blocked and rev in actuallyhidden):
44 continue
50 continue
45 if rev in hideable:
51 if rev in hideable:
46 if blocked:
52 if blocked:
47 actuallyhidden[rev] = False
53 actuallyhidden[rev] = False
48 else:
54 else:
49 actuallyhidden.setdefault(rev, True)
55 actuallyhidden.setdefault(rev, True)
50 else:
56 else:
51 blocked = True
57 blocked = True
52
58
53 for parent in (p for p in getparentrevs(rev) if p != nullrev):
59 for parent in (p for p in getparentrevs(rev) if p != nullrev):
54 queue.append((parent, blocked))
60 queue.append((parent, blocked))
55 return set(rev for rev, hidden in actuallyhidden.iteritems() if hidden)
61 return set(rev for rev, hidden in actuallyhidden.iteritems() if hidden)
56
62
57 def _getdynamicblockers(repo):
63 def _getdynamicblockers(repo):
58 """Non-cacheable revisions blocking hidden changesets from being filtered.
64 """Non-cacheable revisions blocking hidden changesets from being filtered.
59
65
60 Get revisions that will block hidden changesets and are likely to change,
66 Get revisions that will block hidden changesets and are likely to change,
61 but unlikely to create hidden blockers. They won't be cached, so be careful
67 but unlikely to create hidden blockers. They won't be cached, so be careful
62 with adding additional computation."""
68 with adding additional computation."""
63
69
64 cl = repo.changelog
70 cl = repo.changelog
65 blockers = set()
71 blockers = set()
66 blockers.update([par.rev() for par in repo[None].parents()])
72 blockers.update([par.rev() for par in repo[None].parents()])
67 blockers.update([cl.rev(bm) for bm in repo._bookmarks.values()])
73 blockers.update([cl.rev(bm) for bm in repo._bookmarks.values()])
68
74
69 tags = {}
75 tags = {}
70 tagsmod.readlocaltags(repo.ui, repo, tags, {})
76 tagsmod.readlocaltags(repo.ui, repo, tags, {})
71 if tags:
77 if tags:
72 rev, nodemap = cl.rev, cl.nodemap
78 rev, nodemap = cl.rev, cl.nodemap
73 blockers.update(rev(t[0]) for t in tags.values() if t[0] in nodemap)
79 blockers.update(rev(t[0]) for t in tags.values() if t[0] in nodemap)
74 return blockers
80 return blockers
75
81
76 cacheversion = 1
82 cacheversion = 1
77 cachefile = 'cache/hidden'
83 cachefile = 'cache/hidden'
78
84
79 def cachehash(repo, hideable):
85 def cachehash(repo, hideable):
80 """return sha1 hash of repository data to identify a valid cache.
86 """return sha1 hash of repository data to identify a valid cache.
81
87
82 We calculate a sha1 of repo heads and the content of the obsstore and write
88 We calculate a sha1 of repo heads and the content of the obsstore and write
83 it to the cache. Upon reading we can easily validate by checking the hash
89 it to the cache. Upon reading we can easily validate by checking the hash
84 against the stored one and discard the cache in case the hashes don't match.
90 against the stored one and discard the cache in case the hashes don't match.
85 """
91 """
86 h = util.sha1()
92 h = util.sha1()
87 h.update(''.join(repo.heads()))
93 h.update(''.join(repo.heads()))
88 h.update(str(hash(frozenset(hideable))))
94 h.update(str(hash(frozenset(hideable))))
89 return h.digest()
95 return h.digest()
90
96
91 def _writehiddencache(cachefile, cachehash, hidden):
97 def _writehiddencache(cachefile, cachehash, hidden):
92 """write hidden data to a cache file"""
98 """write hidden data to a cache file"""
93 data = struct.pack('>%ii' % len(hidden), *sorted(hidden))
99 data = struct.pack('>%ii' % len(hidden), *sorted(hidden))
94 cachefile.write(struct.pack(">H", cacheversion))
100 cachefile.write(struct.pack(">H", cacheversion))
95 cachefile.write(cachehash)
101 cachefile.write(cachehash)
96 cachefile.write(data)
102 cachefile.write(data)
97
103
98 def trywritehiddencache(repo, hideable, hidden):
104 def trywritehiddencache(repo, hideable, hidden):
99 """write cache of hidden changesets to disk
105 """write cache of hidden changesets to disk
100
106
101 Will not write the cache if a wlock cannot be obtained lazily.
107 Will not write the cache if a wlock cannot be obtained lazily.
102 The cache consists of a head of 22byte:
108 The cache consists of a head of 22byte:
103 2 byte version number of the cache
109 2 byte version number of the cache
104 20 byte sha1 to validate the cache
110 20 byte sha1 to validate the cache
105 n*4 byte hidden revs
111 n*4 byte hidden revs
106 """
112 """
107 wlock = fh = None
113 wlock = fh = None
108 try:
114 try:
109 try:
115 try:
110 wlock = repo.wlock(wait=False)
116 wlock = repo.wlock(wait=False)
111 # write cache to file
117 # write cache to file
112 newhash = cachehash(repo, hideable)
118 newhash = cachehash(repo, hideable)
113 fh = repo.vfs.open(cachefile, 'w+b', atomictemp=True)
119 fh = repo.vfs.open(cachefile, 'w+b', atomictemp=True)
114 _writehiddencache(fh, newhash, hidden)
120 _writehiddencache(fh, newhash, hidden)
115 except (IOError, OSError):
121 except (IOError, OSError):
116 repo.ui.debug('error writing hidden changesets cache')
122 repo.ui.debug('error writing hidden changesets cache')
117 except error.LockHeld:
123 except error.LockHeld:
118 repo.ui.debug('cannot obtain lock to write hidden changesets cache')
124 repo.ui.debug('cannot obtain lock to write hidden changesets cache')
119 finally:
125 finally:
120 if fh:
126 if fh:
121 fh.close()
127 fh.close()
122 if wlock:
128 if wlock:
123 wlock.release()
129 wlock.release()
124
130
125 def tryreadcache(repo, hideable):
131 def tryreadcache(repo, hideable):
126 """read a cache if the cache exists and is valid, otherwise returns None."""
132 """read a cache if the cache exists and is valid, otherwise returns None."""
127 hidden = fh = None
133 hidden = fh = None
128 try:
134 try:
129 if repo.vfs.exists(cachefile):
135 if repo.vfs.exists(cachefile):
130 fh = repo.vfs.open(cachefile, 'rb')
136 fh = repo.vfs.open(cachefile, 'rb')
131 version, = struct.unpack(">H", fh.read(2))
137 version, = struct.unpack(">H", fh.read(2))
132 oldhash = fh.read(20)
138 oldhash = fh.read(20)
133 newhash = cachehash(repo, hideable)
139 newhash = cachehash(repo, hideable)
134 if (cacheversion, oldhash) == (version, newhash):
140 if (cacheversion, oldhash) == (version, newhash):
135 # cache is valid, so we can start reading the hidden revs
141 # cache is valid, so we can start reading the hidden revs
136 data = fh.read()
142 data = fh.read()
137 count = len(data) / 4
143 count = len(data) / 4
138 hidden = frozenset(struct.unpack('>%ii' % count, data))
144 hidden = frozenset(struct.unpack('>%ii' % count, data))
139 return hidden
145 return hidden
140 finally:
146 finally:
141 if fh:
147 if fh:
142 fh.close()
148 fh.close()
143
149
144 def computehidden(repo):
150 def computehidden(repo):
145 """compute the set of hidden revision to filter
151 """compute the set of hidden revision to filter
146
152
147 During most operation hidden should be filtered."""
153 During most operation hidden should be filtered."""
148 assert not repo.changelog.filteredrevs
154 assert not repo.changelog.filteredrevs
149
155
150 hidden = frozenset()
156 hidden = frozenset()
151 hideable = hideablerevs(repo)
157 hideable = hideablerevs(repo)
152 if hideable:
158 if hideable:
153 cl = repo.changelog
159 cl = repo.changelog
154 hidden = tryreadcache(repo, hideable)
160 hidden = tryreadcache(repo, hideable)
155 if hidden is None:
161 if hidden is None:
156 hidden = frozenset(_getstatichidden(repo))
162 hidden = frozenset(_getstatichidden(repo))
157 trywritehiddencache(repo, hideable, hidden)
163 trywritehiddencache(repo, hideable, hidden)
158
164
159 # check if we have wd parents, bookmarks or tags pointing to hidden
165 # check if we have wd parents, bookmarks or tags pointing to hidden
160 # changesets and remove those.
166 # changesets and remove those.
161 dynamic = hidden & _getdynamicblockers(repo)
167 dynamic = hidden & _getdynamicblockers(repo)
162 if dynamic:
168 if dynamic:
163 blocked = cl.ancestors(dynamic, inclusive=True)
169 blocked = cl.ancestors(dynamic, inclusive=True)
164 hidden = frozenset(r for r in hidden if r not in blocked)
170 hidden = frozenset(r for r in hidden if r not in blocked)
165 return hidden
171 return hidden
166
172
167 def computeunserved(repo):
173 def computeunserved(repo):
168 """compute the set of revision that should be filtered when used a server
174 """compute the set of revision that should be filtered when used a server
169
175
170 Secret and hidden changeset should not pretend to be here."""
176 Secret and hidden changeset should not pretend to be here."""
171 assert not repo.changelog.filteredrevs
177 assert not repo.changelog.filteredrevs
172 # fast path in simple case to avoid impact of non optimised code
178 # fast path in simple case to avoid impact of non optimised code
173 hiddens = filterrevs(repo, 'visible')
179 hiddens = filterrevs(repo, 'visible')
174 if phases.hassecret(repo):
180 if phases.hassecret(repo):
175 cl = repo.changelog
181 cl = repo.changelog
176 secret = phases.secret
182 secret = phases.secret
177 getphase = repo._phasecache.phase
183 getphase = repo._phasecache.phase
178 first = min(cl.rev(n) for n in repo._phasecache.phaseroots[secret])
184 first = min(cl.rev(n) for n in repo._phasecache.phaseroots[secret])
179 revs = cl.revs(start=first)
185 revs = cl.revs(start=first)
180 secrets = set(r for r in revs if getphase(repo, r) >= secret)
186 secrets = set(r for r in revs if getphase(repo, r) >= secret)
181 return frozenset(hiddens | secrets)
187 return frozenset(hiddens | secrets)
182 else:
188 else:
183 return hiddens
189 return hiddens
184
190
185 def computemutable(repo):
191 def computemutable(repo):
186 """compute the set of revision that should be filtered when used a server
192 """compute the set of revision that should be filtered when used a server
187
193
188 Secret and hidden changeset should not pretend to be here."""
194 Secret and hidden changeset should not pretend to be here."""
189 assert not repo.changelog.filteredrevs
195 assert not repo.changelog.filteredrevs
190 # fast check to avoid revset call on huge repo
196 # fast check to avoid revset call on huge repo
191 if util.any(repo._phasecache.phaseroots[1:]):
197 if util.any(repo._phasecache.phaseroots[1:]):
192 getphase = repo._phasecache.phase
198 getphase = repo._phasecache.phase
193 maymutable = filterrevs(repo, 'base')
199 maymutable = filterrevs(repo, 'base')
194 return frozenset(r for r in maymutable if getphase(repo, r))
200 return frozenset(r for r in maymutable if getphase(repo, r))
195 return frozenset()
201 return frozenset()
196
202
197 def computeimpactable(repo):
203 def computeimpactable(repo):
198 """Everything impactable by mutable revision
204 """Everything impactable by mutable revision
199
205
200 The immutable filter still have some chance to get invalidated. This will
206 The immutable filter still have some chance to get invalidated. This will
201 happen when:
207 happen when:
202
208
203 - you garbage collect hidden changeset,
209 - you garbage collect hidden changeset,
204 - public phase is moved backward,
210 - public phase is moved backward,
205 - something is changed in the filtering (this could be fixed)
211 - something is changed in the filtering (this could be fixed)
206
212
207 This filter out any mutable changeset and any public changeset that may be
213 This filter out any mutable changeset and any public changeset that may be
208 impacted by something happening to a mutable revision.
214 impacted by something happening to a mutable revision.
209
215
210 This is achieved by filtered everything with a revision number egal or
216 This is achieved by filtered everything with a revision number egal or
211 higher than the first mutable changeset is filtered."""
217 higher than the first mutable changeset is filtered."""
212 assert not repo.changelog.filteredrevs
218 assert not repo.changelog.filteredrevs
213 cl = repo.changelog
219 cl = repo.changelog
214 firstmutable = len(cl)
220 firstmutable = len(cl)
215 for roots in repo._phasecache.phaseroots[1:]:
221 for roots in repo._phasecache.phaseroots[1:]:
216 if roots:
222 if roots:
217 firstmutable = min(firstmutable, min(cl.rev(r) for r in roots))
223 firstmutable = min(firstmutable, min(cl.rev(r) for r in roots))
218 # protect from nullrev root
224 # protect from nullrev root
219 firstmutable = max(0, firstmutable)
225 firstmutable = max(0, firstmutable)
220 return frozenset(xrange(firstmutable, len(cl)))
226 return frozenset(xrange(firstmutable, len(cl)))
221
227
222 # function to compute filtered set
228 # function to compute filtered set
223 #
229 #
224 # When adding a new filter you MUST update the table at:
230 # When adding a new filter you MUST update the table at:
225 # mercurial.branchmap.subsettable
231 # mercurial.branchmap.subsettable
226 # Otherwise your filter will have to recompute all its branches cache
232 # Otherwise your filter will have to recompute all its branches cache
227 # from scratch (very slow).
233 # from scratch (very slow).
228 filtertable = {'visible': computehidden,
234 filtertable = {'visible': computehidden,
229 'served': computeunserved,
235 'served': computeunserved,
230 'immutable': computemutable,
236 'immutable': computemutable,
231 'base': computeimpactable}
237 'base': computeimpactable}
232
238
233 def filterrevs(repo, filtername):
239 def filterrevs(repo, filtername):
234 """returns set of filtered revision for this filter name"""
240 """returns set of filtered revision for this filter name"""
235 if filtername not in repo.filteredrevcache:
241 if filtername not in repo.filteredrevcache:
236 func = filtertable[filtername]
242 func = filtertable[filtername]
237 repo.filteredrevcache[filtername] = func(repo.unfiltered())
243 repo.filteredrevcache[filtername] = func(repo.unfiltered())
238 return repo.filteredrevcache[filtername]
244 return repo.filteredrevcache[filtername]
239
245
240 class repoview(object):
246 class repoview(object):
241 """Provide a read/write view of a repo through a filtered changelog
247 """Provide a read/write view of a repo through a filtered changelog
242
248
243 This object is used to access a filtered version of a repository without
249 This object is used to access a filtered version of a repository without
244 altering the original repository object itself. We can not alter the
250 altering the original repository object itself. We can not alter the
245 original object for two main reasons:
251 original object for two main reasons:
246 - It prevents the use of a repo with multiple filters at the same time. In
252 - It prevents the use of a repo with multiple filters at the same time. In
247 particular when multiple threads are involved.
253 particular when multiple threads are involved.
248 - It makes scope of the filtering harder to control.
254 - It makes scope of the filtering harder to control.
249
255
250 This object behaves very closely to the original repository. All attribute
256 This object behaves very closely to the original repository. All attribute
251 operations are done on the original repository:
257 operations are done on the original repository:
252 - An access to `repoview.someattr` actually returns `repo.someattr`,
258 - An access to `repoview.someattr` actually returns `repo.someattr`,
253 - A write to `repoview.someattr` actually sets value of `repo.someattr`,
259 - A write to `repoview.someattr` actually sets value of `repo.someattr`,
254 - A deletion of `repoview.someattr` actually drops `someattr`
260 - A deletion of `repoview.someattr` actually drops `someattr`
255 from `repo.__dict__`.
261 from `repo.__dict__`.
256
262
257 The only exception is the `changelog` property. It is overridden to return
263 The only exception is the `changelog` property. It is overridden to return
258 a (surface) copy of `repo.changelog` with some revisions filtered. The
264 a (surface) copy of `repo.changelog` with some revisions filtered. The
259 `filtername` attribute of the view control the revisions that need to be
265 `filtername` attribute of the view control the revisions that need to be
260 filtered. (the fact the changelog is copied is an implementation detail).
266 filtered. (the fact the changelog is copied is an implementation detail).
261
267
262 Unlike attributes, this object intercepts all method calls. This means that
268 Unlike attributes, this object intercepts all method calls. This means that
263 all methods are run on the `repoview` object with the filtered `changelog`
269 all methods are run on the `repoview` object with the filtered `changelog`
264 property. For this purpose the simple `repoview` class must be mixed with
270 property. For this purpose the simple `repoview` class must be mixed with
265 the actual class of the repository. This ensures that the resulting
271 the actual class of the repository. This ensures that the resulting
266 `repoview` object have the very same methods than the repo object. This
272 `repoview` object have the very same methods than the repo object. This
267 leads to the property below.
273 leads to the property below.
268
274
269 repoview.method() --> repo.__class__.method(repoview)
275 repoview.method() --> repo.__class__.method(repoview)
270
276
271 The inheritance has to be done dynamically because `repo` can be of any
277 The inheritance has to be done dynamically because `repo` can be of any
272 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
278 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
273 """
279 """
274
280
275 def __init__(self, repo, filtername):
281 def __init__(self, repo, filtername):
276 object.__setattr__(self, '_unfilteredrepo', repo)
282 object.__setattr__(self, '_unfilteredrepo', repo)
277 object.__setattr__(self, 'filtername', filtername)
283 object.__setattr__(self, 'filtername', filtername)
278 object.__setattr__(self, '_clcachekey', None)
284 object.__setattr__(self, '_clcachekey', None)
279 object.__setattr__(self, '_clcache', None)
285 object.__setattr__(self, '_clcache', None)
280
286
281 # not a propertycache on purpose we shall implement a proper cache later
287 # not a propertycache on purpose we shall implement a proper cache later
282 @property
288 @property
283 def changelog(self):
289 def changelog(self):
284 """return a filtered version of the changeset
290 """return a filtered version of the changeset
285
291
286 this changelog must not be used for writing"""
292 this changelog must not be used for writing"""
287 # some cache may be implemented later
293 # some cache may be implemented later
288 unfi = self._unfilteredrepo
294 unfi = self._unfilteredrepo
289 unfichangelog = unfi.changelog
295 unfichangelog = unfi.changelog
290 revs = filterrevs(unfi, self.filtername)
296 revs = filterrevs(unfi, self.filtername)
291 cl = self._clcache
297 cl = self._clcache
292 newkey = (len(unfichangelog), unfichangelog.tip(), hash(revs),
298 newkey = (len(unfichangelog), unfichangelog.tip(), hash(revs),
293 unfichangelog._delayed)
299 unfichangelog._delayed)
294 if cl is not None:
300 if cl is not None:
295 # we need to check curkey too for some obscure reason.
301 # we need to check curkey too for some obscure reason.
296 # MQ test show a corruption of the underlying repo (in _clcache)
302 # MQ test show a corruption of the underlying repo (in _clcache)
297 # without change in the cachekey.
303 # without change in the cachekey.
298 oldfilter = cl.filteredrevs
304 oldfilter = cl.filteredrevs
299 try:
305 try:
300 cl.filteredrevs = () # disable filtering for tip
306 cl.filteredrevs = () # disable filtering for tip
301 curkey = (len(cl), cl.tip(), hash(oldfilter), cl._delayed)
307 curkey = (len(cl), cl.tip(), hash(oldfilter), cl._delayed)
302 finally:
308 finally:
303 cl.filteredrevs = oldfilter
309 cl.filteredrevs = oldfilter
304 if newkey != self._clcachekey or newkey != curkey:
310 if newkey != self._clcachekey or newkey != curkey:
305 cl = None
311 cl = None
306 # could have been made None by the previous if
312 # could have been made None by the previous if
307 if cl is None:
313 if cl is None:
308 cl = copy.copy(unfichangelog)
314 cl = copy.copy(unfichangelog)
309 cl.filteredrevs = revs
315 cl.filteredrevs = revs
310 object.__setattr__(self, '_clcache', cl)
316 object.__setattr__(self, '_clcache', cl)
311 object.__setattr__(self, '_clcachekey', newkey)
317 object.__setattr__(self, '_clcachekey', newkey)
312 return cl
318 return cl
313
319
314 def unfiltered(self):
320 def unfiltered(self):
315 """Return an unfiltered version of a repo"""
321 """Return an unfiltered version of a repo"""
316 return self._unfilteredrepo
322 return self._unfilteredrepo
317
323
318 def filtered(self, name):
324 def filtered(self, name):
319 """Return a filtered version of a repository"""
325 """Return a filtered version of a repository"""
320 if name == self.filtername:
326 if name == self.filtername:
321 return self
327 return self
322 return self.unfiltered().filtered(name)
328 return self.unfiltered().filtered(name)
323
329
324 # everything access are forwarded to the proxied repo
330 # everything access are forwarded to the proxied repo
325 def __getattr__(self, attr):
331 def __getattr__(self, attr):
326 return getattr(self._unfilteredrepo, attr)
332 return getattr(self._unfilteredrepo, attr)
327
333
328 def __setattr__(self, attr, value):
334 def __setattr__(self, attr, value):
329 return setattr(self._unfilteredrepo, attr, value)
335 return setattr(self._unfilteredrepo, attr, value)
330
336
331 def __delattr__(self, attr):
337 def __delattr__(self, attr):
332 return delattr(self._unfilteredrepo, attr)
338 return delattr(self._unfilteredrepo, attr)
333
339
334 # The `requirements` attribute is initialized during __init__. But
340 # The `requirements` attribute is initialized during __init__. But
335 # __getattr__ won't be called as it also exists on the class. We need
341 # __getattr__ won't be called as it also exists on the class. We need
336 # explicit forwarding to main repo here
342 # explicit forwarding to main repo here
337 @property
343 @property
338 def requirements(self):
344 def requirements(self):
339 return self._unfilteredrepo.requirements
345 return self._unfilteredrepo.requirements
General Comments 0
You need to be logged in to leave comments. Login now