##// END OF EJS Templates
repoview: backout ced3ecfc2e57...
Pierre-Yves David -
r23636:ab3b8d8f default
parent child Browse files
Show More
@@ -1,328 +1,323
1 # repoview.py - Filtered view of a localrepo object
1 # repoview.py - Filtered view of a localrepo object
2 #
2 #
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
4 # Logilab SA <contact@logilab.fr>
4 # Logilab SA <contact@logilab.fr>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 import types
10 import copy
9 import copy
11 import error
10 import error
12 import phases
11 import phases
13 import util
12 import util
14 import obsolete
13 import obsolete
15 import struct
14 import struct
16 import tags as tagsmod
15 import tags as tagsmod
17
16
18 def hideablerevs(repo):
17 def hideablerevs(repo):
19 """Revisions candidates to be hidden
18 """Revisions candidates to be hidden
20
19
21 This is a standalone function to help extensions to wrap it."""
20 This is a standalone function to help extensions to wrap it."""
22 return obsolete.getrevs(repo, 'obsolete')
21 return obsolete.getrevs(repo, 'obsolete')
23
22
24 def _getstaticblockers(repo):
23 def _getstaticblockers(repo):
25 """Cacheable revisions blocking hidden changesets from being filtered.
24 """Cacheable revisions blocking hidden changesets from being filtered.
26
25
27 Additional non-cached hidden blockers are computed in _getdynamicblockers.
26 Additional non-cached hidden blockers are computed in _getdynamicblockers.
28 This is a standalone function to help extensions to wrap it."""
27 This is a standalone function to help extensions to wrap it."""
29 assert not repo.changelog.filteredrevs
28 assert not repo.changelog.filteredrevs
30 hideable = hideablerevs(repo)
29 hideable = hideablerevs(repo)
31 blockers = set()
30 blockers = set()
32 if hideable:
31 if hideable:
33 # We use cl to avoid recursive lookup from repo[xxx]
32 # We use cl to avoid recursive lookup from repo[xxx]
34 cl = repo.changelog
33 cl = repo.changelog
35 firsthideable = min(hideable)
34 firsthideable = min(hideable)
36 revs = cl.revs(start=firsthideable)
35 revs = cl.revs(start=firsthideable)
37 tofilter = repo.revs(
36 tofilter = repo.revs(
38 '(%ld) and children(%ld)', list(revs), list(hideable))
37 '(%ld) and children(%ld)', list(revs), list(hideable))
39 blockers.update([r for r in tofilter if r not in hideable])
38 blockers.update([r for r in tofilter if r not in hideable])
40 return blockers
39 return blockers
41
40
42 def _getdynamicblockers(repo):
41 def _getdynamicblockers(repo):
43 """Non-cacheable revisions blocking hidden changesets from being filtered.
42 """Non-cacheable revisions blocking hidden changesets from being filtered.
44
43
45 Get revisions that will block hidden changesets and are likely to change,
44 Get revisions that will block hidden changesets and are likely to change,
46 but unlikely to create hidden blockers. They won't be cached, so be careful
45 but unlikely to create hidden blockers. They won't be cached, so be careful
47 with adding additional computation."""
46 with adding additional computation."""
48
47
49 cl = repo.changelog
48 cl = repo.changelog
50 blockers = set()
49 blockers = set()
51 blockers.update([par.rev() for par in repo[None].parents()])
50 blockers.update([par.rev() for par in repo[None].parents()])
52 blockers.update([cl.rev(bm) for bm in repo._bookmarks.values()])
51 blockers.update([cl.rev(bm) for bm in repo._bookmarks.values()])
53
52
54 tags = {}
53 tags = {}
55 tagsmod.readlocaltags(repo.ui, repo, tags, {})
54 tagsmod.readlocaltags(repo.ui, repo, tags, {})
56 if tags:
55 if tags:
57 rev, nodemap = cl.rev, cl.nodemap
56 rev, nodemap = cl.rev, cl.nodemap
58 blockers.update(rev(t[0]) for t in tags.values() if t[0] in nodemap)
57 blockers.update(rev(t[0]) for t in tags.values() if t[0] in nodemap)
59 return blockers
58 return blockers
60
59
61 cacheversion = 1
60 cacheversion = 1
62 cachefile = 'cache/hidden'
61 cachefile = 'cache/hidden'
63
62
64 def cachehash(repo, hideable):
63 def cachehash(repo, hideable):
65 """return sha1 hash of repository data to identify a valid cache.
64 """return sha1 hash of repository data to identify a valid cache.
66
65
67 We calculate a sha1 of repo heads and the content of the obsstore and write
66 We calculate a sha1 of repo heads and the content of the obsstore and write
68 it to the cache. Upon reading we can easily validate by checking the hash
67 it to the cache. Upon reading we can easily validate by checking the hash
69 against the stored one and discard the cache in case the hashes don't match.
68 against the stored one and discard the cache in case the hashes don't match.
70 """
69 """
71 h = util.sha1()
70 h = util.sha1()
72 h.update(''.join(repo.heads()))
71 h.update(''.join(repo.heads()))
73 h.update(str(hash(frozenset(hideable))))
72 h.update(str(hash(frozenset(hideable))))
74 return h.digest()
73 return h.digest()
75
74
76 def _writehiddencache(cachefile, cachehash, hidden):
75 def _writehiddencache(cachefile, cachehash, hidden):
77 """write hidden data to a cache file"""
76 """write hidden data to a cache file"""
78 data = struct.pack('>%ii' % len(hidden), *sorted(hidden))
77 data = struct.pack('>%ii' % len(hidden), *sorted(hidden))
79 cachefile.write(struct.pack(">H", cacheversion))
78 cachefile.write(struct.pack(">H", cacheversion))
80 cachefile.write(cachehash)
79 cachefile.write(cachehash)
81 cachefile.write(data)
80 cachefile.write(data)
82
81
83 def trywritehiddencache(repo, hideable, hidden):
82 def trywritehiddencache(repo, hideable, hidden):
84 """write cache of hidden changesets to disk
83 """write cache of hidden changesets to disk
85
84
86 Will not write the cache if a wlock cannot be obtained lazily.
85 Will not write the cache if a wlock cannot be obtained lazily.
87 The cache consists of a head of 22byte:
86 The cache consists of a head of 22byte:
88 2 byte version number of the cache
87 2 byte version number of the cache
89 20 byte sha1 to validate the cache
88 20 byte sha1 to validate the cache
90 n*4 byte hidden revs
89 n*4 byte hidden revs
91 """
90 """
92 wlock = fh = None
91 wlock = fh = None
93 try:
92 try:
94 try:
93 try:
95 wlock = repo.wlock(wait=False)
94 wlock = repo.wlock(wait=False)
96 # write cache to file
95 # write cache to file
97 newhash = cachehash(repo, hideable)
96 newhash = cachehash(repo, hideable)
98 fh = repo.vfs.open(cachefile, 'w+b', atomictemp=True)
97 fh = repo.vfs.open(cachefile, 'w+b', atomictemp=True)
99 _writehiddencache(fh, newhash, hidden)
98 _writehiddencache(fh, newhash, hidden)
100 except (IOError, OSError):
99 except (IOError, OSError):
101 repo.ui.debug('error writing hidden changesets cache')
100 repo.ui.debug('error writing hidden changesets cache')
102 except error.LockHeld:
101 except error.LockHeld:
103 repo.ui.debug('cannot obtain lock to write hidden changesets cache')
102 repo.ui.debug('cannot obtain lock to write hidden changesets cache')
104 finally:
103 finally:
105 if fh:
104 if fh:
106 fh.close()
105 fh.close()
107 if wlock:
106 if wlock:
108 wlock.release()
107 wlock.release()
109
108
110 def tryreadcache(repo, hideable):
109 def tryreadcache(repo, hideable):
111 """read a cache if the cache exists and is valid, otherwise returns None."""
110 """read a cache if the cache exists and is valid, otherwise returns None."""
112 hidden = fh = None
111 hidden = fh = None
113 try:
112 try:
114 if repo.vfs.exists(cachefile):
113 if repo.vfs.exists(cachefile):
115 fh = repo.vfs.open(cachefile, 'rb')
114 fh = repo.vfs.open(cachefile, 'rb')
116 version, = struct.unpack(">H", fh.read(2))
115 version, = struct.unpack(">H", fh.read(2))
117 oldhash = fh.read(20)
116 oldhash = fh.read(20)
118 newhash = cachehash(repo, hideable)
117 newhash = cachehash(repo, hideable)
119 if (cacheversion, oldhash) == (version, newhash):
118 if (cacheversion, oldhash) == (version, newhash):
120 # cache is valid, so we can start reading the hidden revs
119 # cache is valid, so we can start reading the hidden revs
121 data = fh.read()
120 data = fh.read()
122 count = len(data) / 4
121 count = len(data) / 4
123 hidden = frozenset(struct.unpack('>%ii' % count, data))
122 hidden = frozenset(struct.unpack('>%ii' % count, data))
124 return hidden
123 return hidden
125 finally:
124 finally:
126 if fh:
125 if fh:
127 fh.close()
126 fh.close()
128
127
129 def computehidden(repo):
128 def computehidden(repo):
130 """compute the set of hidden revision to filter
129 """compute the set of hidden revision to filter
131
130
132 During most operation hidden should be filtered."""
131 During most operation hidden should be filtered."""
133 assert not repo.changelog.filteredrevs
132 assert not repo.changelog.filteredrevs
134
133
135 hidden = frozenset()
134 hidden = frozenset()
136 hideable = hideablerevs(repo)
135 hideable = hideablerevs(repo)
137 if hideable:
136 if hideable:
138 cl = repo.changelog
137 cl = repo.changelog
139 hidden = tryreadcache(repo, hideable)
138 hidden = tryreadcache(repo, hideable)
140 if hidden is None:
139 if hidden is None:
141 blocked = cl.ancestors(_getstaticblockers(repo), inclusive=True)
140 blocked = cl.ancestors(_getstaticblockers(repo), inclusive=True)
142 hidden = frozenset(r for r in hideable if r not in blocked)
141 hidden = frozenset(r for r in hideable if r not in blocked)
143 trywritehiddencache(repo, hideable, hidden)
142 trywritehiddencache(repo, hideable, hidden)
144
143
145 # check if we have wd parents, bookmarks or tags pointing to hidden
144 # check if we have wd parents, bookmarks or tags pointing to hidden
146 # changesets and remove those.
145 # changesets and remove those.
147 dynamic = hidden & _getdynamicblockers(repo)
146 dynamic = hidden & _getdynamicblockers(repo)
148 if dynamic:
147 if dynamic:
149 blocked = cl.ancestors(dynamic, inclusive=True)
148 blocked = cl.ancestors(dynamic, inclusive=True)
150 hidden = frozenset(r for r in hidden if r not in blocked)
149 hidden = frozenset(r for r in hidden if r not in blocked)
151 return hidden
150 return hidden
152
151
153 def computeunserved(repo):
152 def computeunserved(repo):
154 """compute the set of revision that should be filtered when used a server
153 """compute the set of revision that should be filtered when used a server
155
154
156 Secret and hidden changeset should not pretend to be here."""
155 Secret and hidden changeset should not pretend to be here."""
157 assert not repo.changelog.filteredrevs
156 assert not repo.changelog.filteredrevs
158 # fast path in simple case to avoid impact of non optimised code
157 # fast path in simple case to avoid impact of non optimised code
159 hiddens = filterrevs(repo, 'visible')
158 hiddens = filterrevs(repo, 'visible')
160 if phases.hassecret(repo):
159 if phases.hassecret(repo):
161 cl = repo.changelog
160 cl = repo.changelog
162 secret = phases.secret
161 secret = phases.secret
163 getphase = repo._phasecache.phase
162 getphase = repo._phasecache.phase
164 first = min(cl.rev(n) for n in repo._phasecache.phaseroots[secret])
163 first = min(cl.rev(n) for n in repo._phasecache.phaseroots[secret])
165 revs = cl.revs(start=first)
164 revs = cl.revs(start=first)
166 secrets = set(r for r in revs if getphase(repo, r) >= secret)
165 secrets = set(r for r in revs if getphase(repo, r) >= secret)
167 return frozenset(hiddens | secrets)
166 return frozenset(hiddens | secrets)
168 else:
167 else:
169 return hiddens
168 return hiddens
170
169
171 def computemutable(repo):
170 def computemutable(repo):
172 """compute the set of revision that should be filtered when used a server
171 """compute the set of revision that should be filtered when used a server
173
172
174 Secret and hidden changeset should not pretend to be here."""
173 Secret and hidden changeset should not pretend to be here."""
175 assert not repo.changelog.filteredrevs
174 assert not repo.changelog.filteredrevs
176 # fast check to avoid revset call on huge repo
175 # fast check to avoid revset call on huge repo
177 if util.any(repo._phasecache.phaseroots[1:]):
176 if util.any(repo._phasecache.phaseroots[1:]):
178 getphase = repo._phasecache.phase
177 getphase = repo._phasecache.phase
179 maymutable = filterrevs(repo, 'base')
178 maymutable = filterrevs(repo, 'base')
180 return frozenset(r for r in maymutable if getphase(repo, r))
179 return frozenset(r for r in maymutable if getphase(repo, r))
181 return frozenset()
180 return frozenset()
182
181
183 def computeimpactable(repo):
182 def computeimpactable(repo):
184 """Everything impactable by mutable revision
183 """Everything impactable by mutable revision
185
184
186 The immutable filter still have some chance to get invalidated. This will
185 The immutable filter still have some chance to get invalidated. This will
187 happen when:
186 happen when:
188
187
189 - you garbage collect hidden changeset,
188 - you garbage collect hidden changeset,
190 - public phase is moved backward,
189 - public phase is moved backward,
191 - something is changed in the filtering (this could be fixed)
190 - something is changed in the filtering (this could be fixed)
192
191
193 This filter out any mutable changeset and any public changeset that may be
192 This filter out any mutable changeset and any public changeset that may be
194 impacted by something happening to a mutable revision.
193 impacted by something happening to a mutable revision.
195
194
196 This is achieved by filtered everything with a revision number egal or
195 This is achieved by filtered everything with a revision number egal or
197 higher than the first mutable changeset is filtered."""
196 higher than the first mutable changeset is filtered."""
198 assert not repo.changelog.filteredrevs
197 assert not repo.changelog.filteredrevs
199 cl = repo.changelog
198 cl = repo.changelog
200 firstmutable = len(cl)
199 firstmutable = len(cl)
201 for roots in repo._phasecache.phaseroots[1:]:
200 for roots in repo._phasecache.phaseroots[1:]:
202 if roots:
201 if roots:
203 firstmutable = min(firstmutable, min(cl.rev(r) for r in roots))
202 firstmutable = min(firstmutable, min(cl.rev(r) for r in roots))
204 # protect from nullrev root
203 # protect from nullrev root
205 firstmutable = max(0, firstmutable)
204 firstmutable = max(0, firstmutable)
206 return frozenset(xrange(firstmutable, len(cl)))
205 return frozenset(xrange(firstmutable, len(cl)))
207
206
208 # function to compute filtered set
207 # function to compute filtered set
209 #
208 #
210 # When adding a new filter you MUST update the table at:
209 # When adding a new filter you MUST update the table at:
211 # mercurial.branchmap.subsettable
210 # mercurial.branchmap.subsettable
212 # Otherwise your filter will have to recompute all its branches cache
211 # Otherwise your filter will have to recompute all its branches cache
213 # from scratch (very slow).
212 # from scratch (very slow).
214 filtertable = {'visible': computehidden,
213 filtertable = {'visible': computehidden,
215 'served': computeunserved,
214 'served': computeunserved,
216 'immutable': computemutable,
215 'immutable': computemutable,
217 'base': computeimpactable}
216 'base': computeimpactable}
218
217
219 def filterrevs(repo, filtername):
218 def filterrevs(repo, filtername):
220 """returns set of filtered revision for this filter name"""
219 """returns set of filtered revision for this filter name"""
221 if filtername not in repo.filteredrevcache:
220 if filtername not in repo.filteredrevcache:
222 func = filtertable[filtername]
221 func = filtertable[filtername]
223 repo.filteredrevcache[filtername] = func(repo.unfiltered())
222 repo.filteredrevcache[filtername] = func(repo.unfiltered())
224 return repo.filteredrevcache[filtername]
223 return repo.filteredrevcache[filtername]
225
224
226 class repoview(object):
225 class repoview(object):
227 """Provide a read/write view of a repo through a filtered changelog
226 """Provide a read/write view of a repo through a filtered changelog
228
227
229 This object is used to access a filtered version of a repository without
228 This object is used to access a filtered version of a repository without
230 altering the original repository object itself. We can not alter the
229 altering the original repository object itself. We can not alter the
231 original object for two main reasons:
230 original object for two main reasons:
232 - It prevents the use of a repo with multiple filters at the same time. In
231 - It prevents the use of a repo with multiple filters at the same time. In
233 particular when multiple threads are involved.
232 particular when multiple threads are involved.
234 - It makes scope of the filtering harder to control.
233 - It makes scope of the filtering harder to control.
235
234
236 This object behaves very closely to the original repository. All attribute
235 This object behaves very closely to the original repository. All attribute
237 operations are done on the original repository:
236 operations are done on the original repository:
238 - An access to `repoview.someattr` actually returns `repo.someattr`,
237 - An access to `repoview.someattr` actually returns `repo.someattr`,
239 - A write to `repoview.someattr` actually sets value of `repo.someattr`,
238 - A write to `repoview.someattr` actually sets value of `repo.someattr`,
240 - A deletion of `repoview.someattr` actually drops `someattr`
239 - A deletion of `repoview.someattr` actually drops `someattr`
241 from `repo.__dict__`.
240 from `repo.__dict__`.
242
241
243 The only exception is the `changelog` property. It is overridden to return
242 The only exception is the `changelog` property. It is overridden to return
244 a (surface) copy of `repo.changelog` with some revisions filtered. The
243 a (surface) copy of `repo.changelog` with some revisions filtered. The
245 `filtername` attribute of the view control the revisions that need to be
244 `filtername` attribute of the view control the revisions that need to be
246 filtered. (the fact the changelog is copied is an implementation detail).
245 filtered. (the fact the changelog is copied is an implementation detail).
247
246
248 Unlike attributes, this object intercepts all method calls. This means that
247 Unlike attributes, this object intercepts all method calls. This means that
249 all methods are run on the `repoview` object with the filtered `changelog`
248 all methods are run on the `repoview` object with the filtered `changelog`
250 property. For this purpose the simple `repoview` class must be mixed with
249 property. For this purpose the simple `repoview` class must be mixed with
251 the actual class of the repository. This ensures that the resulting
250 the actual class of the repository. This ensures that the resulting
252 `repoview` object have the very same methods than the repo object. This
251 `repoview` object have the very same methods than the repo object. This
253 leads to the property below.
252 leads to the property below.
254
253
255 repoview.method() --> repo.__class__.method(repoview)
254 repoview.method() --> repo.__class__.method(repoview)
256
255
257 The inheritance has to be done dynamically because `repo` can be of any
256 The inheritance has to be done dynamically because `repo` can be of any
258 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
257 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
259 """
258 """
260
259
261 def __init__(self, repo, filtername):
260 def __init__(self, repo, filtername):
262 object.__setattr__(self, '_unfilteredrepo', repo)
261 object.__setattr__(self, '_unfilteredrepo', repo)
263 object.__setattr__(self, 'filtername', filtername)
262 object.__setattr__(self, 'filtername', filtername)
264 object.__setattr__(self, '_clcachekey', None)
263 object.__setattr__(self, '_clcachekey', None)
265 object.__setattr__(self, '_clcache', None)
264 object.__setattr__(self, '_clcache', None)
266
265
267 # not a propertycache on purpose we shall implement a proper cache later
266 # not a propertycache on purpose we shall implement a proper cache later
268 @property
267 @property
269 def changelog(self):
268 def changelog(self):
270 """return a filtered version of the changeset
269 """return a filtered version of the changeset
271
270
272 this changelog must not be used for writing"""
271 this changelog must not be used for writing"""
273 # some cache may be implemented later
272 # some cache may be implemented later
274 unfi = self._unfilteredrepo
273 unfi = self._unfilteredrepo
275 unfichangelog = unfi.changelog
274 unfichangelog = unfi.changelog
276 revs = filterrevs(unfi, self.filtername)
275 revs = filterrevs(unfi, self.filtername)
277 cl = self._clcache
276 cl = self._clcache
278 newkey = (len(unfichangelog), unfichangelog.tip(), hash(revs))
277 newkey = (len(unfichangelog), unfichangelog.tip(), hash(revs))
279 if cl is not None:
278 if cl is not None:
280 # we need to check curkey too for some obscure reason.
279 # we need to check curkey too for some obscure reason.
281 # MQ test show a corruption of the underlying repo (in _clcache)
280 # MQ test show a corruption of the underlying repo (in _clcache)
282 # without change in the cachekey.
281 # without change in the cachekey.
283 oldfilter = cl.filteredrevs
282 oldfilter = cl.filteredrevs
284 try:
283 try:
285 cl.filteredrevs = () # disable filtering for tip
284 cl.filteredrevs = () # disable filtering for tip
286 curkey = (len(cl), cl.tip(), hash(oldfilter))
285 curkey = (len(cl), cl.tip(), hash(oldfilter))
287 finally:
286 finally:
288 cl.filteredrevs = oldfilter
287 cl.filteredrevs = oldfilter
289 if newkey != self._clcachekey or newkey != curkey:
288 if newkey != self._clcachekey or newkey != curkey:
290 cl = None
289 cl = None
291 # could have been made None by the previous if
290 # could have been made None by the previous if
292 if cl is None:
291 if cl is None:
293 cl = copy.copy(unfichangelog)
292 cl = copy.copy(unfichangelog)
294 cl.filteredrevs = revs
293 cl.filteredrevs = revs
295 object.__setattr__(self, '_clcache', cl)
294 object.__setattr__(self, '_clcache', cl)
296 object.__setattr__(self, '_clcachekey', newkey)
295 object.__setattr__(self, '_clcachekey', newkey)
297 return cl
296 return cl
298
297
299 def unfiltered(self):
298 def unfiltered(self):
300 """Return an unfiltered version of a repo"""
299 """Return an unfiltered version of a repo"""
301 return self._unfilteredrepo
300 return self._unfilteredrepo
302
301
303 def filtered(self, name):
302 def filtered(self, name):
304 """Return a filtered version of a repository"""
303 """Return a filtered version of a repository"""
305 if name == self.filtername:
304 if name == self.filtername:
306 return self
305 return self
307 return self.unfiltered().filtered(name)
306 return self.unfiltered().filtered(name)
308
307
309 # everything access are forwarded to the proxied repo
308 # everything access are forwarded to the proxied repo
310 def __getattr__(self, attr):
309 def __getattr__(self, attr):
311 return getattr(self._unfilteredrepo, attr)
310 return getattr(self._unfilteredrepo, attr)
312
311
313 def __setattr__(self, attr, value):
312 def __setattr__(self, attr, value):
314 # Allow method replacement on filtered repos, like status() in
315 # largefiles' purge override
316 if type(value) == types.FunctionType:
317 object.__setattr__(self, attr, value)
318 return setattr(self._unfilteredrepo, attr, value)
313 return setattr(self._unfilteredrepo, attr, value)
319
314
320 def __delattr__(self, attr):
315 def __delattr__(self, attr):
321 return delattr(self._unfilteredrepo, attr)
316 return delattr(self._unfilteredrepo, attr)
322
317
323 # The `requirements` attribute is initialized during __init__. But
318 # The `requirements` attribute is initialized during __init__. But
324 # __getattr__ won't be called as it also exists on the class. We need
319 # __getattr__ won't be called as it also exists on the class. We need
325 # explicit forwarding to main repo here
320 # explicit forwarding to main repo here
326 @property
321 @property
327 def requirements(self):
322 def requirements(self):
328 return self._unfilteredrepo.requirements
323 return self._unfilteredrepo.requirements
General Comments 0
You need to be logged in to leave comments. Login now