##// END OF EJS Templates
repoview: use try/except/finally
Matt Mackall -
r25086:140c2d1e default
parent child Browse files
Show More
@@ -1,348 +1,347 b''
1 1 # repoview.py - Filtered view of a localrepo object
2 2 #
3 3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
4 4 # Logilab SA <contact@logilab.fr>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 import heapq
10 10 import copy
11 11 import error
12 12 import phases
13 13 import util
14 14 import obsolete
15 15 import struct
16 16 import tags as tagsmod
17 17 from node import nullrev
18 18
19 19 def hideablerevs(repo):
20 20 """Revisions candidates to be hidden
21 21
22 22 This is a standalone function to help extensions to wrap it."""
23 23 return obsolete.getrevs(repo, 'obsolete')
24 24
25 25 def _getstatichidden(repo):
26 26 """Revision to be hidden (disregarding dynamic blocker)
27 27
28 28 To keep a consistent graph, we cannot hide any revisions with
29 29 non-hidden descendants. This function computes the set of
30 30 revisions that could be hidden while keeping the graph consistent.
31 31
32 32 A second pass will be done to apply "dynamic blocker" like bookmarks or
33 33 working directory parents.
34 34
35 35 """
36 36 assert not repo.changelog.filteredrevs
37 37 hidden = set(hideablerevs(repo))
38 38 if hidden:
39 39 getphase = repo._phasecache.phase
40 40 getparentrevs = repo.changelog.parentrevs
41 41 # Skip heads which are public (guaranteed to not be hidden)
42 42 heap = [-r for r in repo.changelog.headrevs() if getphase(repo, r)]
43 43 heapq.heapify(heap)
44 44 heappop = heapq.heappop
45 45 heappush = heapq.heappush
46 46 seen = set() # no need to init it with heads, they have no children
47 47 while heap:
48 48 rev = -heappop(heap)
49 49 # All children have been processed so at that point, if no children
50 50 # removed 'rev' from the 'hidden' set, 'rev' is going to be hidden.
51 51 blocker = rev not in hidden
52 52 for parent in getparentrevs(rev):
53 53 if parent == nullrev:
54 54 continue
55 55 if blocker:
56 56 # If visible, ensure parent will be visible too
57 57 hidden.discard(parent)
58 58 # - Avoid adding the same revision twice
59 59 # - Skip nodes which are public (guaranteed to not be hidden)
60 60 pre = len(seen)
61 61 seen.add(parent)
62 62 if pre < len(seen) and getphase(repo, rev):
63 63 heappush(heap, -parent)
64 64 return hidden
65 65
66 66 def _getdynamicblockers(repo):
67 67 """Non-cacheable revisions blocking hidden changesets from being filtered.
68 68
69 69 Get revisions that will block hidden changesets and are likely to change,
70 70 but unlikely to create hidden blockers. They won't be cached, so be careful
71 71 with adding additional computation."""
72 72
73 73 cl = repo.changelog
74 74 blockers = set()
75 75 blockers.update([par.rev() for par in repo[None].parents()])
76 76 blockers.update([cl.rev(bm) for bm in repo._bookmarks.values()])
77 77
78 78 tags = {}
79 79 tagsmod.readlocaltags(repo.ui, repo, tags, {})
80 80 if tags:
81 81 rev, nodemap = cl.rev, cl.nodemap
82 82 blockers.update(rev(t[0]) for t in tags.values() if t[0] in nodemap)
83 83 return blockers
84 84
85 85 cacheversion = 1
86 86 cachefile = 'cache/hidden'
87 87
88 88 def cachehash(repo, hideable):
89 89 """return sha1 hash of repository data to identify a valid cache.
90 90
91 91 We calculate a sha1 of repo heads and the content of the obsstore and write
92 92 it to the cache. Upon reading we can easily validate by checking the hash
93 93 against the stored one and discard the cache in case the hashes don't match.
94 94 """
95 95 h = util.sha1()
96 96 h.update(''.join(repo.heads()))
97 97 h.update(str(hash(frozenset(hideable))))
98 98 return h.digest()
99 99
100 100 def _writehiddencache(cachefile, cachehash, hidden):
101 101 """write hidden data to a cache file"""
102 102 data = struct.pack('>%ii' % len(hidden), *sorted(hidden))
103 103 cachefile.write(struct.pack(">H", cacheversion))
104 104 cachefile.write(cachehash)
105 105 cachefile.write(data)
106 106
107 107 def trywritehiddencache(repo, hideable, hidden):
108 108 """write cache of hidden changesets to disk
109 109
110 110 Will not write the cache if a wlock cannot be obtained lazily.
111 111 The cache consists of a head of 22byte:
112 112 2 byte version number of the cache
113 113 20 byte sha1 to validate the cache
114 114 n*4 byte hidden revs
115 115 """
116 116 wlock = fh = None
117 117 try:
118 try:
119 wlock = repo.wlock(wait=False)
120 # write cache to file
121 newhash = cachehash(repo, hideable)
122 fh = repo.vfs.open(cachefile, 'w+b', atomictemp=True)
123 _writehiddencache(fh, newhash, hidden)
124 except (IOError, OSError):
125 repo.ui.debug('error writing hidden changesets cache')
126 except error.LockHeld:
127 repo.ui.debug('cannot obtain lock to write hidden changesets cache')
118 wlock = repo.wlock(wait=False)
119 # write cache to file
120 newhash = cachehash(repo, hideable)
121 fh = repo.vfs.open(cachefile, 'w+b', atomictemp=True)
122 _writehiddencache(fh, newhash, hidden)
123 except (IOError, OSError):
124 repo.ui.debug('error writing hidden changesets cache')
125 except error.LockHeld:
126 repo.ui.debug('cannot obtain lock to write hidden changesets cache')
128 127 finally:
129 128 if fh:
130 129 fh.close()
131 130 if wlock:
132 131 wlock.release()
133 132
134 133 def tryreadcache(repo, hideable):
135 134 """read a cache if the cache exists and is valid, otherwise returns None."""
136 135 hidden = fh = None
137 136 try:
138 137 if repo.vfs.exists(cachefile):
139 138 fh = repo.vfs.open(cachefile, 'rb')
140 139 version, = struct.unpack(">H", fh.read(2))
141 140 oldhash = fh.read(20)
142 141 newhash = cachehash(repo, hideable)
143 142 if (cacheversion, oldhash) == (version, newhash):
144 143 # cache is valid, so we can start reading the hidden revs
145 144 data = fh.read()
146 145 count = len(data) / 4
147 146 hidden = frozenset(struct.unpack('>%ii' % count, data))
148 147 return hidden
149 148 finally:
150 149 if fh:
151 150 fh.close()
152 151
153 152 def computehidden(repo):
154 153 """compute the set of hidden revision to filter
155 154
156 155 During most operation hidden should be filtered."""
157 156 assert not repo.changelog.filteredrevs
158 157
159 158 hidden = frozenset()
160 159 hideable = hideablerevs(repo)
161 160 if hideable:
162 161 cl = repo.changelog
163 162 hidden = tryreadcache(repo, hideable)
164 163 if hidden is None:
165 164 hidden = frozenset(_getstatichidden(repo))
166 165 trywritehiddencache(repo, hideable, hidden)
167 166
168 167 # check if we have wd parents, bookmarks or tags pointing to hidden
169 168 # changesets and remove those.
170 169 dynamic = hidden & _getdynamicblockers(repo)
171 170 if dynamic:
172 171 blocked = cl.ancestors(dynamic, inclusive=True)
173 172 hidden = frozenset(r for r in hidden if r not in blocked)
174 173 return hidden
175 174
176 175 def computeunserved(repo):
177 176 """compute the set of revision that should be filtered when used a server
178 177
179 178 Secret and hidden changeset should not pretend to be here."""
180 179 assert not repo.changelog.filteredrevs
181 180 # fast path in simple case to avoid impact of non optimised code
182 181 hiddens = filterrevs(repo, 'visible')
183 182 if phases.hassecret(repo):
184 183 cl = repo.changelog
185 184 secret = phases.secret
186 185 getphase = repo._phasecache.phase
187 186 first = min(cl.rev(n) for n in repo._phasecache.phaseroots[secret])
188 187 revs = cl.revs(start=first)
189 188 secrets = set(r for r in revs if getphase(repo, r) >= secret)
190 189 return frozenset(hiddens | secrets)
191 190 else:
192 191 return hiddens
193 192
194 193 def computemutable(repo):
195 194 """compute the set of revision that should be filtered when used a server
196 195
197 196 Secret and hidden changeset should not pretend to be here."""
198 197 assert not repo.changelog.filteredrevs
199 198 # fast check to avoid revset call on huge repo
200 199 if util.any(repo._phasecache.phaseroots[1:]):
201 200 getphase = repo._phasecache.phase
202 201 maymutable = filterrevs(repo, 'base')
203 202 return frozenset(r for r in maymutable if getphase(repo, r))
204 203 return frozenset()
205 204
206 205 def computeimpactable(repo):
207 206 """Everything impactable by mutable revision
208 207
209 208 The immutable filter still have some chance to get invalidated. This will
210 209 happen when:
211 210
212 211 - you garbage collect hidden changeset,
213 212 - public phase is moved backward,
214 213 - something is changed in the filtering (this could be fixed)
215 214
216 215 This filter out any mutable changeset and any public changeset that may be
217 216 impacted by something happening to a mutable revision.
218 217
219 218 This is achieved by filtered everything with a revision number egal or
220 219 higher than the first mutable changeset is filtered."""
221 220 assert not repo.changelog.filteredrevs
222 221 cl = repo.changelog
223 222 firstmutable = len(cl)
224 223 for roots in repo._phasecache.phaseroots[1:]:
225 224 if roots:
226 225 firstmutable = min(firstmutable, min(cl.rev(r) for r in roots))
227 226 # protect from nullrev root
228 227 firstmutable = max(0, firstmutable)
229 228 return frozenset(xrange(firstmutable, len(cl)))
230 229
231 230 # function to compute filtered set
232 231 #
233 232 # When adding a new filter you MUST update the table at:
234 233 # mercurial.branchmap.subsettable
235 234 # Otherwise your filter will have to recompute all its branches cache
236 235 # from scratch (very slow).
237 236 filtertable = {'visible': computehidden,
238 237 'served': computeunserved,
239 238 'immutable': computemutable,
240 239 'base': computeimpactable}
241 240
242 241 def filterrevs(repo, filtername):
243 242 """returns set of filtered revision for this filter name"""
244 243 if filtername not in repo.filteredrevcache:
245 244 func = filtertable[filtername]
246 245 repo.filteredrevcache[filtername] = func(repo.unfiltered())
247 246 return repo.filteredrevcache[filtername]
248 247
249 248 class repoview(object):
250 249 """Provide a read/write view of a repo through a filtered changelog
251 250
252 251 This object is used to access a filtered version of a repository without
253 252 altering the original repository object itself. We can not alter the
254 253 original object for two main reasons:
255 254 - It prevents the use of a repo with multiple filters at the same time. In
256 255 particular when multiple threads are involved.
257 256 - It makes scope of the filtering harder to control.
258 257
259 258 This object behaves very closely to the original repository. All attribute
260 259 operations are done on the original repository:
261 260 - An access to `repoview.someattr` actually returns `repo.someattr`,
262 261 - A write to `repoview.someattr` actually sets value of `repo.someattr`,
263 262 - A deletion of `repoview.someattr` actually drops `someattr`
264 263 from `repo.__dict__`.
265 264
266 265 The only exception is the `changelog` property. It is overridden to return
267 266 a (surface) copy of `repo.changelog` with some revisions filtered. The
268 267 `filtername` attribute of the view control the revisions that need to be
269 268 filtered. (the fact the changelog is copied is an implementation detail).
270 269
271 270 Unlike attributes, this object intercepts all method calls. This means that
272 271 all methods are run on the `repoview` object with the filtered `changelog`
273 272 property. For this purpose the simple `repoview` class must be mixed with
274 273 the actual class of the repository. This ensures that the resulting
275 274 `repoview` object have the very same methods than the repo object. This
276 275 leads to the property below.
277 276
278 277 repoview.method() --> repo.__class__.method(repoview)
279 278
280 279 The inheritance has to be done dynamically because `repo` can be of any
281 280 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
282 281 """
283 282
284 283 def __init__(self, repo, filtername):
285 284 object.__setattr__(self, '_unfilteredrepo', repo)
286 285 object.__setattr__(self, 'filtername', filtername)
287 286 object.__setattr__(self, '_clcachekey', None)
288 287 object.__setattr__(self, '_clcache', None)
289 288
290 289 # not a propertycache on purpose we shall implement a proper cache later
291 290 @property
292 291 def changelog(self):
293 292 """return a filtered version of the changeset
294 293
295 294 this changelog must not be used for writing"""
296 295 # some cache may be implemented later
297 296 unfi = self._unfilteredrepo
298 297 unfichangelog = unfi.changelog
299 298 revs = filterrevs(unfi, self.filtername)
300 299 cl = self._clcache
301 300 newkey = (len(unfichangelog), unfichangelog.tip(), hash(revs),
302 301 unfichangelog._delayed)
303 302 if cl is not None:
304 303 # we need to check curkey too for some obscure reason.
305 304 # MQ test show a corruption of the underlying repo (in _clcache)
306 305 # without change in the cachekey.
307 306 oldfilter = cl.filteredrevs
308 307 try:
309 308 cl.filteredrevs = () # disable filtering for tip
310 309 curkey = (len(cl), cl.tip(), hash(oldfilter), cl._delayed)
311 310 finally:
312 311 cl.filteredrevs = oldfilter
313 312 if newkey != self._clcachekey or newkey != curkey:
314 313 cl = None
315 314 # could have been made None by the previous if
316 315 if cl is None:
317 316 cl = copy.copy(unfichangelog)
318 317 cl.filteredrevs = revs
319 318 object.__setattr__(self, '_clcache', cl)
320 319 object.__setattr__(self, '_clcachekey', newkey)
321 320 return cl
322 321
323 322 def unfiltered(self):
324 323 """Return an unfiltered version of a repo"""
325 324 return self._unfilteredrepo
326 325
327 326 def filtered(self, name):
328 327 """Return a filtered version of a repository"""
329 328 if name == self.filtername:
330 329 return self
331 330 return self.unfiltered().filtered(name)
332 331
333 332 # everything access are forwarded to the proxied repo
334 333 def __getattr__(self, attr):
335 334 return getattr(self._unfilteredrepo, attr)
336 335
337 336 def __setattr__(self, attr, value):
338 337 return setattr(self._unfilteredrepo, attr, value)
339 338
340 339 def __delattr__(self, attr):
341 340 return delattr(self._unfilteredrepo, attr)
342 341
343 342 # The `requirements` attribute is initialized during __init__. But
344 343 # __getattr__ won't be called as it also exists on the class. We need
345 344 # explicit forwarding to main repo here
346 345 @property
347 346 def requirements(self):
348 347 return self._unfilteredrepo.requirements
General Comments 0
You need to be logged in to leave comments. Login now