##// END OF EJS Templates
repoview: use absolute_import
Gregory Szorc -
r25972:f2791911 default
parent child Browse files
Show More
@@ -1,347 +1,352
1 1 # repoview.py - Filtered view of a localrepo object
2 2 #
3 3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
4 4 # Logilab SA <contact@logilab.fr>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 import heapq
9 from __future__ import absolute_import
10
10 11 import copy
11 import error
12 import phases
13 import util
14 import obsolete
12 import heapq
15 13 import struct
16 import tags as tagsmod
17 from node import nullrev
14
15 from .node import nullrev
16 from . import (
17 error,
18 obsolete,
19 phases,
20 tags as tagsmod,
21 util,
22 )
18 23
19 24 def hideablerevs(repo):
20 25 """Revisions candidates to be hidden
21 26
22 27 This is a standalone function to help extensions to wrap it."""
23 28 return obsolete.getrevs(repo, 'obsolete')
24 29
25 30 def _getstatichidden(repo):
26 31 """Revision to be hidden (disregarding dynamic blocker)
27 32
28 33 To keep a consistent graph, we cannot hide any revisions with
29 34 non-hidden descendants. This function computes the set of
30 35 revisions that could be hidden while keeping the graph consistent.
31 36
32 37 A second pass will be done to apply "dynamic blocker" like bookmarks or
33 38 working directory parents.
34 39
35 40 """
36 41 assert not repo.changelog.filteredrevs
37 42 hidden = set(hideablerevs(repo))
38 43 if hidden:
39 44 getphase = repo._phasecache.phase
40 45 getparentrevs = repo.changelog.parentrevs
41 46 # Skip heads which are public (guaranteed to not be hidden)
42 47 heap = [-r for r in repo.changelog.headrevs() if getphase(repo, r)]
43 48 heapq.heapify(heap)
44 49 heappop = heapq.heappop
45 50 heappush = heapq.heappush
46 51 seen = set() # no need to init it with heads, they have no children
47 52 while heap:
48 53 rev = -heappop(heap)
49 54 # All children have been processed so at that point, if no children
50 55 # removed 'rev' from the 'hidden' set, 'rev' is going to be hidden.
51 56 blocker = rev not in hidden
52 57 for parent in getparentrevs(rev):
53 58 if parent == nullrev:
54 59 continue
55 60 if blocker:
56 61 # If visible, ensure parent will be visible too
57 62 hidden.discard(parent)
58 63 # - Avoid adding the same revision twice
59 64 # - Skip nodes which are public (guaranteed to not be hidden)
60 65 pre = len(seen)
61 66 seen.add(parent)
62 67 if pre < len(seen) and getphase(repo, rev):
63 68 heappush(heap, -parent)
64 69 return hidden
65 70
66 71 def _getdynamicblockers(repo):
67 72 """Non-cacheable revisions blocking hidden changesets from being filtered.
68 73
69 74 Get revisions that will block hidden changesets and are likely to change,
70 75 but unlikely to create hidden blockers. They won't be cached, so be careful
71 76 with adding additional computation."""
72 77
73 78 cl = repo.changelog
74 79 blockers = set()
75 80 blockers.update([par.rev() for par in repo[None].parents()])
76 81 blockers.update([cl.rev(bm) for bm in repo._bookmarks.values()])
77 82
78 83 tags = {}
79 84 tagsmod.readlocaltags(repo.ui, repo, tags, {})
80 85 if tags:
81 86 rev, nodemap = cl.rev, cl.nodemap
82 87 blockers.update(rev(t[0]) for t in tags.values() if t[0] in nodemap)
83 88 return blockers
84 89
85 90 cacheversion = 1
86 91 cachefile = 'cache/hidden'
87 92
88 93 def cachehash(repo, hideable):
89 94 """return sha1 hash of repository data to identify a valid cache.
90 95
91 96 We calculate a sha1 of repo heads and the content of the obsstore and write
92 97 it to the cache. Upon reading we can easily validate by checking the hash
93 98 against the stored one and discard the cache in case the hashes don't match.
94 99 """
95 100 h = util.sha1()
96 101 h.update(''.join(repo.heads()))
97 102 h.update(str(hash(frozenset(hideable))))
98 103 return h.digest()
99 104
100 105 def _writehiddencache(cachefile, cachehash, hidden):
101 106 """write hidden data to a cache file"""
102 107 data = struct.pack('>%ii' % len(hidden), *sorted(hidden))
103 108 cachefile.write(struct.pack(">H", cacheversion))
104 109 cachefile.write(cachehash)
105 110 cachefile.write(data)
106 111
107 112 def trywritehiddencache(repo, hideable, hidden):
108 113 """write cache of hidden changesets to disk
109 114
110 115 Will not write the cache if a wlock cannot be obtained lazily.
111 116 The cache consists of a head of 22byte:
112 117 2 byte version number of the cache
113 118 20 byte sha1 to validate the cache
114 119 n*4 byte hidden revs
115 120 """
116 121 wlock = fh = None
117 122 try:
118 123 wlock = repo.wlock(wait=False)
119 124 # write cache to file
120 125 newhash = cachehash(repo, hideable)
121 126 fh = repo.vfs.open(cachefile, 'w+b', atomictemp=True)
122 127 _writehiddencache(fh, newhash, hidden)
123 128 except (IOError, OSError):
124 129 repo.ui.debug('error writing hidden changesets cache')
125 130 except error.LockHeld:
126 131 repo.ui.debug('cannot obtain lock to write hidden changesets cache')
127 132 finally:
128 133 if fh:
129 134 fh.close()
130 135 if wlock:
131 136 wlock.release()
132 137
133 138 def tryreadcache(repo, hideable):
134 139 """read a cache if the cache exists and is valid, otherwise returns None."""
135 140 hidden = fh = None
136 141 try:
137 142 if repo.vfs.exists(cachefile):
138 143 fh = repo.vfs.open(cachefile, 'rb')
139 144 version, = struct.unpack(">H", fh.read(2))
140 145 oldhash = fh.read(20)
141 146 newhash = cachehash(repo, hideable)
142 147 if (cacheversion, oldhash) == (version, newhash):
143 148 # cache is valid, so we can start reading the hidden revs
144 149 data = fh.read()
145 150 count = len(data) / 4
146 151 hidden = frozenset(struct.unpack('>%ii' % count, data))
147 152 return hidden
148 153 finally:
149 154 if fh:
150 155 fh.close()
151 156
152 157 def computehidden(repo):
153 158 """compute the set of hidden revision to filter
154 159
155 160 During most operation hidden should be filtered."""
156 161 assert not repo.changelog.filteredrevs
157 162
158 163 hidden = frozenset()
159 164 hideable = hideablerevs(repo)
160 165 if hideable:
161 166 cl = repo.changelog
162 167 hidden = tryreadcache(repo, hideable)
163 168 if hidden is None:
164 169 hidden = frozenset(_getstatichidden(repo))
165 170 trywritehiddencache(repo, hideable, hidden)
166 171
167 172 # check if we have wd parents, bookmarks or tags pointing to hidden
168 173 # changesets and remove those.
169 174 dynamic = hidden & _getdynamicblockers(repo)
170 175 if dynamic:
171 176 blocked = cl.ancestors(dynamic, inclusive=True)
172 177 hidden = frozenset(r for r in hidden if r not in blocked)
173 178 return hidden
174 179
175 180 def computeunserved(repo):
176 181 """compute the set of revision that should be filtered when used a server
177 182
178 183 Secret and hidden changeset should not pretend to be here."""
179 184 assert not repo.changelog.filteredrevs
180 185 # fast path in simple case to avoid impact of non optimised code
181 186 hiddens = filterrevs(repo, 'visible')
182 187 if phases.hassecret(repo):
183 188 cl = repo.changelog
184 189 secret = phases.secret
185 190 getphase = repo._phasecache.phase
186 191 first = min(cl.rev(n) for n in repo._phasecache.phaseroots[secret])
187 192 revs = cl.revs(start=first)
188 193 secrets = set(r for r in revs if getphase(repo, r) >= secret)
189 194 return frozenset(hiddens | secrets)
190 195 else:
191 196 return hiddens
192 197
193 198 def computemutable(repo):
194 199 """compute the set of revision that should be filtered when used a server
195 200
196 201 Secret and hidden changeset should not pretend to be here."""
197 202 assert not repo.changelog.filteredrevs
198 203 # fast check to avoid revset call on huge repo
199 204 if any(repo._phasecache.phaseroots[1:]):
200 205 getphase = repo._phasecache.phase
201 206 maymutable = filterrevs(repo, 'base')
202 207 return frozenset(r for r in maymutable if getphase(repo, r))
203 208 return frozenset()
204 209
205 210 def computeimpactable(repo):
206 211 """Everything impactable by mutable revision
207 212
208 213 The immutable filter still have some chance to get invalidated. This will
209 214 happen when:
210 215
211 216 - you garbage collect hidden changeset,
212 217 - public phase is moved backward,
213 218 - something is changed in the filtering (this could be fixed)
214 219
215 220 This filter out any mutable changeset and any public changeset that may be
216 221 impacted by something happening to a mutable revision.
217 222
218 223 This is achieved by filtered everything with a revision number egal or
219 224 higher than the first mutable changeset is filtered."""
220 225 assert not repo.changelog.filteredrevs
221 226 cl = repo.changelog
222 227 firstmutable = len(cl)
223 228 for roots in repo._phasecache.phaseroots[1:]:
224 229 if roots:
225 230 firstmutable = min(firstmutable, min(cl.rev(r) for r in roots))
226 231 # protect from nullrev root
227 232 firstmutable = max(0, firstmutable)
228 233 return frozenset(xrange(firstmutable, len(cl)))
229 234
230 235 # function to compute filtered set
231 236 #
232 237 # When adding a new filter you MUST update the table at:
233 238 # mercurial.branchmap.subsettable
234 239 # Otherwise your filter will have to recompute all its branches cache
235 240 # from scratch (very slow).
236 241 filtertable = {'visible': computehidden,
237 242 'served': computeunserved,
238 243 'immutable': computemutable,
239 244 'base': computeimpactable}
240 245
241 246 def filterrevs(repo, filtername):
242 247 """returns set of filtered revision for this filter name"""
243 248 if filtername not in repo.filteredrevcache:
244 249 func = filtertable[filtername]
245 250 repo.filteredrevcache[filtername] = func(repo.unfiltered())
246 251 return repo.filteredrevcache[filtername]
247 252
248 253 class repoview(object):
249 254 """Provide a read/write view of a repo through a filtered changelog
250 255
251 256 This object is used to access a filtered version of a repository without
252 257 altering the original repository object itself. We can not alter the
253 258 original object for two main reasons:
254 259 - It prevents the use of a repo with multiple filters at the same time. In
255 260 particular when multiple threads are involved.
256 261 - It makes scope of the filtering harder to control.
257 262
258 263 This object behaves very closely to the original repository. All attribute
259 264 operations are done on the original repository:
260 265 - An access to `repoview.someattr` actually returns `repo.someattr`,
261 266 - A write to `repoview.someattr` actually sets value of `repo.someattr`,
262 267 - A deletion of `repoview.someattr` actually drops `someattr`
263 268 from `repo.__dict__`.
264 269
265 270 The only exception is the `changelog` property. It is overridden to return
266 271 a (surface) copy of `repo.changelog` with some revisions filtered. The
267 272 `filtername` attribute of the view control the revisions that need to be
268 273 filtered. (the fact the changelog is copied is an implementation detail).
269 274
270 275 Unlike attributes, this object intercepts all method calls. This means that
271 276 all methods are run on the `repoview` object with the filtered `changelog`
272 277 property. For this purpose the simple `repoview` class must be mixed with
273 278 the actual class of the repository. This ensures that the resulting
274 279 `repoview` object have the very same methods than the repo object. This
275 280 leads to the property below.
276 281
277 282 repoview.method() --> repo.__class__.method(repoview)
278 283
279 284 The inheritance has to be done dynamically because `repo` can be of any
280 285 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
281 286 """
282 287
283 288 def __init__(self, repo, filtername):
284 289 object.__setattr__(self, '_unfilteredrepo', repo)
285 290 object.__setattr__(self, 'filtername', filtername)
286 291 object.__setattr__(self, '_clcachekey', None)
287 292 object.__setattr__(self, '_clcache', None)
288 293
289 294 # not a propertycache on purpose we shall implement a proper cache later
290 295 @property
291 296 def changelog(self):
292 297 """return a filtered version of the changeset
293 298
294 299 this changelog must not be used for writing"""
295 300 # some cache may be implemented later
296 301 unfi = self._unfilteredrepo
297 302 unfichangelog = unfi.changelog
298 303 revs = filterrevs(unfi, self.filtername)
299 304 cl = self._clcache
300 305 newkey = (len(unfichangelog), unfichangelog.tip(), hash(revs),
301 306 unfichangelog._delayed)
302 307 if cl is not None:
303 308 # we need to check curkey too for some obscure reason.
304 309 # MQ test show a corruption of the underlying repo (in _clcache)
305 310 # without change in the cachekey.
306 311 oldfilter = cl.filteredrevs
307 312 try:
308 313 cl.filteredrevs = () # disable filtering for tip
309 314 curkey = (len(cl), cl.tip(), hash(oldfilter), cl._delayed)
310 315 finally:
311 316 cl.filteredrevs = oldfilter
312 317 if newkey != self._clcachekey or newkey != curkey:
313 318 cl = None
314 319 # could have been made None by the previous if
315 320 if cl is None:
316 321 cl = copy.copy(unfichangelog)
317 322 cl.filteredrevs = revs
318 323 object.__setattr__(self, '_clcache', cl)
319 324 object.__setattr__(self, '_clcachekey', newkey)
320 325 return cl
321 326
322 327 def unfiltered(self):
323 328 """Return an unfiltered version of a repo"""
324 329 return self._unfilteredrepo
325 330
326 331 def filtered(self, name):
327 332 """Return a filtered version of a repository"""
328 333 if name == self.filtername:
329 334 return self
330 335 return self.unfiltered().filtered(name)
331 336
332 337 # everything access are forwarded to the proxied repo
333 338 def __getattr__(self, attr):
334 339 return getattr(self._unfilteredrepo, attr)
335 340
336 341 def __setattr__(self, attr, value):
337 342 return setattr(self._unfilteredrepo, attr, value)
338 343
339 344 def __delattr__(self, attr):
340 345 return delattr(self._unfilteredrepo, attr)
341 346
342 347 # The `requirements` attribute is initialized during __init__. But
343 348 # __getattr__ won't be called as it also exists on the class. We need
344 349 # explicit forwarding to main repo here
345 350 @property
346 351 def requirements(self):
347 352 return self._unfilteredrepo.requirements
General Comments 0
You need to be logged in to leave comments. Login now