Show More
@@ -77,8 +77,7 b' def updatecache(repo):' | |||||
77 | revs.extend(r for r in extrarevs if r <= partial.tiprev) |
|
77 | revs.extend(r for r in extrarevs if r <= partial.tiprev) | |
78 | revs.extend(cl.revs(start=partial.tiprev + 1)) |
|
78 | revs.extend(cl.revs(start=partial.tiprev + 1)) | |
79 | if revs: |
|
79 | if revs: | |
80 | ctxgen = (repo[r] for r in revs) |
|
80 | partial.update(repo, revs) | |
81 | partial.update(repo, ctxgen) |
|
|||
82 | partial.write(repo) |
|
81 | partial.write(repo) | |
83 | assert partial.validfor(repo) |
|
82 | assert partial.validfor(repo) | |
84 | repo._branchcaches[repo.filtername] = partial |
|
83 | repo._branchcaches[repo.filtername] = partial | |
@@ -144,12 +143,13 b' class branchcache(dict):' | |||||
144 | # Abort may be raise by read only opener |
|
143 | # Abort may be raise by read only opener | |
145 | pass |
|
144 | pass | |
146 |
|
145 | |||
147 |
def update(self, repo, |
|
146 | def update(self, repo, revgen): | |
148 | """Given a branchhead cache, self, that may have extra nodes or be |
|
147 | """Given a branchhead cache, self, that may have extra nodes or be | |
149 | missing heads, and a generator of nodes that are at least a superset of |
|
148 | missing heads, and a generator of nodes that are at least a superset of | |
150 | heads missing, this function updates self to be correct. |
|
149 | heads missing, this function updates self to be correct. | |
151 | """ |
|
150 | """ | |
152 | cl = repo.changelog |
|
151 | cl = repo.changelog | |
|
152 | ctxgen = (repo[r] for r in revgen) | |||
153 | # collect new branch entries |
|
153 | # collect new branch entries | |
154 | newbranches = {} |
|
154 | newbranches = {} | |
155 | for c in ctxgen: |
|
155 | for c in ctxgen: |
@@ -196,7 +196,7 b' def _headssummary(repo, remote, outgoing' | |||||
196 | newmap = branchmap.branchcache((branch, heads[1]) |
|
196 | newmap = branchmap.branchcache((branch, heads[1]) | |
197 | for branch, heads in headssum.iteritems() |
|
197 | for branch, heads in headssum.iteritems() | |
198 | if heads[0] is not None) |
|
198 | if heads[0] is not None) | |
199 | newmap.update(repo, missingctx) |
|
199 | newmap.update(repo, (ctx.rev() for ctx in missingctx)) | |
200 | for branch, newheads in newmap.iteritems(): |
|
200 | for branch, newheads in newmap.iteritems(): | |
201 | headssum[branch][1][:] = newheads |
|
201 | headssum[branch][1][:] = newheads | |
202 | return headssum |
|
202 | return headssum |
@@ -1406,10 +1406,11 b' class localrepository(object):' | |||||
1406 | # it, Otherwise, since nodes were destroyed, the cache is stale and this |
|
1406 | # it, Otherwise, since nodes were destroyed, the cache is stale and this | |
1407 | # will be caught the next time it is read. |
|
1407 | # will be caught the next time it is read. | |
1408 | if newheadnodes: |
|
1408 | if newheadnodes: | |
1409 | ctxgen = (self[node] for node in newheadnodes |
|
1409 | cl = self.changelog | |
1410 | if self.changelog.hasnode(node)) |
|
1410 | revgen = (cl.rev(node) for node in newheadnodes | |
|
1411 | if cl.hasnode(node)) | |||
1411 | cache = self._branchcaches[None] |
|
1412 | cache = self._branchcaches[None] | |
1412 |
cache.update(self, |
|
1413 | cache.update(self, revgen) | |
1413 | cache.write(self) |
|
1414 | cache.write(self) | |
1414 |
|
1415 | |||
1415 | # Ensure the persistent tag cache is updated. Doing it now |
|
1416 | # Ensure the persistent tag cache is updated. Doing it now |
General Comments 0
You need to be logged in to leave comments.
Login now