##// END OF EJS Templates
branchmap: add a copy method...
Pierre-Yves David -
r18232:dd0b636b default
parent child Browse files
Show More
@@ -1,212 +1,215 b''
1 1 # branchmap.py - logic to computes, maintain and stores branchmap for local repo
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import bin, hex, nullid, nullrev
9 9 import encoding
10 10 import util
11 11
12 12 def _filename(repo):
13 13 """name of a branchcache file for a given repo or repoview"""
14 14 filename = "cache/branchheads"
15 15 if repo.filtername:
16 16 filename = '%s-%s' % (filename, repo.filtername)
17 17 return filename
18 18
19 19 def read(repo):
20 20 try:
21 21 f = repo.opener(_filename(repo))
22 22 lines = f.read().split('\n')
23 23 f.close()
24 24 except (IOError, OSError):
25 25 return None
26 26
27 27 try:
28 28 cachekey = lines.pop(0).split(" ", 2)
29 29 last, lrev = cachekey[:2]
30 30 last, lrev = bin(last), int(lrev)
31 31 filteredhash = None
32 32 if len(cachekey) > 2:
33 33 filteredhash = bin(cachekey[2])
34 34 partial = branchcache(tipnode=last, tiprev=lrev,
35 35 filteredhash=filteredhash)
36 36 if not partial.validfor(repo):
37 37 # invalidate the cache
38 38 raise ValueError('tip differs')
39 39 for l in lines:
40 40 if not l:
41 41 continue
42 42 node, label = l.split(" ", 1)
43 43 label = encoding.tolocal(label.strip())
44 44 if not node in repo:
45 45 raise ValueError('node %s does not exist' % node)
46 46 partial.setdefault(label, []).append(bin(node))
47 47 except KeyboardInterrupt:
48 48 raise
49 49 except Exception, inst:
50 50 if repo.ui.debugflag:
51 51 msg = 'invalid branchheads cache'
52 52 if repo.filtername is not None:
53 53 msg += ' (%s)' % repo.filtername
54 54 msg += ': %s\n'
55 55 repo.ui.warn(msg % inst)
56 56 partial = None
57 57 return partial
58 58
59 59
60 60
61 61 def updatecache(repo):
62 62 cl = repo.changelog
63 63 filtername = repo.filtername
64 64 partial = repo._branchcaches.get(filtername)
65 65
66 66 if partial is None or not partial.validfor(repo):
67 67 partial = read(repo)
68 68 if partial is None:
69 69 partial = branchcache()
70 70
71 71 revs = list(cl.revs(start=partial.tiprev +1))
72 72 if revs:
73 73 ctxgen = (repo[r] for r in revs)
74 74 partial.update(repo, ctxgen)
75 75 partial.write(repo)
76 76 repo._branchcaches[repo.filtername] = partial
77 77
78 78 class branchcache(dict):
79 79 """A dict like object that hold branches heads cache"""
80 80
81 81 def __init__(self, entries=(), tipnode=nullid, tiprev=nullrev,
82 82 filteredhash=None):
83 83 super(branchcache, self).__init__(entries)
84 84 self.tipnode = tipnode
85 85 self.tiprev = tiprev
86 86 self.filteredhash = filteredhash
87 87
88 88 def _hashfiltered(self, repo):
89 89 """build hash of revision filtered in the current cache
90 90
91 91 Tracking tipnode and tiprev is not enough to ensure validaty of the
92 92 cache as they do not help to distinct cache that ignored various
93 93 revision bellow tiprev.
94 94
95 95 To detect such difference, we build a cache of all ignored revisions.
96 96 """
97 97 cl = repo.changelog
98 98 if not cl.filteredrevs:
99 99 return None
100 100 key = None
101 101 revs = sorted(r for r in cl.filteredrevs if r <= self.tiprev)
102 102 if revs:
103 103 s = util.sha1()
104 104 for rev in revs:
105 105 s.update('%s;' % rev)
106 106 key = s.digest()
107 107 return key
108 108
109 109 def validfor(self, repo):
110 110 """Is the cache content valide regarding a repo
111 111
112 112 - False when cached tipnode are unknown or if we detect a strip.
113 113 - True when cache is up to date or a subset of current repo."""
114 114 try:
115 115 return ((self.tipnode == repo.changelog.node(self.tiprev))
116 116 and (self.filteredhash == self._hashfiltered(repo)))
117 117 except IndexError:
118 118 return False
119 119
120 def copy(self):
121 """return an deep copy of the branchcache object"""
122 return branchcache(self, self.tipnode, self.tiprev, self.filteredhash)
120 123
121 124 def write(self, repo):
122 125 try:
123 126 f = repo.opener(_filename(repo), "w", atomictemp=True)
124 127 cachekey = [hex(self.tipnode), str(self.tiprev)]
125 128 if self.filteredhash is not None:
126 129 cachekey.append(hex(self.filteredhash))
127 130 f.write(" ".join(cachekey) + '\n')
128 131 for label, nodes in self.iteritems():
129 132 for node in nodes:
130 133 f.write("%s %s\n" % (hex(node), encoding.fromlocal(label)))
131 134 f.close()
132 135 except (IOError, OSError, util.Abort):
133 136 # Abort may be raise by read only opener
134 137 pass
135 138
136 139 def update(self, repo, ctxgen):
137 140 """Given a branchhead cache, self, that may have extra nodes or be
138 141 missing heads, and a generator of nodes that are at least a superset of
139 142 heads missing, this function updates self to be correct.
140 143 """
141 144 cl = repo.changelog
142 145 # collect new branch entries
143 146 newbranches = {}
144 147 for c in ctxgen:
145 148 newbranches.setdefault(c.branch(), []).append(c.node())
146 149 # if older branchheads are reachable from new ones, they aren't
147 150 # really branchheads. Note checking parents is insufficient:
148 151 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
149 152 for branch, newnodes in newbranches.iteritems():
150 153 bheads = self.setdefault(branch, [])
151 154 # Remove candidate heads that no longer are in the repo (e.g., as
152 155 # the result of a strip that just happened). Avoid using 'node in
153 156 # self' here because that dives down into branchcache code somewhat
154 157 # recursively.
155 158 bheadrevs = [cl.rev(node) for node in bheads
156 159 if cl.hasnode(node)]
157 160 newheadrevs = [cl.rev(node) for node in newnodes
158 161 if cl.hasnode(node)]
159 162 ctxisnew = bheadrevs and min(newheadrevs) > max(bheadrevs)
160 163 # Remove duplicates - nodes that are in newheadrevs and are already
161 164 # in bheadrevs. This can happen if you strip a node whose parent
162 165 # was already a head (because they're on different branches).
163 166 bheadrevs = sorted(set(bheadrevs).union(newheadrevs))
164 167
165 168 # Starting from tip means fewer passes over reachable. If we know
166 169 # the new candidates are not ancestors of existing heads, we don't
167 170 # have to examine ancestors of existing heads
168 171 if ctxisnew:
169 172 iterrevs = sorted(newheadrevs)
170 173 else:
171 174 iterrevs = list(bheadrevs)
172 175
173 176 # This loop prunes out two kinds of heads - heads that are
174 177 # superseded by a head in newheadrevs, and newheadrevs that are not
175 178 # heads because an existing head is their descendant.
176 179 while iterrevs:
177 180 latest = iterrevs.pop()
178 181 if latest not in bheadrevs:
179 182 continue
180 183 ancestors = set(cl.ancestors([latest],
181 184 bheadrevs[0]))
182 185 if ancestors:
183 186 bheadrevs = [b for b in bheadrevs if b not in ancestors]
184 187 self[branch] = [cl.node(rev) for rev in bheadrevs]
185 188 tiprev = max(bheadrevs)
186 189 if tiprev > self.tiprev:
187 190 self.tipnode = cl.node(tiprev)
188 191 self.tiprev = tiprev
189 192
190 193 # There may be branches that cease to exist when the last commit in the
191 194 # branch was stripped. This code filters them out. Note that the
192 195 # branch that ceased to exist may not be in newbranches because
193 196 # newbranches is the set of candidate heads, which when you strip the
194 197 # last commit in a branch will be the parent branch.
195 198 droppednodes = []
196 199 for branch in self.keys():
197 200 nodes = [head for head in self[branch]
198 201 if cl.hasnode(head)]
199 202 if not nodes:
200 203 droppednodes.extend(nodes)
201 204 del self[branch]
202 205 if ((not self.validfor(repo)) or (self.tipnode in droppednodes)):
203 206
204 207 # cache key are not valid anymore
205 208 self.tipnode = nullid
206 209 self.tiprev = nullrev
207 210 for heads in self.values():
208 211 tiprev = max(cl.rev(node) for node in heads)
209 212 if tiprev > self.tiprev:
210 213 self.tipnode = cl.node(tiprev)
211 214 self.tiprev = tiprev
212 215 self.filteredhash = self._hashfiltered(repo)
General Comments 0
You need to be logged in to leave comments. Login now