##// END OF EJS Templates
cleanup: remove useless re-raises of KeyboardInterrupt...
Yuya Nishihara -
r32182:b98ee1a8 default
parent child Browse files
Show More
@@ -1,521 +1,519 b''
1 # branchmap.py - logic to computes, maintain and stores branchmap for local repo
1 # branchmap.py - logic to computes, maintain and stores branchmap for local repo
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import struct
10 import struct
11
11
12 from .node import (
12 from .node import (
13 bin,
13 bin,
14 hex,
14 hex,
15 nullid,
15 nullid,
16 nullrev,
16 nullrev,
17 )
17 )
18 from . import (
18 from . import (
19 encoding,
19 encoding,
20 error,
20 error,
21 scmutil,
21 scmutil,
22 util,
22 util,
23 )
23 )
24
24
25 calcsize = struct.calcsize
25 calcsize = struct.calcsize
26 pack_into = struct.pack_into
26 pack_into = struct.pack_into
27 unpack_from = struct.unpack_from
27 unpack_from = struct.unpack_from
28
28
29 def _filename(repo):
29 def _filename(repo):
30 """name of a branchcache file for a given repo or repoview"""
30 """name of a branchcache file for a given repo or repoview"""
31 filename = "cache/branch2"
31 filename = "cache/branch2"
32 if repo.filtername:
32 if repo.filtername:
33 filename = '%s-%s' % (filename, repo.filtername)
33 filename = '%s-%s' % (filename, repo.filtername)
34 return filename
34 return filename
35
35
36 def read(repo):
36 def read(repo):
37 try:
37 try:
38 f = repo.vfs(_filename(repo))
38 f = repo.vfs(_filename(repo))
39 lines = f.read().split('\n')
39 lines = f.read().split('\n')
40 f.close()
40 f.close()
41 except (IOError, OSError):
41 except (IOError, OSError):
42 return None
42 return None
43
43
44 try:
44 try:
45 cachekey = lines.pop(0).split(" ", 2)
45 cachekey = lines.pop(0).split(" ", 2)
46 last, lrev = cachekey[:2]
46 last, lrev = cachekey[:2]
47 last, lrev = bin(last), int(lrev)
47 last, lrev = bin(last), int(lrev)
48 filteredhash = None
48 filteredhash = None
49 if len(cachekey) > 2:
49 if len(cachekey) > 2:
50 filteredhash = bin(cachekey[2])
50 filteredhash = bin(cachekey[2])
51 partial = branchcache(tipnode=last, tiprev=lrev,
51 partial = branchcache(tipnode=last, tiprev=lrev,
52 filteredhash=filteredhash)
52 filteredhash=filteredhash)
53 if not partial.validfor(repo):
53 if not partial.validfor(repo):
54 # invalidate the cache
54 # invalidate the cache
55 raise ValueError('tip differs')
55 raise ValueError('tip differs')
56 cl = repo.changelog
56 cl = repo.changelog
57 for l in lines:
57 for l in lines:
58 if not l:
58 if not l:
59 continue
59 continue
60 node, state, label = l.split(" ", 2)
60 node, state, label = l.split(" ", 2)
61 if state not in 'oc':
61 if state not in 'oc':
62 raise ValueError('invalid branch state')
62 raise ValueError('invalid branch state')
63 label = encoding.tolocal(label.strip())
63 label = encoding.tolocal(label.strip())
64 node = bin(node)
64 node = bin(node)
65 if not cl.hasnode(node):
65 if not cl.hasnode(node):
66 raise ValueError('node %s does not exist' % hex(node))
66 raise ValueError('node %s does not exist' % hex(node))
67 partial.setdefault(label, []).append(node)
67 partial.setdefault(label, []).append(node)
68 if state == 'c':
68 if state == 'c':
69 partial._closednodes.add(node)
69 partial._closednodes.add(node)
70 except KeyboardInterrupt:
71 raise
72 except Exception as inst:
70 except Exception as inst:
73 if repo.ui.debugflag:
71 if repo.ui.debugflag:
74 msg = 'invalid branchheads cache'
72 msg = 'invalid branchheads cache'
75 if repo.filtername is not None:
73 if repo.filtername is not None:
76 msg += ' (%s)' % repo.filtername
74 msg += ' (%s)' % repo.filtername
77 msg += ': %s\n'
75 msg += ': %s\n'
78 repo.ui.debug(msg % inst)
76 repo.ui.debug(msg % inst)
79 partial = None
77 partial = None
80 return partial
78 return partial
81
79
82 ### Nearest subset relation
80 ### Nearest subset relation
83 # Nearest subset of filter X is a filter Y so that:
81 # Nearest subset of filter X is a filter Y so that:
84 # * Y is included in X,
82 # * Y is included in X,
85 # * X - Y is as small as possible.
83 # * X - Y is as small as possible.
86 # This create and ordering used for branchmap purpose.
84 # This create and ordering used for branchmap purpose.
87 # the ordering may be partial
85 # the ordering may be partial
88 subsettable = {None: 'visible',
86 subsettable = {None: 'visible',
89 'visible': 'served',
87 'visible': 'served',
90 'served': 'immutable',
88 'served': 'immutable',
91 'immutable': 'base'}
89 'immutable': 'base'}
92
90
93 def updatecache(repo):
91 def updatecache(repo):
94 cl = repo.changelog
92 cl = repo.changelog
95 filtername = repo.filtername
93 filtername = repo.filtername
96 partial = repo._branchcaches.get(filtername)
94 partial = repo._branchcaches.get(filtername)
97
95
98 revs = []
96 revs = []
99 if partial is None or not partial.validfor(repo):
97 if partial is None or not partial.validfor(repo):
100 partial = read(repo)
98 partial = read(repo)
101 if partial is None:
99 if partial is None:
102 subsetname = subsettable.get(filtername)
100 subsetname = subsettable.get(filtername)
103 if subsetname is None:
101 if subsetname is None:
104 partial = branchcache()
102 partial = branchcache()
105 else:
103 else:
106 subset = repo.filtered(subsetname)
104 subset = repo.filtered(subsetname)
107 partial = subset.branchmap().copy()
105 partial = subset.branchmap().copy()
108 extrarevs = subset.changelog.filteredrevs - cl.filteredrevs
106 extrarevs = subset.changelog.filteredrevs - cl.filteredrevs
109 revs.extend(r for r in extrarevs if r <= partial.tiprev)
107 revs.extend(r for r in extrarevs if r <= partial.tiprev)
110 revs.extend(cl.revs(start=partial.tiprev + 1))
108 revs.extend(cl.revs(start=partial.tiprev + 1))
111 if revs:
109 if revs:
112 partial.update(repo, revs)
110 partial.update(repo, revs)
113 partial.write(repo)
111 partial.write(repo)
114
112
115 assert partial.validfor(repo), filtername
113 assert partial.validfor(repo), filtername
116 repo._branchcaches[repo.filtername] = partial
114 repo._branchcaches[repo.filtername] = partial
117
115
118 def replacecache(repo, bm):
116 def replacecache(repo, bm):
119 """Replace the branchmap cache for a repo with a branch mapping.
117 """Replace the branchmap cache for a repo with a branch mapping.
120
118
121 This is likely only called during clone with a branch map from a remote.
119 This is likely only called during clone with a branch map from a remote.
122 """
120 """
123 rbheads = []
121 rbheads = []
124 closed = []
122 closed = []
125 for bheads in bm.itervalues():
123 for bheads in bm.itervalues():
126 rbheads.extend(bheads)
124 rbheads.extend(bheads)
127 for h in bheads:
125 for h in bheads:
128 r = repo.changelog.rev(h)
126 r = repo.changelog.rev(h)
129 b, c = repo.changelog.branchinfo(r)
127 b, c = repo.changelog.branchinfo(r)
130 if c:
128 if c:
131 closed.append(h)
129 closed.append(h)
132
130
133 if rbheads:
131 if rbheads:
134 rtiprev = max((int(repo.changelog.rev(node))
132 rtiprev = max((int(repo.changelog.rev(node))
135 for node in rbheads))
133 for node in rbheads))
136 cache = branchcache(bm,
134 cache = branchcache(bm,
137 repo[rtiprev].node(),
135 repo[rtiprev].node(),
138 rtiprev,
136 rtiprev,
139 closednodes=closed)
137 closednodes=closed)
140
138
141 # Try to stick it as low as possible
139 # Try to stick it as low as possible
142 # filter above served are unlikely to be fetch from a clone
140 # filter above served are unlikely to be fetch from a clone
143 for candidate in ('base', 'immutable', 'served'):
141 for candidate in ('base', 'immutable', 'served'):
144 rview = repo.filtered(candidate)
142 rview = repo.filtered(candidate)
145 if cache.validfor(rview):
143 if cache.validfor(rview):
146 repo._branchcaches[candidate] = cache
144 repo._branchcaches[candidate] = cache
147 cache.write(rview)
145 cache.write(rview)
148 break
146 break
149
147
150 class branchcache(dict):
148 class branchcache(dict):
151 """A dict like object that hold branches heads cache.
149 """A dict like object that hold branches heads cache.
152
150
153 This cache is used to avoid costly computations to determine all the
151 This cache is used to avoid costly computations to determine all the
154 branch heads of a repo.
152 branch heads of a repo.
155
153
156 The cache is serialized on disk in the following format:
154 The cache is serialized on disk in the following format:
157
155
158 <tip hex node> <tip rev number> [optional filtered repo hex hash]
156 <tip hex node> <tip rev number> [optional filtered repo hex hash]
159 <branch head hex node> <open/closed state> <branch name>
157 <branch head hex node> <open/closed state> <branch name>
160 <branch head hex node> <open/closed state> <branch name>
158 <branch head hex node> <open/closed state> <branch name>
161 ...
159 ...
162
160
163 The first line is used to check if the cache is still valid. If the
161 The first line is used to check if the cache is still valid. If the
164 branch cache is for a filtered repo view, an optional third hash is
162 branch cache is for a filtered repo view, an optional third hash is
165 included that hashes the hashes of all filtered revisions.
163 included that hashes the hashes of all filtered revisions.
166
164
167 The open/closed state is represented by a single letter 'o' or 'c'.
165 The open/closed state is represented by a single letter 'o' or 'c'.
168 This field can be used to avoid changelog reads when determining if a
166 This field can be used to avoid changelog reads when determining if a
169 branch head closes a branch or not.
167 branch head closes a branch or not.
170 """
168 """
171
169
172 def __init__(self, entries=(), tipnode=nullid, tiprev=nullrev,
170 def __init__(self, entries=(), tipnode=nullid, tiprev=nullrev,
173 filteredhash=None, closednodes=None):
171 filteredhash=None, closednodes=None):
174 super(branchcache, self).__init__(entries)
172 super(branchcache, self).__init__(entries)
175 self.tipnode = tipnode
173 self.tipnode = tipnode
176 self.tiprev = tiprev
174 self.tiprev = tiprev
177 self.filteredhash = filteredhash
175 self.filteredhash = filteredhash
178 # closednodes is a set of nodes that close their branch. If the branch
176 # closednodes is a set of nodes that close their branch. If the branch
179 # cache has been updated, it may contain nodes that are no longer
177 # cache has been updated, it may contain nodes that are no longer
180 # heads.
178 # heads.
181 if closednodes is None:
179 if closednodes is None:
182 self._closednodes = set()
180 self._closednodes = set()
183 else:
181 else:
184 self._closednodes = closednodes
182 self._closednodes = closednodes
185
183
186 def validfor(self, repo):
184 def validfor(self, repo):
187 """Is the cache content valid regarding a repo
185 """Is the cache content valid regarding a repo
188
186
189 - False when cached tipnode is unknown or if we detect a strip.
187 - False when cached tipnode is unknown or if we detect a strip.
190 - True when cache is up to date or a subset of current repo."""
188 - True when cache is up to date or a subset of current repo."""
191 try:
189 try:
192 return ((self.tipnode == repo.changelog.node(self.tiprev))
190 return ((self.tipnode == repo.changelog.node(self.tiprev))
193 and (self.filteredhash == \
191 and (self.filteredhash == \
194 scmutil.filteredhash(repo, self.tiprev)))
192 scmutil.filteredhash(repo, self.tiprev)))
195 except IndexError:
193 except IndexError:
196 return False
194 return False
197
195
198 def _branchtip(self, heads):
196 def _branchtip(self, heads):
199 '''Return tuple with last open head in heads and false,
197 '''Return tuple with last open head in heads and false,
200 otherwise return last closed head and true.'''
198 otherwise return last closed head and true.'''
201 tip = heads[-1]
199 tip = heads[-1]
202 closed = True
200 closed = True
203 for h in reversed(heads):
201 for h in reversed(heads):
204 if h not in self._closednodes:
202 if h not in self._closednodes:
205 tip = h
203 tip = h
206 closed = False
204 closed = False
207 break
205 break
208 return tip, closed
206 return tip, closed
209
207
210 def branchtip(self, branch):
208 def branchtip(self, branch):
211 '''Return the tipmost open head on branch head, otherwise return the
209 '''Return the tipmost open head on branch head, otherwise return the
212 tipmost closed head on branch.
210 tipmost closed head on branch.
213 Raise KeyError for unknown branch.'''
211 Raise KeyError for unknown branch.'''
214 return self._branchtip(self[branch])[0]
212 return self._branchtip(self[branch])[0]
215
213
216 def branchheads(self, branch, closed=False):
214 def branchheads(self, branch, closed=False):
217 heads = self[branch]
215 heads = self[branch]
218 if not closed:
216 if not closed:
219 heads = [h for h in heads if h not in self._closednodes]
217 heads = [h for h in heads if h not in self._closednodes]
220 return heads
218 return heads
221
219
222 def iterbranches(self):
220 def iterbranches(self):
223 for bn, heads in self.iteritems():
221 for bn, heads in self.iteritems():
224 yield (bn, heads) + self._branchtip(heads)
222 yield (bn, heads) + self._branchtip(heads)
225
223
226 def copy(self):
224 def copy(self):
227 """return an deep copy of the branchcache object"""
225 """return an deep copy of the branchcache object"""
228 return branchcache(self, self.tipnode, self.tiprev, self.filteredhash,
226 return branchcache(self, self.tipnode, self.tiprev, self.filteredhash,
229 self._closednodes)
227 self._closednodes)
230
228
231 def write(self, repo):
229 def write(self, repo):
232 try:
230 try:
233 f = repo.vfs(_filename(repo), "w", atomictemp=True)
231 f = repo.vfs(_filename(repo), "w", atomictemp=True)
234 cachekey = [hex(self.tipnode), '%d' % self.tiprev]
232 cachekey = [hex(self.tipnode), '%d' % self.tiprev]
235 if self.filteredhash is not None:
233 if self.filteredhash is not None:
236 cachekey.append(hex(self.filteredhash))
234 cachekey.append(hex(self.filteredhash))
237 f.write(" ".join(cachekey) + '\n')
235 f.write(" ".join(cachekey) + '\n')
238 nodecount = 0
236 nodecount = 0
239 for label, nodes in sorted(self.iteritems()):
237 for label, nodes in sorted(self.iteritems()):
240 for node in nodes:
238 for node in nodes:
241 nodecount += 1
239 nodecount += 1
242 if node in self._closednodes:
240 if node in self._closednodes:
243 state = 'c'
241 state = 'c'
244 else:
242 else:
245 state = 'o'
243 state = 'o'
246 f.write("%s %s %s\n" % (hex(node), state,
244 f.write("%s %s %s\n" % (hex(node), state,
247 encoding.fromlocal(label)))
245 encoding.fromlocal(label)))
248 f.close()
246 f.close()
249 repo.ui.log('branchcache',
247 repo.ui.log('branchcache',
250 'wrote %s branch cache with %d labels and %d nodes\n',
248 'wrote %s branch cache with %d labels and %d nodes\n',
251 repo.filtername, len(self), nodecount)
249 repo.filtername, len(self), nodecount)
252 except (IOError, OSError, error.Abort) as inst:
250 except (IOError, OSError, error.Abort) as inst:
253 repo.ui.debug("couldn't write branch cache: %s\n" % inst)
251 repo.ui.debug("couldn't write branch cache: %s\n" % inst)
254 # Abort may be raise by read only opener
252 # Abort may be raise by read only opener
255 pass
253 pass
256
254
257 def update(self, repo, revgen):
255 def update(self, repo, revgen):
258 """Given a branchhead cache, self, that may have extra nodes or be
256 """Given a branchhead cache, self, that may have extra nodes or be
259 missing heads, and a generator of nodes that are strictly a superset of
257 missing heads, and a generator of nodes that are strictly a superset of
260 heads missing, this function updates self to be correct.
258 heads missing, this function updates self to be correct.
261 """
259 """
262 starttime = util.timer()
260 starttime = util.timer()
263 cl = repo.changelog
261 cl = repo.changelog
264 # collect new branch entries
262 # collect new branch entries
265 newbranches = {}
263 newbranches = {}
266 getbranchinfo = repo.revbranchcache().branchinfo
264 getbranchinfo = repo.revbranchcache().branchinfo
267 for r in revgen:
265 for r in revgen:
268 branch, closesbranch = getbranchinfo(r)
266 branch, closesbranch = getbranchinfo(r)
269 newbranches.setdefault(branch, []).append(r)
267 newbranches.setdefault(branch, []).append(r)
270 if closesbranch:
268 if closesbranch:
271 self._closednodes.add(cl.node(r))
269 self._closednodes.add(cl.node(r))
272
270
273 # fetch current topological heads to speed up filtering
271 # fetch current topological heads to speed up filtering
274 topoheads = set(cl.headrevs())
272 topoheads = set(cl.headrevs())
275
273
276 # if older branchheads are reachable from new ones, they aren't
274 # if older branchheads are reachable from new ones, they aren't
277 # really branchheads. Note checking parents is insufficient:
275 # really branchheads. Note checking parents is insufficient:
278 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
276 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
279 for branch, newheadrevs in newbranches.iteritems():
277 for branch, newheadrevs in newbranches.iteritems():
280 bheads = self.setdefault(branch, [])
278 bheads = self.setdefault(branch, [])
281 bheadset = set(cl.rev(node) for node in bheads)
279 bheadset = set(cl.rev(node) for node in bheads)
282
280
283 # This have been tested True on all internal usage of this function.
281 # This have been tested True on all internal usage of this function.
284 # run it again in case of doubt
282 # run it again in case of doubt
285 # assert not (set(bheadrevs) & set(newheadrevs))
283 # assert not (set(bheadrevs) & set(newheadrevs))
286 newheadrevs.sort()
284 newheadrevs.sort()
287 bheadset.update(newheadrevs)
285 bheadset.update(newheadrevs)
288
286
289 # This prunes out two kinds of heads - heads that are superseded by
287 # This prunes out two kinds of heads - heads that are superseded by
290 # a head in newheadrevs, and newheadrevs that are not heads because
288 # a head in newheadrevs, and newheadrevs that are not heads because
291 # an existing head is their descendant.
289 # an existing head is their descendant.
292 uncertain = bheadset - topoheads
290 uncertain = bheadset - topoheads
293 if uncertain:
291 if uncertain:
294 floorrev = min(uncertain)
292 floorrev = min(uncertain)
295 ancestors = set(cl.ancestors(newheadrevs, floorrev))
293 ancestors = set(cl.ancestors(newheadrevs, floorrev))
296 bheadset -= ancestors
294 bheadset -= ancestors
297 bheadrevs = sorted(bheadset)
295 bheadrevs = sorted(bheadset)
298 self[branch] = [cl.node(rev) for rev in bheadrevs]
296 self[branch] = [cl.node(rev) for rev in bheadrevs]
299 tiprev = bheadrevs[-1]
297 tiprev = bheadrevs[-1]
300 if tiprev > self.tiprev:
298 if tiprev > self.tiprev:
301 self.tipnode = cl.node(tiprev)
299 self.tipnode = cl.node(tiprev)
302 self.tiprev = tiprev
300 self.tiprev = tiprev
303
301
304 if not self.validfor(repo):
302 if not self.validfor(repo):
305 # cache key are not valid anymore
303 # cache key are not valid anymore
306 self.tipnode = nullid
304 self.tipnode = nullid
307 self.tiprev = nullrev
305 self.tiprev = nullrev
308 for heads in self.values():
306 for heads in self.values():
309 tiprev = max(cl.rev(node) for node in heads)
307 tiprev = max(cl.rev(node) for node in heads)
310 if tiprev > self.tiprev:
308 if tiprev > self.tiprev:
311 self.tipnode = cl.node(tiprev)
309 self.tipnode = cl.node(tiprev)
312 self.tiprev = tiprev
310 self.tiprev = tiprev
313 self.filteredhash = scmutil.filteredhash(repo, self.tiprev)
311 self.filteredhash = scmutil.filteredhash(repo, self.tiprev)
314
312
315 duration = util.timer() - starttime
313 duration = util.timer() - starttime
316 repo.ui.log('branchcache', 'updated %s branch cache in %.4f seconds\n',
314 repo.ui.log('branchcache', 'updated %s branch cache in %.4f seconds\n',
317 repo.filtername, duration)
315 repo.filtername, duration)
318
316
319 # Revision branch info cache
317 # Revision branch info cache
320
318
321 _rbcversion = '-v1'
319 _rbcversion = '-v1'
322 _rbcnames = 'cache/rbc-names' + _rbcversion
320 _rbcnames = 'cache/rbc-names' + _rbcversion
323 _rbcrevs = 'cache/rbc-revs' + _rbcversion
321 _rbcrevs = 'cache/rbc-revs' + _rbcversion
324 # [4 byte hash prefix][4 byte branch name number with sign bit indicating open]
322 # [4 byte hash prefix][4 byte branch name number with sign bit indicating open]
325 _rbcrecfmt = '>4sI'
323 _rbcrecfmt = '>4sI'
326 _rbcrecsize = calcsize(_rbcrecfmt)
324 _rbcrecsize = calcsize(_rbcrecfmt)
327 _rbcnodelen = 4
325 _rbcnodelen = 4
328 _rbcbranchidxmask = 0x7fffffff
326 _rbcbranchidxmask = 0x7fffffff
329 _rbccloseflag = 0x80000000
327 _rbccloseflag = 0x80000000
330
328
331 class revbranchcache(object):
329 class revbranchcache(object):
332 """Persistent cache, mapping from revision number to branch name and close.
330 """Persistent cache, mapping from revision number to branch name and close.
333 This is a low level cache, independent of filtering.
331 This is a low level cache, independent of filtering.
334
332
335 Branch names are stored in rbc-names in internal encoding separated by 0.
333 Branch names are stored in rbc-names in internal encoding separated by 0.
336 rbc-names is append-only, and each branch name is only stored once and will
334 rbc-names is append-only, and each branch name is only stored once and will
337 thus have a unique index.
335 thus have a unique index.
338
336
339 The branch info for each revision is stored in rbc-revs as constant size
337 The branch info for each revision is stored in rbc-revs as constant size
340 records. The whole file is read into memory, but it is only 'parsed' on
338 records. The whole file is read into memory, but it is only 'parsed' on
341 demand. The file is usually append-only but will be truncated if repo
339 demand. The file is usually append-only but will be truncated if repo
342 modification is detected.
340 modification is detected.
343 The record for each revision contains the first 4 bytes of the
341 The record for each revision contains the first 4 bytes of the
344 corresponding node hash, and the record is only used if it still matches.
342 corresponding node hash, and the record is only used if it still matches.
345 Even a completely trashed rbc-revs fill thus still give the right result
343 Even a completely trashed rbc-revs fill thus still give the right result
346 while converging towards full recovery ... assuming no incorrectly matching
344 while converging towards full recovery ... assuming no incorrectly matching
347 node hashes.
345 node hashes.
348 The record also contains 4 bytes where 31 bits contains the index of the
346 The record also contains 4 bytes where 31 bits contains the index of the
349 branch and the last bit indicate that it is a branch close commit.
347 branch and the last bit indicate that it is a branch close commit.
350 The usage pattern for rbc-revs is thus somewhat similar to 00changelog.i
348 The usage pattern for rbc-revs is thus somewhat similar to 00changelog.i
351 and will grow with it but be 1/8th of its size.
349 and will grow with it but be 1/8th of its size.
352 """
350 """
353
351
354 def __init__(self, repo, readonly=True):
352 def __init__(self, repo, readonly=True):
355 assert repo.filtername is None
353 assert repo.filtername is None
356 self._repo = repo
354 self._repo = repo
357 self._names = [] # branch names in local encoding with static index
355 self._names = [] # branch names in local encoding with static index
358 self._rbcrevs = bytearray()
356 self._rbcrevs = bytearray()
359 self._rbcsnameslen = 0 # length of names read at _rbcsnameslen
357 self._rbcsnameslen = 0 # length of names read at _rbcsnameslen
360 try:
358 try:
361 bndata = repo.vfs.read(_rbcnames)
359 bndata = repo.vfs.read(_rbcnames)
362 self._rbcsnameslen = len(bndata) # for verification before writing
360 self._rbcsnameslen = len(bndata) # for verification before writing
363 if bndata:
361 if bndata:
364 self._names = [encoding.tolocal(bn)
362 self._names = [encoding.tolocal(bn)
365 for bn in bndata.split('\0')]
363 for bn in bndata.split('\0')]
366 except (IOError, OSError):
364 except (IOError, OSError):
367 if readonly:
365 if readonly:
368 # don't try to use cache - fall back to the slow path
366 # don't try to use cache - fall back to the slow path
369 self.branchinfo = self._branchinfo
367 self.branchinfo = self._branchinfo
370
368
371 if self._names:
369 if self._names:
372 try:
370 try:
373 data = repo.vfs.read(_rbcrevs)
371 data = repo.vfs.read(_rbcrevs)
374 self._rbcrevs[:] = data
372 self._rbcrevs[:] = data
375 except (IOError, OSError) as inst:
373 except (IOError, OSError) as inst:
376 repo.ui.debug("couldn't read revision branch cache: %s\n" %
374 repo.ui.debug("couldn't read revision branch cache: %s\n" %
377 inst)
375 inst)
378 # remember number of good records on disk
376 # remember number of good records on disk
379 self._rbcrevslen = min(len(self._rbcrevs) // _rbcrecsize,
377 self._rbcrevslen = min(len(self._rbcrevs) // _rbcrecsize,
380 len(repo.changelog))
378 len(repo.changelog))
381 if self._rbcrevslen == 0:
379 if self._rbcrevslen == 0:
382 self._names = []
380 self._names = []
383 self._rbcnamescount = len(self._names) # number of names read at
381 self._rbcnamescount = len(self._names) # number of names read at
384 # _rbcsnameslen
382 # _rbcsnameslen
385 self._namesreverse = dict((b, r) for r, b in enumerate(self._names))
383 self._namesreverse = dict((b, r) for r, b in enumerate(self._names))
386
384
387 def _clear(self):
385 def _clear(self):
388 self._rbcsnameslen = 0
386 self._rbcsnameslen = 0
389 del self._names[:]
387 del self._names[:]
390 self._rbcnamescount = 0
388 self._rbcnamescount = 0
391 self._namesreverse.clear()
389 self._namesreverse.clear()
392 self._rbcrevslen = len(self._repo.changelog)
390 self._rbcrevslen = len(self._repo.changelog)
393 self._rbcrevs = bytearray(self._rbcrevslen * _rbcrecsize)
391 self._rbcrevs = bytearray(self._rbcrevslen * _rbcrecsize)
394
392
395 def branchinfo(self, rev):
393 def branchinfo(self, rev):
396 """Return branch name and close flag for rev, using and updating
394 """Return branch name and close flag for rev, using and updating
397 persistent cache."""
395 persistent cache."""
398 changelog = self._repo.changelog
396 changelog = self._repo.changelog
399 rbcrevidx = rev * _rbcrecsize
397 rbcrevidx = rev * _rbcrecsize
400
398
401 # avoid negative index, changelog.read(nullrev) is fast without cache
399 # avoid negative index, changelog.read(nullrev) is fast without cache
402 if rev == nullrev:
400 if rev == nullrev:
403 return changelog.branchinfo(rev)
401 return changelog.branchinfo(rev)
404
402
405 # if requested rev isn't allocated, grow and cache the rev info
403 # if requested rev isn't allocated, grow and cache the rev info
406 if len(self._rbcrevs) < rbcrevidx + _rbcrecsize:
404 if len(self._rbcrevs) < rbcrevidx + _rbcrecsize:
407 return self._branchinfo(rev)
405 return self._branchinfo(rev)
408
406
409 # fast path: extract data from cache, use it if node is matching
407 # fast path: extract data from cache, use it if node is matching
410 reponode = changelog.node(rev)[:_rbcnodelen]
408 reponode = changelog.node(rev)[:_rbcnodelen]
411 cachenode, branchidx = unpack_from(
409 cachenode, branchidx = unpack_from(
412 _rbcrecfmt, util.buffer(self._rbcrevs), rbcrevidx)
410 _rbcrecfmt, util.buffer(self._rbcrevs), rbcrevidx)
413 close = bool(branchidx & _rbccloseflag)
411 close = bool(branchidx & _rbccloseflag)
414 if close:
412 if close:
415 branchidx &= _rbcbranchidxmask
413 branchidx &= _rbcbranchidxmask
416 if cachenode == '\0\0\0\0':
414 if cachenode == '\0\0\0\0':
417 pass
415 pass
418 elif cachenode == reponode:
416 elif cachenode == reponode:
419 try:
417 try:
420 return self._names[branchidx], close
418 return self._names[branchidx], close
421 except IndexError:
419 except IndexError:
422 # recover from invalid reference to unknown branch
420 # recover from invalid reference to unknown branch
423 self._repo.ui.debug("referenced branch names not found"
421 self._repo.ui.debug("referenced branch names not found"
424 " - rebuilding revision branch cache from scratch\n")
422 " - rebuilding revision branch cache from scratch\n")
425 self._clear()
423 self._clear()
426 else:
424 else:
427 # rev/node map has changed, invalidate the cache from here up
425 # rev/node map has changed, invalidate the cache from here up
428 self._repo.ui.debug("history modification detected - truncating "
426 self._repo.ui.debug("history modification detected - truncating "
429 "revision branch cache to revision %d\n" % rev)
427 "revision branch cache to revision %d\n" % rev)
430 truncate = rbcrevidx + _rbcrecsize
428 truncate = rbcrevidx + _rbcrecsize
431 del self._rbcrevs[truncate:]
429 del self._rbcrevs[truncate:]
432 self._rbcrevslen = min(self._rbcrevslen, truncate)
430 self._rbcrevslen = min(self._rbcrevslen, truncate)
433
431
434 # fall back to slow path and make sure it will be written to disk
432 # fall back to slow path and make sure it will be written to disk
435 return self._branchinfo(rev)
433 return self._branchinfo(rev)
436
434
437 def _branchinfo(self, rev):
435 def _branchinfo(self, rev):
438 """Retrieve branch info from changelog and update _rbcrevs"""
436 """Retrieve branch info from changelog and update _rbcrevs"""
439 changelog = self._repo.changelog
437 changelog = self._repo.changelog
440 b, close = changelog.branchinfo(rev)
438 b, close = changelog.branchinfo(rev)
441 if b in self._namesreverse:
439 if b in self._namesreverse:
442 branchidx = self._namesreverse[b]
440 branchidx = self._namesreverse[b]
443 else:
441 else:
444 branchidx = len(self._names)
442 branchidx = len(self._names)
445 self._names.append(b)
443 self._names.append(b)
446 self._namesreverse[b] = branchidx
444 self._namesreverse[b] = branchidx
447 reponode = changelog.node(rev)
445 reponode = changelog.node(rev)
448 if close:
446 if close:
449 branchidx |= _rbccloseflag
447 branchidx |= _rbccloseflag
450 self._setcachedata(rev, reponode, branchidx)
448 self._setcachedata(rev, reponode, branchidx)
451 return b, close
449 return b, close
452
450
453 def _setcachedata(self, rev, node, branchidx):
451 def _setcachedata(self, rev, node, branchidx):
454 """Writes the node's branch data to the in-memory cache data."""
452 """Writes the node's branch data to the in-memory cache data."""
455 if rev == nullrev:
453 if rev == nullrev:
456 return
454 return
457 rbcrevidx = rev * _rbcrecsize
455 rbcrevidx = rev * _rbcrecsize
458 if len(self._rbcrevs) < rbcrevidx + _rbcrecsize:
456 if len(self._rbcrevs) < rbcrevidx + _rbcrecsize:
459 self._rbcrevs.extend('\0' *
457 self._rbcrevs.extend('\0' *
460 (len(self._repo.changelog) * _rbcrecsize -
458 (len(self._repo.changelog) * _rbcrecsize -
461 len(self._rbcrevs)))
459 len(self._rbcrevs)))
462 pack_into(_rbcrecfmt, self._rbcrevs, rbcrevidx, node, branchidx)
460 pack_into(_rbcrecfmt, self._rbcrevs, rbcrevidx, node, branchidx)
463 self._rbcrevslen = min(self._rbcrevslen, rev)
461 self._rbcrevslen = min(self._rbcrevslen, rev)
464
462
465 tr = self._repo.currenttransaction()
463 tr = self._repo.currenttransaction()
466 if tr:
464 if tr:
467 tr.addfinalize('write-revbranchcache', self.write)
465 tr.addfinalize('write-revbranchcache', self.write)
468
466
469 def write(self, tr=None):
467 def write(self, tr=None):
470 """Save branch cache if it is dirty."""
468 """Save branch cache if it is dirty."""
471 repo = self._repo
469 repo = self._repo
472 wlock = None
470 wlock = None
473 step = ''
471 step = ''
474 try:
472 try:
475 if self._rbcnamescount < len(self._names):
473 if self._rbcnamescount < len(self._names):
476 step = ' names'
474 step = ' names'
477 wlock = repo.wlock(wait=False)
475 wlock = repo.wlock(wait=False)
478 if self._rbcnamescount != 0:
476 if self._rbcnamescount != 0:
479 f = repo.vfs.open(_rbcnames, 'ab')
477 f = repo.vfs.open(_rbcnames, 'ab')
480 if f.tell() == self._rbcsnameslen:
478 if f.tell() == self._rbcsnameslen:
481 f.write('\0')
479 f.write('\0')
482 else:
480 else:
483 f.close()
481 f.close()
484 repo.ui.debug("%s changed - rewriting it\n" % _rbcnames)
482 repo.ui.debug("%s changed - rewriting it\n" % _rbcnames)
485 self._rbcnamescount = 0
483 self._rbcnamescount = 0
486 self._rbcrevslen = 0
484 self._rbcrevslen = 0
487 if self._rbcnamescount == 0:
485 if self._rbcnamescount == 0:
488 # before rewriting names, make sure references are removed
486 # before rewriting names, make sure references are removed
489 repo.vfs.unlinkpath(_rbcrevs, ignoremissing=True)
487 repo.vfs.unlinkpath(_rbcrevs, ignoremissing=True)
490 f = repo.vfs.open(_rbcnames, 'wb')
488 f = repo.vfs.open(_rbcnames, 'wb')
491 f.write('\0'.join(encoding.fromlocal(b)
489 f.write('\0'.join(encoding.fromlocal(b)
492 for b in self._names[self._rbcnamescount:]))
490 for b in self._names[self._rbcnamescount:]))
493 self._rbcsnameslen = f.tell()
491 self._rbcsnameslen = f.tell()
494 f.close()
492 f.close()
495 self._rbcnamescount = len(self._names)
493 self._rbcnamescount = len(self._names)
496
494
497 start = self._rbcrevslen * _rbcrecsize
495 start = self._rbcrevslen * _rbcrecsize
498 if start != len(self._rbcrevs):
496 if start != len(self._rbcrevs):
499 step = ''
497 step = ''
500 if wlock is None:
498 if wlock is None:
501 wlock = repo.wlock(wait=False)
499 wlock = repo.wlock(wait=False)
502 revs = min(len(repo.changelog),
500 revs = min(len(repo.changelog),
503 len(self._rbcrevs) // _rbcrecsize)
501 len(self._rbcrevs) // _rbcrecsize)
504 f = repo.vfs.open(_rbcrevs, 'ab')
502 f = repo.vfs.open(_rbcrevs, 'ab')
505 if f.tell() != start:
503 if f.tell() != start:
506 repo.ui.debug("truncating %s to %d\n" % (_rbcrevs, start))
504 repo.ui.debug("truncating %s to %d\n" % (_rbcrevs, start))
507 f.seek(start)
505 f.seek(start)
508 if f.tell() != start:
506 if f.tell() != start:
509 start = 0
507 start = 0
510 f.seek(start)
508 f.seek(start)
511 f.truncate()
509 f.truncate()
512 end = revs * _rbcrecsize
510 end = revs * _rbcrecsize
513 f.write(self._rbcrevs[start:end])
511 f.write(self._rbcrevs[start:end])
514 f.close()
512 f.close()
515 self._rbcrevslen = revs
513 self._rbcrevslen = revs
516 except (IOError, OSError, error.Abort, error.LockError) as inst:
514 except (IOError, OSError, error.Abort, error.LockError) as inst:
517 repo.ui.debug("couldn't write revision branch cache%s: %s\n"
515 repo.ui.debug("couldn't write revision branch cache%s: %s\n"
518 % (step, inst))
516 % (step, inst))
519 finally:
517 finally:
520 if wlock is not None:
518 if wlock is not None:
521 wlock.release()
519 wlock.release()
@@ -1,558 +1,556 b''
1 # extensions.py - extension handling for mercurial
1 # extensions.py - extension handling for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import imp
10 import imp
11 import inspect
11 import inspect
12 import os
12 import os
13
13
14 from .i18n import (
14 from .i18n import (
15 _,
15 _,
16 gettext,
16 gettext,
17 )
17 )
18
18
19 from . import (
19 from . import (
20 cmdutil,
20 cmdutil,
21 encoding,
21 encoding,
22 error,
22 error,
23 pycompat,
23 pycompat,
24 util,
24 util,
25 )
25 )
26
26
27 _extensions = {}
27 _extensions = {}
28 _disabledextensions = {}
28 _disabledextensions = {}
29 _aftercallbacks = {}
29 _aftercallbacks = {}
30 _order = []
30 _order = []
31 _builtin = set(['hbisect', 'bookmarks', 'parentrevspec', 'progress', 'interhg',
31 _builtin = set(['hbisect', 'bookmarks', 'parentrevspec', 'progress', 'interhg',
32 'inotify', 'hgcia'])
32 'inotify', 'hgcia'])
33
33
34 def extensions(ui=None):
34 def extensions(ui=None):
35 if ui:
35 if ui:
36 def enabled(name):
36 def enabled(name):
37 for format in ['%s', 'hgext.%s']:
37 for format in ['%s', 'hgext.%s']:
38 conf = ui.config('extensions', format % name)
38 conf = ui.config('extensions', format % name)
39 if conf is not None and not conf.startswith('!'):
39 if conf is not None and not conf.startswith('!'):
40 return True
40 return True
41 else:
41 else:
42 enabled = lambda name: True
42 enabled = lambda name: True
43 for name in _order:
43 for name in _order:
44 module = _extensions[name]
44 module = _extensions[name]
45 if module and enabled(name):
45 if module and enabled(name):
46 yield name, module
46 yield name, module
47
47
48 def find(name):
48 def find(name):
49 '''return module with given extension name'''
49 '''return module with given extension name'''
50 mod = None
50 mod = None
51 try:
51 try:
52 mod = _extensions[name]
52 mod = _extensions[name]
53 except KeyError:
53 except KeyError:
54 for k, v in _extensions.iteritems():
54 for k, v in _extensions.iteritems():
55 if k.endswith('.' + name) or k.endswith('/' + name):
55 if k.endswith('.' + name) or k.endswith('/' + name):
56 mod = v
56 mod = v
57 break
57 break
58 if not mod:
58 if not mod:
59 raise KeyError(name)
59 raise KeyError(name)
60 return mod
60 return mod
61
61
62 def loadpath(path, module_name):
62 def loadpath(path, module_name):
63 module_name = module_name.replace('.', '_')
63 module_name = module_name.replace('.', '_')
64 path = util.normpath(util.expandpath(path))
64 path = util.normpath(util.expandpath(path))
65 module_name = pycompat.fsdecode(module_name)
65 module_name = pycompat.fsdecode(module_name)
66 path = pycompat.fsdecode(path)
66 path = pycompat.fsdecode(path)
67 if os.path.isdir(path):
67 if os.path.isdir(path):
68 # module/__init__.py style
68 # module/__init__.py style
69 d, f = os.path.split(path)
69 d, f = os.path.split(path)
70 fd, fpath, desc = imp.find_module(f, [d])
70 fd, fpath, desc = imp.find_module(f, [d])
71 return imp.load_module(module_name, fd, fpath, desc)
71 return imp.load_module(module_name, fd, fpath, desc)
72 else:
72 else:
73 try:
73 try:
74 return imp.load_source(module_name, path)
74 return imp.load_source(module_name, path)
75 except IOError as exc:
75 except IOError as exc:
76 if not exc.filename:
76 if not exc.filename:
77 exc.filename = path # python does not fill this
77 exc.filename = path # python does not fill this
78 raise
78 raise
79
79
80 def _importh(name):
80 def _importh(name):
81 """import and return the <name> module"""
81 """import and return the <name> module"""
82 mod = __import__(pycompat.sysstr(name))
82 mod = __import__(pycompat.sysstr(name))
83 components = name.split('.')
83 components = name.split('.')
84 for comp in components[1:]:
84 for comp in components[1:]:
85 mod = getattr(mod, comp)
85 mod = getattr(mod, comp)
86 return mod
86 return mod
87
87
88 def _importext(name, path=None, reportfunc=None):
88 def _importext(name, path=None, reportfunc=None):
89 if path:
89 if path:
90 # the module will be loaded in sys.modules
90 # the module will be loaded in sys.modules
91 # choose an unique name so that it doesn't
91 # choose an unique name so that it doesn't
92 # conflicts with other modules
92 # conflicts with other modules
93 mod = loadpath(path, 'hgext.%s' % name)
93 mod = loadpath(path, 'hgext.%s' % name)
94 else:
94 else:
95 try:
95 try:
96 mod = _importh("hgext.%s" % name)
96 mod = _importh("hgext.%s" % name)
97 except ImportError as err:
97 except ImportError as err:
98 if reportfunc:
98 if reportfunc:
99 reportfunc(err, "hgext.%s" % name, "hgext3rd.%s" % name)
99 reportfunc(err, "hgext.%s" % name, "hgext3rd.%s" % name)
100 try:
100 try:
101 mod = _importh("hgext3rd.%s" % name)
101 mod = _importh("hgext3rd.%s" % name)
102 except ImportError as err:
102 except ImportError as err:
103 if reportfunc:
103 if reportfunc:
104 reportfunc(err, "hgext3rd.%s" % name, name)
104 reportfunc(err, "hgext3rd.%s" % name, name)
105 mod = _importh(name)
105 mod = _importh(name)
106 return mod
106 return mod
107
107
108 def _forbytes(inst):
108 def _forbytes(inst):
109 """Portably format an import error into a form suitable for
109 """Portably format an import error into a form suitable for
110 %-formatting into bytestrings."""
110 %-formatting into bytestrings."""
111 return encoding.strtolocal(str(inst))
111 return encoding.strtolocal(str(inst))
112
112
113 def _reportimporterror(ui, err, failed, next):
113 def _reportimporterror(ui, err, failed, next):
114 # note: this ui.debug happens before --debug is processed,
114 # note: this ui.debug happens before --debug is processed,
115 # Use --config ui.debug=1 to see them.
115 # Use --config ui.debug=1 to see them.
116 ui.debug('could not import %s (%s): trying %s\n'
116 ui.debug('could not import %s (%s): trying %s\n'
117 % (failed, _forbytes(err), next))
117 % (failed, _forbytes(err), next))
118 if ui.debugflag:
118 if ui.debugflag:
119 ui.traceback()
119 ui.traceback()
120
120
121 def load(ui, name, path):
121 def load(ui, name, path):
122 if name.startswith('hgext.') or name.startswith('hgext/'):
122 if name.startswith('hgext.') or name.startswith('hgext/'):
123 shortname = name[6:]
123 shortname = name[6:]
124 else:
124 else:
125 shortname = name
125 shortname = name
126 if shortname in _builtin:
126 if shortname in _builtin:
127 return None
127 return None
128 if shortname in _extensions:
128 if shortname in _extensions:
129 return _extensions[shortname]
129 return _extensions[shortname]
130 _extensions[shortname] = None
130 _extensions[shortname] = None
131 mod = _importext(name, path, bind(_reportimporterror, ui))
131 mod = _importext(name, path, bind(_reportimporterror, ui))
132
132
133 # Before we do anything with the extension, check against minimum stated
133 # Before we do anything with the extension, check against minimum stated
134 # compatibility. This gives extension authors a mechanism to have their
134 # compatibility. This gives extension authors a mechanism to have their
135 # extensions short circuit when loaded with a known incompatible version
135 # extensions short circuit when loaded with a known incompatible version
136 # of Mercurial.
136 # of Mercurial.
137 minver = getattr(mod, 'minimumhgversion', None)
137 minver = getattr(mod, 'minimumhgversion', None)
138 if minver and util.versiontuple(minver, 2) > util.versiontuple(n=2):
138 if minver and util.versiontuple(minver, 2) > util.versiontuple(n=2):
139 ui.warn(_('(third party extension %s requires version %s or newer '
139 ui.warn(_('(third party extension %s requires version %s or newer '
140 'of Mercurial; disabling)\n') % (shortname, minver))
140 'of Mercurial; disabling)\n') % (shortname, minver))
141 return
141 return
142
142
143 _extensions[shortname] = mod
143 _extensions[shortname] = mod
144 _order.append(shortname)
144 _order.append(shortname)
145 for fn in _aftercallbacks.get(shortname, []):
145 for fn in _aftercallbacks.get(shortname, []):
146 fn(loaded=True)
146 fn(loaded=True)
147 return mod
147 return mod
148
148
149 def _runuisetup(name, ui):
149 def _runuisetup(name, ui):
150 uisetup = getattr(_extensions[name], 'uisetup', None)
150 uisetup = getattr(_extensions[name], 'uisetup', None)
151 if uisetup:
151 if uisetup:
152 uisetup(ui)
152 uisetup(ui)
153
153
154 def _runextsetup(name, ui):
154 def _runextsetup(name, ui):
155 extsetup = getattr(_extensions[name], 'extsetup', None)
155 extsetup = getattr(_extensions[name], 'extsetup', None)
156 if extsetup:
156 if extsetup:
157 try:
157 try:
158 extsetup(ui)
158 extsetup(ui)
159 except TypeError:
159 except TypeError:
160 if inspect.getargspec(extsetup).args:
160 if inspect.getargspec(extsetup).args:
161 raise
161 raise
162 extsetup() # old extsetup with no ui argument
162 extsetup() # old extsetup with no ui argument
163
163
164 def loadall(ui):
164 def loadall(ui):
165 result = ui.configitems("extensions")
165 result = ui.configitems("extensions")
166 newindex = len(_order)
166 newindex = len(_order)
167 for (name, path) in result:
167 for (name, path) in result:
168 if path:
168 if path:
169 if path[0:1] == '!':
169 if path[0:1] == '!':
170 _disabledextensions[name] = path[1:]
170 _disabledextensions[name] = path[1:]
171 continue
171 continue
172 try:
172 try:
173 load(ui, name, path)
173 load(ui, name, path)
174 except KeyboardInterrupt:
175 raise
176 except Exception as inst:
174 except Exception as inst:
177 inst = _forbytes(inst)
175 inst = _forbytes(inst)
178 if path:
176 if path:
179 ui.warn(_("*** failed to import extension %s from %s: %s\n")
177 ui.warn(_("*** failed to import extension %s from %s: %s\n")
180 % (name, path, inst))
178 % (name, path, inst))
181 else:
179 else:
182 ui.warn(_("*** failed to import extension %s: %s\n")
180 ui.warn(_("*** failed to import extension %s: %s\n")
183 % (name, inst))
181 % (name, inst))
184 ui.traceback()
182 ui.traceback()
185
183
186 for name in _order[newindex:]:
184 for name in _order[newindex:]:
187 _runuisetup(name, ui)
185 _runuisetup(name, ui)
188
186
189 for name in _order[newindex:]:
187 for name in _order[newindex:]:
190 _runextsetup(name, ui)
188 _runextsetup(name, ui)
191
189
192 # Call aftercallbacks that were never met.
190 # Call aftercallbacks that were never met.
193 for shortname in _aftercallbacks:
191 for shortname in _aftercallbacks:
194 if shortname in _extensions:
192 if shortname in _extensions:
195 continue
193 continue
196
194
197 for fn in _aftercallbacks[shortname]:
195 for fn in _aftercallbacks[shortname]:
198 fn(loaded=False)
196 fn(loaded=False)
199
197
200 # loadall() is called multiple times and lingering _aftercallbacks
198 # loadall() is called multiple times and lingering _aftercallbacks
201 # entries could result in double execution. See issue4646.
199 # entries could result in double execution. See issue4646.
202 _aftercallbacks.clear()
200 _aftercallbacks.clear()
203
201
204 def afterloaded(extension, callback):
202 def afterloaded(extension, callback):
205 '''Run the specified function after a named extension is loaded.
203 '''Run the specified function after a named extension is loaded.
206
204
207 If the named extension is already loaded, the callback will be called
205 If the named extension is already loaded, the callback will be called
208 immediately.
206 immediately.
209
207
210 If the named extension never loads, the callback will be called after
208 If the named extension never loads, the callback will be called after
211 all extensions have been loaded.
209 all extensions have been loaded.
212
210
213 The callback receives the named argument ``loaded``, which is a boolean
211 The callback receives the named argument ``loaded``, which is a boolean
214 indicating whether the dependent extension actually loaded.
212 indicating whether the dependent extension actually loaded.
215 '''
213 '''
216
214
217 if extension in _extensions:
215 if extension in _extensions:
218 callback(loaded=True)
216 callback(loaded=True)
219 else:
217 else:
220 _aftercallbacks.setdefault(extension, []).append(callback)
218 _aftercallbacks.setdefault(extension, []).append(callback)
221
219
222 def bind(func, *args):
220 def bind(func, *args):
223 '''Partial function application
221 '''Partial function application
224
222
225 Returns a new function that is the partial application of args and kwargs
223 Returns a new function that is the partial application of args and kwargs
226 to func. For example,
224 to func. For example,
227
225
228 f(1, 2, bar=3) === bind(f, 1)(2, bar=3)'''
226 f(1, 2, bar=3) === bind(f, 1)(2, bar=3)'''
229 assert callable(func)
227 assert callable(func)
230 def closure(*a, **kw):
228 def closure(*a, **kw):
231 return func(*(args + a), **kw)
229 return func(*(args + a), **kw)
232 return closure
230 return closure
233
231
234 def _updatewrapper(wrap, origfn, unboundwrapper):
232 def _updatewrapper(wrap, origfn, unboundwrapper):
235 '''Copy and add some useful attributes to wrapper'''
233 '''Copy and add some useful attributes to wrapper'''
236 wrap.__module__ = getattr(origfn, '__module__')
234 wrap.__module__ = getattr(origfn, '__module__')
237 wrap.__doc__ = getattr(origfn, '__doc__')
235 wrap.__doc__ = getattr(origfn, '__doc__')
238 wrap.__dict__.update(getattr(origfn, '__dict__', {}))
236 wrap.__dict__.update(getattr(origfn, '__dict__', {}))
239 wrap._origfunc = origfn
237 wrap._origfunc = origfn
240 wrap._unboundwrapper = unboundwrapper
238 wrap._unboundwrapper = unboundwrapper
241
239
242 def wrapcommand(table, command, wrapper, synopsis=None, docstring=None):
240 def wrapcommand(table, command, wrapper, synopsis=None, docstring=None):
243 '''Wrap the command named `command' in table
241 '''Wrap the command named `command' in table
244
242
245 Replace command in the command table with wrapper. The wrapped command will
243 Replace command in the command table with wrapper. The wrapped command will
246 be inserted into the command table specified by the table argument.
244 be inserted into the command table specified by the table argument.
247
245
248 The wrapper will be called like
246 The wrapper will be called like
249
247
250 wrapper(orig, *args, **kwargs)
248 wrapper(orig, *args, **kwargs)
251
249
252 where orig is the original (wrapped) function, and *args, **kwargs
250 where orig is the original (wrapped) function, and *args, **kwargs
253 are the arguments passed to it.
251 are the arguments passed to it.
254
252
255 Optionally append to the command synopsis and docstring, used for help.
253 Optionally append to the command synopsis and docstring, used for help.
256 For example, if your extension wraps the ``bookmarks`` command to add the
254 For example, if your extension wraps the ``bookmarks`` command to add the
257 flags ``--remote`` and ``--all`` you might call this function like so:
255 flags ``--remote`` and ``--all`` you might call this function like so:
258
256
259 synopsis = ' [-a] [--remote]'
257 synopsis = ' [-a] [--remote]'
260 docstring = """
258 docstring = """
261
259
262 The ``remotenames`` extension adds the ``--remote`` and ``--all`` (``-a``)
260 The ``remotenames`` extension adds the ``--remote`` and ``--all`` (``-a``)
263 flags to the bookmarks command. Either flag will show the remote bookmarks
261 flags to the bookmarks command. Either flag will show the remote bookmarks
264 known to the repository; ``--remote`` will also suppress the output of the
262 known to the repository; ``--remote`` will also suppress the output of the
265 local bookmarks.
263 local bookmarks.
266 """
264 """
267
265
268 extensions.wrapcommand(commands.table, 'bookmarks', exbookmarks,
266 extensions.wrapcommand(commands.table, 'bookmarks', exbookmarks,
269 synopsis, docstring)
267 synopsis, docstring)
270 '''
268 '''
271 assert callable(wrapper)
269 assert callable(wrapper)
272 aliases, entry = cmdutil.findcmd(command, table)
270 aliases, entry = cmdutil.findcmd(command, table)
273 for alias, e in table.iteritems():
271 for alias, e in table.iteritems():
274 if e is entry:
272 if e is entry:
275 key = alias
273 key = alias
276 break
274 break
277
275
278 origfn = entry[0]
276 origfn = entry[0]
279 wrap = bind(util.checksignature(wrapper), util.checksignature(origfn))
277 wrap = bind(util.checksignature(wrapper), util.checksignature(origfn))
280 _updatewrapper(wrap, origfn, wrapper)
278 _updatewrapper(wrap, origfn, wrapper)
281 if docstring is not None:
279 if docstring is not None:
282 wrap.__doc__ += docstring
280 wrap.__doc__ += docstring
283
281
284 newentry = list(entry)
282 newentry = list(entry)
285 newentry[0] = wrap
283 newentry[0] = wrap
286 if synopsis is not None:
284 if synopsis is not None:
287 newentry[2] += synopsis
285 newentry[2] += synopsis
288 table[key] = tuple(newentry)
286 table[key] = tuple(newentry)
289 return entry
287 return entry
290
288
291 def wrapfunction(container, funcname, wrapper):
289 def wrapfunction(container, funcname, wrapper):
292 '''Wrap the function named funcname in container
290 '''Wrap the function named funcname in container
293
291
294 Replace the funcname member in the given container with the specified
292 Replace the funcname member in the given container with the specified
295 wrapper. The container is typically a module, class, or instance.
293 wrapper. The container is typically a module, class, or instance.
296
294
297 The wrapper will be called like
295 The wrapper will be called like
298
296
299 wrapper(orig, *args, **kwargs)
297 wrapper(orig, *args, **kwargs)
300
298
301 where orig is the original (wrapped) function, and *args, **kwargs
299 where orig is the original (wrapped) function, and *args, **kwargs
302 are the arguments passed to it.
300 are the arguments passed to it.
303
301
304 Wrapping methods of the repository object is not recommended since
302 Wrapping methods of the repository object is not recommended since
305 it conflicts with extensions that extend the repository by
303 it conflicts with extensions that extend the repository by
306 subclassing. All extensions that need to extend methods of
304 subclassing. All extensions that need to extend methods of
307 localrepository should use this subclassing trick: namely,
305 localrepository should use this subclassing trick: namely,
308 reposetup() should look like
306 reposetup() should look like
309
307
310 def reposetup(ui, repo):
308 def reposetup(ui, repo):
311 class myrepo(repo.__class__):
309 class myrepo(repo.__class__):
312 def whatever(self, *args, **kwargs):
310 def whatever(self, *args, **kwargs):
313 [...extension stuff...]
311 [...extension stuff...]
314 super(myrepo, self).whatever(*args, **kwargs)
312 super(myrepo, self).whatever(*args, **kwargs)
315 [...extension stuff...]
313 [...extension stuff...]
316
314
317 repo.__class__ = myrepo
315 repo.__class__ = myrepo
318
316
319 In general, combining wrapfunction() with subclassing does not
317 In general, combining wrapfunction() with subclassing does not
320 work. Since you cannot control what other extensions are loaded by
318 work. Since you cannot control what other extensions are loaded by
321 your end users, you should play nicely with others by using the
319 your end users, you should play nicely with others by using the
322 subclass trick.
320 subclass trick.
323 '''
321 '''
324 assert callable(wrapper)
322 assert callable(wrapper)
325
323
326 origfn = getattr(container, funcname)
324 origfn = getattr(container, funcname)
327 assert callable(origfn)
325 assert callable(origfn)
328 wrap = bind(wrapper, origfn)
326 wrap = bind(wrapper, origfn)
329 _updatewrapper(wrap, origfn, wrapper)
327 _updatewrapper(wrap, origfn, wrapper)
330 setattr(container, funcname, wrap)
328 setattr(container, funcname, wrap)
331 return origfn
329 return origfn
332
330
333 def unwrapfunction(container, funcname, wrapper=None):
331 def unwrapfunction(container, funcname, wrapper=None):
334 '''undo wrapfunction
332 '''undo wrapfunction
335
333
336 If wrappers is None, undo the last wrap. Otherwise removes the wrapper
334 If wrappers is None, undo the last wrap. Otherwise removes the wrapper
337 from the chain of wrappers.
335 from the chain of wrappers.
338
336
339 Return the removed wrapper.
337 Return the removed wrapper.
340 Raise IndexError if wrapper is None and nothing to unwrap; ValueError if
338 Raise IndexError if wrapper is None and nothing to unwrap; ValueError if
341 wrapper is not None but is not found in the wrapper chain.
339 wrapper is not None but is not found in the wrapper chain.
342 '''
340 '''
343 chain = getwrapperchain(container, funcname)
341 chain = getwrapperchain(container, funcname)
344 origfn = chain.pop()
342 origfn = chain.pop()
345 if wrapper is None:
343 if wrapper is None:
346 wrapper = chain[0]
344 wrapper = chain[0]
347 chain.remove(wrapper)
345 chain.remove(wrapper)
348 setattr(container, funcname, origfn)
346 setattr(container, funcname, origfn)
349 for w in reversed(chain):
347 for w in reversed(chain):
350 wrapfunction(container, funcname, w)
348 wrapfunction(container, funcname, w)
351 return wrapper
349 return wrapper
352
350
353 def getwrapperchain(container, funcname):
351 def getwrapperchain(container, funcname):
354 '''get a chain of wrappers of a function
352 '''get a chain of wrappers of a function
355
353
356 Return a list of functions: [newest wrapper, ..., oldest wrapper, origfunc]
354 Return a list of functions: [newest wrapper, ..., oldest wrapper, origfunc]
357
355
358 The wrapper functions are the ones passed to wrapfunction, whose first
356 The wrapper functions are the ones passed to wrapfunction, whose first
359 argument is origfunc.
357 argument is origfunc.
360 '''
358 '''
361 result = []
359 result = []
362 fn = getattr(container, funcname)
360 fn = getattr(container, funcname)
363 while fn:
361 while fn:
364 assert callable(fn)
362 assert callable(fn)
365 result.append(getattr(fn, '_unboundwrapper', fn))
363 result.append(getattr(fn, '_unboundwrapper', fn))
366 fn = getattr(fn, '_origfunc', None)
364 fn = getattr(fn, '_origfunc', None)
367 return result
365 return result
368
366
369 def _disabledpaths(strip_init=False):
367 def _disabledpaths(strip_init=False):
370 '''find paths of disabled extensions. returns a dict of {name: path}
368 '''find paths of disabled extensions. returns a dict of {name: path}
371 removes /__init__.py from packages if strip_init is True'''
369 removes /__init__.py from packages if strip_init is True'''
372 import hgext
370 import hgext
373 extpath = os.path.dirname(
371 extpath = os.path.dirname(
374 os.path.abspath(pycompat.fsencode(hgext.__file__)))
372 os.path.abspath(pycompat.fsencode(hgext.__file__)))
375 try: # might not be a filesystem path
373 try: # might not be a filesystem path
376 files = os.listdir(extpath)
374 files = os.listdir(extpath)
377 except OSError:
375 except OSError:
378 return {}
376 return {}
379
377
380 exts = {}
378 exts = {}
381 for e in files:
379 for e in files:
382 if e.endswith('.py'):
380 if e.endswith('.py'):
383 name = e.rsplit('.', 1)[0]
381 name = e.rsplit('.', 1)[0]
384 path = os.path.join(extpath, e)
382 path = os.path.join(extpath, e)
385 else:
383 else:
386 name = e
384 name = e
387 path = os.path.join(extpath, e, '__init__.py')
385 path = os.path.join(extpath, e, '__init__.py')
388 if not os.path.exists(path):
386 if not os.path.exists(path):
389 continue
387 continue
390 if strip_init:
388 if strip_init:
391 path = os.path.dirname(path)
389 path = os.path.dirname(path)
392 if name in exts or name in _order or name == '__init__':
390 if name in exts or name in _order or name == '__init__':
393 continue
391 continue
394 exts[name] = path
392 exts[name] = path
395 exts.update(_disabledextensions)
393 exts.update(_disabledextensions)
396 return exts
394 return exts
397
395
398 def _moduledoc(file):
396 def _moduledoc(file):
399 '''return the top-level python documentation for the given file
397 '''return the top-level python documentation for the given file
400
398
401 Loosely inspired by pydoc.source_synopsis(), but rewritten to
399 Loosely inspired by pydoc.source_synopsis(), but rewritten to
402 handle triple quotes and to return the whole text instead of just
400 handle triple quotes and to return the whole text instead of just
403 the synopsis'''
401 the synopsis'''
404 result = []
402 result = []
405
403
406 line = file.readline()
404 line = file.readline()
407 while line[:1] == '#' or not line.strip():
405 while line[:1] == '#' or not line.strip():
408 line = file.readline()
406 line = file.readline()
409 if not line:
407 if not line:
410 break
408 break
411
409
412 start = line[:3]
410 start = line[:3]
413 if start == '"""' or start == "'''":
411 if start == '"""' or start == "'''":
414 line = line[3:]
412 line = line[3:]
415 while line:
413 while line:
416 if line.rstrip().endswith(start):
414 if line.rstrip().endswith(start):
417 line = line.split(start)[0]
415 line = line.split(start)[0]
418 if line:
416 if line:
419 result.append(line)
417 result.append(line)
420 break
418 break
421 elif not line:
419 elif not line:
422 return None # unmatched delimiter
420 return None # unmatched delimiter
423 result.append(line)
421 result.append(line)
424 line = file.readline()
422 line = file.readline()
425 else:
423 else:
426 return None
424 return None
427
425
428 return ''.join(result)
426 return ''.join(result)
429
427
430 def _disabledhelp(path):
428 def _disabledhelp(path):
431 '''retrieve help synopsis of a disabled extension (without importing)'''
429 '''retrieve help synopsis of a disabled extension (without importing)'''
432 try:
430 try:
433 file = open(path)
431 file = open(path)
434 except IOError:
432 except IOError:
435 return
433 return
436 else:
434 else:
437 doc = _moduledoc(file)
435 doc = _moduledoc(file)
438 file.close()
436 file.close()
439
437
440 if doc: # extracting localized synopsis
438 if doc: # extracting localized synopsis
441 return gettext(doc)
439 return gettext(doc)
442 else:
440 else:
443 return _('(no help text available)')
441 return _('(no help text available)')
444
442
445 def disabled():
443 def disabled():
446 '''find disabled extensions from hgext. returns a dict of {name: desc}'''
444 '''find disabled extensions from hgext. returns a dict of {name: desc}'''
447 try:
445 try:
448 from hgext import __index__
446 from hgext import __index__
449 return dict((name, gettext(desc))
447 return dict((name, gettext(desc))
450 for name, desc in __index__.docs.iteritems()
448 for name, desc in __index__.docs.iteritems()
451 if name not in _order)
449 if name not in _order)
452 except (ImportError, AttributeError):
450 except (ImportError, AttributeError):
453 pass
451 pass
454
452
455 paths = _disabledpaths()
453 paths = _disabledpaths()
456 if not paths:
454 if not paths:
457 return {}
455 return {}
458
456
459 exts = {}
457 exts = {}
460 for name, path in paths.iteritems():
458 for name, path in paths.iteritems():
461 doc = _disabledhelp(path)
459 doc = _disabledhelp(path)
462 if doc:
460 if doc:
463 exts[name] = doc.splitlines()[0]
461 exts[name] = doc.splitlines()[0]
464
462
465 return exts
463 return exts
466
464
467 def disabledext(name):
465 def disabledext(name):
468 '''find a specific disabled extension from hgext. returns desc'''
466 '''find a specific disabled extension from hgext. returns desc'''
469 try:
467 try:
470 from hgext import __index__
468 from hgext import __index__
471 if name in _order: # enabled
469 if name in _order: # enabled
472 return
470 return
473 else:
471 else:
474 return gettext(__index__.docs.get(name))
472 return gettext(__index__.docs.get(name))
475 except (ImportError, AttributeError):
473 except (ImportError, AttributeError):
476 pass
474 pass
477
475
478 paths = _disabledpaths()
476 paths = _disabledpaths()
479 if name in paths:
477 if name in paths:
480 return _disabledhelp(paths[name])
478 return _disabledhelp(paths[name])
481
479
482 def disabledcmd(ui, cmd, strict=False):
480 def disabledcmd(ui, cmd, strict=False):
483 '''import disabled extensions until cmd is found.
481 '''import disabled extensions until cmd is found.
484 returns (cmdname, extname, module)'''
482 returns (cmdname, extname, module)'''
485
483
486 paths = _disabledpaths(strip_init=True)
484 paths = _disabledpaths(strip_init=True)
487 if not paths:
485 if not paths:
488 raise error.UnknownCommand(cmd)
486 raise error.UnknownCommand(cmd)
489
487
490 def findcmd(cmd, name, path):
488 def findcmd(cmd, name, path):
491 try:
489 try:
492 mod = loadpath(path, 'hgext.%s' % name)
490 mod = loadpath(path, 'hgext.%s' % name)
493 except Exception:
491 except Exception:
494 return
492 return
495 try:
493 try:
496 aliases, entry = cmdutil.findcmd(cmd,
494 aliases, entry = cmdutil.findcmd(cmd,
497 getattr(mod, 'cmdtable', {}), strict)
495 getattr(mod, 'cmdtable', {}), strict)
498 except (error.AmbiguousCommand, error.UnknownCommand):
496 except (error.AmbiguousCommand, error.UnknownCommand):
499 return
497 return
500 except Exception:
498 except Exception:
501 ui.warn(_('warning: error finding commands in %s\n') % path)
499 ui.warn(_('warning: error finding commands in %s\n') % path)
502 ui.traceback()
500 ui.traceback()
503 return
501 return
504 for c in aliases:
502 for c in aliases:
505 if c.startswith(cmd):
503 if c.startswith(cmd):
506 cmd = c
504 cmd = c
507 break
505 break
508 else:
506 else:
509 cmd = aliases[0]
507 cmd = aliases[0]
510 return (cmd, name, mod)
508 return (cmd, name, mod)
511
509
512 ext = None
510 ext = None
513 # first, search for an extension with the same name as the command
511 # first, search for an extension with the same name as the command
514 path = paths.pop(cmd, None)
512 path = paths.pop(cmd, None)
515 if path:
513 if path:
516 ext = findcmd(cmd, cmd, path)
514 ext = findcmd(cmd, cmd, path)
517 if not ext:
515 if not ext:
518 # otherwise, interrogate each extension until there's a match
516 # otherwise, interrogate each extension until there's a match
519 for name, path in paths.iteritems():
517 for name, path in paths.iteritems():
520 ext = findcmd(cmd, name, path)
518 ext = findcmd(cmd, name, path)
521 if ext:
519 if ext:
522 break
520 break
523 if ext and 'DEPRECATED' not in ext.__doc__:
521 if ext and 'DEPRECATED' not in ext.__doc__:
524 return ext
522 return ext
525
523
526 raise error.UnknownCommand(cmd)
524 raise error.UnknownCommand(cmd)
527
525
528 def enabled(shortname=True):
526 def enabled(shortname=True):
529 '''return a dict of {name: desc} of extensions'''
527 '''return a dict of {name: desc} of extensions'''
530 exts = {}
528 exts = {}
531 for ename, ext in extensions():
529 for ename, ext in extensions():
532 doc = (gettext(ext.__doc__) or _('(no help text available)'))
530 doc = (gettext(ext.__doc__) or _('(no help text available)'))
533 if shortname:
531 if shortname:
534 ename = ename.split('.')[-1]
532 ename = ename.split('.')[-1]
535 exts[ename] = doc.splitlines()[0].strip()
533 exts[ename] = doc.splitlines()[0].strip()
536
534
537 return exts
535 return exts
538
536
539 def notloaded():
537 def notloaded():
540 '''return short names of extensions that failed to load'''
538 '''return short names of extensions that failed to load'''
541 return [name for name, mod in _extensions.iteritems() if mod is None]
539 return [name for name, mod in _extensions.iteritems() if mod is None]
542
540
543 def moduleversion(module):
541 def moduleversion(module):
544 '''return version information from given module as a string'''
542 '''return version information from given module as a string'''
545 if (util.safehasattr(module, 'getversion')
543 if (util.safehasattr(module, 'getversion')
546 and callable(module.getversion)):
544 and callable(module.getversion)):
547 version = module.getversion()
545 version = module.getversion()
548 elif util.safehasattr(module, '__version__'):
546 elif util.safehasattr(module, '__version__'):
549 version = module.__version__
547 version = module.__version__
550 else:
548 else:
551 version = ''
549 version = ''
552 if isinstance(version, (list, tuple)):
550 if isinstance(version, (list, tuple)):
553 version = '.'.join(str(o) for o in version)
551 version = '.'.join(str(o) for o in version)
554 return version
552 return version
555
553
556 def ismoduleinternal(module):
554 def ismoduleinternal(module):
557 exttestedwith = getattr(module, 'testedwith', None)
555 exttestedwith = getattr(module, 'testedwith', None)
558 return exttestedwith == "ships-with-hg-core"
556 return exttestedwith == "ships-with-hg-core"
General Comments 0
You need to be logged in to leave comments. Login now