Show More
@@ -341,15 +341,27 b' def _readtagcache(ui, repo):' | |||
|
341 | 341 | # potentially expensive search. |
|
342 | 342 | return ([], {}, valid, None, True) |
|
343 | 343 | |
|
344 | starttime = util.timer() | |
|
345 | 344 | |
|
346 | 345 | # Now we have to lookup the .hgtags filenode for every new head. |
|
347 | 346 | # This is the most expensive part of finding tags, so performance |
|
348 | 347 | # depends primarily on the size of newheads. Worst case: no cache |
|
349 | 348 | # file, so newheads == repoheads. |
|
349 | cachefnode = _getfnodes(ui, repo, repoheads) | |
|
350 | ||
|
351 | # Caller has to iterate over all heads, but can use the filenodes in | |
|
352 | # cachefnode to get to each .hgtags revision quickly. | |
|
353 | return (repoheads, cachefnode, valid, None, True) | |
|
354 | ||
|
355 | def _getfnodes(ui, repo, nodes): | |
|
356 | """return .hgtags fnodes for a list of changeset nodes | |
|
357 | ||
|
358 | Return value is a {node: fnode} mapping. There will be no entry for nodes | |
|
359 | without a '.hgtags' file. | |
|
360 | """ | |
|
361 | starttime = util.timer() | |
|
350 | 362 | fnodescache = hgtagsfnodescache(repo.unfiltered()) |
|
351 | 363 | cachefnode = {} |
|
352 |
for head in reversed( |
|
|
364 | for head in reversed(nodes): | |
|
353 | 365 | fnode = fnodescache.getfnode(head) |
|
354 | 366 | if fnode != nullid: |
|
355 | 367 | cachefnode[head] = fnode |
@@ -361,10 +373,7 b' def _readtagcache(ui, repo):' | |||
|
361 | 373 | '%d/%d cache hits/lookups in %0.4f ' |
|
362 | 374 | 'seconds\n', |
|
363 | 375 | fnodescache.hitcount, fnodescache.lookupcount, duration) |
|
364 | ||
|
365 | # Caller has to iterate over all heads, but can use the filenodes in | |
|
366 | # cachefnode to get to each .hgtags revision quickly. | |
|
367 | return (repoheads, cachefnode, valid, None, True) | |
|
376 | return cachefnode | |
|
368 | 377 | |
|
369 | 378 | def _writetagcache(ui, repo, valid, cachetags): |
|
370 | 379 | filename = _filename(repo) |
General Comments 0
You need to be logged in to leave comments.
Login now