Show More
@@ -1,3 +1,4 b'' | |||||
|
1 | # coding: utf8 | |||
1 | # copies.py - copy detection for Mercurial |
|
2 | # copies.py - copy detection for Mercurial | |
2 | # |
|
3 | # | |
3 | # Copyright 2008 Matt Mackall <mpm@selenic.com> |
|
4 | # Copyright 2008 Matt Mackall <mpm@selenic.com> | |
@@ -351,14 +352,21 b' def _combine_changeset_copies(' | |||||
351 | isancestor = cached_is_ancestor(isancestor) |
|
352 | isancestor = cached_is_ancestor(isancestor) | |
352 |
|
353 | |||
353 | all_copies = {} |
|
354 | all_copies = {} | |
|
355 | # iterate over all the "parent" side of copy tracing "edge" | |||
354 | for r in revs: |
|
356 | for r in revs: | |
|
357 | # fetch potential previously computed data for that parent | |||
355 | copies = all_copies.pop(r, None) |
|
358 | copies = all_copies.pop(r, None) | |
356 | if copies is None: |
|
359 | if copies is None: | |
357 | # this is a root |
|
360 | # this is a root | |
358 | copies = {} |
|
361 | copies = {} | |
|
362 | ||||
|
363 | # iterate over all known children to chain the existing data with the | |||
|
364 | # data from the parent β child edge. | |||
359 | for i, c in enumerate(children[r]): |
|
365 | for i, c in enumerate(children[r]): | |
360 | p1, p2, changes = revinfo(c) |
|
366 | p1, p2, changes = revinfo(c) | |
361 | childcopies = {} |
|
367 | childcopies = {} | |
|
368 | ||||
|
369 | # select the right parent β child edge | |||
362 | if r == p1: |
|
370 | if r == p1: | |
363 | parent = 1 |
|
371 | parent = 1 | |
364 | if changes is not None: |
|
372 | if changes is not None: | |
@@ -372,6 +380,8 b' def _combine_changeset_copies(' | |||||
372 | childcopies = { |
|
380 | childcopies = { | |
373 | dst: src for dst, src in childcopies.items() if match(dst) |
|
381 | dst: src for dst, src in childcopies.items() if match(dst) | |
374 | } |
|
382 | } | |
|
383 | ||||
|
384 | # chain the data in the edge with the existing data | |||
375 | newcopies = copies |
|
385 | newcopies = copies | |
376 | if childcopies: |
|
386 | if childcopies: | |
377 | newcopies = copies.copy() |
|
387 | newcopies = copies.copy() | |
@@ -392,6 +402,9 b' def _combine_changeset_copies(' | |||||
392 | # could be avoided. |
|
402 | # could be avoided. | |
393 | newcopies = copies.copy() |
|
403 | newcopies = copies.copy() | |
394 | newcopies[f] = (c, None) |
|
404 | newcopies[f] = (c, None) | |
|
405 | ||||
|
406 | # check potential need to combine the data from another parent (for | |||
|
407 | # that child). See comment below for details. | |||
395 | othercopies = all_copies.get(c) |
|
408 | othercopies = all_copies.get(c) | |
396 | if othercopies is None: |
|
409 | if othercopies is None: | |
397 | all_copies[c] = newcopies |
|
410 | all_copies[c] = newcopies | |
@@ -418,6 +431,7 b' def _combine_changeset_copies(' | |||||
418 | copies = _merge_copies_dict(minor, major, isancestor, changes) |
|
431 | copies = _merge_copies_dict(minor, major, isancestor, changes) | |
419 | all_copies[c] = copies |
|
432 | all_copies[c] = copies | |
420 |
|
433 | |||
|
434 | # filter out internal details and return a {dest: source mapping} | |||
421 | final_copies = {} |
|
435 | final_copies = {} | |
422 | for dest, (tt, source) in all_copies[targetrev].items(): |
|
436 | for dest, (tt, source) in all_copies[targetrev].items(): | |
423 | if source is not None: |
|
437 | if source is not None: |
General Comments 0
You need to be logged in to leave comments.
Login now