Show More
@@ -1,105 +1,107 b'' | |||
|
1 | 1 | #!/bin/bash |
|
2 | 2 | # |
|
3 | 3 | # produces two repositories with different common and missing subsets |
|
4 | 4 | # |
|
5 | 5 | # $ discovery-helper.sh REPO NBHEADS DEPT |
|
6 | 6 | # |
|
7 | 7 | # The Goal is to produce two repositories with some common part and some |
|
8 | 8 | # exclusive part on each side. Provide a source repository REPO, it will |
|
9 | 9 | # produce two repositories REPO-left and REPO-right. |
|
10 | 10 | # |
|
11 | 11 | # Each repository will be missing some revisions exclusive to NBHEADS of the |
|
12 | 12 | # repo topological heads. These heads and revisions exclusive to them (up to |
|
13 | 13 | # DEPTH depth) are stripped. |
|
14 | 14 | # |
|
15 | 15 | # The "left" repository will use the NBHEADS first heads (sorted by |
|
16 | 16 | # description). The "right" use the last NBHEADS one. |
|
17 | 17 | # |
|
18 | 18 | # To find out how many topological heads a repo has, use: |
|
19 | 19 | # |
|
20 | 20 | # $ hg heads -t -T '{rev}\n' | wc -l |
|
21 | 21 | # |
|
22 | 22 | # Example: |
|
23 | 23 | # |
|
24 | 24 | # The `pypy-2018-09-01` repository has 192 heads. To produce two repositories |
|
25 | 25 | # with 92 common heads and ~50 exclusive heads on each side. |
|
26 | 26 | # |
|
27 | 27 | # $ ./discovery-helper.sh pypy-2018-08-01 50 10 |
|
28 | 28 | |
|
29 | 29 | set -euo pipefail |
|
30 | 30 | |
|
31 | 31 | printusage () { |
|
32 | 32 | echo "usage: `basename $0` REPO NBHEADS DEPTH [left|right]" >&2 |
|
33 | 33 | } |
|
34 | 34 | |
|
35 | 35 | if [ $# -lt 3 ]; then |
|
36 | 36 | printusage |
|
37 | 37 | exit 64 |
|
38 | 38 | fi |
|
39 | 39 | |
|
40 | 40 | repo="$1" |
|
41 | 41 | shift |
|
42 | 42 | |
|
43 | 43 | nbheads="$1" |
|
44 | 44 | shift |
|
45 | 45 | |
|
46 | 46 | depth="$1" |
|
47 | 47 | shift |
|
48 | 48 | |
|
49 | 49 | doleft=1 |
|
50 | 50 | doright=1 |
|
51 | 51 | if [ $# -gt 1 ]; then |
|
52 | 52 | printusage |
|
53 | 53 | exit 64 |
|
54 | 54 | elif [ $# -eq 1 ]; then |
|
55 | 55 | if [ "$1" == "left" ]; then |
|
56 | 56 | doleft=1 |
|
57 | 57 | doright=0 |
|
58 | 58 | elif [ "$1" == "right" ]; then |
|
59 | 59 | doleft=0 |
|
60 | 60 | doright=1 |
|
61 | 61 | else |
|
62 | 62 | printusage |
|
63 | 63 | exit 64 |
|
64 | 64 | fi |
|
65 | 65 | fi |
|
66 | 66 | |
|
67 | 67 | leftrepo="${repo}-${nbheads}h-${depth}d-left" |
|
68 | 68 | rightrepo="${repo}-${nbheads}h-${depth}d-right" |
|
69 | 69 | |
|
70 | 70 | left="first(sort(heads(all()), 'desc'), $nbheads)" |
|
71 | 71 | right="last(sort(heads(all()), 'desc'), $nbheads)" |
|
72 | 72 | |
|
73 | 73 | leftsubset="ancestors($left, $depth) and only($left, heads(all() - $left))" |
|
74 | 74 | rightsubset="ancestors($right, $depth) and only($right, heads(all() - $right))" |
|
75 | 75 | |
|
76 | 76 | echo '### creating left/right repositories with missing changesets:' |
|
77 | 77 | if [ $doleft -eq 1 ]; then |
|
78 | 78 | echo '# left revset:' '"'${leftsubset}'"' |
|
79 | 79 | fi |
|
80 | 80 | if [ $doright -eq 1 ]; then |
|
81 | 81 | echo '# right revset:' '"'${rightsubset}'"' |
|
82 | 82 | fi |
|
83 | 83 | |
|
84 | 84 | buildone() { |
|
85 | 85 | side="$1" |
|
86 | 86 | dest="$2" |
|
87 | 87 | revset="$3" |
|
88 | 88 | echo "### building $side repository: $dest" |
|
89 | 89 | if [ -e "$dest" ]; then |
|
90 | 90 | echo "destination repo already exists: $dest" >&2 |
|
91 | 91 | exit 1 |
|
92 | 92 | fi |
|
93 | 93 | echo '# cloning' |
|
94 | hg clone --noupdate "${repo}" "${dest}" | |
|
94 | if ! cp --recursive --reflink=always ${repo} ${dest}; then | |
|
95 | hg clone --noupdate "${repo}" "${dest}" | |
|
96 | fi | |
|
95 | 97 | echo '# stripping' '"'${revset}'"' |
|
96 | 98 | hg -R "${dest}" --config extensions.strip= strip --rev "$revset" --no-backup |
|
97 | 99 | } |
|
98 | 100 | |
|
99 | 101 | if [ $doleft -eq 1 ]; then |
|
100 | 102 | buildone left "$leftrepo" "$leftsubset" |
|
101 | 103 | fi |
|
102 | 104 | |
|
103 | 105 | if [ $doright -eq 1 ]; then |
|
104 | 106 | buildone right "$rightrepo" "$rightsubset" |
|
105 | 107 | fi |
General Comments 0
You need to be logged in to leave comments.
Login now