Show More
@@ -1,68 +1,98 b'' | |||||
1 | #!/bin/bash |
|
1 | #!/bin/bash | |
2 | # |
|
2 | # | |
3 | # produces two repositories with different common and missing subsets |
|
3 | # produces two repositories with different common and missing subsets | |
4 | # |
|
4 | # | |
5 | # $ discovery-helper.sh REPO NBHEADS DEPT |
|
5 | # $ discovery-helper.sh REPO NBHEADS DEPT | |
6 | # |
|
6 | # | |
7 | # The Goal is to produce two repositories with some common part and some |
|
7 | # The Goal is to produce two repositories with some common part and some | |
8 | # exclusive part on each side. Provide a source repository REPO, it will |
|
8 | # exclusive part on each side. Provide a source repository REPO, it will | |
9 | # produce two repositories REPO-left and REPO-right. |
|
9 | # produce two repositories REPO-left and REPO-right. | |
10 | # |
|
10 | # | |
11 | # Each repository will be missing some revisions exclusive to NBHEADS of the |
|
11 | # Each repository will be missing some revisions exclusive to NBHEADS of the | |
12 | # repo topological heads. These heads and revisions exclusive to them (up to |
|
12 | # repo topological heads. These heads and revisions exclusive to them (up to | |
13 | # DEPTH depth) are stripped. |
|
13 | # DEPTH depth) are stripped. | |
14 | # |
|
14 | # | |
15 | # The "left" repository will use the NBHEADS first heads (sorted by |
|
15 | # The "left" repository will use the NBHEADS first heads (sorted by | |
16 | # description). The "right" use the last NBHEADS one. |
|
16 | # description). The "right" use the last NBHEADS one. | |
17 | # |
|
17 | # | |
18 | # To find out how many topological heads a repo has, use: |
|
18 | # To find out how many topological heads a repo has, use: | |
19 | # |
|
19 | # | |
20 | # $ hg heads -t -T '{rev}\n' | wc -l |
|
20 | # $ hg heads -t -T '{rev}\n' | wc -l | |
21 | # |
|
21 | # | |
22 | # Example: |
|
22 | # Example: | |
23 | # |
|
23 | # | |
24 | # The `pypy-2018-09-01` repository has 192 heads. To produce two repositories |
|
24 | # The `pypy-2018-09-01` repository has 192 heads. To produce two repositories | |
25 | # with 92 common heads and ~50 exclusive heads on each side. |
|
25 | # with 92 common heads and ~50 exclusive heads on each side. | |
26 | # |
|
26 | # | |
27 | # $ ./discovery-helper.sh pypy-2018-08-01 50 10 |
|
27 | # $ ./discovery-helper.sh pypy-2018-08-01 50 10 | |
28 |
|
28 | |||
29 | set -euo pipefail |
|
29 | set -euo pipefail | |
30 |
|
30 | |||
|
31 | printusage () { | |||
|
32 | echo "usage: `basename $0` REPO NBHEADS DEPTH [left|right]" >&2 | |||
|
33 | } | |||
|
34 | ||||
31 | if [ $# -lt 3 ]; then |
|
35 | if [ $# -lt 3 ]; then | |
32 | echo "usage: `basename $0` REPO NBHEADS DEPTH" |
|
36 | printusage | |
33 |
|
|
37 | exit 64 | |
34 | fi |
|
38 | fi | |
35 |
|
39 | |||
36 | repo="$1" |
|
40 | repo="$1" | |
37 | shift |
|
41 | shift | |
38 |
|
42 | |||
39 | nbheads="$1" |
|
43 | nbheads="$1" | |
40 | shift |
|
44 | shift | |
41 |
|
45 | |||
42 | depth="$1" |
|
46 | depth="$1" | |
43 | shift |
|
47 | shift | |
44 |
|
48 | |||
|
49 | doleft=1 | |||
|
50 | doright=1 | |||
|
51 | if [ $# -gt 1 ]; then | |||
|
52 | printusage | |||
|
53 | exit 64 | |||
|
54 | elif [ $# -eq 1 ]; then | |||
|
55 | if [ "$1" == "left" ]; then | |||
|
56 | doleft=1 | |||
|
57 | doright=0 | |||
|
58 | elif [ "$1" == "right" ]; then | |||
|
59 | doleft=0 | |||
|
60 | doright=1 | |||
|
61 | else | |||
|
62 | printusage | |||
|
63 | exit 64 | |||
|
64 | fi | |||
|
65 | fi | |||
|
66 | ||||
45 | leftrepo="${repo}-${nbheads}h-${depth}d-left" |
|
67 | leftrepo="${repo}-${nbheads}h-${depth}d-left" | |
46 | rightrepo="${repo}-${nbheads}h-${depth}d-right" |
|
68 | rightrepo="${repo}-${nbheads}h-${depth}d-right" | |
47 |
|
69 | |||
48 | left="first(sort(heads(all()), 'desc'), $nbheads)" |
|
70 | left="first(sort(heads(all()), 'desc'), $nbheads)" | |
49 | right="last(sort(heads(all()), 'desc'), $nbheads)" |
|
71 | right="last(sort(heads(all()), 'desc'), $nbheads)" | |
50 |
|
72 | |||
51 | leftsubset="ancestors($left, $depth) and only($left, heads(all() - $left))" |
|
73 | leftsubset="ancestors($left, $depth) and only($left, heads(all() - $left))" | |
52 | rightsubset="ancestors($right, $depth) and only($right, heads(all() - $right))" |
|
74 | rightsubset="ancestors($right, $depth) and only($right, heads(all() - $right))" | |
53 |
|
75 | |||
54 | echo '### creating left/right repositories with missing changesets:' |
|
76 | echo '### creating left/right repositories with missing changesets:' | |
55 | echo '# left revset:' '"'${leftsubset}'"' |
|
77 | if [ $doleft -eq 1 ]; then | |
56 |
echo '# |
|
78 | echo '# left revset:' '"'${leftsubset}'"' | |
|
79 | fi | |||
|
80 | if [ $doright -eq 1 ]; then | |||
|
81 | echo '# right revset:' '"'${rightsubset}'"' | |||
|
82 | fi | |||
57 |
|
83 | |||
58 | echo '### building left repository:' $left-repo |
|
84 | if [ $doleft -eq 1 ]; then | |
59 | echo '# cloning' |
|
85 | echo '### building left repository:' $left-repo | |
60 | hg clone --noupdate "${repo}" "${leftrepo}" |
|
86 | echo '# cloning' | |
61 | echo '# stripping' '"'${leftsubset}'"' |
|
87 | hg clone --noupdate "${repo}" "${leftrepo}" | |
62 | hg -R "${leftrepo}" --config extensions.strip= strip --rev "$leftsubset" --no-backup |
|
88 | echo '# stripping' '"'${leftsubset}'"' | |
|
89 | hg -R "${leftrepo}" --config extensions.strip= strip --rev "$leftsubset" --no-backup | |||
|
90 | fi | |||
63 |
|
91 | |||
64 | echo '### building right repository:' $right-repo |
|
92 | if [ $doright -eq 1 ]; then | |
65 | echo '# cloning' |
|
93 | echo '### building right repository:' $right-repo | |
66 | hg clone --noupdate "${repo}" "${rightrepo}" |
|
94 | echo '# cloning' | |
67 | echo '# stripping:' '"'${rightsubset}'"' |
|
95 | hg clone --noupdate "${repo}" "${rightrepo}" | |
68 | hg -R "${rightrepo}" --config extensions.strip= strip --rev "$rightsubset" --no-backup |
|
96 | echo '# stripping:' '"'${rightsubset}'"' | |
|
97 | hg -R "${rightrepo}" --config extensions.strip= strip --rev "$rightsubset" --no-backup | |||
|
98 | fi |
General Comments 0
You need to be logged in to leave comments.
Login now