Show More
@@ -1,551 +1,566 | |||||
1 | # linux.py - Linux specific automation functionality |
|
1 | # linux.py - Linux specific automation functionality | |
2 | # |
|
2 | # | |
3 | # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com> |
|
3 | # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | # no-check-code because Python 3 native. |
|
8 | # no-check-code because Python 3 native. | |
9 |
|
9 | |||
10 | import os |
|
10 | import os | |
11 | import pathlib |
|
11 | import pathlib | |
12 | import shlex |
|
12 | import shlex | |
13 | import subprocess |
|
13 | import subprocess | |
14 | import tempfile |
|
14 | import tempfile | |
15 |
|
15 | |||
16 | from .ssh import ( |
|
16 | from .ssh import ( | |
17 | exec_command, |
|
17 | exec_command, | |
18 | ) |
|
18 | ) | |
19 |
|
19 | |||
20 |
|
20 | |||
21 | # Linux distributions that are supported. |
|
21 | # Linux distributions that are supported. | |
22 | DISTROS = { |
|
22 | DISTROS = { | |
23 | 'debian9', |
|
23 | 'debian9', | |
24 | 'ubuntu18.04', |
|
24 | 'ubuntu18.04', | |
25 | 'ubuntu18.10', |
|
25 | 'ubuntu18.10', | |
26 | 'ubuntu19.04', |
|
26 | 'ubuntu19.04', | |
27 | } |
|
27 | } | |
28 |
|
28 | |||
29 | INSTALL_PYTHONS = r''' |
|
29 | INSTALL_PYTHONS = r''' | |
30 | PYENV2_VERSIONS="2.7.16 pypy2.7-7.1.1" |
|
30 | PYENV2_VERSIONS="2.7.16 pypy2.7-7.1.1" | |
31 | PYENV3_VERSIONS="3.5.7 3.6.9 3.7.4 3.8-dev pypy3.5-7.0.0 pypy3.6-7.1.1" |
|
31 | PYENV3_VERSIONS="3.5.7 3.6.9 3.7.4 3.8-dev pypy3.5-7.0.0 pypy3.6-7.1.1" | |
32 |
|
32 | |||
33 | git clone https://github.com/pyenv/pyenv.git /hgdev/pyenv |
|
33 | git clone https://github.com/pyenv/pyenv.git /hgdev/pyenv | |
34 | pushd /hgdev/pyenv |
|
34 | pushd /hgdev/pyenv | |
35 | git checkout 17f44b7cd6f58ea2fa68ec0371fb9e7a826b8be2 |
|
35 | git checkout 17f44b7cd6f58ea2fa68ec0371fb9e7a826b8be2 | |
36 | popd |
|
36 | popd | |
37 |
|
37 | |||
38 | export PYENV_ROOT="/hgdev/pyenv" |
|
38 | export PYENV_ROOT="/hgdev/pyenv" | |
39 | export PATH="$PYENV_ROOT/bin:$PATH" |
|
39 | export PATH="$PYENV_ROOT/bin:$PATH" | |
40 |
|
40 | |||
41 | # pip 19.0.3. |
|
41 | # pip 19.0.3. | |
42 | PIP_SHA256=efe99298f3fbb1f56201ce6b81d2658067d2f7d7dfc2d412e0d3cacc9a397c61 |
|
42 | PIP_SHA256=efe99298f3fbb1f56201ce6b81d2658067d2f7d7dfc2d412e0d3cacc9a397c61 | |
43 | wget -O get-pip.py --progress dot:mega https://github.com/pypa/get-pip/raw/fee32c376da1ff6496a798986d7939cd51e1644f/get-pip.py |
|
43 | wget -O get-pip.py --progress dot:mega https://github.com/pypa/get-pip/raw/fee32c376da1ff6496a798986d7939cd51e1644f/get-pip.py | |
44 | echo "${PIP_SHA256} get-pip.py" | sha256sum --check - |
|
44 | echo "${PIP_SHA256} get-pip.py" | sha256sum --check - | |
45 |
|
45 | |||
46 | VIRTUALENV_SHA256=984d7e607b0a5d1329425dd8845bd971b957424b5ba664729fab51ab8c11bc39 |
|
46 | VIRTUALENV_SHA256=984d7e607b0a5d1329425dd8845bd971b957424b5ba664729fab51ab8c11bc39 | |
47 | VIRTUALENV_TARBALL=virtualenv-16.4.3.tar.gz |
|
47 | VIRTUALENV_TARBALL=virtualenv-16.4.3.tar.gz | |
48 | wget -O ${VIRTUALENV_TARBALL} --progress dot:mega https://files.pythonhosted.org/packages/37/db/89d6b043b22052109da35416abc3c397655e4bd3cff031446ba02b9654fa/${VIRTUALENV_TARBALL} |
|
48 | wget -O ${VIRTUALENV_TARBALL} --progress dot:mega https://files.pythonhosted.org/packages/37/db/89d6b043b22052109da35416abc3c397655e4bd3cff031446ba02b9654fa/${VIRTUALENV_TARBALL} | |
49 | echo "${VIRTUALENV_SHA256} ${VIRTUALENV_TARBALL}" | sha256sum --check - |
|
49 | echo "${VIRTUALENV_SHA256} ${VIRTUALENV_TARBALL}" | sha256sum --check - | |
50 |
|
50 | |||
51 | for v in ${PYENV2_VERSIONS}; do |
|
51 | for v in ${PYENV2_VERSIONS}; do | |
52 | pyenv install -v ${v} |
|
52 | pyenv install -v ${v} | |
53 | ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py |
|
53 | ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py | |
54 | ${PYENV_ROOT}/versions/${v}/bin/pip install ${VIRTUALENV_TARBALL} |
|
54 | ${PYENV_ROOT}/versions/${v}/bin/pip install ${VIRTUALENV_TARBALL} | |
55 | ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py2.txt |
|
55 | ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py2.txt | |
56 | done |
|
56 | done | |
57 |
|
57 | |||
58 | for v in ${PYENV3_VERSIONS}; do |
|
58 | for v in ${PYENV3_VERSIONS}; do | |
59 | pyenv install -v ${v} |
|
59 | pyenv install -v ${v} | |
60 | ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py |
|
60 | ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py | |
61 | ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py3.txt |
|
61 | ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py3.txt | |
62 | done |
|
62 | done | |
63 |
|
63 | |||
64 | pyenv global ${PYENV2_VERSIONS} ${PYENV3_VERSIONS} system |
|
64 | pyenv global ${PYENV2_VERSIONS} ${PYENV3_VERSIONS} system | |
65 | '''.lstrip().replace('\r\n', '\n') |
|
65 | '''.lstrip().replace('\r\n', '\n') | |
66 |
|
66 | |||
67 |
|
67 | |||
|
68 | INSTALL_RUST = r''' | |||
|
69 | RUSTUP_INIT_SHA256=a46fe67199b7bcbbde2dcbc23ae08db6f29883e260e23899a88b9073effc9076 | |||
|
70 | wget -O rustup-init --progress dot:mega https://static.rust-lang.org/rustup/archive/1.18.3/x86_64-unknown-linux-gnu/rustup-init | |||
|
71 | echo "${RUSTUP_INIT_SHA256} rustup-init" | sha256sum --check - | |||
|
72 | ||||
|
73 | chmod +x rustup-init | |||
|
74 | sudo -H -u hg -g hg ./rustup-init -y | |||
|
75 | sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup install 1.31.1 1.34.2 | |||
|
76 | sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup component add clippy | |||
|
77 | ''' | |||
|
78 | ||||
|
79 | ||||
68 | BOOTSTRAP_VIRTUALENV = r''' |
|
80 | BOOTSTRAP_VIRTUALENV = r''' | |
69 | /usr/bin/virtualenv /hgdev/venv-bootstrap |
|
81 | /usr/bin/virtualenv /hgdev/venv-bootstrap | |
70 |
|
82 | |||
71 | HG_SHA256=1bdd21bb87d1e05fb5cd395d488d0e0cc2f2f90ce0fd248e31a03595da5ccb47 |
|
83 | HG_SHA256=1bdd21bb87d1e05fb5cd395d488d0e0cc2f2f90ce0fd248e31a03595da5ccb47 | |
72 | HG_TARBALL=mercurial-4.9.1.tar.gz |
|
84 | HG_TARBALL=mercurial-4.9.1.tar.gz | |
73 |
|
85 | |||
74 | wget -O ${HG_TARBALL} --progress dot:mega https://www.mercurial-scm.org/release/${HG_TARBALL} |
|
86 | wget -O ${HG_TARBALL} --progress dot:mega https://www.mercurial-scm.org/release/${HG_TARBALL} | |
75 | echo "${HG_SHA256} ${HG_TARBALL}" | sha256sum --check - |
|
87 | echo "${HG_SHA256} ${HG_TARBALL}" | sha256sum --check - | |
76 |
|
88 | |||
77 | /hgdev/venv-bootstrap/bin/pip install ${HG_TARBALL} |
|
89 | /hgdev/venv-bootstrap/bin/pip install ${HG_TARBALL} | |
78 | '''.lstrip().replace('\r\n', '\n') |
|
90 | '''.lstrip().replace('\r\n', '\n') | |
79 |
|
91 | |||
80 |
|
92 | |||
81 | BOOTSTRAP_DEBIAN = r''' |
|
93 | BOOTSTRAP_DEBIAN = r''' | |
82 | #!/bin/bash |
|
94 | #!/bin/bash | |
83 |
|
95 | |||
84 | set -ex |
|
96 | set -ex | |
85 |
|
97 | |||
86 | DISTRO=`grep DISTRIB_ID /etc/lsb-release | awk -F= '{{print $2}}'` |
|
98 | DISTRO=`grep DISTRIB_ID /etc/lsb-release | awk -F= '{{print $2}}'` | |
87 | DEBIAN_VERSION=`cat /etc/debian_version` |
|
99 | DEBIAN_VERSION=`cat /etc/debian_version` | |
88 | LSB_RELEASE=`lsb_release -cs` |
|
100 | LSB_RELEASE=`lsb_release -cs` | |
89 |
|
101 | |||
90 | sudo /usr/sbin/groupadd hg |
|
102 | sudo /usr/sbin/groupadd hg | |
91 | sudo /usr/sbin/groupadd docker |
|
103 | sudo /usr/sbin/groupadd docker | |
92 | sudo /usr/sbin/useradd -g hg -G sudo,docker -d /home/hg -m -s /bin/bash hg |
|
104 | sudo /usr/sbin/useradd -g hg -G sudo,docker -d /home/hg -m -s /bin/bash hg | |
93 | sudo mkdir /home/hg/.ssh |
|
105 | sudo mkdir /home/hg/.ssh | |
94 | sudo cp ~/.ssh/authorized_keys /home/hg/.ssh/authorized_keys |
|
106 | sudo cp ~/.ssh/authorized_keys /home/hg/.ssh/authorized_keys | |
95 | sudo chown -R hg:hg /home/hg/.ssh |
|
107 | sudo chown -R hg:hg /home/hg/.ssh | |
96 | sudo chmod 700 /home/hg/.ssh |
|
108 | sudo chmod 700 /home/hg/.ssh | |
97 | sudo chmod 600 /home/hg/.ssh/authorized_keys |
|
109 | sudo chmod 600 /home/hg/.ssh/authorized_keys | |
98 |
|
110 | |||
99 | cat << EOF | sudo tee /etc/sudoers.d/90-hg |
|
111 | cat << EOF | sudo tee /etc/sudoers.d/90-hg | |
100 | hg ALL=(ALL) NOPASSWD:ALL |
|
112 | hg ALL=(ALL) NOPASSWD:ALL | |
101 | EOF |
|
113 | EOF | |
102 |
|
114 | |||
103 | sudo apt-get update |
|
115 | sudo apt-get update | |
104 | sudo DEBIAN_FRONTEND=noninteractive apt-get -yq dist-upgrade |
|
116 | sudo DEBIAN_FRONTEND=noninteractive apt-get -yq dist-upgrade | |
105 |
|
117 | |||
106 | # Install packages necessary to set up Docker Apt repo. |
|
118 | # Install packages necessary to set up Docker Apt repo. | |
107 | sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends \ |
|
119 | sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends \ | |
108 | apt-transport-https \ |
|
120 | apt-transport-https \ | |
109 | gnupg |
|
121 | gnupg | |
110 |
|
122 | |||
111 | cat > docker-apt-key << EOF |
|
123 | cat > docker-apt-key << EOF | |
112 | -----BEGIN PGP PUBLIC KEY BLOCK----- |
|
124 | -----BEGIN PGP PUBLIC KEY BLOCK----- | |
113 |
|
125 | |||
114 | mQINBFit2ioBEADhWpZ8/wvZ6hUTiXOwQHXMAlaFHcPH9hAtr4F1y2+OYdbtMuth |
|
126 | mQINBFit2ioBEADhWpZ8/wvZ6hUTiXOwQHXMAlaFHcPH9hAtr4F1y2+OYdbtMuth | |
115 | lqqwp028AqyY+PRfVMtSYMbjuQuu5byyKR01BbqYhuS3jtqQmljZ/bJvXqnmiVXh |
|
127 | lqqwp028AqyY+PRfVMtSYMbjuQuu5byyKR01BbqYhuS3jtqQmljZ/bJvXqnmiVXh | |
116 | 38UuLa+z077PxyxQhu5BbqntTPQMfiyqEiU+BKbq2WmANUKQf+1AmZY/IruOXbnq |
|
128 | 38UuLa+z077PxyxQhu5BbqntTPQMfiyqEiU+BKbq2WmANUKQf+1AmZY/IruOXbnq | |
117 | L4C1+gJ8vfmXQt99npCaxEjaNRVYfOS8QcixNzHUYnb6emjlANyEVlZzeqo7XKl7 |
|
129 | L4C1+gJ8vfmXQt99npCaxEjaNRVYfOS8QcixNzHUYnb6emjlANyEVlZzeqo7XKl7 | |
118 | UrwV5inawTSzWNvtjEjj4nJL8NsLwscpLPQUhTQ+7BbQXAwAmeHCUTQIvvWXqw0N |
|
130 | UrwV5inawTSzWNvtjEjj4nJL8NsLwscpLPQUhTQ+7BbQXAwAmeHCUTQIvvWXqw0N | |
119 | cmhh4HgeQscQHYgOJjjDVfoY5MucvglbIgCqfzAHW9jxmRL4qbMZj+b1XoePEtht |
|
131 | cmhh4HgeQscQHYgOJjjDVfoY5MucvglbIgCqfzAHW9jxmRL4qbMZj+b1XoePEtht | |
120 | ku4bIQN1X5P07fNWzlgaRL5Z4POXDDZTlIQ/El58j9kp4bnWRCJW0lya+f8ocodo |
|
132 | ku4bIQN1X5P07fNWzlgaRL5Z4POXDDZTlIQ/El58j9kp4bnWRCJW0lya+f8ocodo | |
121 | vZZ+Doi+fy4D5ZGrL4XEcIQP/Lv5uFyf+kQtl/94VFYVJOleAv8W92KdgDkhTcTD |
|
133 | vZZ+Doi+fy4D5ZGrL4XEcIQP/Lv5uFyf+kQtl/94VFYVJOleAv8W92KdgDkhTcTD | |
122 | G7c0tIkVEKNUq48b3aQ64NOZQW7fVjfoKwEZdOqPE72Pa45jrZzvUFxSpdiNk2tZ |
|
134 | G7c0tIkVEKNUq48b3aQ64NOZQW7fVjfoKwEZdOqPE72Pa45jrZzvUFxSpdiNk2tZ | |
123 | XYukHjlxxEgBdC/J3cMMNRE1F4NCA3ApfV1Y7/hTeOnmDuDYwr9/obA8t016Yljj |
|
135 | XYukHjlxxEgBdC/J3cMMNRE1F4NCA3ApfV1Y7/hTeOnmDuDYwr9/obA8t016Yljj | |
124 | q5rdkywPf4JF8mXUW5eCN1vAFHxeg9ZWemhBtQmGxXnw9M+z6hWwc6ahmwARAQAB |
|
136 | q5rdkywPf4JF8mXUW5eCN1vAFHxeg9ZWemhBtQmGxXnw9M+z6hWwc6ahmwARAQAB | |
125 | tCtEb2NrZXIgUmVsZWFzZSAoQ0UgZGViKSA8ZG9ja2VyQGRvY2tlci5jb20+iQI3 |
|
137 | tCtEb2NrZXIgUmVsZWFzZSAoQ0UgZGViKSA8ZG9ja2VyQGRvY2tlci5jb20+iQI3 | |
126 | BBMBCgAhBQJYrefAAhsvBQsJCAcDBRUKCQgLBRYCAwEAAh4BAheAAAoJEI2BgDwO |
|
138 | BBMBCgAhBQJYrefAAhsvBQsJCAcDBRUKCQgLBRYCAwEAAh4BAheAAAoJEI2BgDwO | |
127 | v82IsskP/iQZo68flDQmNvn8X5XTd6RRaUH33kXYXquT6NkHJciS7E2gTJmqvMqd |
|
139 | v82IsskP/iQZo68flDQmNvn8X5XTd6RRaUH33kXYXquT6NkHJciS7E2gTJmqvMqd | |
128 | tI4mNYHCSEYxI5qrcYV5YqX9P6+Ko+vozo4nseUQLPH/ATQ4qL0Zok+1jkag3Lgk |
|
140 | tI4mNYHCSEYxI5qrcYV5YqX9P6+Ko+vozo4nseUQLPH/ATQ4qL0Zok+1jkag3Lgk | |
129 | jonyUf9bwtWxFp05HC3GMHPhhcUSexCxQLQvnFWXD2sWLKivHp2fT8QbRGeZ+d3m |
|
141 | jonyUf9bwtWxFp05HC3GMHPhhcUSexCxQLQvnFWXD2sWLKivHp2fT8QbRGeZ+d3m | |
130 | 6fqcd5Fu7pxsqm0EUDK5NL+nPIgYhN+auTrhgzhK1CShfGccM/wfRlei9Utz6p9P |
|
142 | 6fqcd5Fu7pxsqm0EUDK5NL+nPIgYhN+auTrhgzhK1CShfGccM/wfRlei9Utz6p9P | |
131 | XRKIlWnXtT4qNGZNTN0tR+NLG/6Bqd8OYBaFAUcue/w1VW6JQ2VGYZHnZu9S8LMc |
|
143 | XRKIlWnXtT4qNGZNTN0tR+NLG/6Bqd8OYBaFAUcue/w1VW6JQ2VGYZHnZu9S8LMc | |
132 | FYBa5Ig9PxwGQOgq6RDKDbV+PqTQT5EFMeR1mrjckk4DQJjbxeMZbiNMG5kGECA8 |
|
144 | FYBa5Ig9PxwGQOgq6RDKDbV+PqTQT5EFMeR1mrjckk4DQJjbxeMZbiNMG5kGECA8 | |
133 | g383P3elhn03WGbEEa4MNc3Z4+7c236QI3xWJfNPdUbXRaAwhy/6rTSFbzwKB0Jm |
|
145 | g383P3elhn03WGbEEa4MNc3Z4+7c236QI3xWJfNPdUbXRaAwhy/6rTSFbzwKB0Jm | |
134 | ebwzQfwjQY6f55MiI/RqDCyuPj3r3jyVRkK86pQKBAJwFHyqj9KaKXMZjfVnowLh |
|
146 | ebwzQfwjQY6f55MiI/RqDCyuPj3r3jyVRkK86pQKBAJwFHyqj9KaKXMZjfVnowLh | |
135 | 9svIGfNbGHpucATqREvUHuQbNnqkCx8VVhtYkhDb9fEP2xBu5VvHbR+3nfVhMut5 |
|
147 | 9svIGfNbGHpucATqREvUHuQbNnqkCx8VVhtYkhDb9fEP2xBu5VvHbR+3nfVhMut5 | |
136 | G34Ct5RS7Jt6LIfFdtcn8CaSas/l1HbiGeRgc70X/9aYx/V/CEJv0lIe8gP6uDoW |
|
148 | G34Ct5RS7Jt6LIfFdtcn8CaSas/l1HbiGeRgc70X/9aYx/V/CEJv0lIe8gP6uDoW | |
137 | FPIZ7d6vH+Vro6xuWEGiuMaiznap2KhZmpkgfupyFmplh0s6knymuQINBFit2ioB |
|
149 | FPIZ7d6vH+Vro6xuWEGiuMaiznap2KhZmpkgfupyFmplh0s6knymuQINBFit2ioB | |
138 | EADneL9S9m4vhU3blaRjVUUyJ7b/qTjcSylvCH5XUE6R2k+ckEZjfAMZPLpO+/tF |
|
150 | EADneL9S9m4vhU3blaRjVUUyJ7b/qTjcSylvCH5XUE6R2k+ckEZjfAMZPLpO+/tF | |
139 | M2JIJMD4SifKuS3xck9KtZGCufGmcwiLQRzeHF7vJUKrLD5RTkNi23ydvWZgPjtx |
|
151 | M2JIJMD4SifKuS3xck9KtZGCufGmcwiLQRzeHF7vJUKrLD5RTkNi23ydvWZgPjtx | |
140 | Q+DTT1Zcn7BrQFY6FgnRoUVIxwtdw1bMY/89rsFgS5wwuMESd3Q2RYgb7EOFOpnu |
|
152 | Q+DTT1Zcn7BrQFY6FgnRoUVIxwtdw1bMY/89rsFgS5wwuMESd3Q2RYgb7EOFOpnu | |
141 | w6da7WakWf4IhnF5nsNYGDVaIHzpiqCl+uTbf1epCjrOlIzkZ3Z3Yk5CM/TiFzPk |
|
153 | w6da7WakWf4IhnF5nsNYGDVaIHzpiqCl+uTbf1epCjrOlIzkZ3Z3Yk5CM/TiFzPk | |
142 | z2lLz89cpD8U+NtCsfagWWfjd2U3jDapgH+7nQnCEWpROtzaKHG6lA3pXdix5zG8 |
|
154 | z2lLz89cpD8U+NtCsfagWWfjd2U3jDapgH+7nQnCEWpROtzaKHG6lA3pXdix5zG8 | |
143 | eRc6/0IbUSWvfjKxLLPfNeCS2pCL3IeEI5nothEEYdQH6szpLog79xB9dVnJyKJb |
|
155 | eRc6/0IbUSWvfjKxLLPfNeCS2pCL3IeEI5nothEEYdQH6szpLog79xB9dVnJyKJb | |
144 | VfxXnseoYqVrRz2VVbUI5Blwm6B40E3eGVfUQWiux54DspyVMMk41Mx7QJ3iynIa |
|
156 | VfxXnseoYqVrRz2VVbUI5Blwm6B40E3eGVfUQWiux54DspyVMMk41Mx7QJ3iynIa | |
145 | 1N4ZAqVMAEruyXTRTxc9XW0tYhDMA/1GYvz0EmFpm8LzTHA6sFVtPm/ZlNCX6P1X |
|
157 | 1N4ZAqVMAEruyXTRTxc9XW0tYhDMA/1GYvz0EmFpm8LzTHA6sFVtPm/ZlNCX6P1X | |
146 | zJwrv7DSQKD6GGlBQUX+OeEJ8tTkkf8QTJSPUdh8P8YxDFS5EOGAvhhpMBYD42kQ |
|
158 | zJwrv7DSQKD6GGlBQUX+OeEJ8tTkkf8QTJSPUdh8P8YxDFS5EOGAvhhpMBYD42kQ | |
147 | pqXjEC+XcycTvGI7impgv9PDY1RCC1zkBjKPa120rNhv/hkVk/YhuGoajoHyy4h7 |
|
159 | pqXjEC+XcycTvGI7impgv9PDY1RCC1zkBjKPa120rNhv/hkVk/YhuGoajoHyy4h7 | |
148 | ZQopdcMtpN2dgmhEegny9JCSwxfQmQ0zK0g7m6SHiKMwjwARAQABiQQ+BBgBCAAJ |
|
160 | ZQopdcMtpN2dgmhEegny9JCSwxfQmQ0zK0g7m6SHiKMwjwARAQABiQQ+BBgBCAAJ | |
149 | BQJYrdoqAhsCAikJEI2BgDwOv82IwV0gBBkBCAAGBQJYrdoqAAoJEH6gqcPyc/zY |
|
161 | BQJYrdoqAhsCAikJEI2BgDwOv82IwV0gBBkBCAAGBQJYrdoqAAoJEH6gqcPyc/zY | |
150 | 1WAP/2wJ+R0gE6qsce3rjaIz58PJmc8goKrir5hnElWhPgbq7cYIsW5qiFyLhkdp |
|
162 | 1WAP/2wJ+R0gE6qsce3rjaIz58PJmc8goKrir5hnElWhPgbq7cYIsW5qiFyLhkdp | |
151 | YcMmhD9mRiPpQn6Ya2w3e3B8zfIVKipbMBnke/ytZ9M7qHmDCcjoiSmwEXN3wKYI |
|
163 | YcMmhD9mRiPpQn6Ya2w3e3B8zfIVKipbMBnke/ytZ9M7qHmDCcjoiSmwEXN3wKYI | |
152 | mD9VHONsl/CG1rU9Isw1jtB5g1YxuBA7M/m36XN6x2u+NtNMDB9P56yc4gfsZVES |
|
164 | mD9VHONsl/CG1rU9Isw1jtB5g1YxuBA7M/m36XN6x2u+NtNMDB9P56yc4gfsZVES | |
153 | KA9v+yY2/l45L8d/WUkUi0YXomn6hyBGI7JrBLq0CX37GEYP6O9rrKipfz73XfO7 |
|
165 | KA9v+yY2/l45L8d/WUkUi0YXomn6hyBGI7JrBLq0CX37GEYP6O9rrKipfz73XfO7 | |
154 | JIGzOKZlljb/D9RX/g7nRbCn+3EtH7xnk+TK/50euEKw8SMUg147sJTcpQmv6UzZ |
|
166 | JIGzOKZlljb/D9RX/g7nRbCn+3EtH7xnk+TK/50euEKw8SMUg147sJTcpQmv6UzZ | |
155 | cM4JgL0HbHVCojV4C/plELwMddALOFeYQzTif6sMRPf+3DSj8frbInjChC3yOLy0 |
|
167 | cM4JgL0HbHVCojV4C/plELwMddALOFeYQzTif6sMRPf+3DSj8frbInjChC3yOLy0 | |
156 | 6br92KFom17EIj2CAcoeq7UPhi2oouYBwPxh5ytdehJkoo+sN7RIWua6P2WSmon5 |
|
168 | 6br92KFom17EIj2CAcoeq7UPhi2oouYBwPxh5ytdehJkoo+sN7RIWua6P2WSmon5 | |
157 | U888cSylXC0+ADFdgLX9K2zrDVYUG1vo8CX0vzxFBaHwN6Px26fhIT1/hYUHQR1z |
|
169 | U888cSylXC0+ADFdgLX9K2zrDVYUG1vo8CX0vzxFBaHwN6Px26fhIT1/hYUHQR1z | |
158 | VfNDcyQmXqkOnZvvoMfz/Q0s9BhFJ/zU6AgQbIZE/hm1spsfgvtsD1frZfygXJ9f |
|
170 | VfNDcyQmXqkOnZvvoMfz/Q0s9BhFJ/zU6AgQbIZE/hm1spsfgvtsD1frZfygXJ9f | |
159 | irP+MSAI80xHSf91qSRZOj4Pl3ZJNbq4yYxv0b1pkMqeGdjdCYhLU+LZ4wbQmpCk |
|
171 | irP+MSAI80xHSf91qSRZOj4Pl3ZJNbq4yYxv0b1pkMqeGdjdCYhLU+LZ4wbQmpCk | |
160 | SVe2prlLureigXtmZfkqevRz7FrIZiu9ky8wnCAPwC7/zmS18rgP/17bOtL4/iIz |
|
172 | SVe2prlLureigXtmZfkqevRz7FrIZiu9ky8wnCAPwC7/zmS18rgP/17bOtL4/iIz | |
161 | QhxAAoAMWVrGyJivSkjhSGx1uCojsWfsTAm11P7jsruIL61ZzMUVE2aM3Pmj5G+W |
|
173 | QhxAAoAMWVrGyJivSkjhSGx1uCojsWfsTAm11P7jsruIL61ZzMUVE2aM3Pmj5G+W | |
162 | 9AcZ58Em+1WsVnAXdUR//bMmhyr8wL/G1YO1V3JEJTRdxsSxdYa4deGBBY/Adpsw |
|
174 | 9AcZ58Em+1WsVnAXdUR//bMmhyr8wL/G1YO1V3JEJTRdxsSxdYa4deGBBY/Adpsw | |
163 | 24jxhOJR+lsJpqIUeb999+R8euDhRHG9eFO7DRu6weatUJ6suupoDTRWtr/4yGqe |
|
175 | 24jxhOJR+lsJpqIUeb999+R8euDhRHG9eFO7DRu6weatUJ6suupoDTRWtr/4yGqe | |
164 | dKxV3qQhNLSnaAzqW/1nA3iUB4k7kCaKZxhdhDbClf9P37qaRW467BLCVO/coL3y |
|
176 | dKxV3qQhNLSnaAzqW/1nA3iUB4k7kCaKZxhdhDbClf9P37qaRW467BLCVO/coL3y | |
165 | Vm50dwdrNtKpMBh3ZpbB1uJvgi9mXtyBOMJ3v8RZeDzFiG8HdCtg9RvIt/AIFoHR |
|
177 | Vm50dwdrNtKpMBh3ZpbB1uJvgi9mXtyBOMJ3v8RZeDzFiG8HdCtg9RvIt/AIFoHR | |
166 | H3S+U79NT6i0KPzLImDfs8T7RlpyuMc4Ufs8ggyg9v3Ae6cN3eQyxcK3w0cbBwsh |
|
178 | H3S+U79NT6i0KPzLImDfs8T7RlpyuMc4Ufs8ggyg9v3Ae6cN3eQyxcK3w0cbBwsh | |
167 | /nQNfsA6uu+9H7NhbehBMhYnpNZyrHzCmzyXkauwRAqoCbGCNykTRwsur9gS41TQ |
|
179 | /nQNfsA6uu+9H7NhbehBMhYnpNZyrHzCmzyXkauwRAqoCbGCNykTRwsur9gS41TQ | |
168 | M8ssD1jFheOJf3hODnkKU+HKjvMROl1DK7zdmLdNzA1cvtZH/nCC9KPj1z8QC47S |
|
180 | M8ssD1jFheOJf3hODnkKU+HKjvMROl1DK7zdmLdNzA1cvtZH/nCC9KPj1z8QC47S | |
169 | xx+dTZSx4ONAhwbS/LN3PoKtn8LPjY9NP9uDWI+TWYquS2U+KHDrBDlsgozDbs/O |
|
181 | xx+dTZSx4ONAhwbS/LN3PoKtn8LPjY9NP9uDWI+TWYquS2U+KHDrBDlsgozDbs/O | |
170 | jCxcpDzNmXpWQHEtHU7649OXHP7UeNST1mCUCH5qdank0V1iejF6/CfTFU4MfcrG |
|
182 | jCxcpDzNmXpWQHEtHU7649OXHP7UeNST1mCUCH5qdank0V1iejF6/CfTFU4MfcrG | |
171 | YT90qFF93M3v01BbxP+EIY2/9tiIPbrd |
|
183 | YT90qFF93M3v01BbxP+EIY2/9tiIPbrd | |
172 | =0YYh |
|
184 | =0YYh | |
173 | -----END PGP PUBLIC KEY BLOCK----- |
|
185 | -----END PGP PUBLIC KEY BLOCK----- | |
174 | EOF |
|
186 | EOF | |
175 |
|
187 | |||
176 | sudo apt-key add docker-apt-key |
|
188 | sudo apt-key add docker-apt-key | |
177 |
|
189 | |||
178 | if [ "$DEBIAN_VERSION" = "9.8" ]; then |
|
190 | if [ "$DEBIAN_VERSION" = "9.8" ]; then | |
179 | cat << EOF | sudo tee -a /etc/apt/sources.list |
|
191 | cat << EOF | sudo tee -a /etc/apt/sources.list | |
180 | # Need backports for clang-format-6.0 |
|
192 | # Need backports for clang-format-6.0 | |
181 | deb http://deb.debian.org/debian stretch-backports main |
|
193 | deb http://deb.debian.org/debian stretch-backports main | |
182 |
|
194 | |||
183 | # Sources are useful if we want to compile things locally. |
|
195 | # Sources are useful if we want to compile things locally. | |
184 | deb-src http://deb.debian.org/debian stretch main |
|
196 | deb-src http://deb.debian.org/debian stretch main | |
185 | deb-src http://security.debian.org/debian-security stretch/updates main |
|
197 | deb-src http://security.debian.org/debian-security stretch/updates main | |
186 | deb-src http://deb.debian.org/debian stretch-updates main |
|
198 | deb-src http://deb.debian.org/debian stretch-updates main | |
187 | deb-src http://deb.debian.org/debian stretch-backports main |
|
199 | deb-src http://deb.debian.org/debian stretch-backports main | |
188 |
|
200 | |||
189 | deb [arch=amd64] https://download.docker.com/linux/debian stretch stable |
|
201 | deb [arch=amd64] https://download.docker.com/linux/debian stretch stable | |
190 | EOF |
|
202 | EOF | |
191 |
|
203 | |||
192 | elif [ "$DISTRO" = "Ubuntu" ]; then |
|
204 | elif [ "$DISTRO" = "Ubuntu" ]; then | |
193 | cat << EOF | sudo tee -a /etc/apt/sources.list |
|
205 | cat << EOF | sudo tee -a /etc/apt/sources.list | |
194 | deb [arch=amd64] https://download.docker.com/linux/ubuntu $LSB_RELEASE stable |
|
206 | deb [arch=amd64] https://download.docker.com/linux/ubuntu $LSB_RELEASE stable | |
195 | EOF |
|
207 | EOF | |
196 |
|
208 | |||
197 | fi |
|
209 | fi | |
198 |
|
210 | |||
199 | sudo apt-get update |
|
211 | sudo apt-get update | |
200 |
|
212 | |||
201 | PACKAGES="\ |
|
213 | PACKAGES="\ | |
202 | btrfs-progs \ |
|
214 | btrfs-progs \ | |
203 | build-essential \ |
|
215 | build-essential \ | |
204 | bzr \ |
|
216 | bzr \ | |
205 | clang-format-6.0 \ |
|
217 | clang-format-6.0 \ | |
206 | cvs \ |
|
218 | cvs \ | |
207 | darcs \ |
|
219 | darcs \ | |
208 | debhelper \ |
|
220 | debhelper \ | |
209 | devscripts \ |
|
221 | devscripts \ | |
210 | dpkg-dev \ |
|
222 | dpkg-dev \ | |
211 | dstat \ |
|
223 | dstat \ | |
212 | emacs \ |
|
224 | emacs \ | |
213 | gettext \ |
|
225 | gettext \ | |
214 | git \ |
|
226 | git \ | |
215 | htop \ |
|
227 | htop \ | |
216 | iotop \ |
|
228 | iotop \ | |
217 | jfsutils \ |
|
229 | jfsutils \ | |
218 | libbz2-dev \ |
|
230 | libbz2-dev \ | |
219 | libexpat1-dev \ |
|
231 | libexpat1-dev \ | |
220 | libffi-dev \ |
|
232 | libffi-dev \ | |
221 | libgdbm-dev \ |
|
233 | libgdbm-dev \ | |
222 | liblzma-dev \ |
|
234 | liblzma-dev \ | |
223 | libncurses5-dev \ |
|
235 | libncurses5-dev \ | |
224 | libnss3-dev \ |
|
236 | libnss3-dev \ | |
225 | libreadline-dev \ |
|
237 | libreadline-dev \ | |
226 | libsqlite3-dev \ |
|
238 | libsqlite3-dev \ | |
227 | libssl-dev \ |
|
239 | libssl-dev \ | |
228 | netbase \ |
|
240 | netbase \ | |
229 | ntfs-3g \ |
|
241 | ntfs-3g \ | |
230 | nvme-cli \ |
|
242 | nvme-cli \ | |
231 | pyflakes \ |
|
243 | pyflakes \ | |
232 | pyflakes3 \ |
|
244 | pyflakes3 \ | |
233 | pylint \ |
|
245 | pylint \ | |
234 | pylint3 \ |
|
246 | pylint3 \ | |
235 | python-all-dev \ |
|
247 | python-all-dev \ | |
236 | python-dev \ |
|
248 | python-dev \ | |
237 | python-docutils \ |
|
249 | python-docutils \ | |
238 | python-fuzzywuzzy \ |
|
250 | python-fuzzywuzzy \ | |
239 | python-pygments \ |
|
251 | python-pygments \ | |
240 | python-subversion \ |
|
252 | python-subversion \ | |
241 | python-vcr \ |
|
253 | python-vcr \ | |
242 | python3-dev \ |
|
254 | python3-dev \ | |
243 | python3-docutils \ |
|
255 | python3-docutils \ | |
244 | python3-fuzzywuzzy \ |
|
256 | python3-fuzzywuzzy \ | |
245 | python3-pygments \ |
|
257 | python3-pygments \ | |
246 | python3-vcr \ |
|
258 | python3-vcr \ | |
247 | rsync \ |
|
259 | rsync \ | |
248 | sqlite3 \ |
|
260 | sqlite3 \ | |
249 | subversion \ |
|
261 | subversion \ | |
250 | tcl-dev \ |
|
262 | tcl-dev \ | |
251 | tk-dev \ |
|
263 | tk-dev \ | |
252 | tla \ |
|
264 | tla \ | |
253 | unzip \ |
|
265 | unzip \ | |
254 | uuid-dev \ |
|
266 | uuid-dev \ | |
255 | vim \ |
|
267 | vim \ | |
256 | virtualenv \ |
|
268 | virtualenv \ | |
257 | wget \ |
|
269 | wget \ | |
258 | xfsprogs \ |
|
270 | xfsprogs \ | |
259 | zip \ |
|
271 | zip \ | |
260 | zlib1g-dev" |
|
272 | zlib1g-dev" | |
261 |
|
273 | |||
262 | if [ "$DEBIAN_VERSION" = "9.8" ]; then |
|
274 | if [ "$DEBIAN_VERSION" = "9.8" ]; then | |
263 | PACKAGES="$PACKAGES linux-perf" |
|
275 | PACKAGES="$PACKAGES linux-perf" | |
264 | elif [ "$DISTRO" = "Ubuntu" ]; then |
|
276 | elif [ "$DISTRO" = "Ubuntu" ]; then | |
265 | PACKAGES="$PACKAGES linux-tools-common" |
|
277 | PACKAGES="$PACKAGES linux-tools-common" | |
266 | fi |
|
278 | fi | |
267 |
|
279 | |||
268 | # Ubuntu 19.04 removes monotone. |
|
280 | # Ubuntu 19.04 removes monotone. | |
269 | if [ "$LSB_RELEASE" != "disco" ]; then |
|
281 | if [ "$LSB_RELEASE" != "disco" ]; then | |
270 | PACKAGES="$PACKAGES monotone" |
|
282 | PACKAGES="$PACKAGES monotone" | |
271 | fi |
|
283 | fi | |
272 |
|
284 | |||
273 | # As of April 27, 2019, Docker hasn't published packages for |
|
285 | # As of April 27, 2019, Docker hasn't published packages for | |
274 | # Ubuntu 19.04 yet. |
|
286 | # Ubuntu 19.04 yet. | |
275 | if [ "$LSB_RELEASE" != "disco" ]; then |
|
287 | if [ "$LSB_RELEASE" != "disco" ]; then | |
276 | PACKAGES="$PACKAGES docker-ce" |
|
288 | PACKAGES="$PACKAGES docker-ce" | |
277 | fi |
|
289 | fi | |
278 |
|
290 | |||
279 | sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends $PACKAGES |
|
291 | sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends $PACKAGES | |
280 |
|
292 | |||
281 | # Create clang-format symlink so test harness finds it. |
|
293 | # Create clang-format symlink so test harness finds it. | |
282 | sudo update-alternatives --install /usr/bin/clang-format clang-format \ |
|
294 | sudo update-alternatives --install /usr/bin/clang-format clang-format \ | |
283 | /usr/bin/clang-format-6.0 1000 |
|
295 | /usr/bin/clang-format-6.0 1000 | |
284 |
|
296 | |||
285 | sudo mkdir /hgdev |
|
297 | sudo mkdir /hgdev | |
286 | # Will be normalized to hg:hg later. |
|
298 | # Will be normalized to hg:hg later. | |
287 | sudo chown `whoami` /hgdev |
|
299 | sudo chown `whoami` /hgdev | |
288 |
|
300 | |||
|
301 | {install_rust} | |||
|
302 | ||||
289 | cp requirements-py2.txt /hgdev/requirements-py2.txt |
|
303 | cp requirements-py2.txt /hgdev/requirements-py2.txt | |
290 | cp requirements-py3.txt /hgdev/requirements-py3.txt |
|
304 | cp requirements-py3.txt /hgdev/requirements-py3.txt | |
291 |
|
305 | |||
292 | # Disable the pip version check because it uses the network and can |
|
306 | # Disable the pip version check because it uses the network and can | |
293 | # be annoying. |
|
307 | # be annoying. | |
294 | cat << EOF | sudo tee -a /etc/pip.conf |
|
308 | cat << EOF | sudo tee -a /etc/pip.conf | |
295 | [global] |
|
309 | [global] | |
296 | disable-pip-version-check = True |
|
310 | disable-pip-version-check = True | |
297 | EOF |
|
311 | EOF | |
298 |
|
312 | |||
299 | {install_pythons} |
|
313 | {install_pythons} | |
300 | {bootstrap_virtualenv} |
|
314 | {bootstrap_virtualenv} | |
301 |
|
315 | |||
302 | /hgdev/venv-bootstrap/bin/hg clone https://www.mercurial-scm.org/repo/hg /hgdev/src |
|
316 | /hgdev/venv-bootstrap/bin/hg clone https://www.mercurial-scm.org/repo/hg /hgdev/src | |
303 |
|
317 | |||
304 | # Mark the repo as non-publishing. |
|
318 | # Mark the repo as non-publishing. | |
305 | cat >> /hgdev/src/.hg/hgrc << EOF |
|
319 | cat >> /hgdev/src/.hg/hgrc << EOF | |
306 | [phases] |
|
320 | [phases] | |
307 | publish = false |
|
321 | publish = false | |
308 | EOF |
|
322 | EOF | |
309 |
|
323 | |||
310 | sudo chown -R hg:hg /hgdev |
|
324 | sudo chown -R hg:hg /hgdev | |
311 | '''.lstrip().format( |
|
325 | '''.lstrip().format( | |
|
326 | install_rust=INSTALL_RUST, | |||
312 | install_pythons=INSTALL_PYTHONS, |
|
327 | install_pythons=INSTALL_PYTHONS, | |
313 | bootstrap_virtualenv=BOOTSTRAP_VIRTUALENV |
|
328 | bootstrap_virtualenv=BOOTSTRAP_VIRTUALENV | |
314 | ).replace('\r\n', '\n') |
|
329 | ).replace('\r\n', '\n') | |
315 |
|
330 | |||
316 |
|
331 | |||
317 | # Prepares /hgdev for operations. |
|
332 | # Prepares /hgdev for operations. | |
318 | PREPARE_HGDEV = ''' |
|
333 | PREPARE_HGDEV = ''' | |
319 | #!/bin/bash |
|
334 | #!/bin/bash | |
320 |
|
335 | |||
321 | set -e |
|
336 | set -e | |
322 |
|
337 | |||
323 | FS=$1 |
|
338 | FS=$1 | |
324 |
|
339 | |||
325 | ensure_device() { |
|
340 | ensure_device() { | |
326 | if [ -z "${DEVICE}" ]; then |
|
341 | if [ -z "${DEVICE}" ]; then | |
327 | echo "could not find block device to format" |
|
342 | echo "could not find block device to format" | |
328 | exit 1 |
|
343 | exit 1 | |
329 | fi |
|
344 | fi | |
330 | } |
|
345 | } | |
331 |
|
346 | |||
332 | # Determine device to partition for extra filesystem. |
|
347 | # Determine device to partition for extra filesystem. | |
333 | # If only 1 volume is present, it will be the root volume and |
|
348 | # If only 1 volume is present, it will be the root volume and | |
334 | # should be /dev/nvme0. If multiple volumes are present, the |
|
349 | # should be /dev/nvme0. If multiple volumes are present, the | |
335 | # root volume could be nvme0 or nvme1. Use whichever one doesn't have |
|
350 | # root volume could be nvme0 or nvme1. Use whichever one doesn't have | |
336 | # a partition. |
|
351 | # a partition. | |
337 | if [ -e /dev/nvme1n1 ]; then |
|
352 | if [ -e /dev/nvme1n1 ]; then | |
338 | if [ -e /dev/nvme0n1p1 ]; then |
|
353 | if [ -e /dev/nvme0n1p1 ]; then | |
339 | DEVICE=/dev/nvme1n1 |
|
354 | DEVICE=/dev/nvme1n1 | |
340 | else |
|
355 | else | |
341 | DEVICE=/dev/nvme0n1 |
|
356 | DEVICE=/dev/nvme0n1 | |
342 | fi |
|
357 | fi | |
343 | else |
|
358 | else | |
344 | DEVICE= |
|
359 | DEVICE= | |
345 | fi |
|
360 | fi | |
346 |
|
361 | |||
347 | sudo mkdir /hgwork |
|
362 | sudo mkdir /hgwork | |
348 |
|
363 | |||
349 | if [ "${FS}" != "default" -a "${FS}" != "tmpfs" ]; then |
|
364 | if [ "${FS}" != "default" -a "${FS}" != "tmpfs" ]; then | |
350 | ensure_device |
|
365 | ensure_device | |
351 | echo "creating ${FS} filesystem on ${DEVICE}" |
|
366 | echo "creating ${FS} filesystem on ${DEVICE}" | |
352 | fi |
|
367 | fi | |
353 |
|
368 | |||
354 | if [ "${FS}" = "default" ]; then |
|
369 | if [ "${FS}" = "default" ]; then | |
355 | : |
|
370 | : | |
356 |
|
371 | |||
357 | elif [ "${FS}" = "btrfs" ]; then |
|
372 | elif [ "${FS}" = "btrfs" ]; then | |
358 | sudo mkfs.btrfs ${DEVICE} |
|
373 | sudo mkfs.btrfs ${DEVICE} | |
359 | sudo mount ${DEVICE} /hgwork |
|
374 | sudo mount ${DEVICE} /hgwork | |
360 |
|
375 | |||
361 | elif [ "${FS}" = "ext3" ]; then |
|
376 | elif [ "${FS}" = "ext3" ]; then | |
362 | # lazy_journal_init speeds up filesystem creation at the expense of |
|
377 | # lazy_journal_init speeds up filesystem creation at the expense of | |
363 | # integrity if things crash. We are an ephemeral instance, so we don't |
|
378 | # integrity if things crash. We are an ephemeral instance, so we don't | |
364 | # care about integrity. |
|
379 | # care about integrity. | |
365 | sudo mkfs.ext3 -E lazy_journal_init=1 ${DEVICE} |
|
380 | sudo mkfs.ext3 -E lazy_journal_init=1 ${DEVICE} | |
366 | sudo mount ${DEVICE} /hgwork |
|
381 | sudo mount ${DEVICE} /hgwork | |
367 |
|
382 | |||
368 | elif [ "${FS}" = "ext4" ]; then |
|
383 | elif [ "${FS}" = "ext4" ]; then | |
369 | sudo mkfs.ext4 -E lazy_journal_init=1 ${DEVICE} |
|
384 | sudo mkfs.ext4 -E lazy_journal_init=1 ${DEVICE} | |
370 | sudo mount ${DEVICE} /hgwork |
|
385 | sudo mount ${DEVICE} /hgwork | |
371 |
|
386 | |||
372 | elif [ "${FS}" = "jfs" ]; then |
|
387 | elif [ "${FS}" = "jfs" ]; then | |
373 | sudo mkfs.jfs ${DEVICE} |
|
388 | sudo mkfs.jfs ${DEVICE} | |
374 | sudo mount ${DEVICE} /hgwork |
|
389 | sudo mount ${DEVICE} /hgwork | |
375 |
|
390 | |||
376 | elif [ "${FS}" = "tmpfs" ]; then |
|
391 | elif [ "${FS}" = "tmpfs" ]; then | |
377 | echo "creating tmpfs volume in /hgwork" |
|
392 | echo "creating tmpfs volume in /hgwork" | |
378 | sudo mount -t tmpfs -o size=1024M tmpfs /hgwork |
|
393 | sudo mount -t tmpfs -o size=1024M tmpfs /hgwork | |
379 |
|
394 | |||
380 | elif [ "${FS}" = "xfs" ]; then |
|
395 | elif [ "${FS}" = "xfs" ]; then | |
381 | sudo mkfs.xfs ${DEVICE} |
|
396 | sudo mkfs.xfs ${DEVICE} | |
382 | sudo mount ${DEVICE} /hgwork |
|
397 | sudo mount ${DEVICE} /hgwork | |
383 |
|
398 | |||
384 | else |
|
399 | else | |
385 | echo "unsupported filesystem: ${FS}" |
|
400 | echo "unsupported filesystem: ${FS}" | |
386 | exit 1 |
|
401 | exit 1 | |
387 | fi |
|
402 | fi | |
388 |
|
403 | |||
389 | echo "/hgwork ready" |
|
404 | echo "/hgwork ready" | |
390 |
|
405 | |||
391 | sudo chown hg:hg /hgwork |
|
406 | sudo chown hg:hg /hgwork | |
392 | mkdir /hgwork/tmp |
|
407 | mkdir /hgwork/tmp | |
393 | chown hg:hg /hgwork/tmp |
|
408 | chown hg:hg /hgwork/tmp | |
394 |
|
409 | |||
395 | rsync -a /hgdev/src /hgwork/ |
|
410 | rsync -a /hgdev/src /hgwork/ | |
396 | '''.lstrip().replace('\r\n', '\n') |
|
411 | '''.lstrip().replace('\r\n', '\n') | |
397 |
|
412 | |||
398 |
|
413 | |||
399 | HG_UPDATE_CLEAN = ''' |
|
414 | HG_UPDATE_CLEAN = ''' | |
400 | set -ex |
|
415 | set -ex | |
401 |
|
416 | |||
402 | HG=/hgdev/venv-bootstrap/bin/hg |
|
417 | HG=/hgdev/venv-bootstrap/bin/hg | |
403 |
|
418 | |||
404 | cd /hgwork/src |
|
419 | cd /hgwork/src | |
405 | ${HG} --config extensions.purge= purge --all |
|
420 | ${HG} --config extensions.purge= purge --all | |
406 | ${HG} update -C $1 |
|
421 | ${HG} update -C $1 | |
407 | ${HG} log -r . |
|
422 | ${HG} log -r . | |
408 | '''.lstrip().replace('\r\n', '\n') |
|
423 | '''.lstrip().replace('\r\n', '\n') | |
409 |
|
424 | |||
410 |
|
425 | |||
411 | def prepare_exec_environment(ssh_client, filesystem='default'): |
|
426 | def prepare_exec_environment(ssh_client, filesystem='default'): | |
412 | """Prepare an EC2 instance to execute things. |
|
427 | """Prepare an EC2 instance to execute things. | |
413 |
|
428 | |||
414 | The AMI has an ``/hgdev`` bootstrapped with various Python installs |
|
429 | The AMI has an ``/hgdev`` bootstrapped with various Python installs | |
415 | and a clone of the Mercurial repo. |
|
430 | and a clone of the Mercurial repo. | |
416 |
|
431 | |||
417 | In EC2, EBS volumes launched from snapshots have wonky performance behavior. |
|
432 | In EC2, EBS volumes launched from snapshots have wonky performance behavior. | |
418 | Notably, blocks have to be copied on first access, which makes volume |
|
433 | Notably, blocks have to be copied on first access, which makes volume | |
419 | I/O extremely slow on fresh volumes. |
|
434 | I/O extremely slow on fresh volumes. | |
420 |
|
435 | |||
421 | Furthermore, we may want to run operations, tests, etc on alternative |
|
436 | Furthermore, we may want to run operations, tests, etc on alternative | |
422 | filesystems so we examine behavior on different filesystems. |
|
437 | filesystems so we examine behavior on different filesystems. | |
423 |
|
438 | |||
424 | This function is used to facilitate executing operations on alternate |
|
439 | This function is used to facilitate executing operations on alternate | |
425 | volumes. |
|
440 | volumes. | |
426 | """ |
|
441 | """ | |
427 | sftp = ssh_client.open_sftp() |
|
442 | sftp = ssh_client.open_sftp() | |
428 |
|
443 | |||
429 | with sftp.open('/hgdev/prepare-hgdev', 'wb') as fh: |
|
444 | with sftp.open('/hgdev/prepare-hgdev', 'wb') as fh: | |
430 | fh.write(PREPARE_HGDEV) |
|
445 | fh.write(PREPARE_HGDEV) | |
431 | fh.chmod(0o0777) |
|
446 | fh.chmod(0o0777) | |
432 |
|
447 | |||
433 | command = 'sudo /hgdev/prepare-hgdev %s' % filesystem |
|
448 | command = 'sudo /hgdev/prepare-hgdev %s' % filesystem | |
434 | chan, stdin, stdout = exec_command(ssh_client, command) |
|
449 | chan, stdin, stdout = exec_command(ssh_client, command) | |
435 | stdin.close() |
|
450 | stdin.close() | |
436 |
|
451 | |||
437 | for line in stdout: |
|
452 | for line in stdout: | |
438 | print(line, end='') |
|
453 | print(line, end='') | |
439 |
|
454 | |||
440 | res = chan.recv_exit_status() |
|
455 | res = chan.recv_exit_status() | |
441 |
|
456 | |||
442 | if res: |
|
457 | if res: | |
443 | raise Exception('non-0 exit code updating working directory; %d' |
|
458 | raise Exception('non-0 exit code updating working directory; %d' | |
444 | % res) |
|
459 | % res) | |
445 |
|
460 | |||
446 |
|
461 | |||
447 | def synchronize_hg(source_path: pathlib.Path, ec2_instance, revision: str=None): |
|
462 | def synchronize_hg(source_path: pathlib.Path, ec2_instance, revision: str=None): | |
448 | """Synchronize a local Mercurial source path to remote EC2 instance.""" |
|
463 | """Synchronize a local Mercurial source path to remote EC2 instance.""" | |
449 |
|
464 | |||
450 | with tempfile.TemporaryDirectory() as temp_dir: |
|
465 | with tempfile.TemporaryDirectory() as temp_dir: | |
451 | temp_dir = pathlib.Path(temp_dir) |
|
466 | temp_dir = pathlib.Path(temp_dir) | |
452 |
|
467 | |||
453 | ssh_dir = temp_dir / '.ssh' |
|
468 | ssh_dir = temp_dir / '.ssh' | |
454 | ssh_dir.mkdir() |
|
469 | ssh_dir.mkdir() | |
455 | ssh_dir.chmod(0o0700) |
|
470 | ssh_dir.chmod(0o0700) | |
456 |
|
471 | |||
457 | public_ip = ec2_instance.public_ip_address |
|
472 | public_ip = ec2_instance.public_ip_address | |
458 |
|
473 | |||
459 | ssh_config = ssh_dir / 'config' |
|
474 | ssh_config = ssh_dir / 'config' | |
460 |
|
475 | |||
461 | with ssh_config.open('w', encoding='utf-8') as fh: |
|
476 | with ssh_config.open('w', encoding='utf-8') as fh: | |
462 | fh.write('Host %s\n' % public_ip) |
|
477 | fh.write('Host %s\n' % public_ip) | |
463 | fh.write(' User hg\n') |
|
478 | fh.write(' User hg\n') | |
464 | fh.write(' StrictHostKeyChecking no\n') |
|
479 | fh.write(' StrictHostKeyChecking no\n') | |
465 | fh.write(' UserKnownHostsFile %s\n' % (ssh_dir / 'known_hosts')) |
|
480 | fh.write(' UserKnownHostsFile %s\n' % (ssh_dir / 'known_hosts')) | |
466 | fh.write(' IdentityFile %s\n' % ec2_instance.ssh_private_key_path) |
|
481 | fh.write(' IdentityFile %s\n' % ec2_instance.ssh_private_key_path) | |
467 |
|
482 | |||
468 | if not (source_path / '.hg').is_dir(): |
|
483 | if not (source_path / '.hg').is_dir(): | |
469 | raise Exception('%s is not a Mercurial repository; synchronization ' |
|
484 | raise Exception('%s is not a Mercurial repository; synchronization ' | |
470 | 'not yet supported' % source_path) |
|
485 | 'not yet supported' % source_path) | |
471 |
|
486 | |||
472 | env = dict(os.environ) |
|
487 | env = dict(os.environ) | |
473 | env['HGPLAIN'] = '1' |
|
488 | env['HGPLAIN'] = '1' | |
474 | env['HGENCODING'] = 'utf-8' |
|
489 | env['HGENCODING'] = 'utf-8' | |
475 |
|
490 | |||
476 | hg_bin = source_path / 'hg' |
|
491 | hg_bin = source_path / 'hg' | |
477 |
|
492 | |||
478 | res = subprocess.run( |
|
493 | res = subprocess.run( | |
479 | ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'], |
|
494 | ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'], | |
480 | cwd=str(source_path), env=env, check=True, capture_output=True) |
|
495 | cwd=str(source_path), env=env, check=True, capture_output=True) | |
481 |
|
496 | |||
482 | full_revision = res.stdout.decode('ascii') |
|
497 | full_revision = res.stdout.decode('ascii') | |
483 |
|
498 | |||
484 | args = [ |
|
499 | args = [ | |
485 | 'python2.7', str(hg_bin), |
|
500 | 'python2.7', str(hg_bin), | |
486 | '--config', 'ui.ssh=ssh -F %s' % ssh_config, |
|
501 | '--config', 'ui.ssh=ssh -F %s' % ssh_config, | |
487 | '--config', 'ui.remotecmd=/hgdev/venv-bootstrap/bin/hg', |
|
502 | '--config', 'ui.remotecmd=/hgdev/venv-bootstrap/bin/hg', | |
488 | # Also ensure .hgtags changes are present so auto version |
|
503 | # Also ensure .hgtags changes are present so auto version | |
489 | # calculation works. |
|
504 | # calculation works. | |
490 | 'push', '-f', '-r', full_revision, '-r', 'file(.hgtags)', |
|
505 | 'push', '-f', '-r', full_revision, '-r', 'file(.hgtags)', | |
491 | 'ssh://%s//hgwork/src' % public_ip, |
|
506 | 'ssh://%s//hgwork/src' % public_ip, | |
492 | ] |
|
507 | ] | |
493 |
|
508 | |||
494 | res = subprocess.run(args, cwd=str(source_path), env=env) |
|
509 | res = subprocess.run(args, cwd=str(source_path), env=env) | |
495 |
|
510 | |||
496 | # Allow 1 (no-op) to not trigger error. |
|
511 | # Allow 1 (no-op) to not trigger error. | |
497 | if res.returncode not in (0, 1): |
|
512 | if res.returncode not in (0, 1): | |
498 | res.check_returncode() |
|
513 | res.check_returncode() | |
499 |
|
514 | |||
500 | # TODO support synchronizing dirty working directory. |
|
515 | # TODO support synchronizing dirty working directory. | |
501 |
|
516 | |||
502 | sftp = ec2_instance.ssh_client.open_sftp() |
|
517 | sftp = ec2_instance.ssh_client.open_sftp() | |
503 |
|
518 | |||
504 | with sftp.open('/hgdev/hgup', 'wb') as fh: |
|
519 | with sftp.open('/hgdev/hgup', 'wb') as fh: | |
505 | fh.write(HG_UPDATE_CLEAN) |
|
520 | fh.write(HG_UPDATE_CLEAN) | |
506 | fh.chmod(0o0700) |
|
521 | fh.chmod(0o0700) | |
507 |
|
522 | |||
508 | chan, stdin, stdout = exec_command( |
|
523 | chan, stdin, stdout = exec_command( | |
509 | ec2_instance.ssh_client, '/hgdev/hgup %s' % full_revision) |
|
524 | ec2_instance.ssh_client, '/hgdev/hgup %s' % full_revision) | |
510 | stdin.close() |
|
525 | stdin.close() | |
511 |
|
526 | |||
512 | for line in stdout: |
|
527 | for line in stdout: | |
513 | print(line, end='') |
|
528 | print(line, end='') | |
514 |
|
529 | |||
515 | res = chan.recv_exit_status() |
|
530 | res = chan.recv_exit_status() | |
516 |
|
531 | |||
517 | if res: |
|
532 | if res: | |
518 | raise Exception('non-0 exit code updating working directory; %d' |
|
533 | raise Exception('non-0 exit code updating working directory; %d' | |
519 | % res) |
|
534 | % res) | |
520 |
|
535 | |||
521 |
|
536 | |||
522 | def run_tests(ssh_client, python_version, test_flags=None): |
|
537 | def run_tests(ssh_client, python_version, test_flags=None): | |
523 | """Run tests on a remote Linux machine via an SSH client.""" |
|
538 | """Run tests on a remote Linux machine via an SSH client.""" | |
524 | test_flags = test_flags or [] |
|
539 | test_flags = test_flags or [] | |
525 |
|
540 | |||
526 | print('running tests') |
|
541 | print('running tests') | |
527 |
|
542 | |||
528 | if python_version == 'system2': |
|
543 | if python_version == 'system2': | |
529 | python = '/usr/bin/python2' |
|
544 | python = '/usr/bin/python2' | |
530 | elif python_version == 'system3': |
|
545 | elif python_version == 'system3': | |
531 | python = '/usr/bin/python3' |
|
546 | python = '/usr/bin/python3' | |
532 | elif python_version.startswith('pypy'): |
|
547 | elif python_version.startswith('pypy'): | |
533 | python = '/hgdev/pyenv/shims/%s' % python_version |
|
548 | python = '/hgdev/pyenv/shims/%s' % python_version | |
534 | else: |
|
549 | else: | |
535 | python = '/hgdev/pyenv/shims/python%s' % python_version |
|
550 | python = '/hgdev/pyenv/shims/python%s' % python_version | |
536 |
|
551 | |||
537 | test_flags = ' '.join(shlex.quote(a) for a in test_flags) |
|
552 | test_flags = ' '.join(shlex.quote(a) for a in test_flags) | |
538 |
|
553 | |||
539 | command = ( |
|
554 | command = ( | |
540 | '/bin/sh -c "export TMPDIR=/hgwork/tmp; ' |
|
555 | '/bin/sh -c "export TMPDIR=/hgwork/tmp; ' | |
541 | 'cd /hgwork/src/tests && %s run-tests.py %s"' % ( |
|
556 | 'cd /hgwork/src/tests && %s run-tests.py %s"' % ( | |
542 | python, test_flags)) |
|
557 | python, test_flags)) | |
543 |
|
558 | |||
544 | chan, stdin, stdout = exec_command(ssh_client, command) |
|
559 | chan, stdin, stdout = exec_command(ssh_client, command) | |
545 |
|
560 | |||
546 | stdin.close() |
|
561 | stdin.close() | |
547 |
|
562 | |||
548 | for line in stdout: |
|
563 | for line in stdout: | |
549 | print(line, end='') |
|
564 | print(line, end='') | |
550 |
|
565 | |||
551 | return chan.recv_exit_status() |
|
566 | return chan.recv_exit_status() |
General Comments 0
You need to be logged in to leave comments.
Login now