##// END OF EJS Templates
automation: install latest Python versions in Linux environment...
Gregory Szorc -
r47967:743522fa default
parent child Browse files
Show More
@@ -1,609 +1,609 b''
1 # linux.py - Linux specific automation functionality
1 # linux.py - Linux specific automation functionality
2 #
2 #
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 # no-check-code because Python 3 native.
8 # no-check-code because Python 3 native.
9
9
10 import os
10 import os
11 import pathlib
11 import pathlib
12 import shlex
12 import shlex
13 import subprocess
13 import subprocess
14 import tempfile
14 import tempfile
15
15
16 from .ssh import exec_command
16 from .ssh import exec_command
17
17
18
18
19 # Linux distributions that are supported.
19 # Linux distributions that are supported.
20 DISTROS = {
20 DISTROS = {
21 'debian9',
21 'debian9',
22 'debian10',
22 'debian10',
23 'ubuntu18.04',
23 'ubuntu18.04',
24 'ubuntu19.04',
24 'ubuntu19.04',
25 }
25 }
26
26
27 INSTALL_PYTHONS = r'''
27 INSTALL_PYTHONS = r'''
28 PYENV2_VERSIONS="2.7.17 pypy2.7-7.2.0"
28 PYENV2_VERSIONS="2.7.17 pypy2.7-7.2.0"
29 PYENV3_VERSIONS="3.5.10 3.6.12 3.7.9 3.8.6 3.9.0 pypy3.5-7.0.0 pypy3.6-7.3.0"
29 PYENV3_VERSIONS="3.5.10 3.6.13 3.7.10 3.8.10 3.9.5 pypy3.5-7.0.0 pypy3.6-7.3.3 pypy3.7-7.3.3"
30
30
31 git clone https://github.com/pyenv/pyenv.git /hgdev/pyenv
31 git clone https://github.com/pyenv/pyenv.git /hgdev/pyenv
32 pushd /hgdev/pyenv
32 pushd /hgdev/pyenv
33 git checkout 8ac91b4fd678a8c04356f5ec85cfcd565c265e9a
33 git checkout 328fd42c3a2fbf14ae46dae2021a087fe27ba7e2
34 popd
34 popd
35
35
36 export PYENV_ROOT="/hgdev/pyenv"
36 export PYENV_ROOT="/hgdev/pyenv"
37 export PATH="$PYENV_ROOT/bin:$PATH"
37 export PATH="$PYENV_ROOT/bin:$PATH"
38
38
39 # pip 19.2.3.
39 # pip 19.2.3.
40 PIP_SHA256=57e3643ff19f018f8a00dfaa6b7e4620e3c1a7a2171fd218425366ec006b3bfe
40 PIP_SHA256=57e3643ff19f018f8a00dfaa6b7e4620e3c1a7a2171fd218425366ec006b3bfe
41 wget -O get-pip.py --progress dot:mega https://github.com/pypa/get-pip/raw/309a56c5fd94bd1134053a541cb4657a4e47e09d/get-pip.py
41 wget -O get-pip.py --progress dot:mega https://github.com/pypa/get-pip/raw/309a56c5fd94bd1134053a541cb4657a4e47e09d/get-pip.py
42 echo "${PIP_SHA256} get-pip.py" | sha256sum --check -
42 echo "${PIP_SHA256} get-pip.py" | sha256sum --check -
43
43
44 VIRTUALENV_SHA256=f78d81b62d3147396ac33fc9d77579ddc42cc2a98dd9ea38886f616b33bc7fb2
44 VIRTUALENV_SHA256=f78d81b62d3147396ac33fc9d77579ddc42cc2a98dd9ea38886f616b33bc7fb2
45 VIRTUALENV_TARBALL=virtualenv-16.7.5.tar.gz
45 VIRTUALENV_TARBALL=virtualenv-16.7.5.tar.gz
46 wget -O ${VIRTUALENV_TARBALL} --progress dot:mega https://files.pythonhosted.org/packages/66/f0/6867af06d2e2f511e4e1d7094ff663acdebc4f15d4a0cb0fed1007395124/${VIRTUALENV_TARBALL}
46 wget -O ${VIRTUALENV_TARBALL} --progress dot:mega https://files.pythonhosted.org/packages/66/f0/6867af06d2e2f511e4e1d7094ff663acdebc4f15d4a0cb0fed1007395124/${VIRTUALENV_TARBALL}
47 echo "${VIRTUALENV_SHA256} ${VIRTUALENV_TARBALL}" | sha256sum --check -
47 echo "${VIRTUALENV_SHA256} ${VIRTUALENV_TARBALL}" | sha256sum --check -
48
48
49 for v in ${PYENV2_VERSIONS}; do
49 for v in ${PYENV2_VERSIONS}; do
50 pyenv install -v ${v}
50 pyenv install -v ${v}
51 ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
51 ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
52 ${PYENV_ROOT}/versions/${v}/bin/pip install ${VIRTUALENV_TARBALL}
52 ${PYENV_ROOT}/versions/${v}/bin/pip install ${VIRTUALENV_TARBALL}
53 ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py2.txt
53 ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py2.txt
54 done
54 done
55
55
56 for v in ${PYENV3_VERSIONS}; do
56 for v in ${PYENV3_VERSIONS}; do
57 pyenv install -v ${v}
57 pyenv install -v ${v}
58 ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
58 ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
59
59
60 case ${v} in
60 case ${v} in
61 3.5.*)
61 3.5.*)
62 REQUIREMENTS=requirements-py3.5.txt
62 REQUIREMENTS=requirements-py3.5.txt
63 ;;
63 ;;
64 pypy3.5*)
64 pypy3.5*)
65 REQUIREMENTS=requirements-py3.5.txt
65 REQUIREMENTS=requirements-py3.5.txt
66 ;;
66 ;;
67 *)
67 *)
68 REQUIREMENTS=requirements-py3.txt
68 REQUIREMENTS=requirements-py3.txt
69 ;;
69 ;;
70 esac
70 esac
71
71
72 ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/${REQUIREMENTS}
72 ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/${REQUIREMENTS}
73 done
73 done
74
74
75 pyenv global ${PYENV2_VERSIONS} ${PYENV3_VERSIONS} system
75 pyenv global ${PYENV2_VERSIONS} ${PYENV3_VERSIONS} system
76 '''.lstrip().replace(
76 '''.lstrip().replace(
77 '\r\n', '\n'
77 '\r\n', '\n'
78 )
78 )
79
79
80
80
81 INSTALL_RUST = r'''
81 INSTALL_RUST = r'''
82 RUSTUP_INIT_SHA256=a46fe67199b7bcbbde2dcbc23ae08db6f29883e260e23899a88b9073effc9076
82 RUSTUP_INIT_SHA256=a46fe67199b7bcbbde2dcbc23ae08db6f29883e260e23899a88b9073effc9076
83 wget -O rustup-init --progress dot:mega https://static.rust-lang.org/rustup/archive/1.18.3/x86_64-unknown-linux-gnu/rustup-init
83 wget -O rustup-init --progress dot:mega https://static.rust-lang.org/rustup/archive/1.18.3/x86_64-unknown-linux-gnu/rustup-init
84 echo "${RUSTUP_INIT_SHA256} rustup-init" | sha256sum --check -
84 echo "${RUSTUP_INIT_SHA256} rustup-init" | sha256sum --check -
85
85
86 chmod +x rustup-init
86 chmod +x rustup-init
87 sudo -H -u hg -g hg ./rustup-init -y
87 sudo -H -u hg -g hg ./rustup-init -y
88 sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup install 1.31.1 1.46.0
88 sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup install 1.31.1 1.46.0
89 sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup component add clippy
89 sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup component add clippy
90
90
91 sudo -H -u hg -g hg /home/hg/.cargo/bin/cargo install --version 0.10.3 pyoxidizer
91 sudo -H -u hg -g hg /home/hg/.cargo/bin/cargo install --version 0.10.3 pyoxidizer
92 '''
92 '''
93
93
94
94
95 BOOTSTRAP_VIRTUALENV = r'''
95 BOOTSTRAP_VIRTUALENV = r'''
96 /usr/bin/virtualenv /hgdev/venv-bootstrap
96 /usr/bin/virtualenv /hgdev/venv-bootstrap
97
97
98 HG_SHA256=35fc8ba5e0379c1b3affa2757e83fb0509e8ac314cbd9f1fd133cf265d16e49f
98 HG_SHA256=35fc8ba5e0379c1b3affa2757e83fb0509e8ac314cbd9f1fd133cf265d16e49f
99 HG_TARBALL=mercurial-5.1.1.tar.gz
99 HG_TARBALL=mercurial-5.1.1.tar.gz
100
100
101 wget -O ${HG_TARBALL} --progress dot:mega https://www.mercurial-scm.org/release/${HG_TARBALL}
101 wget -O ${HG_TARBALL} --progress dot:mega https://www.mercurial-scm.org/release/${HG_TARBALL}
102 echo "${HG_SHA256} ${HG_TARBALL}" | sha256sum --check -
102 echo "${HG_SHA256} ${HG_TARBALL}" | sha256sum --check -
103
103
104 /hgdev/venv-bootstrap/bin/pip install ${HG_TARBALL}
104 /hgdev/venv-bootstrap/bin/pip install ${HG_TARBALL}
105 '''.lstrip().replace(
105 '''.lstrip().replace(
106 '\r\n', '\n'
106 '\r\n', '\n'
107 )
107 )
108
108
109
109
110 BOOTSTRAP_DEBIAN = (
110 BOOTSTRAP_DEBIAN = (
111 r'''
111 r'''
112 #!/bin/bash
112 #!/bin/bash
113
113
114 set -ex
114 set -ex
115
115
116 DISTRO=`grep DISTRIB_ID /etc/lsb-release | awk -F= '{{print $2}}'`
116 DISTRO=`grep DISTRIB_ID /etc/lsb-release | awk -F= '{{print $2}}'`
117 DEBIAN_VERSION=`cat /etc/debian_version`
117 DEBIAN_VERSION=`cat /etc/debian_version`
118 LSB_RELEASE=`lsb_release -cs`
118 LSB_RELEASE=`lsb_release -cs`
119
119
120 sudo /usr/sbin/groupadd hg
120 sudo /usr/sbin/groupadd hg
121 sudo /usr/sbin/groupadd docker
121 sudo /usr/sbin/groupadd docker
122 sudo /usr/sbin/useradd -g hg -G sudo,docker -d /home/hg -m -s /bin/bash hg
122 sudo /usr/sbin/useradd -g hg -G sudo,docker -d /home/hg -m -s /bin/bash hg
123 sudo mkdir /home/hg/.ssh
123 sudo mkdir /home/hg/.ssh
124 sudo cp ~/.ssh/authorized_keys /home/hg/.ssh/authorized_keys
124 sudo cp ~/.ssh/authorized_keys /home/hg/.ssh/authorized_keys
125 sudo chown -R hg:hg /home/hg/.ssh
125 sudo chown -R hg:hg /home/hg/.ssh
126 sudo chmod 700 /home/hg/.ssh
126 sudo chmod 700 /home/hg/.ssh
127 sudo chmod 600 /home/hg/.ssh/authorized_keys
127 sudo chmod 600 /home/hg/.ssh/authorized_keys
128
128
129 cat << EOF | sudo tee /etc/sudoers.d/90-hg
129 cat << EOF | sudo tee /etc/sudoers.d/90-hg
130 hg ALL=(ALL) NOPASSWD:ALL
130 hg ALL=(ALL) NOPASSWD:ALL
131 EOF
131 EOF
132
132
133 sudo apt-get update
133 sudo apt-get update
134 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq dist-upgrade
134 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq dist-upgrade
135
135
136 # Install packages necessary to set up Docker Apt repo.
136 # Install packages necessary to set up Docker Apt repo.
137 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends \
137 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends \
138 apt-transport-https \
138 apt-transport-https \
139 gnupg
139 gnupg
140
140
141 cat > docker-apt-key << EOF
141 cat > docker-apt-key << EOF
142 -----BEGIN PGP PUBLIC KEY BLOCK-----
142 -----BEGIN PGP PUBLIC KEY BLOCK-----
143
143
144 mQINBFit2ioBEADhWpZ8/wvZ6hUTiXOwQHXMAlaFHcPH9hAtr4F1y2+OYdbtMuth
144 mQINBFit2ioBEADhWpZ8/wvZ6hUTiXOwQHXMAlaFHcPH9hAtr4F1y2+OYdbtMuth
145 lqqwp028AqyY+PRfVMtSYMbjuQuu5byyKR01BbqYhuS3jtqQmljZ/bJvXqnmiVXh
145 lqqwp028AqyY+PRfVMtSYMbjuQuu5byyKR01BbqYhuS3jtqQmljZ/bJvXqnmiVXh
146 38UuLa+z077PxyxQhu5BbqntTPQMfiyqEiU+BKbq2WmANUKQf+1AmZY/IruOXbnq
146 38UuLa+z077PxyxQhu5BbqntTPQMfiyqEiU+BKbq2WmANUKQf+1AmZY/IruOXbnq
147 L4C1+gJ8vfmXQt99npCaxEjaNRVYfOS8QcixNzHUYnb6emjlANyEVlZzeqo7XKl7
147 L4C1+gJ8vfmXQt99npCaxEjaNRVYfOS8QcixNzHUYnb6emjlANyEVlZzeqo7XKl7
148 UrwV5inawTSzWNvtjEjj4nJL8NsLwscpLPQUhTQ+7BbQXAwAmeHCUTQIvvWXqw0N
148 UrwV5inawTSzWNvtjEjj4nJL8NsLwscpLPQUhTQ+7BbQXAwAmeHCUTQIvvWXqw0N
149 cmhh4HgeQscQHYgOJjjDVfoY5MucvglbIgCqfzAHW9jxmRL4qbMZj+b1XoePEtht
149 cmhh4HgeQscQHYgOJjjDVfoY5MucvglbIgCqfzAHW9jxmRL4qbMZj+b1XoePEtht
150 ku4bIQN1X5P07fNWzlgaRL5Z4POXDDZTlIQ/El58j9kp4bnWRCJW0lya+f8ocodo
150 ku4bIQN1X5P07fNWzlgaRL5Z4POXDDZTlIQ/El58j9kp4bnWRCJW0lya+f8ocodo
151 vZZ+Doi+fy4D5ZGrL4XEcIQP/Lv5uFyf+kQtl/94VFYVJOleAv8W92KdgDkhTcTD
151 vZZ+Doi+fy4D5ZGrL4XEcIQP/Lv5uFyf+kQtl/94VFYVJOleAv8W92KdgDkhTcTD
152 G7c0tIkVEKNUq48b3aQ64NOZQW7fVjfoKwEZdOqPE72Pa45jrZzvUFxSpdiNk2tZ
152 G7c0tIkVEKNUq48b3aQ64NOZQW7fVjfoKwEZdOqPE72Pa45jrZzvUFxSpdiNk2tZ
153 XYukHjlxxEgBdC/J3cMMNRE1F4NCA3ApfV1Y7/hTeOnmDuDYwr9/obA8t016Yljj
153 XYukHjlxxEgBdC/J3cMMNRE1F4NCA3ApfV1Y7/hTeOnmDuDYwr9/obA8t016Yljj
154 q5rdkywPf4JF8mXUW5eCN1vAFHxeg9ZWemhBtQmGxXnw9M+z6hWwc6ahmwARAQAB
154 q5rdkywPf4JF8mXUW5eCN1vAFHxeg9ZWemhBtQmGxXnw9M+z6hWwc6ahmwARAQAB
155 tCtEb2NrZXIgUmVsZWFzZSAoQ0UgZGViKSA8ZG9ja2VyQGRvY2tlci5jb20+iQI3
155 tCtEb2NrZXIgUmVsZWFzZSAoQ0UgZGViKSA8ZG9ja2VyQGRvY2tlci5jb20+iQI3
156 BBMBCgAhBQJYrefAAhsvBQsJCAcDBRUKCQgLBRYCAwEAAh4BAheAAAoJEI2BgDwO
156 BBMBCgAhBQJYrefAAhsvBQsJCAcDBRUKCQgLBRYCAwEAAh4BAheAAAoJEI2BgDwO
157 v82IsskP/iQZo68flDQmNvn8X5XTd6RRaUH33kXYXquT6NkHJciS7E2gTJmqvMqd
157 v82IsskP/iQZo68flDQmNvn8X5XTd6RRaUH33kXYXquT6NkHJciS7E2gTJmqvMqd
158 tI4mNYHCSEYxI5qrcYV5YqX9P6+Ko+vozo4nseUQLPH/ATQ4qL0Zok+1jkag3Lgk
158 tI4mNYHCSEYxI5qrcYV5YqX9P6+Ko+vozo4nseUQLPH/ATQ4qL0Zok+1jkag3Lgk
159 jonyUf9bwtWxFp05HC3GMHPhhcUSexCxQLQvnFWXD2sWLKivHp2fT8QbRGeZ+d3m
159 jonyUf9bwtWxFp05HC3GMHPhhcUSexCxQLQvnFWXD2sWLKivHp2fT8QbRGeZ+d3m
160 6fqcd5Fu7pxsqm0EUDK5NL+nPIgYhN+auTrhgzhK1CShfGccM/wfRlei9Utz6p9P
160 6fqcd5Fu7pxsqm0EUDK5NL+nPIgYhN+auTrhgzhK1CShfGccM/wfRlei9Utz6p9P
161 XRKIlWnXtT4qNGZNTN0tR+NLG/6Bqd8OYBaFAUcue/w1VW6JQ2VGYZHnZu9S8LMc
161 XRKIlWnXtT4qNGZNTN0tR+NLG/6Bqd8OYBaFAUcue/w1VW6JQ2VGYZHnZu9S8LMc
162 FYBa5Ig9PxwGQOgq6RDKDbV+PqTQT5EFMeR1mrjckk4DQJjbxeMZbiNMG5kGECA8
162 FYBa5Ig9PxwGQOgq6RDKDbV+PqTQT5EFMeR1mrjckk4DQJjbxeMZbiNMG5kGECA8
163 g383P3elhn03WGbEEa4MNc3Z4+7c236QI3xWJfNPdUbXRaAwhy/6rTSFbzwKB0Jm
163 g383P3elhn03WGbEEa4MNc3Z4+7c236QI3xWJfNPdUbXRaAwhy/6rTSFbzwKB0Jm
164 ebwzQfwjQY6f55MiI/RqDCyuPj3r3jyVRkK86pQKBAJwFHyqj9KaKXMZjfVnowLh
164 ebwzQfwjQY6f55MiI/RqDCyuPj3r3jyVRkK86pQKBAJwFHyqj9KaKXMZjfVnowLh
165 9svIGfNbGHpucATqREvUHuQbNnqkCx8VVhtYkhDb9fEP2xBu5VvHbR+3nfVhMut5
165 9svIGfNbGHpucATqREvUHuQbNnqkCx8VVhtYkhDb9fEP2xBu5VvHbR+3nfVhMut5
166 G34Ct5RS7Jt6LIfFdtcn8CaSas/l1HbiGeRgc70X/9aYx/V/CEJv0lIe8gP6uDoW
166 G34Ct5RS7Jt6LIfFdtcn8CaSas/l1HbiGeRgc70X/9aYx/V/CEJv0lIe8gP6uDoW
167 FPIZ7d6vH+Vro6xuWEGiuMaiznap2KhZmpkgfupyFmplh0s6knymuQINBFit2ioB
167 FPIZ7d6vH+Vro6xuWEGiuMaiznap2KhZmpkgfupyFmplh0s6knymuQINBFit2ioB
168 EADneL9S9m4vhU3blaRjVUUyJ7b/qTjcSylvCH5XUE6R2k+ckEZjfAMZPLpO+/tF
168 EADneL9S9m4vhU3blaRjVUUyJ7b/qTjcSylvCH5XUE6R2k+ckEZjfAMZPLpO+/tF
169 M2JIJMD4SifKuS3xck9KtZGCufGmcwiLQRzeHF7vJUKrLD5RTkNi23ydvWZgPjtx
169 M2JIJMD4SifKuS3xck9KtZGCufGmcwiLQRzeHF7vJUKrLD5RTkNi23ydvWZgPjtx
170 Q+DTT1Zcn7BrQFY6FgnRoUVIxwtdw1bMY/89rsFgS5wwuMESd3Q2RYgb7EOFOpnu
170 Q+DTT1Zcn7BrQFY6FgnRoUVIxwtdw1bMY/89rsFgS5wwuMESd3Q2RYgb7EOFOpnu
171 w6da7WakWf4IhnF5nsNYGDVaIHzpiqCl+uTbf1epCjrOlIzkZ3Z3Yk5CM/TiFzPk
171 w6da7WakWf4IhnF5nsNYGDVaIHzpiqCl+uTbf1epCjrOlIzkZ3Z3Yk5CM/TiFzPk
172 z2lLz89cpD8U+NtCsfagWWfjd2U3jDapgH+7nQnCEWpROtzaKHG6lA3pXdix5zG8
172 z2lLz89cpD8U+NtCsfagWWfjd2U3jDapgH+7nQnCEWpROtzaKHG6lA3pXdix5zG8
173 eRc6/0IbUSWvfjKxLLPfNeCS2pCL3IeEI5nothEEYdQH6szpLog79xB9dVnJyKJb
173 eRc6/0IbUSWvfjKxLLPfNeCS2pCL3IeEI5nothEEYdQH6szpLog79xB9dVnJyKJb
174 VfxXnseoYqVrRz2VVbUI5Blwm6B40E3eGVfUQWiux54DspyVMMk41Mx7QJ3iynIa
174 VfxXnseoYqVrRz2VVbUI5Blwm6B40E3eGVfUQWiux54DspyVMMk41Mx7QJ3iynIa
175 1N4ZAqVMAEruyXTRTxc9XW0tYhDMA/1GYvz0EmFpm8LzTHA6sFVtPm/ZlNCX6P1X
175 1N4ZAqVMAEruyXTRTxc9XW0tYhDMA/1GYvz0EmFpm8LzTHA6sFVtPm/ZlNCX6P1X
176 zJwrv7DSQKD6GGlBQUX+OeEJ8tTkkf8QTJSPUdh8P8YxDFS5EOGAvhhpMBYD42kQ
176 zJwrv7DSQKD6GGlBQUX+OeEJ8tTkkf8QTJSPUdh8P8YxDFS5EOGAvhhpMBYD42kQ
177 pqXjEC+XcycTvGI7impgv9PDY1RCC1zkBjKPa120rNhv/hkVk/YhuGoajoHyy4h7
177 pqXjEC+XcycTvGI7impgv9PDY1RCC1zkBjKPa120rNhv/hkVk/YhuGoajoHyy4h7
178 ZQopdcMtpN2dgmhEegny9JCSwxfQmQ0zK0g7m6SHiKMwjwARAQABiQQ+BBgBCAAJ
178 ZQopdcMtpN2dgmhEegny9JCSwxfQmQ0zK0g7m6SHiKMwjwARAQABiQQ+BBgBCAAJ
179 BQJYrdoqAhsCAikJEI2BgDwOv82IwV0gBBkBCAAGBQJYrdoqAAoJEH6gqcPyc/zY
179 BQJYrdoqAhsCAikJEI2BgDwOv82IwV0gBBkBCAAGBQJYrdoqAAoJEH6gqcPyc/zY
180 1WAP/2wJ+R0gE6qsce3rjaIz58PJmc8goKrir5hnElWhPgbq7cYIsW5qiFyLhkdp
180 1WAP/2wJ+R0gE6qsce3rjaIz58PJmc8goKrir5hnElWhPgbq7cYIsW5qiFyLhkdp
181 YcMmhD9mRiPpQn6Ya2w3e3B8zfIVKipbMBnke/ytZ9M7qHmDCcjoiSmwEXN3wKYI
181 YcMmhD9mRiPpQn6Ya2w3e3B8zfIVKipbMBnke/ytZ9M7qHmDCcjoiSmwEXN3wKYI
182 mD9VHONsl/CG1rU9Isw1jtB5g1YxuBA7M/m36XN6x2u+NtNMDB9P56yc4gfsZVES
182 mD9VHONsl/CG1rU9Isw1jtB5g1YxuBA7M/m36XN6x2u+NtNMDB9P56yc4gfsZVES
183 KA9v+yY2/l45L8d/WUkUi0YXomn6hyBGI7JrBLq0CX37GEYP6O9rrKipfz73XfO7
183 KA9v+yY2/l45L8d/WUkUi0YXomn6hyBGI7JrBLq0CX37GEYP6O9rrKipfz73XfO7
184 JIGzOKZlljb/D9RX/g7nRbCn+3EtH7xnk+TK/50euEKw8SMUg147sJTcpQmv6UzZ
184 JIGzOKZlljb/D9RX/g7nRbCn+3EtH7xnk+TK/50euEKw8SMUg147sJTcpQmv6UzZ
185 cM4JgL0HbHVCojV4C/plELwMddALOFeYQzTif6sMRPf+3DSj8frbInjChC3yOLy0
185 cM4JgL0HbHVCojV4C/plELwMddALOFeYQzTif6sMRPf+3DSj8frbInjChC3yOLy0
186 6br92KFom17EIj2CAcoeq7UPhi2oouYBwPxh5ytdehJkoo+sN7RIWua6P2WSmon5
186 6br92KFom17EIj2CAcoeq7UPhi2oouYBwPxh5ytdehJkoo+sN7RIWua6P2WSmon5
187 U888cSylXC0+ADFdgLX9K2zrDVYUG1vo8CX0vzxFBaHwN6Px26fhIT1/hYUHQR1z
187 U888cSylXC0+ADFdgLX9K2zrDVYUG1vo8CX0vzxFBaHwN6Px26fhIT1/hYUHQR1z
188 VfNDcyQmXqkOnZvvoMfz/Q0s9BhFJ/zU6AgQbIZE/hm1spsfgvtsD1frZfygXJ9f
188 VfNDcyQmXqkOnZvvoMfz/Q0s9BhFJ/zU6AgQbIZE/hm1spsfgvtsD1frZfygXJ9f
189 irP+MSAI80xHSf91qSRZOj4Pl3ZJNbq4yYxv0b1pkMqeGdjdCYhLU+LZ4wbQmpCk
189 irP+MSAI80xHSf91qSRZOj4Pl3ZJNbq4yYxv0b1pkMqeGdjdCYhLU+LZ4wbQmpCk
190 SVe2prlLureigXtmZfkqevRz7FrIZiu9ky8wnCAPwC7/zmS18rgP/17bOtL4/iIz
190 SVe2prlLureigXtmZfkqevRz7FrIZiu9ky8wnCAPwC7/zmS18rgP/17bOtL4/iIz
191 QhxAAoAMWVrGyJivSkjhSGx1uCojsWfsTAm11P7jsruIL61ZzMUVE2aM3Pmj5G+W
191 QhxAAoAMWVrGyJivSkjhSGx1uCojsWfsTAm11P7jsruIL61ZzMUVE2aM3Pmj5G+W
192 9AcZ58Em+1WsVnAXdUR//bMmhyr8wL/G1YO1V3JEJTRdxsSxdYa4deGBBY/Adpsw
192 9AcZ58Em+1WsVnAXdUR//bMmhyr8wL/G1YO1V3JEJTRdxsSxdYa4deGBBY/Adpsw
193 24jxhOJR+lsJpqIUeb999+R8euDhRHG9eFO7DRu6weatUJ6suupoDTRWtr/4yGqe
193 24jxhOJR+lsJpqIUeb999+R8euDhRHG9eFO7DRu6weatUJ6suupoDTRWtr/4yGqe
194 dKxV3qQhNLSnaAzqW/1nA3iUB4k7kCaKZxhdhDbClf9P37qaRW467BLCVO/coL3y
194 dKxV3qQhNLSnaAzqW/1nA3iUB4k7kCaKZxhdhDbClf9P37qaRW467BLCVO/coL3y
195 Vm50dwdrNtKpMBh3ZpbB1uJvgi9mXtyBOMJ3v8RZeDzFiG8HdCtg9RvIt/AIFoHR
195 Vm50dwdrNtKpMBh3ZpbB1uJvgi9mXtyBOMJ3v8RZeDzFiG8HdCtg9RvIt/AIFoHR
196 H3S+U79NT6i0KPzLImDfs8T7RlpyuMc4Ufs8ggyg9v3Ae6cN3eQyxcK3w0cbBwsh
196 H3S+U79NT6i0KPzLImDfs8T7RlpyuMc4Ufs8ggyg9v3Ae6cN3eQyxcK3w0cbBwsh
197 /nQNfsA6uu+9H7NhbehBMhYnpNZyrHzCmzyXkauwRAqoCbGCNykTRwsur9gS41TQ
197 /nQNfsA6uu+9H7NhbehBMhYnpNZyrHzCmzyXkauwRAqoCbGCNykTRwsur9gS41TQ
198 M8ssD1jFheOJf3hODnkKU+HKjvMROl1DK7zdmLdNzA1cvtZH/nCC9KPj1z8QC47S
198 M8ssD1jFheOJf3hODnkKU+HKjvMROl1DK7zdmLdNzA1cvtZH/nCC9KPj1z8QC47S
199 xx+dTZSx4ONAhwbS/LN3PoKtn8LPjY9NP9uDWI+TWYquS2U+KHDrBDlsgozDbs/O
199 xx+dTZSx4ONAhwbS/LN3PoKtn8LPjY9NP9uDWI+TWYquS2U+KHDrBDlsgozDbs/O
200 jCxcpDzNmXpWQHEtHU7649OXHP7UeNST1mCUCH5qdank0V1iejF6/CfTFU4MfcrG
200 jCxcpDzNmXpWQHEtHU7649OXHP7UeNST1mCUCH5qdank0V1iejF6/CfTFU4MfcrG
201 YT90qFF93M3v01BbxP+EIY2/9tiIPbrd
201 YT90qFF93M3v01BbxP+EIY2/9tiIPbrd
202 =0YYh
202 =0YYh
203 -----END PGP PUBLIC KEY BLOCK-----
203 -----END PGP PUBLIC KEY BLOCK-----
204 EOF
204 EOF
205
205
206 sudo apt-key add docker-apt-key
206 sudo apt-key add docker-apt-key
207
207
208 if [ "$LSB_RELEASE" = "stretch" ]; then
208 if [ "$LSB_RELEASE" = "stretch" ]; then
209 cat << EOF | sudo tee -a /etc/apt/sources.list
209 cat << EOF | sudo tee -a /etc/apt/sources.list
210 # Need backports for clang-format-6.0
210 # Need backports for clang-format-6.0
211 deb http://deb.debian.org/debian stretch-backports main
211 deb http://deb.debian.org/debian stretch-backports main
212 EOF
212 EOF
213 fi
213 fi
214
214
215 if [ "$LSB_RELEASE" = "stretch" -o "$LSB_RELEASE" = "buster" ]; then
215 if [ "$LSB_RELEASE" = "stretch" -o "$LSB_RELEASE" = "buster" ]; then
216 cat << EOF | sudo tee -a /etc/apt/sources.list
216 cat << EOF | sudo tee -a /etc/apt/sources.list
217 # Sources are useful if we want to compile things locally.
217 # Sources are useful if we want to compile things locally.
218 deb-src http://deb.debian.org/debian $LSB_RELEASE main
218 deb-src http://deb.debian.org/debian $LSB_RELEASE main
219 deb-src http://security.debian.org/debian-security $LSB_RELEASE/updates main
219 deb-src http://security.debian.org/debian-security $LSB_RELEASE/updates main
220 deb-src http://deb.debian.org/debian $LSB_RELEASE-updates main
220 deb-src http://deb.debian.org/debian $LSB_RELEASE-updates main
221 deb-src http://deb.debian.org/debian $LSB_RELEASE-backports main
221 deb-src http://deb.debian.org/debian $LSB_RELEASE-backports main
222
222
223 deb [arch=amd64] https://download.docker.com/linux/debian $LSB_RELEASE stable
223 deb [arch=amd64] https://download.docker.com/linux/debian $LSB_RELEASE stable
224 EOF
224 EOF
225
225
226 elif [ "$DISTRO" = "Ubuntu" ]; then
226 elif [ "$DISTRO" = "Ubuntu" ]; then
227 cat << EOF | sudo tee -a /etc/apt/sources.list
227 cat << EOF | sudo tee -a /etc/apt/sources.list
228 deb [arch=amd64] https://download.docker.com/linux/ubuntu $LSB_RELEASE stable
228 deb [arch=amd64] https://download.docker.com/linux/ubuntu $LSB_RELEASE stable
229 EOF
229 EOF
230
230
231 fi
231 fi
232
232
233 sudo apt-get update
233 sudo apt-get update
234
234
235 PACKAGES="\
235 PACKAGES="\
236 awscli \
236 awscli \
237 btrfs-progs \
237 btrfs-progs \
238 build-essential \
238 build-essential \
239 bzr \
239 bzr \
240 clang-format-6.0 \
240 clang-format-6.0 \
241 cvs \
241 cvs \
242 darcs \
242 darcs \
243 debhelper \
243 debhelper \
244 devscripts \
244 devscripts \
245 docker-ce \
245 docker-ce \
246 dpkg-dev \
246 dpkg-dev \
247 dstat \
247 dstat \
248 emacs \
248 emacs \
249 gettext \
249 gettext \
250 git \
250 git \
251 htop \
251 htop \
252 iotop \
252 iotop \
253 jfsutils \
253 jfsutils \
254 libbz2-dev \
254 libbz2-dev \
255 libexpat1-dev \
255 libexpat1-dev \
256 libffi-dev \
256 libffi-dev \
257 libgdbm-dev \
257 libgdbm-dev \
258 liblzma-dev \
258 liblzma-dev \
259 libncurses5-dev \
259 libncurses5-dev \
260 libnss3-dev \
260 libnss3-dev \
261 libreadline-dev \
261 libreadline-dev \
262 libsqlite3-dev \
262 libsqlite3-dev \
263 libssl-dev \
263 libssl-dev \
264 netbase \
264 netbase \
265 ntfs-3g \
265 ntfs-3g \
266 nvme-cli \
266 nvme-cli \
267 pyflakes \
267 pyflakes \
268 pyflakes3 \
268 pyflakes3 \
269 pylint \
269 pylint \
270 pylint3 \
270 pylint3 \
271 python-all-dev \
271 python-all-dev \
272 python-dev \
272 python-dev \
273 python-docutils \
273 python-docutils \
274 python-fuzzywuzzy \
274 python-fuzzywuzzy \
275 python-pygments \
275 python-pygments \
276 python-subversion \
276 python-subversion \
277 python-vcr \
277 python-vcr \
278 python3-boto3 \
278 python3-boto3 \
279 python3-dev \
279 python3-dev \
280 python3-docutils \
280 python3-docutils \
281 python3-fuzzywuzzy \
281 python3-fuzzywuzzy \
282 python3-pygments \
282 python3-pygments \
283 python3-vcr \
283 python3-vcr \
284 python3-venv \
284 python3-venv \
285 rsync \
285 rsync \
286 sqlite3 \
286 sqlite3 \
287 subversion \
287 subversion \
288 tcl-dev \
288 tcl-dev \
289 tk-dev \
289 tk-dev \
290 tla \
290 tla \
291 unzip \
291 unzip \
292 uuid-dev \
292 uuid-dev \
293 vim \
293 vim \
294 virtualenv \
294 virtualenv \
295 wget \
295 wget \
296 xfsprogs \
296 xfsprogs \
297 zip \
297 zip \
298 zlib1g-dev"
298 zlib1g-dev"
299
299
300 if [ "LSB_RELEASE" = "stretch" ]; then
300 if [ "LSB_RELEASE" = "stretch" ]; then
301 PACKAGES="$PACKAGES linux-perf"
301 PACKAGES="$PACKAGES linux-perf"
302 elif [ "$DISTRO" = "Ubuntu" ]; then
302 elif [ "$DISTRO" = "Ubuntu" ]; then
303 PACKAGES="$PACKAGES linux-tools-common"
303 PACKAGES="$PACKAGES linux-tools-common"
304 fi
304 fi
305
305
306 # Monotone only available in older releases.
306 # Monotone only available in older releases.
307 if [ "$LSB_RELEASE" = "stretch" -o "$LSB_RELEASE" = "xenial" ]; then
307 if [ "$LSB_RELEASE" = "stretch" -o "$LSB_RELEASE" = "xenial" ]; then
308 PACKAGES="$PACKAGES monotone"
308 PACKAGES="$PACKAGES monotone"
309 fi
309 fi
310
310
311 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends $PACKAGES
311 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends $PACKAGES
312
312
313 # Create clang-format symlink so test harness finds it.
313 # Create clang-format symlink so test harness finds it.
314 sudo update-alternatives --install /usr/bin/clang-format clang-format \
314 sudo update-alternatives --install /usr/bin/clang-format clang-format \
315 /usr/bin/clang-format-6.0 1000
315 /usr/bin/clang-format-6.0 1000
316
316
317 sudo mkdir /hgdev
317 sudo mkdir /hgdev
318 # Will be normalized to hg:hg later.
318 # Will be normalized to hg:hg later.
319 sudo chown `whoami` /hgdev
319 sudo chown `whoami` /hgdev
320
320
321 {install_rust}
321 {install_rust}
322
322
323 cp requirements-*.txt /hgdev/
323 cp requirements-*.txt /hgdev/
324
324
325 # Disable the pip version check because it uses the network and can
325 # Disable the pip version check because it uses the network and can
326 # be annoying.
326 # be annoying.
327 cat << EOF | sudo tee -a /etc/pip.conf
327 cat << EOF | sudo tee -a /etc/pip.conf
328 [global]
328 [global]
329 disable-pip-version-check = True
329 disable-pip-version-check = True
330 EOF
330 EOF
331
331
332 {install_pythons}
332 {install_pythons}
333 {bootstrap_virtualenv}
333 {bootstrap_virtualenv}
334
334
335 /hgdev/venv-bootstrap/bin/hg clone https://www.mercurial-scm.org/repo/hg /hgdev/src
335 /hgdev/venv-bootstrap/bin/hg clone https://www.mercurial-scm.org/repo/hg /hgdev/src
336
336
337 # Mark the repo as non-publishing.
337 # Mark the repo as non-publishing.
338 cat >> /hgdev/src/.hg/hgrc << EOF
338 cat >> /hgdev/src/.hg/hgrc << EOF
339 [phases]
339 [phases]
340 publish = false
340 publish = false
341 EOF
341 EOF
342
342
343 sudo chown -R hg:hg /hgdev
343 sudo chown -R hg:hg /hgdev
344 '''.lstrip()
344 '''.lstrip()
345 .format(
345 .format(
346 install_rust=INSTALL_RUST,
346 install_rust=INSTALL_RUST,
347 install_pythons=INSTALL_PYTHONS,
347 install_pythons=INSTALL_PYTHONS,
348 bootstrap_virtualenv=BOOTSTRAP_VIRTUALENV,
348 bootstrap_virtualenv=BOOTSTRAP_VIRTUALENV,
349 )
349 )
350 .replace('\r\n', '\n')
350 .replace('\r\n', '\n')
351 )
351 )
352
352
353
353
354 # Prepares /hgdev for operations.
354 # Prepares /hgdev for operations.
355 PREPARE_HGDEV = '''
355 PREPARE_HGDEV = '''
356 #!/bin/bash
356 #!/bin/bash
357
357
358 set -e
358 set -e
359
359
360 FS=$1
360 FS=$1
361
361
362 ensure_device() {
362 ensure_device() {
363 if [ -z "${DEVICE}" ]; then
363 if [ -z "${DEVICE}" ]; then
364 echo "could not find block device to format"
364 echo "could not find block device to format"
365 exit 1
365 exit 1
366 fi
366 fi
367 }
367 }
368
368
369 # Determine device to partition for extra filesystem.
369 # Determine device to partition for extra filesystem.
370 # If only 1 volume is present, it will be the root volume and
370 # If only 1 volume is present, it will be the root volume and
371 # should be /dev/nvme0. If multiple volumes are present, the
371 # should be /dev/nvme0. If multiple volumes are present, the
372 # root volume could be nvme0 or nvme1. Use whichever one doesn't have
372 # root volume could be nvme0 or nvme1. Use whichever one doesn't have
373 # a partition.
373 # a partition.
374 if [ -e /dev/nvme1n1 ]; then
374 if [ -e /dev/nvme1n1 ]; then
375 if [ -e /dev/nvme0n1p1 ]; then
375 if [ -e /dev/nvme0n1p1 ]; then
376 DEVICE=/dev/nvme1n1
376 DEVICE=/dev/nvme1n1
377 else
377 else
378 DEVICE=/dev/nvme0n1
378 DEVICE=/dev/nvme0n1
379 fi
379 fi
380 else
380 else
381 DEVICE=
381 DEVICE=
382 fi
382 fi
383
383
384 sudo mkdir /hgwork
384 sudo mkdir /hgwork
385
385
386 if [ "${FS}" != "default" -a "${FS}" != "tmpfs" ]; then
386 if [ "${FS}" != "default" -a "${FS}" != "tmpfs" ]; then
387 ensure_device
387 ensure_device
388 echo "creating ${FS} filesystem on ${DEVICE}"
388 echo "creating ${FS} filesystem on ${DEVICE}"
389 fi
389 fi
390
390
391 if [ "${FS}" = "default" ]; then
391 if [ "${FS}" = "default" ]; then
392 :
392 :
393
393
394 elif [ "${FS}" = "btrfs" ]; then
394 elif [ "${FS}" = "btrfs" ]; then
395 sudo mkfs.btrfs ${DEVICE}
395 sudo mkfs.btrfs ${DEVICE}
396 sudo mount ${DEVICE} /hgwork
396 sudo mount ${DEVICE} /hgwork
397
397
398 elif [ "${FS}" = "ext3" ]; then
398 elif [ "${FS}" = "ext3" ]; then
399 # lazy_journal_init speeds up filesystem creation at the expense of
399 # lazy_journal_init speeds up filesystem creation at the expense of
400 # integrity if things crash. We are an ephemeral instance, so we don't
400 # integrity if things crash. We are an ephemeral instance, so we don't
401 # care about integrity.
401 # care about integrity.
402 sudo mkfs.ext3 -E lazy_journal_init=1 ${DEVICE}
402 sudo mkfs.ext3 -E lazy_journal_init=1 ${DEVICE}
403 sudo mount ${DEVICE} /hgwork
403 sudo mount ${DEVICE} /hgwork
404
404
405 elif [ "${FS}" = "ext4" ]; then
405 elif [ "${FS}" = "ext4" ]; then
406 sudo mkfs.ext4 -E lazy_journal_init=1 ${DEVICE}
406 sudo mkfs.ext4 -E lazy_journal_init=1 ${DEVICE}
407 sudo mount ${DEVICE} /hgwork
407 sudo mount ${DEVICE} /hgwork
408
408
409 elif [ "${FS}" = "jfs" ]; then
409 elif [ "${FS}" = "jfs" ]; then
410 sudo mkfs.jfs ${DEVICE}
410 sudo mkfs.jfs ${DEVICE}
411 sudo mount ${DEVICE} /hgwork
411 sudo mount ${DEVICE} /hgwork
412
412
413 elif [ "${FS}" = "tmpfs" ]; then
413 elif [ "${FS}" = "tmpfs" ]; then
414 echo "creating tmpfs volume in /hgwork"
414 echo "creating tmpfs volume in /hgwork"
415 sudo mount -t tmpfs -o size=1024M tmpfs /hgwork
415 sudo mount -t tmpfs -o size=1024M tmpfs /hgwork
416
416
417 elif [ "${FS}" = "xfs" ]; then
417 elif [ "${FS}" = "xfs" ]; then
418 sudo mkfs.xfs ${DEVICE}
418 sudo mkfs.xfs ${DEVICE}
419 sudo mount ${DEVICE} /hgwork
419 sudo mount ${DEVICE} /hgwork
420
420
421 else
421 else
422 echo "unsupported filesystem: ${FS}"
422 echo "unsupported filesystem: ${FS}"
423 exit 1
423 exit 1
424 fi
424 fi
425
425
426 echo "/hgwork ready"
426 echo "/hgwork ready"
427
427
428 sudo chown hg:hg /hgwork
428 sudo chown hg:hg /hgwork
429 mkdir /hgwork/tmp
429 mkdir /hgwork/tmp
430 chown hg:hg /hgwork/tmp
430 chown hg:hg /hgwork/tmp
431
431
432 rsync -a /hgdev/src /hgwork/
432 rsync -a /hgdev/src /hgwork/
433 '''.lstrip().replace(
433 '''.lstrip().replace(
434 '\r\n', '\n'
434 '\r\n', '\n'
435 )
435 )
436
436
437
437
438 HG_UPDATE_CLEAN = '''
438 HG_UPDATE_CLEAN = '''
439 set -ex
439 set -ex
440
440
441 HG=/hgdev/venv-bootstrap/bin/hg
441 HG=/hgdev/venv-bootstrap/bin/hg
442
442
443 cd /hgwork/src
443 cd /hgwork/src
444 ${HG} --config extensions.purge= purge --all
444 ${HG} --config extensions.purge= purge --all
445 ${HG} update -C $1
445 ${HG} update -C $1
446 ${HG} log -r .
446 ${HG} log -r .
447 '''.lstrip().replace(
447 '''.lstrip().replace(
448 '\r\n', '\n'
448 '\r\n', '\n'
449 )
449 )
450
450
451
451
452 def prepare_exec_environment(ssh_client, filesystem='default'):
452 def prepare_exec_environment(ssh_client, filesystem='default'):
453 """Prepare an EC2 instance to execute things.
453 """Prepare an EC2 instance to execute things.
454
454
455 The AMI has an ``/hgdev`` bootstrapped with various Python installs
455 The AMI has an ``/hgdev`` bootstrapped with various Python installs
456 and a clone of the Mercurial repo.
456 and a clone of the Mercurial repo.
457
457
458 In EC2, EBS volumes launched from snapshots have wonky performance behavior.
458 In EC2, EBS volumes launched from snapshots have wonky performance behavior.
459 Notably, blocks have to be copied on first access, which makes volume
459 Notably, blocks have to be copied on first access, which makes volume
460 I/O extremely slow on fresh volumes.
460 I/O extremely slow on fresh volumes.
461
461
462 Furthermore, we may want to run operations, tests, etc on alternative
462 Furthermore, we may want to run operations, tests, etc on alternative
463 filesystems so we examine behavior on different filesystems.
463 filesystems so we examine behavior on different filesystems.
464
464
465 This function is used to facilitate executing operations on alternate
465 This function is used to facilitate executing operations on alternate
466 volumes.
466 volumes.
467 """
467 """
468 sftp = ssh_client.open_sftp()
468 sftp = ssh_client.open_sftp()
469
469
470 with sftp.open('/hgdev/prepare-hgdev', 'wb') as fh:
470 with sftp.open('/hgdev/prepare-hgdev', 'wb') as fh:
471 fh.write(PREPARE_HGDEV)
471 fh.write(PREPARE_HGDEV)
472 fh.chmod(0o0777)
472 fh.chmod(0o0777)
473
473
474 command = 'sudo /hgdev/prepare-hgdev %s' % filesystem
474 command = 'sudo /hgdev/prepare-hgdev %s' % filesystem
475 chan, stdin, stdout = exec_command(ssh_client, command)
475 chan, stdin, stdout = exec_command(ssh_client, command)
476 stdin.close()
476 stdin.close()
477
477
478 for line in stdout:
478 for line in stdout:
479 print(line, end='')
479 print(line, end='')
480
480
481 res = chan.recv_exit_status()
481 res = chan.recv_exit_status()
482
482
483 if res:
483 if res:
484 raise Exception('non-0 exit code updating working directory; %d' % res)
484 raise Exception('non-0 exit code updating working directory; %d' % res)
485
485
486
486
487 def synchronize_hg(
487 def synchronize_hg(
488 source_path: pathlib.Path, ec2_instance, revision: str = None
488 source_path: pathlib.Path, ec2_instance, revision: str = None
489 ):
489 ):
490 """Synchronize a local Mercurial source path to remote EC2 instance."""
490 """Synchronize a local Mercurial source path to remote EC2 instance."""
491
491
492 with tempfile.TemporaryDirectory() as temp_dir:
492 with tempfile.TemporaryDirectory() as temp_dir:
493 temp_dir = pathlib.Path(temp_dir)
493 temp_dir = pathlib.Path(temp_dir)
494
494
495 ssh_dir = temp_dir / '.ssh'
495 ssh_dir = temp_dir / '.ssh'
496 ssh_dir.mkdir()
496 ssh_dir.mkdir()
497 ssh_dir.chmod(0o0700)
497 ssh_dir.chmod(0o0700)
498
498
499 public_ip = ec2_instance.public_ip_address
499 public_ip = ec2_instance.public_ip_address
500
500
501 ssh_config = ssh_dir / 'config'
501 ssh_config = ssh_dir / 'config'
502
502
503 with ssh_config.open('w', encoding='utf-8') as fh:
503 with ssh_config.open('w', encoding='utf-8') as fh:
504 fh.write('Host %s\n' % public_ip)
504 fh.write('Host %s\n' % public_ip)
505 fh.write(' User hg\n')
505 fh.write(' User hg\n')
506 fh.write(' StrictHostKeyChecking no\n')
506 fh.write(' StrictHostKeyChecking no\n')
507 fh.write(' UserKnownHostsFile %s\n' % (ssh_dir / 'known_hosts'))
507 fh.write(' UserKnownHostsFile %s\n' % (ssh_dir / 'known_hosts'))
508 fh.write(' IdentityFile %s\n' % ec2_instance.ssh_private_key_path)
508 fh.write(' IdentityFile %s\n' % ec2_instance.ssh_private_key_path)
509
509
510 if not (source_path / '.hg').is_dir():
510 if not (source_path / '.hg').is_dir():
511 raise Exception(
511 raise Exception(
512 '%s is not a Mercurial repository; synchronization '
512 '%s is not a Mercurial repository; synchronization '
513 'not yet supported' % source_path
513 'not yet supported' % source_path
514 )
514 )
515
515
516 env = dict(os.environ)
516 env = dict(os.environ)
517 env['HGPLAIN'] = '1'
517 env['HGPLAIN'] = '1'
518 env['HGENCODING'] = 'utf-8'
518 env['HGENCODING'] = 'utf-8'
519
519
520 hg_bin = source_path / 'hg'
520 hg_bin = source_path / 'hg'
521
521
522 res = subprocess.run(
522 res = subprocess.run(
523 ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
523 ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
524 cwd=str(source_path),
524 cwd=str(source_path),
525 env=env,
525 env=env,
526 check=True,
526 check=True,
527 capture_output=True,
527 capture_output=True,
528 )
528 )
529
529
530 full_revision = res.stdout.decode('ascii')
530 full_revision = res.stdout.decode('ascii')
531
531
532 args = [
532 args = [
533 'python2.7',
533 'python2.7',
534 str(hg_bin),
534 str(hg_bin),
535 '--config',
535 '--config',
536 'ui.ssh=ssh -F %s' % ssh_config,
536 'ui.ssh=ssh -F %s' % ssh_config,
537 '--config',
537 '--config',
538 'ui.remotecmd=/hgdev/venv-bootstrap/bin/hg',
538 'ui.remotecmd=/hgdev/venv-bootstrap/bin/hg',
539 # Also ensure .hgtags changes are present so auto version
539 # Also ensure .hgtags changes are present so auto version
540 # calculation works.
540 # calculation works.
541 'push',
541 'push',
542 '-f',
542 '-f',
543 '-r',
543 '-r',
544 full_revision,
544 full_revision,
545 '-r',
545 '-r',
546 'file(.hgtags)',
546 'file(.hgtags)',
547 'ssh://%s//hgwork/src' % public_ip,
547 'ssh://%s//hgwork/src' % public_ip,
548 ]
548 ]
549
549
550 res = subprocess.run(args, cwd=str(source_path), env=env)
550 res = subprocess.run(args, cwd=str(source_path), env=env)
551
551
552 # Allow 1 (no-op) to not trigger error.
552 # Allow 1 (no-op) to not trigger error.
553 if res.returncode not in (0, 1):
553 if res.returncode not in (0, 1):
554 res.check_returncode()
554 res.check_returncode()
555
555
556 # TODO support synchronizing dirty working directory.
556 # TODO support synchronizing dirty working directory.
557
557
558 sftp = ec2_instance.ssh_client.open_sftp()
558 sftp = ec2_instance.ssh_client.open_sftp()
559
559
560 with sftp.open('/hgdev/hgup', 'wb') as fh:
560 with sftp.open('/hgdev/hgup', 'wb') as fh:
561 fh.write(HG_UPDATE_CLEAN)
561 fh.write(HG_UPDATE_CLEAN)
562 fh.chmod(0o0700)
562 fh.chmod(0o0700)
563
563
564 chan, stdin, stdout = exec_command(
564 chan, stdin, stdout = exec_command(
565 ec2_instance.ssh_client, '/hgdev/hgup %s' % full_revision
565 ec2_instance.ssh_client, '/hgdev/hgup %s' % full_revision
566 )
566 )
567 stdin.close()
567 stdin.close()
568
568
569 for line in stdout:
569 for line in stdout:
570 print(line, end='')
570 print(line, end='')
571
571
572 res = chan.recv_exit_status()
572 res = chan.recv_exit_status()
573
573
574 if res:
574 if res:
575 raise Exception(
575 raise Exception(
576 'non-0 exit code updating working directory; %d' % res
576 'non-0 exit code updating working directory; %d' % res
577 )
577 )
578
578
579
579
580 def run_tests(ssh_client, python_version, test_flags=None):
580 def run_tests(ssh_client, python_version, test_flags=None):
581 """Run tests on a remote Linux machine via an SSH client."""
581 """Run tests on a remote Linux machine via an SSH client."""
582 test_flags = test_flags or []
582 test_flags = test_flags or []
583
583
584 print('running tests')
584 print('running tests')
585
585
586 if python_version == 'system2':
586 if python_version == 'system2':
587 python = '/usr/bin/python2'
587 python = '/usr/bin/python2'
588 elif python_version == 'system3':
588 elif python_version == 'system3':
589 python = '/usr/bin/python3'
589 python = '/usr/bin/python3'
590 elif python_version.startswith('pypy'):
590 elif python_version.startswith('pypy'):
591 python = '/hgdev/pyenv/shims/%s' % python_version
591 python = '/hgdev/pyenv/shims/%s' % python_version
592 else:
592 else:
593 python = '/hgdev/pyenv/shims/python%s' % python_version
593 python = '/hgdev/pyenv/shims/python%s' % python_version
594
594
595 test_flags = ' '.join(shlex.quote(a) for a in test_flags)
595 test_flags = ' '.join(shlex.quote(a) for a in test_flags)
596
596
597 command = (
597 command = (
598 '/bin/sh -c "export TMPDIR=/hgwork/tmp; '
598 '/bin/sh -c "export TMPDIR=/hgwork/tmp; '
599 'cd /hgwork/src/tests && %s run-tests.py %s"' % (python, test_flags)
599 'cd /hgwork/src/tests && %s run-tests.py %s"' % (python, test_flags)
600 )
600 )
601
601
602 chan, stdin, stdout = exec_command(ssh_client, command)
602 chan, stdin, stdout = exec_command(ssh_client, command)
603
603
604 stdin.close()
604 stdin.close()
605
605
606 for line in stdout:
606 for line in stdout:
607 print(line, end='')
607 print(line, end='')
608
608
609 return chan.recv_exit_status()
609 return chan.recv_exit_status()
General Comments 0
You need to be logged in to leave comments. Login now