##// END OF EJS Templates
automation: install Python 2.7.17, 3.7.5, and PyPy 7.2.0...
Gregory Szorc -
r43620:388ada1c stable
parent child Browse files
Show More
@@ -1,594 +1,594 b''
1 # linux.py - Linux specific automation functionality
1 # linux.py - Linux specific automation functionality
2 #
2 #
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 # no-check-code because Python 3 native.
8 # no-check-code because Python 3 native.
9
9
10 import os
10 import os
11 import pathlib
11 import pathlib
12 import shlex
12 import shlex
13 import subprocess
13 import subprocess
14 import tempfile
14 import tempfile
15
15
16 from .ssh import exec_command
16 from .ssh import exec_command
17
17
18
18
19 # Linux distributions that are supported.
19 # Linux distributions that are supported.
20 DISTROS = {
20 DISTROS = {
21 'debian9',
21 'debian9',
22 'debian10',
22 'debian10',
23 'ubuntu18.04',
23 'ubuntu18.04',
24 'ubuntu19.04',
24 'ubuntu19.04',
25 }
25 }
26
26
27 INSTALL_PYTHONS = r'''
27 INSTALL_PYTHONS = r'''
28 PYENV2_VERSIONS="2.7.16 pypy2.7-7.1.1"
28 PYENV2_VERSIONS="2.7.17 pypy2.7-7.2.0"
29 PYENV3_VERSIONS="3.5.7 3.6.9 3.7.4 3.8.0 pypy3.5-7.0.0 pypy3.6-7.1.1"
29 PYENV3_VERSIONS="3.5.7 3.6.9 3.7.5 3.8.0 pypy3.5-7.0.0 pypy3.6-7.2.0"
30
30
31 git clone https://github.com/pyenv/pyenv.git /hgdev/pyenv
31 git clone https://github.com/pyenv/pyenv.git /hgdev/pyenv
32 pushd /hgdev/pyenv
32 pushd /hgdev/pyenv
33 git checkout d6d6bc8bb08bcdcbf4eb79509aa7061011ade1c4
33 git checkout 0e7cfc3b3d4eca46ad83d632e1505f5932cd179b
34 popd
34 popd
35
35
36 export PYENV_ROOT="/hgdev/pyenv"
36 export PYENV_ROOT="/hgdev/pyenv"
37 export PATH="$PYENV_ROOT/bin:$PATH"
37 export PATH="$PYENV_ROOT/bin:$PATH"
38
38
39 # pip 19.2.3.
39 # pip 19.2.3.
40 PIP_SHA256=57e3643ff19f018f8a00dfaa6b7e4620e3c1a7a2171fd218425366ec006b3bfe
40 PIP_SHA256=57e3643ff19f018f8a00dfaa6b7e4620e3c1a7a2171fd218425366ec006b3bfe
41 wget -O get-pip.py --progress dot:mega https://github.com/pypa/get-pip/raw/309a56c5fd94bd1134053a541cb4657a4e47e09d/get-pip.py
41 wget -O get-pip.py --progress dot:mega https://github.com/pypa/get-pip/raw/309a56c5fd94bd1134053a541cb4657a4e47e09d/get-pip.py
42 echo "${PIP_SHA256} get-pip.py" | sha256sum --check -
42 echo "${PIP_SHA256} get-pip.py" | sha256sum --check -
43
43
44 VIRTUALENV_SHA256=f78d81b62d3147396ac33fc9d77579ddc42cc2a98dd9ea38886f616b33bc7fb2
44 VIRTUALENV_SHA256=f78d81b62d3147396ac33fc9d77579ddc42cc2a98dd9ea38886f616b33bc7fb2
45 VIRTUALENV_TARBALL=virtualenv-16.7.5.tar.gz
45 VIRTUALENV_TARBALL=virtualenv-16.7.5.tar.gz
46 wget -O ${VIRTUALENV_TARBALL} --progress dot:mega https://files.pythonhosted.org/packages/66/f0/6867af06d2e2f511e4e1d7094ff663acdebc4f15d4a0cb0fed1007395124/${VIRTUALENV_TARBALL}
46 wget -O ${VIRTUALENV_TARBALL} --progress dot:mega https://files.pythonhosted.org/packages/66/f0/6867af06d2e2f511e4e1d7094ff663acdebc4f15d4a0cb0fed1007395124/${VIRTUALENV_TARBALL}
47 echo "${VIRTUALENV_SHA256} ${VIRTUALENV_TARBALL}" | sha256sum --check -
47 echo "${VIRTUALENV_SHA256} ${VIRTUALENV_TARBALL}" | sha256sum --check -
48
48
49 for v in ${PYENV2_VERSIONS}; do
49 for v in ${PYENV2_VERSIONS}; do
50 pyenv install -v ${v}
50 pyenv install -v ${v}
51 ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
51 ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
52 ${PYENV_ROOT}/versions/${v}/bin/pip install ${VIRTUALENV_TARBALL}
52 ${PYENV_ROOT}/versions/${v}/bin/pip install ${VIRTUALENV_TARBALL}
53 ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py2.txt
53 ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py2.txt
54 done
54 done
55
55
56 for v in ${PYENV3_VERSIONS}; do
56 for v in ${PYENV3_VERSIONS}; do
57 pyenv install -v ${v}
57 pyenv install -v ${v}
58 ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
58 ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
59 ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py3.txt
59 ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py3.txt
60 done
60 done
61
61
62 pyenv global ${PYENV2_VERSIONS} ${PYENV3_VERSIONS} system
62 pyenv global ${PYENV2_VERSIONS} ${PYENV3_VERSIONS} system
63 '''.lstrip().replace(
63 '''.lstrip().replace(
64 '\r\n', '\n'
64 '\r\n', '\n'
65 )
65 )
66
66
67
67
68 INSTALL_RUST = r'''
68 INSTALL_RUST = r'''
69 RUSTUP_INIT_SHA256=a46fe67199b7bcbbde2dcbc23ae08db6f29883e260e23899a88b9073effc9076
69 RUSTUP_INIT_SHA256=a46fe67199b7bcbbde2dcbc23ae08db6f29883e260e23899a88b9073effc9076
70 wget -O rustup-init --progress dot:mega https://static.rust-lang.org/rustup/archive/1.18.3/x86_64-unknown-linux-gnu/rustup-init
70 wget -O rustup-init --progress dot:mega https://static.rust-lang.org/rustup/archive/1.18.3/x86_64-unknown-linux-gnu/rustup-init
71 echo "${RUSTUP_INIT_SHA256} rustup-init" | sha256sum --check -
71 echo "${RUSTUP_INIT_SHA256} rustup-init" | sha256sum --check -
72
72
73 chmod +x rustup-init
73 chmod +x rustup-init
74 sudo -H -u hg -g hg ./rustup-init -y
74 sudo -H -u hg -g hg ./rustup-init -y
75 sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup install 1.31.1 1.34.2
75 sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup install 1.31.1 1.34.2
76 sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup component add clippy
76 sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup component add clippy
77 '''
77 '''
78
78
79
79
80 BOOTSTRAP_VIRTUALENV = r'''
80 BOOTSTRAP_VIRTUALENV = r'''
81 /usr/bin/virtualenv /hgdev/venv-bootstrap
81 /usr/bin/virtualenv /hgdev/venv-bootstrap
82
82
83 HG_SHA256=35fc8ba5e0379c1b3affa2757e83fb0509e8ac314cbd9f1fd133cf265d16e49f
83 HG_SHA256=35fc8ba5e0379c1b3affa2757e83fb0509e8ac314cbd9f1fd133cf265d16e49f
84 HG_TARBALL=mercurial-5.1.1.tar.gz
84 HG_TARBALL=mercurial-5.1.1.tar.gz
85
85
86 wget -O ${HG_TARBALL} --progress dot:mega https://www.mercurial-scm.org/release/${HG_TARBALL}
86 wget -O ${HG_TARBALL} --progress dot:mega https://www.mercurial-scm.org/release/${HG_TARBALL}
87 echo "${HG_SHA256} ${HG_TARBALL}" | sha256sum --check -
87 echo "${HG_SHA256} ${HG_TARBALL}" | sha256sum --check -
88
88
89 /hgdev/venv-bootstrap/bin/pip install ${HG_TARBALL}
89 /hgdev/venv-bootstrap/bin/pip install ${HG_TARBALL}
90 '''.lstrip().replace(
90 '''.lstrip().replace(
91 '\r\n', '\n'
91 '\r\n', '\n'
92 )
92 )
93
93
94
94
95 BOOTSTRAP_DEBIAN = (
95 BOOTSTRAP_DEBIAN = (
96 r'''
96 r'''
97 #!/bin/bash
97 #!/bin/bash
98
98
99 set -ex
99 set -ex
100
100
101 DISTRO=`grep DISTRIB_ID /etc/lsb-release | awk -F= '{{print $2}}'`
101 DISTRO=`grep DISTRIB_ID /etc/lsb-release | awk -F= '{{print $2}}'`
102 DEBIAN_VERSION=`cat /etc/debian_version`
102 DEBIAN_VERSION=`cat /etc/debian_version`
103 LSB_RELEASE=`lsb_release -cs`
103 LSB_RELEASE=`lsb_release -cs`
104
104
105 sudo /usr/sbin/groupadd hg
105 sudo /usr/sbin/groupadd hg
106 sudo /usr/sbin/groupadd docker
106 sudo /usr/sbin/groupadd docker
107 sudo /usr/sbin/useradd -g hg -G sudo,docker -d /home/hg -m -s /bin/bash hg
107 sudo /usr/sbin/useradd -g hg -G sudo,docker -d /home/hg -m -s /bin/bash hg
108 sudo mkdir /home/hg/.ssh
108 sudo mkdir /home/hg/.ssh
109 sudo cp ~/.ssh/authorized_keys /home/hg/.ssh/authorized_keys
109 sudo cp ~/.ssh/authorized_keys /home/hg/.ssh/authorized_keys
110 sudo chown -R hg:hg /home/hg/.ssh
110 sudo chown -R hg:hg /home/hg/.ssh
111 sudo chmod 700 /home/hg/.ssh
111 sudo chmod 700 /home/hg/.ssh
112 sudo chmod 600 /home/hg/.ssh/authorized_keys
112 sudo chmod 600 /home/hg/.ssh/authorized_keys
113
113
114 cat << EOF | sudo tee /etc/sudoers.d/90-hg
114 cat << EOF | sudo tee /etc/sudoers.d/90-hg
115 hg ALL=(ALL) NOPASSWD:ALL
115 hg ALL=(ALL) NOPASSWD:ALL
116 EOF
116 EOF
117
117
118 sudo apt-get update
118 sudo apt-get update
119 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq dist-upgrade
119 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq dist-upgrade
120
120
121 # Install packages necessary to set up Docker Apt repo.
121 # Install packages necessary to set up Docker Apt repo.
122 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends \
122 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends \
123 apt-transport-https \
123 apt-transport-https \
124 gnupg
124 gnupg
125
125
126 cat > docker-apt-key << EOF
126 cat > docker-apt-key << EOF
127 -----BEGIN PGP PUBLIC KEY BLOCK-----
127 -----BEGIN PGP PUBLIC KEY BLOCK-----
128
128
129 mQINBFit2ioBEADhWpZ8/wvZ6hUTiXOwQHXMAlaFHcPH9hAtr4F1y2+OYdbtMuth
129 mQINBFit2ioBEADhWpZ8/wvZ6hUTiXOwQHXMAlaFHcPH9hAtr4F1y2+OYdbtMuth
130 lqqwp028AqyY+PRfVMtSYMbjuQuu5byyKR01BbqYhuS3jtqQmljZ/bJvXqnmiVXh
130 lqqwp028AqyY+PRfVMtSYMbjuQuu5byyKR01BbqYhuS3jtqQmljZ/bJvXqnmiVXh
131 38UuLa+z077PxyxQhu5BbqntTPQMfiyqEiU+BKbq2WmANUKQf+1AmZY/IruOXbnq
131 38UuLa+z077PxyxQhu5BbqntTPQMfiyqEiU+BKbq2WmANUKQf+1AmZY/IruOXbnq
132 L4C1+gJ8vfmXQt99npCaxEjaNRVYfOS8QcixNzHUYnb6emjlANyEVlZzeqo7XKl7
132 L4C1+gJ8vfmXQt99npCaxEjaNRVYfOS8QcixNzHUYnb6emjlANyEVlZzeqo7XKl7
133 UrwV5inawTSzWNvtjEjj4nJL8NsLwscpLPQUhTQ+7BbQXAwAmeHCUTQIvvWXqw0N
133 UrwV5inawTSzWNvtjEjj4nJL8NsLwscpLPQUhTQ+7BbQXAwAmeHCUTQIvvWXqw0N
134 cmhh4HgeQscQHYgOJjjDVfoY5MucvglbIgCqfzAHW9jxmRL4qbMZj+b1XoePEtht
134 cmhh4HgeQscQHYgOJjjDVfoY5MucvglbIgCqfzAHW9jxmRL4qbMZj+b1XoePEtht
135 ku4bIQN1X5P07fNWzlgaRL5Z4POXDDZTlIQ/El58j9kp4bnWRCJW0lya+f8ocodo
135 ku4bIQN1X5P07fNWzlgaRL5Z4POXDDZTlIQ/El58j9kp4bnWRCJW0lya+f8ocodo
136 vZZ+Doi+fy4D5ZGrL4XEcIQP/Lv5uFyf+kQtl/94VFYVJOleAv8W92KdgDkhTcTD
136 vZZ+Doi+fy4D5ZGrL4XEcIQP/Lv5uFyf+kQtl/94VFYVJOleAv8W92KdgDkhTcTD
137 G7c0tIkVEKNUq48b3aQ64NOZQW7fVjfoKwEZdOqPE72Pa45jrZzvUFxSpdiNk2tZ
137 G7c0tIkVEKNUq48b3aQ64NOZQW7fVjfoKwEZdOqPE72Pa45jrZzvUFxSpdiNk2tZ
138 XYukHjlxxEgBdC/J3cMMNRE1F4NCA3ApfV1Y7/hTeOnmDuDYwr9/obA8t016Yljj
138 XYukHjlxxEgBdC/J3cMMNRE1F4NCA3ApfV1Y7/hTeOnmDuDYwr9/obA8t016Yljj
139 q5rdkywPf4JF8mXUW5eCN1vAFHxeg9ZWemhBtQmGxXnw9M+z6hWwc6ahmwARAQAB
139 q5rdkywPf4JF8mXUW5eCN1vAFHxeg9ZWemhBtQmGxXnw9M+z6hWwc6ahmwARAQAB
140 tCtEb2NrZXIgUmVsZWFzZSAoQ0UgZGViKSA8ZG9ja2VyQGRvY2tlci5jb20+iQI3
140 tCtEb2NrZXIgUmVsZWFzZSAoQ0UgZGViKSA8ZG9ja2VyQGRvY2tlci5jb20+iQI3
141 BBMBCgAhBQJYrefAAhsvBQsJCAcDBRUKCQgLBRYCAwEAAh4BAheAAAoJEI2BgDwO
141 BBMBCgAhBQJYrefAAhsvBQsJCAcDBRUKCQgLBRYCAwEAAh4BAheAAAoJEI2BgDwO
142 v82IsskP/iQZo68flDQmNvn8X5XTd6RRaUH33kXYXquT6NkHJciS7E2gTJmqvMqd
142 v82IsskP/iQZo68flDQmNvn8X5XTd6RRaUH33kXYXquT6NkHJciS7E2gTJmqvMqd
143 tI4mNYHCSEYxI5qrcYV5YqX9P6+Ko+vozo4nseUQLPH/ATQ4qL0Zok+1jkag3Lgk
143 tI4mNYHCSEYxI5qrcYV5YqX9P6+Ko+vozo4nseUQLPH/ATQ4qL0Zok+1jkag3Lgk
144 jonyUf9bwtWxFp05HC3GMHPhhcUSexCxQLQvnFWXD2sWLKivHp2fT8QbRGeZ+d3m
144 jonyUf9bwtWxFp05HC3GMHPhhcUSexCxQLQvnFWXD2sWLKivHp2fT8QbRGeZ+d3m
145 6fqcd5Fu7pxsqm0EUDK5NL+nPIgYhN+auTrhgzhK1CShfGccM/wfRlei9Utz6p9P
145 6fqcd5Fu7pxsqm0EUDK5NL+nPIgYhN+auTrhgzhK1CShfGccM/wfRlei9Utz6p9P
146 XRKIlWnXtT4qNGZNTN0tR+NLG/6Bqd8OYBaFAUcue/w1VW6JQ2VGYZHnZu9S8LMc
146 XRKIlWnXtT4qNGZNTN0tR+NLG/6Bqd8OYBaFAUcue/w1VW6JQ2VGYZHnZu9S8LMc
147 FYBa5Ig9PxwGQOgq6RDKDbV+PqTQT5EFMeR1mrjckk4DQJjbxeMZbiNMG5kGECA8
147 FYBa5Ig9PxwGQOgq6RDKDbV+PqTQT5EFMeR1mrjckk4DQJjbxeMZbiNMG5kGECA8
148 g383P3elhn03WGbEEa4MNc3Z4+7c236QI3xWJfNPdUbXRaAwhy/6rTSFbzwKB0Jm
148 g383P3elhn03WGbEEa4MNc3Z4+7c236QI3xWJfNPdUbXRaAwhy/6rTSFbzwKB0Jm
149 ebwzQfwjQY6f55MiI/RqDCyuPj3r3jyVRkK86pQKBAJwFHyqj9KaKXMZjfVnowLh
149 ebwzQfwjQY6f55MiI/RqDCyuPj3r3jyVRkK86pQKBAJwFHyqj9KaKXMZjfVnowLh
150 9svIGfNbGHpucATqREvUHuQbNnqkCx8VVhtYkhDb9fEP2xBu5VvHbR+3nfVhMut5
150 9svIGfNbGHpucATqREvUHuQbNnqkCx8VVhtYkhDb9fEP2xBu5VvHbR+3nfVhMut5
151 G34Ct5RS7Jt6LIfFdtcn8CaSas/l1HbiGeRgc70X/9aYx/V/CEJv0lIe8gP6uDoW
151 G34Ct5RS7Jt6LIfFdtcn8CaSas/l1HbiGeRgc70X/9aYx/V/CEJv0lIe8gP6uDoW
152 FPIZ7d6vH+Vro6xuWEGiuMaiznap2KhZmpkgfupyFmplh0s6knymuQINBFit2ioB
152 FPIZ7d6vH+Vro6xuWEGiuMaiznap2KhZmpkgfupyFmplh0s6knymuQINBFit2ioB
153 EADneL9S9m4vhU3blaRjVUUyJ7b/qTjcSylvCH5XUE6R2k+ckEZjfAMZPLpO+/tF
153 EADneL9S9m4vhU3blaRjVUUyJ7b/qTjcSylvCH5XUE6R2k+ckEZjfAMZPLpO+/tF
154 M2JIJMD4SifKuS3xck9KtZGCufGmcwiLQRzeHF7vJUKrLD5RTkNi23ydvWZgPjtx
154 M2JIJMD4SifKuS3xck9KtZGCufGmcwiLQRzeHF7vJUKrLD5RTkNi23ydvWZgPjtx
155 Q+DTT1Zcn7BrQFY6FgnRoUVIxwtdw1bMY/89rsFgS5wwuMESd3Q2RYgb7EOFOpnu
155 Q+DTT1Zcn7BrQFY6FgnRoUVIxwtdw1bMY/89rsFgS5wwuMESd3Q2RYgb7EOFOpnu
156 w6da7WakWf4IhnF5nsNYGDVaIHzpiqCl+uTbf1epCjrOlIzkZ3Z3Yk5CM/TiFzPk
156 w6da7WakWf4IhnF5nsNYGDVaIHzpiqCl+uTbf1epCjrOlIzkZ3Z3Yk5CM/TiFzPk
157 z2lLz89cpD8U+NtCsfagWWfjd2U3jDapgH+7nQnCEWpROtzaKHG6lA3pXdix5zG8
157 z2lLz89cpD8U+NtCsfagWWfjd2U3jDapgH+7nQnCEWpROtzaKHG6lA3pXdix5zG8
158 eRc6/0IbUSWvfjKxLLPfNeCS2pCL3IeEI5nothEEYdQH6szpLog79xB9dVnJyKJb
158 eRc6/0IbUSWvfjKxLLPfNeCS2pCL3IeEI5nothEEYdQH6szpLog79xB9dVnJyKJb
159 VfxXnseoYqVrRz2VVbUI5Blwm6B40E3eGVfUQWiux54DspyVMMk41Mx7QJ3iynIa
159 VfxXnseoYqVrRz2VVbUI5Blwm6B40E3eGVfUQWiux54DspyVMMk41Mx7QJ3iynIa
160 1N4ZAqVMAEruyXTRTxc9XW0tYhDMA/1GYvz0EmFpm8LzTHA6sFVtPm/ZlNCX6P1X
160 1N4ZAqVMAEruyXTRTxc9XW0tYhDMA/1GYvz0EmFpm8LzTHA6sFVtPm/ZlNCX6P1X
161 zJwrv7DSQKD6GGlBQUX+OeEJ8tTkkf8QTJSPUdh8P8YxDFS5EOGAvhhpMBYD42kQ
161 zJwrv7DSQKD6GGlBQUX+OeEJ8tTkkf8QTJSPUdh8P8YxDFS5EOGAvhhpMBYD42kQ
162 pqXjEC+XcycTvGI7impgv9PDY1RCC1zkBjKPa120rNhv/hkVk/YhuGoajoHyy4h7
162 pqXjEC+XcycTvGI7impgv9PDY1RCC1zkBjKPa120rNhv/hkVk/YhuGoajoHyy4h7
163 ZQopdcMtpN2dgmhEegny9JCSwxfQmQ0zK0g7m6SHiKMwjwARAQABiQQ+BBgBCAAJ
163 ZQopdcMtpN2dgmhEegny9JCSwxfQmQ0zK0g7m6SHiKMwjwARAQABiQQ+BBgBCAAJ
164 BQJYrdoqAhsCAikJEI2BgDwOv82IwV0gBBkBCAAGBQJYrdoqAAoJEH6gqcPyc/zY
164 BQJYrdoqAhsCAikJEI2BgDwOv82IwV0gBBkBCAAGBQJYrdoqAAoJEH6gqcPyc/zY
165 1WAP/2wJ+R0gE6qsce3rjaIz58PJmc8goKrir5hnElWhPgbq7cYIsW5qiFyLhkdp
165 1WAP/2wJ+R0gE6qsce3rjaIz58PJmc8goKrir5hnElWhPgbq7cYIsW5qiFyLhkdp
166 YcMmhD9mRiPpQn6Ya2w3e3B8zfIVKipbMBnke/ytZ9M7qHmDCcjoiSmwEXN3wKYI
166 YcMmhD9mRiPpQn6Ya2w3e3B8zfIVKipbMBnke/ytZ9M7qHmDCcjoiSmwEXN3wKYI
167 mD9VHONsl/CG1rU9Isw1jtB5g1YxuBA7M/m36XN6x2u+NtNMDB9P56yc4gfsZVES
167 mD9VHONsl/CG1rU9Isw1jtB5g1YxuBA7M/m36XN6x2u+NtNMDB9P56yc4gfsZVES
168 KA9v+yY2/l45L8d/WUkUi0YXomn6hyBGI7JrBLq0CX37GEYP6O9rrKipfz73XfO7
168 KA9v+yY2/l45L8d/WUkUi0YXomn6hyBGI7JrBLq0CX37GEYP6O9rrKipfz73XfO7
169 JIGzOKZlljb/D9RX/g7nRbCn+3EtH7xnk+TK/50euEKw8SMUg147sJTcpQmv6UzZ
169 JIGzOKZlljb/D9RX/g7nRbCn+3EtH7xnk+TK/50euEKw8SMUg147sJTcpQmv6UzZ
170 cM4JgL0HbHVCojV4C/plELwMddALOFeYQzTif6sMRPf+3DSj8frbInjChC3yOLy0
170 cM4JgL0HbHVCojV4C/plELwMddALOFeYQzTif6sMRPf+3DSj8frbInjChC3yOLy0
171 6br92KFom17EIj2CAcoeq7UPhi2oouYBwPxh5ytdehJkoo+sN7RIWua6P2WSmon5
171 6br92KFom17EIj2CAcoeq7UPhi2oouYBwPxh5ytdehJkoo+sN7RIWua6P2WSmon5
172 U888cSylXC0+ADFdgLX9K2zrDVYUG1vo8CX0vzxFBaHwN6Px26fhIT1/hYUHQR1z
172 U888cSylXC0+ADFdgLX9K2zrDVYUG1vo8CX0vzxFBaHwN6Px26fhIT1/hYUHQR1z
173 VfNDcyQmXqkOnZvvoMfz/Q0s9BhFJ/zU6AgQbIZE/hm1spsfgvtsD1frZfygXJ9f
173 VfNDcyQmXqkOnZvvoMfz/Q0s9BhFJ/zU6AgQbIZE/hm1spsfgvtsD1frZfygXJ9f
174 irP+MSAI80xHSf91qSRZOj4Pl3ZJNbq4yYxv0b1pkMqeGdjdCYhLU+LZ4wbQmpCk
174 irP+MSAI80xHSf91qSRZOj4Pl3ZJNbq4yYxv0b1pkMqeGdjdCYhLU+LZ4wbQmpCk
175 SVe2prlLureigXtmZfkqevRz7FrIZiu9ky8wnCAPwC7/zmS18rgP/17bOtL4/iIz
175 SVe2prlLureigXtmZfkqevRz7FrIZiu9ky8wnCAPwC7/zmS18rgP/17bOtL4/iIz
176 QhxAAoAMWVrGyJivSkjhSGx1uCojsWfsTAm11P7jsruIL61ZzMUVE2aM3Pmj5G+W
176 QhxAAoAMWVrGyJivSkjhSGx1uCojsWfsTAm11P7jsruIL61ZzMUVE2aM3Pmj5G+W
177 9AcZ58Em+1WsVnAXdUR//bMmhyr8wL/G1YO1V3JEJTRdxsSxdYa4deGBBY/Adpsw
177 9AcZ58Em+1WsVnAXdUR//bMmhyr8wL/G1YO1V3JEJTRdxsSxdYa4deGBBY/Adpsw
178 24jxhOJR+lsJpqIUeb999+R8euDhRHG9eFO7DRu6weatUJ6suupoDTRWtr/4yGqe
178 24jxhOJR+lsJpqIUeb999+R8euDhRHG9eFO7DRu6weatUJ6suupoDTRWtr/4yGqe
179 dKxV3qQhNLSnaAzqW/1nA3iUB4k7kCaKZxhdhDbClf9P37qaRW467BLCVO/coL3y
179 dKxV3qQhNLSnaAzqW/1nA3iUB4k7kCaKZxhdhDbClf9P37qaRW467BLCVO/coL3y
180 Vm50dwdrNtKpMBh3ZpbB1uJvgi9mXtyBOMJ3v8RZeDzFiG8HdCtg9RvIt/AIFoHR
180 Vm50dwdrNtKpMBh3ZpbB1uJvgi9mXtyBOMJ3v8RZeDzFiG8HdCtg9RvIt/AIFoHR
181 H3S+U79NT6i0KPzLImDfs8T7RlpyuMc4Ufs8ggyg9v3Ae6cN3eQyxcK3w0cbBwsh
181 H3S+U79NT6i0KPzLImDfs8T7RlpyuMc4Ufs8ggyg9v3Ae6cN3eQyxcK3w0cbBwsh
182 /nQNfsA6uu+9H7NhbehBMhYnpNZyrHzCmzyXkauwRAqoCbGCNykTRwsur9gS41TQ
182 /nQNfsA6uu+9H7NhbehBMhYnpNZyrHzCmzyXkauwRAqoCbGCNykTRwsur9gS41TQ
183 M8ssD1jFheOJf3hODnkKU+HKjvMROl1DK7zdmLdNzA1cvtZH/nCC9KPj1z8QC47S
183 M8ssD1jFheOJf3hODnkKU+HKjvMROl1DK7zdmLdNzA1cvtZH/nCC9KPj1z8QC47S
184 xx+dTZSx4ONAhwbS/LN3PoKtn8LPjY9NP9uDWI+TWYquS2U+KHDrBDlsgozDbs/O
184 xx+dTZSx4ONAhwbS/LN3PoKtn8LPjY9NP9uDWI+TWYquS2U+KHDrBDlsgozDbs/O
185 jCxcpDzNmXpWQHEtHU7649OXHP7UeNST1mCUCH5qdank0V1iejF6/CfTFU4MfcrG
185 jCxcpDzNmXpWQHEtHU7649OXHP7UeNST1mCUCH5qdank0V1iejF6/CfTFU4MfcrG
186 YT90qFF93M3v01BbxP+EIY2/9tiIPbrd
186 YT90qFF93M3v01BbxP+EIY2/9tiIPbrd
187 =0YYh
187 =0YYh
188 -----END PGP PUBLIC KEY BLOCK-----
188 -----END PGP PUBLIC KEY BLOCK-----
189 EOF
189 EOF
190
190
191 sudo apt-key add docker-apt-key
191 sudo apt-key add docker-apt-key
192
192
193 if [ "$LSB_RELEASE" = "stretch" ]; then
193 if [ "$LSB_RELEASE" = "stretch" ]; then
194 cat << EOF | sudo tee -a /etc/apt/sources.list
194 cat << EOF | sudo tee -a /etc/apt/sources.list
195 # Need backports for clang-format-6.0
195 # Need backports for clang-format-6.0
196 deb http://deb.debian.org/debian stretch-backports main
196 deb http://deb.debian.org/debian stretch-backports main
197 EOF
197 EOF
198 fi
198 fi
199
199
200 if [ "$LSB_RELEASE" = "stretch" -o "$LSB_RELEASE" = "buster" ]; then
200 if [ "$LSB_RELEASE" = "stretch" -o "$LSB_RELEASE" = "buster" ]; then
201 cat << EOF | sudo tee -a /etc/apt/sources.list
201 cat << EOF | sudo tee -a /etc/apt/sources.list
202 # Sources are useful if we want to compile things locally.
202 # Sources are useful if we want to compile things locally.
203 deb-src http://deb.debian.org/debian $LSB_RELEASE main
203 deb-src http://deb.debian.org/debian $LSB_RELEASE main
204 deb-src http://security.debian.org/debian-security $LSB_RELEASE/updates main
204 deb-src http://security.debian.org/debian-security $LSB_RELEASE/updates main
205 deb-src http://deb.debian.org/debian $LSB_RELEASE-updates main
205 deb-src http://deb.debian.org/debian $LSB_RELEASE-updates main
206 deb-src http://deb.debian.org/debian $LSB_RELEASE-backports main
206 deb-src http://deb.debian.org/debian $LSB_RELEASE-backports main
207
207
208 deb [arch=amd64] https://download.docker.com/linux/debian $LSB_RELEASE stable
208 deb [arch=amd64] https://download.docker.com/linux/debian $LSB_RELEASE stable
209 EOF
209 EOF
210
210
211 elif [ "$DISTRO" = "Ubuntu" ]; then
211 elif [ "$DISTRO" = "Ubuntu" ]; then
212 cat << EOF | sudo tee -a /etc/apt/sources.list
212 cat << EOF | sudo tee -a /etc/apt/sources.list
213 deb [arch=amd64] https://download.docker.com/linux/ubuntu $LSB_RELEASE stable
213 deb [arch=amd64] https://download.docker.com/linux/ubuntu $LSB_RELEASE stable
214 EOF
214 EOF
215
215
216 fi
216 fi
217
217
218 sudo apt-get update
218 sudo apt-get update
219
219
220 PACKAGES="\
220 PACKAGES="\
221 awscli \
221 awscli \
222 btrfs-progs \
222 btrfs-progs \
223 build-essential \
223 build-essential \
224 bzr \
224 bzr \
225 clang-format-6.0 \
225 clang-format-6.0 \
226 cvs \
226 cvs \
227 darcs \
227 darcs \
228 debhelper \
228 debhelper \
229 devscripts \
229 devscripts \
230 docker-ce \
230 docker-ce \
231 dpkg-dev \
231 dpkg-dev \
232 dstat \
232 dstat \
233 emacs \
233 emacs \
234 gettext \
234 gettext \
235 git \
235 git \
236 htop \
236 htop \
237 iotop \
237 iotop \
238 jfsutils \
238 jfsutils \
239 libbz2-dev \
239 libbz2-dev \
240 libexpat1-dev \
240 libexpat1-dev \
241 libffi-dev \
241 libffi-dev \
242 libgdbm-dev \
242 libgdbm-dev \
243 liblzma-dev \
243 liblzma-dev \
244 libncurses5-dev \
244 libncurses5-dev \
245 libnss3-dev \
245 libnss3-dev \
246 libreadline-dev \
246 libreadline-dev \
247 libsqlite3-dev \
247 libsqlite3-dev \
248 libssl-dev \
248 libssl-dev \
249 netbase \
249 netbase \
250 ntfs-3g \
250 ntfs-3g \
251 nvme-cli \
251 nvme-cli \
252 pyflakes \
252 pyflakes \
253 pyflakes3 \
253 pyflakes3 \
254 pylint \
254 pylint \
255 pylint3 \
255 pylint3 \
256 python-all-dev \
256 python-all-dev \
257 python-dev \
257 python-dev \
258 python-docutils \
258 python-docutils \
259 python-fuzzywuzzy \
259 python-fuzzywuzzy \
260 python-pygments \
260 python-pygments \
261 python-subversion \
261 python-subversion \
262 python-vcr \
262 python-vcr \
263 python3-boto3 \
263 python3-boto3 \
264 python3-dev \
264 python3-dev \
265 python3-docutils \
265 python3-docutils \
266 python3-fuzzywuzzy \
266 python3-fuzzywuzzy \
267 python3-pygments \
267 python3-pygments \
268 python3-vcr \
268 python3-vcr \
269 rsync \
269 rsync \
270 sqlite3 \
270 sqlite3 \
271 subversion \
271 subversion \
272 tcl-dev \
272 tcl-dev \
273 tk-dev \
273 tk-dev \
274 tla \
274 tla \
275 unzip \
275 unzip \
276 uuid-dev \
276 uuid-dev \
277 vim \
277 vim \
278 virtualenv \
278 virtualenv \
279 wget \
279 wget \
280 xfsprogs \
280 xfsprogs \
281 zip \
281 zip \
282 zlib1g-dev"
282 zlib1g-dev"
283
283
284 if [ "LSB_RELEASE" = "stretch" ]; then
284 if [ "LSB_RELEASE" = "stretch" ]; then
285 PACKAGES="$PACKAGES linux-perf"
285 PACKAGES="$PACKAGES linux-perf"
286 elif [ "$DISTRO" = "Ubuntu" ]; then
286 elif [ "$DISTRO" = "Ubuntu" ]; then
287 PACKAGES="$PACKAGES linux-tools-common"
287 PACKAGES="$PACKAGES linux-tools-common"
288 fi
288 fi
289
289
290 # Monotone only available in older releases.
290 # Monotone only available in older releases.
291 if [ "$LSB_RELEASE" = "stretch" -o "$LSB_RELEASE" = "xenial" ]; then
291 if [ "$LSB_RELEASE" = "stretch" -o "$LSB_RELEASE" = "xenial" ]; then
292 PACKAGES="$PACKAGES monotone"
292 PACKAGES="$PACKAGES monotone"
293 fi
293 fi
294
294
295 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends $PACKAGES
295 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends $PACKAGES
296
296
297 # Create clang-format symlink so test harness finds it.
297 # Create clang-format symlink so test harness finds it.
298 sudo update-alternatives --install /usr/bin/clang-format clang-format \
298 sudo update-alternatives --install /usr/bin/clang-format clang-format \
299 /usr/bin/clang-format-6.0 1000
299 /usr/bin/clang-format-6.0 1000
300
300
301 sudo mkdir /hgdev
301 sudo mkdir /hgdev
302 # Will be normalized to hg:hg later.
302 # Will be normalized to hg:hg later.
303 sudo chown `whoami` /hgdev
303 sudo chown `whoami` /hgdev
304
304
305 {install_rust}
305 {install_rust}
306
306
307 cp requirements-py2.txt /hgdev/requirements-py2.txt
307 cp requirements-py2.txt /hgdev/requirements-py2.txt
308 cp requirements-py3.txt /hgdev/requirements-py3.txt
308 cp requirements-py3.txt /hgdev/requirements-py3.txt
309
309
310 # Disable the pip version check because it uses the network and can
310 # Disable the pip version check because it uses the network and can
311 # be annoying.
311 # be annoying.
312 cat << EOF | sudo tee -a /etc/pip.conf
312 cat << EOF | sudo tee -a /etc/pip.conf
313 [global]
313 [global]
314 disable-pip-version-check = True
314 disable-pip-version-check = True
315 EOF
315 EOF
316
316
317 {install_pythons}
317 {install_pythons}
318 {bootstrap_virtualenv}
318 {bootstrap_virtualenv}
319
319
320 /hgdev/venv-bootstrap/bin/hg clone https://www.mercurial-scm.org/repo/hg /hgdev/src
320 /hgdev/venv-bootstrap/bin/hg clone https://www.mercurial-scm.org/repo/hg /hgdev/src
321
321
322 # Mark the repo as non-publishing.
322 # Mark the repo as non-publishing.
323 cat >> /hgdev/src/.hg/hgrc << EOF
323 cat >> /hgdev/src/.hg/hgrc << EOF
324 [phases]
324 [phases]
325 publish = false
325 publish = false
326 EOF
326 EOF
327
327
328 sudo chown -R hg:hg /hgdev
328 sudo chown -R hg:hg /hgdev
329 '''.lstrip()
329 '''.lstrip()
330 .format(
330 .format(
331 install_rust=INSTALL_RUST,
331 install_rust=INSTALL_RUST,
332 install_pythons=INSTALL_PYTHONS,
332 install_pythons=INSTALL_PYTHONS,
333 bootstrap_virtualenv=BOOTSTRAP_VIRTUALENV,
333 bootstrap_virtualenv=BOOTSTRAP_VIRTUALENV,
334 )
334 )
335 .replace('\r\n', '\n')
335 .replace('\r\n', '\n')
336 )
336 )
337
337
338
338
339 # Prepares /hgdev for operations.
339 # Prepares /hgdev for operations.
340 PREPARE_HGDEV = '''
340 PREPARE_HGDEV = '''
341 #!/bin/bash
341 #!/bin/bash
342
342
343 set -e
343 set -e
344
344
345 FS=$1
345 FS=$1
346
346
347 ensure_device() {
347 ensure_device() {
348 if [ -z "${DEVICE}" ]; then
348 if [ -z "${DEVICE}" ]; then
349 echo "could not find block device to format"
349 echo "could not find block device to format"
350 exit 1
350 exit 1
351 fi
351 fi
352 }
352 }
353
353
354 # Determine device to partition for extra filesystem.
354 # Determine device to partition for extra filesystem.
355 # If only 1 volume is present, it will be the root volume and
355 # If only 1 volume is present, it will be the root volume and
356 # should be /dev/nvme0. If multiple volumes are present, the
356 # should be /dev/nvme0. If multiple volumes are present, the
357 # root volume could be nvme0 or nvme1. Use whichever one doesn't have
357 # root volume could be nvme0 or nvme1. Use whichever one doesn't have
358 # a partition.
358 # a partition.
359 if [ -e /dev/nvme1n1 ]; then
359 if [ -e /dev/nvme1n1 ]; then
360 if [ -e /dev/nvme0n1p1 ]; then
360 if [ -e /dev/nvme0n1p1 ]; then
361 DEVICE=/dev/nvme1n1
361 DEVICE=/dev/nvme1n1
362 else
362 else
363 DEVICE=/dev/nvme0n1
363 DEVICE=/dev/nvme0n1
364 fi
364 fi
365 else
365 else
366 DEVICE=
366 DEVICE=
367 fi
367 fi
368
368
369 sudo mkdir /hgwork
369 sudo mkdir /hgwork
370
370
371 if [ "${FS}" != "default" -a "${FS}" != "tmpfs" ]; then
371 if [ "${FS}" != "default" -a "${FS}" != "tmpfs" ]; then
372 ensure_device
372 ensure_device
373 echo "creating ${FS} filesystem on ${DEVICE}"
373 echo "creating ${FS} filesystem on ${DEVICE}"
374 fi
374 fi
375
375
376 if [ "${FS}" = "default" ]; then
376 if [ "${FS}" = "default" ]; then
377 :
377 :
378
378
379 elif [ "${FS}" = "btrfs" ]; then
379 elif [ "${FS}" = "btrfs" ]; then
380 sudo mkfs.btrfs ${DEVICE}
380 sudo mkfs.btrfs ${DEVICE}
381 sudo mount ${DEVICE} /hgwork
381 sudo mount ${DEVICE} /hgwork
382
382
383 elif [ "${FS}" = "ext3" ]; then
383 elif [ "${FS}" = "ext3" ]; then
384 # lazy_journal_init speeds up filesystem creation at the expense of
384 # lazy_journal_init speeds up filesystem creation at the expense of
385 # integrity if things crash. We are an ephemeral instance, so we don't
385 # integrity if things crash. We are an ephemeral instance, so we don't
386 # care about integrity.
386 # care about integrity.
387 sudo mkfs.ext3 -E lazy_journal_init=1 ${DEVICE}
387 sudo mkfs.ext3 -E lazy_journal_init=1 ${DEVICE}
388 sudo mount ${DEVICE} /hgwork
388 sudo mount ${DEVICE} /hgwork
389
389
390 elif [ "${FS}" = "ext4" ]; then
390 elif [ "${FS}" = "ext4" ]; then
391 sudo mkfs.ext4 -E lazy_journal_init=1 ${DEVICE}
391 sudo mkfs.ext4 -E lazy_journal_init=1 ${DEVICE}
392 sudo mount ${DEVICE} /hgwork
392 sudo mount ${DEVICE} /hgwork
393
393
394 elif [ "${FS}" = "jfs" ]; then
394 elif [ "${FS}" = "jfs" ]; then
395 sudo mkfs.jfs ${DEVICE}
395 sudo mkfs.jfs ${DEVICE}
396 sudo mount ${DEVICE} /hgwork
396 sudo mount ${DEVICE} /hgwork
397
397
398 elif [ "${FS}" = "tmpfs" ]; then
398 elif [ "${FS}" = "tmpfs" ]; then
399 echo "creating tmpfs volume in /hgwork"
399 echo "creating tmpfs volume in /hgwork"
400 sudo mount -t tmpfs -o size=1024M tmpfs /hgwork
400 sudo mount -t tmpfs -o size=1024M tmpfs /hgwork
401
401
402 elif [ "${FS}" = "xfs" ]; then
402 elif [ "${FS}" = "xfs" ]; then
403 sudo mkfs.xfs ${DEVICE}
403 sudo mkfs.xfs ${DEVICE}
404 sudo mount ${DEVICE} /hgwork
404 sudo mount ${DEVICE} /hgwork
405
405
406 else
406 else
407 echo "unsupported filesystem: ${FS}"
407 echo "unsupported filesystem: ${FS}"
408 exit 1
408 exit 1
409 fi
409 fi
410
410
411 echo "/hgwork ready"
411 echo "/hgwork ready"
412
412
413 sudo chown hg:hg /hgwork
413 sudo chown hg:hg /hgwork
414 mkdir /hgwork/tmp
414 mkdir /hgwork/tmp
415 chown hg:hg /hgwork/tmp
415 chown hg:hg /hgwork/tmp
416
416
417 rsync -a /hgdev/src /hgwork/
417 rsync -a /hgdev/src /hgwork/
418 '''.lstrip().replace(
418 '''.lstrip().replace(
419 '\r\n', '\n'
419 '\r\n', '\n'
420 )
420 )
421
421
422
422
423 HG_UPDATE_CLEAN = '''
423 HG_UPDATE_CLEAN = '''
424 set -ex
424 set -ex
425
425
426 HG=/hgdev/venv-bootstrap/bin/hg
426 HG=/hgdev/venv-bootstrap/bin/hg
427
427
428 cd /hgwork/src
428 cd /hgwork/src
429 ${HG} --config extensions.purge= purge --all
429 ${HG} --config extensions.purge= purge --all
430 ${HG} update -C $1
430 ${HG} update -C $1
431 ${HG} log -r .
431 ${HG} log -r .
432 '''.lstrip().replace(
432 '''.lstrip().replace(
433 '\r\n', '\n'
433 '\r\n', '\n'
434 )
434 )
435
435
436
436
437 def prepare_exec_environment(ssh_client, filesystem='default'):
437 def prepare_exec_environment(ssh_client, filesystem='default'):
438 """Prepare an EC2 instance to execute things.
438 """Prepare an EC2 instance to execute things.
439
439
440 The AMI has an ``/hgdev`` bootstrapped with various Python installs
440 The AMI has an ``/hgdev`` bootstrapped with various Python installs
441 and a clone of the Mercurial repo.
441 and a clone of the Mercurial repo.
442
442
443 In EC2, EBS volumes launched from snapshots have wonky performance behavior.
443 In EC2, EBS volumes launched from snapshots have wonky performance behavior.
444 Notably, blocks have to be copied on first access, which makes volume
444 Notably, blocks have to be copied on first access, which makes volume
445 I/O extremely slow on fresh volumes.
445 I/O extremely slow on fresh volumes.
446
446
447 Furthermore, we may want to run operations, tests, etc on alternative
447 Furthermore, we may want to run operations, tests, etc on alternative
448 filesystems so we examine behavior on different filesystems.
448 filesystems so we examine behavior on different filesystems.
449
449
450 This function is used to facilitate executing operations on alternate
450 This function is used to facilitate executing operations on alternate
451 volumes.
451 volumes.
452 """
452 """
453 sftp = ssh_client.open_sftp()
453 sftp = ssh_client.open_sftp()
454
454
455 with sftp.open('/hgdev/prepare-hgdev', 'wb') as fh:
455 with sftp.open('/hgdev/prepare-hgdev', 'wb') as fh:
456 fh.write(PREPARE_HGDEV)
456 fh.write(PREPARE_HGDEV)
457 fh.chmod(0o0777)
457 fh.chmod(0o0777)
458
458
459 command = 'sudo /hgdev/prepare-hgdev %s' % filesystem
459 command = 'sudo /hgdev/prepare-hgdev %s' % filesystem
460 chan, stdin, stdout = exec_command(ssh_client, command)
460 chan, stdin, stdout = exec_command(ssh_client, command)
461 stdin.close()
461 stdin.close()
462
462
463 for line in stdout:
463 for line in stdout:
464 print(line, end='')
464 print(line, end='')
465
465
466 res = chan.recv_exit_status()
466 res = chan.recv_exit_status()
467
467
468 if res:
468 if res:
469 raise Exception('non-0 exit code updating working directory; %d' % res)
469 raise Exception('non-0 exit code updating working directory; %d' % res)
470
470
471
471
472 def synchronize_hg(
472 def synchronize_hg(
473 source_path: pathlib.Path, ec2_instance, revision: str = None
473 source_path: pathlib.Path, ec2_instance, revision: str = None
474 ):
474 ):
475 """Synchronize a local Mercurial source path to remote EC2 instance."""
475 """Synchronize a local Mercurial source path to remote EC2 instance."""
476
476
477 with tempfile.TemporaryDirectory() as temp_dir:
477 with tempfile.TemporaryDirectory() as temp_dir:
478 temp_dir = pathlib.Path(temp_dir)
478 temp_dir = pathlib.Path(temp_dir)
479
479
480 ssh_dir = temp_dir / '.ssh'
480 ssh_dir = temp_dir / '.ssh'
481 ssh_dir.mkdir()
481 ssh_dir.mkdir()
482 ssh_dir.chmod(0o0700)
482 ssh_dir.chmod(0o0700)
483
483
484 public_ip = ec2_instance.public_ip_address
484 public_ip = ec2_instance.public_ip_address
485
485
486 ssh_config = ssh_dir / 'config'
486 ssh_config = ssh_dir / 'config'
487
487
488 with ssh_config.open('w', encoding='utf-8') as fh:
488 with ssh_config.open('w', encoding='utf-8') as fh:
489 fh.write('Host %s\n' % public_ip)
489 fh.write('Host %s\n' % public_ip)
490 fh.write(' User hg\n')
490 fh.write(' User hg\n')
491 fh.write(' StrictHostKeyChecking no\n')
491 fh.write(' StrictHostKeyChecking no\n')
492 fh.write(' UserKnownHostsFile %s\n' % (ssh_dir / 'known_hosts'))
492 fh.write(' UserKnownHostsFile %s\n' % (ssh_dir / 'known_hosts'))
493 fh.write(' IdentityFile %s\n' % ec2_instance.ssh_private_key_path)
493 fh.write(' IdentityFile %s\n' % ec2_instance.ssh_private_key_path)
494
494
495 if not (source_path / '.hg').is_dir():
495 if not (source_path / '.hg').is_dir():
496 raise Exception(
496 raise Exception(
497 '%s is not a Mercurial repository; synchronization '
497 '%s is not a Mercurial repository; synchronization '
498 'not yet supported' % source_path
498 'not yet supported' % source_path
499 )
499 )
500
500
501 env = dict(os.environ)
501 env = dict(os.environ)
502 env['HGPLAIN'] = '1'
502 env['HGPLAIN'] = '1'
503 env['HGENCODING'] = 'utf-8'
503 env['HGENCODING'] = 'utf-8'
504
504
505 hg_bin = source_path / 'hg'
505 hg_bin = source_path / 'hg'
506
506
507 res = subprocess.run(
507 res = subprocess.run(
508 ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
508 ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
509 cwd=str(source_path),
509 cwd=str(source_path),
510 env=env,
510 env=env,
511 check=True,
511 check=True,
512 capture_output=True,
512 capture_output=True,
513 )
513 )
514
514
515 full_revision = res.stdout.decode('ascii')
515 full_revision = res.stdout.decode('ascii')
516
516
517 args = [
517 args = [
518 'python2.7',
518 'python2.7',
519 str(hg_bin),
519 str(hg_bin),
520 '--config',
520 '--config',
521 'ui.ssh=ssh -F %s' % ssh_config,
521 'ui.ssh=ssh -F %s' % ssh_config,
522 '--config',
522 '--config',
523 'ui.remotecmd=/hgdev/venv-bootstrap/bin/hg',
523 'ui.remotecmd=/hgdev/venv-bootstrap/bin/hg',
524 # Also ensure .hgtags changes are present so auto version
524 # Also ensure .hgtags changes are present so auto version
525 # calculation works.
525 # calculation works.
526 'push',
526 'push',
527 '-f',
527 '-f',
528 '-r',
528 '-r',
529 full_revision,
529 full_revision,
530 '-r',
530 '-r',
531 'file(.hgtags)',
531 'file(.hgtags)',
532 'ssh://%s//hgwork/src' % public_ip,
532 'ssh://%s//hgwork/src' % public_ip,
533 ]
533 ]
534
534
535 res = subprocess.run(args, cwd=str(source_path), env=env)
535 res = subprocess.run(args, cwd=str(source_path), env=env)
536
536
537 # Allow 1 (no-op) to not trigger error.
537 # Allow 1 (no-op) to not trigger error.
538 if res.returncode not in (0, 1):
538 if res.returncode not in (0, 1):
539 res.check_returncode()
539 res.check_returncode()
540
540
541 # TODO support synchronizing dirty working directory.
541 # TODO support synchronizing dirty working directory.
542
542
543 sftp = ec2_instance.ssh_client.open_sftp()
543 sftp = ec2_instance.ssh_client.open_sftp()
544
544
545 with sftp.open('/hgdev/hgup', 'wb') as fh:
545 with sftp.open('/hgdev/hgup', 'wb') as fh:
546 fh.write(HG_UPDATE_CLEAN)
546 fh.write(HG_UPDATE_CLEAN)
547 fh.chmod(0o0700)
547 fh.chmod(0o0700)
548
548
549 chan, stdin, stdout = exec_command(
549 chan, stdin, stdout = exec_command(
550 ec2_instance.ssh_client, '/hgdev/hgup %s' % full_revision
550 ec2_instance.ssh_client, '/hgdev/hgup %s' % full_revision
551 )
551 )
552 stdin.close()
552 stdin.close()
553
553
554 for line in stdout:
554 for line in stdout:
555 print(line, end='')
555 print(line, end='')
556
556
557 res = chan.recv_exit_status()
557 res = chan.recv_exit_status()
558
558
559 if res:
559 if res:
560 raise Exception(
560 raise Exception(
561 'non-0 exit code updating working directory; %d' % res
561 'non-0 exit code updating working directory; %d' % res
562 )
562 )
563
563
564
564
565 def run_tests(ssh_client, python_version, test_flags=None):
565 def run_tests(ssh_client, python_version, test_flags=None):
566 """Run tests on a remote Linux machine via an SSH client."""
566 """Run tests on a remote Linux machine via an SSH client."""
567 test_flags = test_flags or []
567 test_flags = test_flags or []
568
568
569 print('running tests')
569 print('running tests')
570
570
571 if python_version == 'system2':
571 if python_version == 'system2':
572 python = '/usr/bin/python2'
572 python = '/usr/bin/python2'
573 elif python_version == 'system3':
573 elif python_version == 'system3':
574 python = '/usr/bin/python3'
574 python = '/usr/bin/python3'
575 elif python_version.startswith('pypy'):
575 elif python_version.startswith('pypy'):
576 python = '/hgdev/pyenv/shims/%s' % python_version
576 python = '/hgdev/pyenv/shims/%s' % python_version
577 else:
577 else:
578 python = '/hgdev/pyenv/shims/python%s' % python_version
578 python = '/hgdev/pyenv/shims/python%s' % python_version
579
579
580 test_flags = ' '.join(shlex.quote(a) for a in test_flags)
580 test_flags = ' '.join(shlex.quote(a) for a in test_flags)
581
581
582 command = (
582 command = (
583 '/bin/sh -c "export TMPDIR=/hgwork/tmp; '
583 '/bin/sh -c "export TMPDIR=/hgwork/tmp; '
584 'cd /hgwork/src/tests && %s run-tests.py %s"' % (python, test_flags)
584 'cd /hgwork/src/tests && %s run-tests.py %s"' % (python, test_flags)
585 )
585 )
586
586
587 chan, stdin, stdout = exec_command(ssh_client, command)
587 chan, stdin, stdout = exec_command(ssh_client, command)
588
588
589 stdin.close()
589 stdin.close()
590
590
591 for line in stdout:
591 for line in stdout:
592 print(line, end='')
592 print(line, end='')
593
593
594 return chan.recv_exit_status()
594 return chan.recv_exit_status()
General Comments 0
You need to be logged in to leave comments. Login now