##// END OF EJS Templates
automation: install latest Python versions...
Gregory Szorc -
r42922:6f7262fe default
parent child Browse files
Show More
@@ -1,551 +1,551 b''
1 # linux.py - Linux specific automation functionality
1 # linux.py - Linux specific automation functionality
2 #
2 #
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 # no-check-code because Python 3 native.
8 # no-check-code because Python 3 native.
9
9
10 import os
10 import os
11 import pathlib
11 import pathlib
12 import shlex
12 import shlex
13 import subprocess
13 import subprocess
14 import tempfile
14 import tempfile
15
15
16 from .ssh import (
16 from .ssh import (
17 exec_command,
17 exec_command,
18 )
18 )
19
19
20
20
21 # Linux distributions that are supported.
21 # Linux distributions that are supported.
22 DISTROS = {
22 DISTROS = {
23 'debian9',
23 'debian9',
24 'ubuntu18.04',
24 'ubuntu18.04',
25 'ubuntu18.10',
25 'ubuntu18.10',
26 'ubuntu19.04',
26 'ubuntu19.04',
27 }
27 }
28
28
29 INSTALL_PYTHONS = r'''
29 INSTALL_PYTHONS = r'''
30 PYENV2_VERSIONS="2.7.16 pypy2.7-7.1.1"
30 PYENV2_VERSIONS="2.7.16 pypy2.7-7.1.1"
31 PYENV3_VERSIONS="3.5.7 3.6.8 3.7.3 3.8-dev pypy3.5-7.0.0 pypy3.6-7.1.1"
31 PYENV3_VERSIONS="3.5.7 3.6.9 3.7.4 3.8-dev pypy3.5-7.0.0 pypy3.6-7.1.1"
32
32
33 git clone https://github.com/pyenv/pyenv.git /hgdev/pyenv
33 git clone https://github.com/pyenv/pyenv.git /hgdev/pyenv
34 pushd /hgdev/pyenv
34 pushd /hgdev/pyenv
35 git checkout 3faeda67bb33e07750d1a104271369a7384ca45c
35 git checkout 17f44b7cd6f58ea2fa68ec0371fb9e7a826b8be2
36 popd
36 popd
37
37
38 export PYENV_ROOT="/hgdev/pyenv"
38 export PYENV_ROOT="/hgdev/pyenv"
39 export PATH="$PYENV_ROOT/bin:$PATH"
39 export PATH="$PYENV_ROOT/bin:$PATH"
40
40
41 # pip 19.0.3.
41 # pip 19.0.3.
42 PIP_SHA256=efe99298f3fbb1f56201ce6b81d2658067d2f7d7dfc2d412e0d3cacc9a397c61
42 PIP_SHA256=efe99298f3fbb1f56201ce6b81d2658067d2f7d7dfc2d412e0d3cacc9a397c61
43 wget -O get-pip.py --progress dot:mega https://github.com/pypa/get-pip/raw/fee32c376da1ff6496a798986d7939cd51e1644f/get-pip.py
43 wget -O get-pip.py --progress dot:mega https://github.com/pypa/get-pip/raw/fee32c376da1ff6496a798986d7939cd51e1644f/get-pip.py
44 echo "${PIP_SHA256} get-pip.py" | sha256sum --check -
44 echo "${PIP_SHA256} get-pip.py" | sha256sum --check -
45
45
46 VIRTUALENV_SHA256=984d7e607b0a5d1329425dd8845bd971b957424b5ba664729fab51ab8c11bc39
46 VIRTUALENV_SHA256=984d7e607b0a5d1329425dd8845bd971b957424b5ba664729fab51ab8c11bc39
47 VIRTUALENV_TARBALL=virtualenv-16.4.3.tar.gz
47 VIRTUALENV_TARBALL=virtualenv-16.4.3.tar.gz
48 wget -O ${VIRTUALENV_TARBALL} --progress dot:mega https://files.pythonhosted.org/packages/37/db/89d6b043b22052109da35416abc3c397655e4bd3cff031446ba02b9654fa/${VIRTUALENV_TARBALL}
48 wget -O ${VIRTUALENV_TARBALL} --progress dot:mega https://files.pythonhosted.org/packages/37/db/89d6b043b22052109da35416abc3c397655e4bd3cff031446ba02b9654fa/${VIRTUALENV_TARBALL}
49 echo "${VIRTUALENV_SHA256} ${VIRTUALENV_TARBALL}" | sha256sum --check -
49 echo "${VIRTUALENV_SHA256} ${VIRTUALENV_TARBALL}" | sha256sum --check -
50
50
51 for v in ${PYENV2_VERSIONS}; do
51 for v in ${PYENV2_VERSIONS}; do
52 pyenv install -v ${v}
52 pyenv install -v ${v}
53 ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
53 ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
54 ${PYENV_ROOT}/versions/${v}/bin/pip install ${VIRTUALENV_TARBALL}
54 ${PYENV_ROOT}/versions/${v}/bin/pip install ${VIRTUALENV_TARBALL}
55 ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py2.txt
55 ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py2.txt
56 done
56 done
57
57
58 for v in ${PYENV3_VERSIONS}; do
58 for v in ${PYENV3_VERSIONS}; do
59 pyenv install -v ${v}
59 pyenv install -v ${v}
60 ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
60 ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
61 ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py3.txt
61 ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py3.txt
62 done
62 done
63
63
64 pyenv global ${PYENV2_VERSIONS} ${PYENV3_VERSIONS} system
64 pyenv global ${PYENV2_VERSIONS} ${PYENV3_VERSIONS} system
65 '''.lstrip().replace('\r\n', '\n')
65 '''.lstrip().replace('\r\n', '\n')
66
66
67
67
68 BOOTSTRAP_VIRTUALENV = r'''
68 BOOTSTRAP_VIRTUALENV = r'''
69 /usr/bin/virtualenv /hgdev/venv-bootstrap
69 /usr/bin/virtualenv /hgdev/venv-bootstrap
70
70
71 HG_SHA256=1bdd21bb87d1e05fb5cd395d488d0e0cc2f2f90ce0fd248e31a03595da5ccb47
71 HG_SHA256=1bdd21bb87d1e05fb5cd395d488d0e0cc2f2f90ce0fd248e31a03595da5ccb47
72 HG_TARBALL=mercurial-4.9.1.tar.gz
72 HG_TARBALL=mercurial-4.9.1.tar.gz
73
73
74 wget -O ${HG_TARBALL} --progress dot:mega https://www.mercurial-scm.org/release/${HG_TARBALL}
74 wget -O ${HG_TARBALL} --progress dot:mega https://www.mercurial-scm.org/release/${HG_TARBALL}
75 echo "${HG_SHA256} ${HG_TARBALL}" | sha256sum --check -
75 echo "${HG_SHA256} ${HG_TARBALL}" | sha256sum --check -
76
76
77 /hgdev/venv-bootstrap/bin/pip install ${HG_TARBALL}
77 /hgdev/venv-bootstrap/bin/pip install ${HG_TARBALL}
78 '''.lstrip().replace('\r\n', '\n')
78 '''.lstrip().replace('\r\n', '\n')
79
79
80
80
81 BOOTSTRAP_DEBIAN = r'''
81 BOOTSTRAP_DEBIAN = r'''
82 #!/bin/bash
82 #!/bin/bash
83
83
84 set -ex
84 set -ex
85
85
86 DISTRO=`grep DISTRIB_ID /etc/lsb-release | awk -F= '{{print $2}}'`
86 DISTRO=`grep DISTRIB_ID /etc/lsb-release | awk -F= '{{print $2}}'`
87 DEBIAN_VERSION=`cat /etc/debian_version`
87 DEBIAN_VERSION=`cat /etc/debian_version`
88 LSB_RELEASE=`lsb_release -cs`
88 LSB_RELEASE=`lsb_release -cs`
89
89
90 sudo /usr/sbin/groupadd hg
90 sudo /usr/sbin/groupadd hg
91 sudo /usr/sbin/groupadd docker
91 sudo /usr/sbin/groupadd docker
92 sudo /usr/sbin/useradd -g hg -G sudo,docker -d /home/hg -m -s /bin/bash hg
92 sudo /usr/sbin/useradd -g hg -G sudo,docker -d /home/hg -m -s /bin/bash hg
93 sudo mkdir /home/hg/.ssh
93 sudo mkdir /home/hg/.ssh
94 sudo cp ~/.ssh/authorized_keys /home/hg/.ssh/authorized_keys
94 sudo cp ~/.ssh/authorized_keys /home/hg/.ssh/authorized_keys
95 sudo chown -R hg:hg /home/hg/.ssh
95 sudo chown -R hg:hg /home/hg/.ssh
96 sudo chmod 700 /home/hg/.ssh
96 sudo chmod 700 /home/hg/.ssh
97 sudo chmod 600 /home/hg/.ssh/authorized_keys
97 sudo chmod 600 /home/hg/.ssh/authorized_keys
98
98
99 cat << EOF | sudo tee /etc/sudoers.d/90-hg
99 cat << EOF | sudo tee /etc/sudoers.d/90-hg
100 hg ALL=(ALL) NOPASSWD:ALL
100 hg ALL=(ALL) NOPASSWD:ALL
101 EOF
101 EOF
102
102
103 sudo apt-get update
103 sudo apt-get update
104 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq dist-upgrade
104 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq dist-upgrade
105
105
106 # Install packages necessary to set up Docker Apt repo.
106 # Install packages necessary to set up Docker Apt repo.
107 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends \
107 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends \
108 apt-transport-https \
108 apt-transport-https \
109 gnupg
109 gnupg
110
110
111 cat > docker-apt-key << EOF
111 cat > docker-apt-key << EOF
112 -----BEGIN PGP PUBLIC KEY BLOCK-----
112 -----BEGIN PGP PUBLIC KEY BLOCK-----
113
113
114 mQINBFit2ioBEADhWpZ8/wvZ6hUTiXOwQHXMAlaFHcPH9hAtr4F1y2+OYdbtMuth
114 mQINBFit2ioBEADhWpZ8/wvZ6hUTiXOwQHXMAlaFHcPH9hAtr4F1y2+OYdbtMuth
115 lqqwp028AqyY+PRfVMtSYMbjuQuu5byyKR01BbqYhuS3jtqQmljZ/bJvXqnmiVXh
115 lqqwp028AqyY+PRfVMtSYMbjuQuu5byyKR01BbqYhuS3jtqQmljZ/bJvXqnmiVXh
116 38UuLa+z077PxyxQhu5BbqntTPQMfiyqEiU+BKbq2WmANUKQf+1AmZY/IruOXbnq
116 38UuLa+z077PxyxQhu5BbqntTPQMfiyqEiU+BKbq2WmANUKQf+1AmZY/IruOXbnq
117 L4C1+gJ8vfmXQt99npCaxEjaNRVYfOS8QcixNzHUYnb6emjlANyEVlZzeqo7XKl7
117 L4C1+gJ8vfmXQt99npCaxEjaNRVYfOS8QcixNzHUYnb6emjlANyEVlZzeqo7XKl7
118 UrwV5inawTSzWNvtjEjj4nJL8NsLwscpLPQUhTQ+7BbQXAwAmeHCUTQIvvWXqw0N
118 UrwV5inawTSzWNvtjEjj4nJL8NsLwscpLPQUhTQ+7BbQXAwAmeHCUTQIvvWXqw0N
119 cmhh4HgeQscQHYgOJjjDVfoY5MucvglbIgCqfzAHW9jxmRL4qbMZj+b1XoePEtht
119 cmhh4HgeQscQHYgOJjjDVfoY5MucvglbIgCqfzAHW9jxmRL4qbMZj+b1XoePEtht
120 ku4bIQN1X5P07fNWzlgaRL5Z4POXDDZTlIQ/El58j9kp4bnWRCJW0lya+f8ocodo
120 ku4bIQN1X5P07fNWzlgaRL5Z4POXDDZTlIQ/El58j9kp4bnWRCJW0lya+f8ocodo
121 vZZ+Doi+fy4D5ZGrL4XEcIQP/Lv5uFyf+kQtl/94VFYVJOleAv8W92KdgDkhTcTD
121 vZZ+Doi+fy4D5ZGrL4XEcIQP/Lv5uFyf+kQtl/94VFYVJOleAv8W92KdgDkhTcTD
122 G7c0tIkVEKNUq48b3aQ64NOZQW7fVjfoKwEZdOqPE72Pa45jrZzvUFxSpdiNk2tZ
122 G7c0tIkVEKNUq48b3aQ64NOZQW7fVjfoKwEZdOqPE72Pa45jrZzvUFxSpdiNk2tZ
123 XYukHjlxxEgBdC/J3cMMNRE1F4NCA3ApfV1Y7/hTeOnmDuDYwr9/obA8t016Yljj
123 XYukHjlxxEgBdC/J3cMMNRE1F4NCA3ApfV1Y7/hTeOnmDuDYwr9/obA8t016Yljj
124 q5rdkywPf4JF8mXUW5eCN1vAFHxeg9ZWemhBtQmGxXnw9M+z6hWwc6ahmwARAQAB
124 q5rdkywPf4JF8mXUW5eCN1vAFHxeg9ZWemhBtQmGxXnw9M+z6hWwc6ahmwARAQAB
125 tCtEb2NrZXIgUmVsZWFzZSAoQ0UgZGViKSA8ZG9ja2VyQGRvY2tlci5jb20+iQI3
125 tCtEb2NrZXIgUmVsZWFzZSAoQ0UgZGViKSA8ZG9ja2VyQGRvY2tlci5jb20+iQI3
126 BBMBCgAhBQJYrefAAhsvBQsJCAcDBRUKCQgLBRYCAwEAAh4BAheAAAoJEI2BgDwO
126 BBMBCgAhBQJYrefAAhsvBQsJCAcDBRUKCQgLBRYCAwEAAh4BAheAAAoJEI2BgDwO
127 v82IsskP/iQZo68flDQmNvn8X5XTd6RRaUH33kXYXquT6NkHJciS7E2gTJmqvMqd
127 v82IsskP/iQZo68flDQmNvn8X5XTd6RRaUH33kXYXquT6NkHJciS7E2gTJmqvMqd
128 tI4mNYHCSEYxI5qrcYV5YqX9P6+Ko+vozo4nseUQLPH/ATQ4qL0Zok+1jkag3Lgk
128 tI4mNYHCSEYxI5qrcYV5YqX9P6+Ko+vozo4nseUQLPH/ATQ4qL0Zok+1jkag3Lgk
129 jonyUf9bwtWxFp05HC3GMHPhhcUSexCxQLQvnFWXD2sWLKivHp2fT8QbRGeZ+d3m
129 jonyUf9bwtWxFp05HC3GMHPhhcUSexCxQLQvnFWXD2sWLKivHp2fT8QbRGeZ+d3m
130 6fqcd5Fu7pxsqm0EUDK5NL+nPIgYhN+auTrhgzhK1CShfGccM/wfRlei9Utz6p9P
130 6fqcd5Fu7pxsqm0EUDK5NL+nPIgYhN+auTrhgzhK1CShfGccM/wfRlei9Utz6p9P
131 XRKIlWnXtT4qNGZNTN0tR+NLG/6Bqd8OYBaFAUcue/w1VW6JQ2VGYZHnZu9S8LMc
131 XRKIlWnXtT4qNGZNTN0tR+NLG/6Bqd8OYBaFAUcue/w1VW6JQ2VGYZHnZu9S8LMc
132 FYBa5Ig9PxwGQOgq6RDKDbV+PqTQT5EFMeR1mrjckk4DQJjbxeMZbiNMG5kGECA8
132 FYBa5Ig9PxwGQOgq6RDKDbV+PqTQT5EFMeR1mrjckk4DQJjbxeMZbiNMG5kGECA8
133 g383P3elhn03WGbEEa4MNc3Z4+7c236QI3xWJfNPdUbXRaAwhy/6rTSFbzwKB0Jm
133 g383P3elhn03WGbEEa4MNc3Z4+7c236QI3xWJfNPdUbXRaAwhy/6rTSFbzwKB0Jm
134 ebwzQfwjQY6f55MiI/RqDCyuPj3r3jyVRkK86pQKBAJwFHyqj9KaKXMZjfVnowLh
134 ebwzQfwjQY6f55MiI/RqDCyuPj3r3jyVRkK86pQKBAJwFHyqj9KaKXMZjfVnowLh
135 9svIGfNbGHpucATqREvUHuQbNnqkCx8VVhtYkhDb9fEP2xBu5VvHbR+3nfVhMut5
135 9svIGfNbGHpucATqREvUHuQbNnqkCx8VVhtYkhDb9fEP2xBu5VvHbR+3nfVhMut5
136 G34Ct5RS7Jt6LIfFdtcn8CaSas/l1HbiGeRgc70X/9aYx/V/CEJv0lIe8gP6uDoW
136 G34Ct5RS7Jt6LIfFdtcn8CaSas/l1HbiGeRgc70X/9aYx/V/CEJv0lIe8gP6uDoW
137 FPIZ7d6vH+Vro6xuWEGiuMaiznap2KhZmpkgfupyFmplh0s6knymuQINBFit2ioB
137 FPIZ7d6vH+Vro6xuWEGiuMaiznap2KhZmpkgfupyFmplh0s6knymuQINBFit2ioB
138 EADneL9S9m4vhU3blaRjVUUyJ7b/qTjcSylvCH5XUE6R2k+ckEZjfAMZPLpO+/tF
138 EADneL9S9m4vhU3blaRjVUUyJ7b/qTjcSylvCH5XUE6R2k+ckEZjfAMZPLpO+/tF
139 M2JIJMD4SifKuS3xck9KtZGCufGmcwiLQRzeHF7vJUKrLD5RTkNi23ydvWZgPjtx
139 M2JIJMD4SifKuS3xck9KtZGCufGmcwiLQRzeHF7vJUKrLD5RTkNi23ydvWZgPjtx
140 Q+DTT1Zcn7BrQFY6FgnRoUVIxwtdw1bMY/89rsFgS5wwuMESd3Q2RYgb7EOFOpnu
140 Q+DTT1Zcn7BrQFY6FgnRoUVIxwtdw1bMY/89rsFgS5wwuMESd3Q2RYgb7EOFOpnu
141 w6da7WakWf4IhnF5nsNYGDVaIHzpiqCl+uTbf1epCjrOlIzkZ3Z3Yk5CM/TiFzPk
141 w6da7WakWf4IhnF5nsNYGDVaIHzpiqCl+uTbf1epCjrOlIzkZ3Z3Yk5CM/TiFzPk
142 z2lLz89cpD8U+NtCsfagWWfjd2U3jDapgH+7nQnCEWpROtzaKHG6lA3pXdix5zG8
142 z2lLz89cpD8U+NtCsfagWWfjd2U3jDapgH+7nQnCEWpROtzaKHG6lA3pXdix5zG8
143 eRc6/0IbUSWvfjKxLLPfNeCS2pCL3IeEI5nothEEYdQH6szpLog79xB9dVnJyKJb
143 eRc6/0IbUSWvfjKxLLPfNeCS2pCL3IeEI5nothEEYdQH6szpLog79xB9dVnJyKJb
144 VfxXnseoYqVrRz2VVbUI5Blwm6B40E3eGVfUQWiux54DspyVMMk41Mx7QJ3iynIa
144 VfxXnseoYqVrRz2VVbUI5Blwm6B40E3eGVfUQWiux54DspyVMMk41Mx7QJ3iynIa
145 1N4ZAqVMAEruyXTRTxc9XW0tYhDMA/1GYvz0EmFpm8LzTHA6sFVtPm/ZlNCX6P1X
145 1N4ZAqVMAEruyXTRTxc9XW0tYhDMA/1GYvz0EmFpm8LzTHA6sFVtPm/ZlNCX6P1X
146 zJwrv7DSQKD6GGlBQUX+OeEJ8tTkkf8QTJSPUdh8P8YxDFS5EOGAvhhpMBYD42kQ
146 zJwrv7DSQKD6GGlBQUX+OeEJ8tTkkf8QTJSPUdh8P8YxDFS5EOGAvhhpMBYD42kQ
147 pqXjEC+XcycTvGI7impgv9PDY1RCC1zkBjKPa120rNhv/hkVk/YhuGoajoHyy4h7
147 pqXjEC+XcycTvGI7impgv9PDY1RCC1zkBjKPa120rNhv/hkVk/YhuGoajoHyy4h7
148 ZQopdcMtpN2dgmhEegny9JCSwxfQmQ0zK0g7m6SHiKMwjwARAQABiQQ+BBgBCAAJ
148 ZQopdcMtpN2dgmhEegny9JCSwxfQmQ0zK0g7m6SHiKMwjwARAQABiQQ+BBgBCAAJ
149 BQJYrdoqAhsCAikJEI2BgDwOv82IwV0gBBkBCAAGBQJYrdoqAAoJEH6gqcPyc/zY
149 BQJYrdoqAhsCAikJEI2BgDwOv82IwV0gBBkBCAAGBQJYrdoqAAoJEH6gqcPyc/zY
150 1WAP/2wJ+R0gE6qsce3rjaIz58PJmc8goKrir5hnElWhPgbq7cYIsW5qiFyLhkdp
150 1WAP/2wJ+R0gE6qsce3rjaIz58PJmc8goKrir5hnElWhPgbq7cYIsW5qiFyLhkdp
151 YcMmhD9mRiPpQn6Ya2w3e3B8zfIVKipbMBnke/ytZ9M7qHmDCcjoiSmwEXN3wKYI
151 YcMmhD9mRiPpQn6Ya2w3e3B8zfIVKipbMBnke/ytZ9M7qHmDCcjoiSmwEXN3wKYI
152 mD9VHONsl/CG1rU9Isw1jtB5g1YxuBA7M/m36XN6x2u+NtNMDB9P56yc4gfsZVES
152 mD9VHONsl/CG1rU9Isw1jtB5g1YxuBA7M/m36XN6x2u+NtNMDB9P56yc4gfsZVES
153 KA9v+yY2/l45L8d/WUkUi0YXomn6hyBGI7JrBLq0CX37GEYP6O9rrKipfz73XfO7
153 KA9v+yY2/l45L8d/WUkUi0YXomn6hyBGI7JrBLq0CX37GEYP6O9rrKipfz73XfO7
154 JIGzOKZlljb/D9RX/g7nRbCn+3EtH7xnk+TK/50euEKw8SMUg147sJTcpQmv6UzZ
154 JIGzOKZlljb/D9RX/g7nRbCn+3EtH7xnk+TK/50euEKw8SMUg147sJTcpQmv6UzZ
155 cM4JgL0HbHVCojV4C/plELwMddALOFeYQzTif6sMRPf+3DSj8frbInjChC3yOLy0
155 cM4JgL0HbHVCojV4C/plELwMddALOFeYQzTif6sMRPf+3DSj8frbInjChC3yOLy0
156 6br92KFom17EIj2CAcoeq7UPhi2oouYBwPxh5ytdehJkoo+sN7RIWua6P2WSmon5
156 6br92KFom17EIj2CAcoeq7UPhi2oouYBwPxh5ytdehJkoo+sN7RIWua6P2WSmon5
157 U888cSylXC0+ADFdgLX9K2zrDVYUG1vo8CX0vzxFBaHwN6Px26fhIT1/hYUHQR1z
157 U888cSylXC0+ADFdgLX9K2zrDVYUG1vo8CX0vzxFBaHwN6Px26fhIT1/hYUHQR1z
158 VfNDcyQmXqkOnZvvoMfz/Q0s9BhFJ/zU6AgQbIZE/hm1spsfgvtsD1frZfygXJ9f
158 VfNDcyQmXqkOnZvvoMfz/Q0s9BhFJ/zU6AgQbIZE/hm1spsfgvtsD1frZfygXJ9f
159 irP+MSAI80xHSf91qSRZOj4Pl3ZJNbq4yYxv0b1pkMqeGdjdCYhLU+LZ4wbQmpCk
159 irP+MSAI80xHSf91qSRZOj4Pl3ZJNbq4yYxv0b1pkMqeGdjdCYhLU+LZ4wbQmpCk
160 SVe2prlLureigXtmZfkqevRz7FrIZiu9ky8wnCAPwC7/zmS18rgP/17bOtL4/iIz
160 SVe2prlLureigXtmZfkqevRz7FrIZiu9ky8wnCAPwC7/zmS18rgP/17bOtL4/iIz
161 QhxAAoAMWVrGyJivSkjhSGx1uCojsWfsTAm11P7jsruIL61ZzMUVE2aM3Pmj5G+W
161 QhxAAoAMWVrGyJivSkjhSGx1uCojsWfsTAm11P7jsruIL61ZzMUVE2aM3Pmj5G+W
162 9AcZ58Em+1WsVnAXdUR//bMmhyr8wL/G1YO1V3JEJTRdxsSxdYa4deGBBY/Adpsw
162 9AcZ58Em+1WsVnAXdUR//bMmhyr8wL/G1YO1V3JEJTRdxsSxdYa4deGBBY/Adpsw
163 24jxhOJR+lsJpqIUeb999+R8euDhRHG9eFO7DRu6weatUJ6suupoDTRWtr/4yGqe
163 24jxhOJR+lsJpqIUeb999+R8euDhRHG9eFO7DRu6weatUJ6suupoDTRWtr/4yGqe
164 dKxV3qQhNLSnaAzqW/1nA3iUB4k7kCaKZxhdhDbClf9P37qaRW467BLCVO/coL3y
164 dKxV3qQhNLSnaAzqW/1nA3iUB4k7kCaKZxhdhDbClf9P37qaRW467BLCVO/coL3y
165 Vm50dwdrNtKpMBh3ZpbB1uJvgi9mXtyBOMJ3v8RZeDzFiG8HdCtg9RvIt/AIFoHR
165 Vm50dwdrNtKpMBh3ZpbB1uJvgi9mXtyBOMJ3v8RZeDzFiG8HdCtg9RvIt/AIFoHR
166 H3S+U79NT6i0KPzLImDfs8T7RlpyuMc4Ufs8ggyg9v3Ae6cN3eQyxcK3w0cbBwsh
166 H3S+U79NT6i0KPzLImDfs8T7RlpyuMc4Ufs8ggyg9v3Ae6cN3eQyxcK3w0cbBwsh
167 /nQNfsA6uu+9H7NhbehBMhYnpNZyrHzCmzyXkauwRAqoCbGCNykTRwsur9gS41TQ
167 /nQNfsA6uu+9H7NhbehBMhYnpNZyrHzCmzyXkauwRAqoCbGCNykTRwsur9gS41TQ
168 M8ssD1jFheOJf3hODnkKU+HKjvMROl1DK7zdmLdNzA1cvtZH/nCC9KPj1z8QC47S
168 M8ssD1jFheOJf3hODnkKU+HKjvMROl1DK7zdmLdNzA1cvtZH/nCC9KPj1z8QC47S
169 xx+dTZSx4ONAhwbS/LN3PoKtn8LPjY9NP9uDWI+TWYquS2U+KHDrBDlsgozDbs/O
169 xx+dTZSx4ONAhwbS/LN3PoKtn8LPjY9NP9uDWI+TWYquS2U+KHDrBDlsgozDbs/O
170 jCxcpDzNmXpWQHEtHU7649OXHP7UeNST1mCUCH5qdank0V1iejF6/CfTFU4MfcrG
170 jCxcpDzNmXpWQHEtHU7649OXHP7UeNST1mCUCH5qdank0V1iejF6/CfTFU4MfcrG
171 YT90qFF93M3v01BbxP+EIY2/9tiIPbrd
171 YT90qFF93M3v01BbxP+EIY2/9tiIPbrd
172 =0YYh
172 =0YYh
173 -----END PGP PUBLIC KEY BLOCK-----
173 -----END PGP PUBLIC KEY BLOCK-----
174 EOF
174 EOF
175
175
176 sudo apt-key add docker-apt-key
176 sudo apt-key add docker-apt-key
177
177
178 if [ "$DEBIAN_VERSION" = "9.8" ]; then
178 if [ "$DEBIAN_VERSION" = "9.8" ]; then
179 cat << EOF | sudo tee -a /etc/apt/sources.list
179 cat << EOF | sudo tee -a /etc/apt/sources.list
180 # Need backports for clang-format-6.0
180 # Need backports for clang-format-6.0
181 deb http://deb.debian.org/debian stretch-backports main
181 deb http://deb.debian.org/debian stretch-backports main
182
182
183 # Sources are useful if we want to compile things locally.
183 # Sources are useful if we want to compile things locally.
184 deb-src http://deb.debian.org/debian stretch main
184 deb-src http://deb.debian.org/debian stretch main
185 deb-src http://security.debian.org/debian-security stretch/updates main
185 deb-src http://security.debian.org/debian-security stretch/updates main
186 deb-src http://deb.debian.org/debian stretch-updates main
186 deb-src http://deb.debian.org/debian stretch-updates main
187 deb-src http://deb.debian.org/debian stretch-backports main
187 deb-src http://deb.debian.org/debian stretch-backports main
188
188
189 deb [arch=amd64] https://download.docker.com/linux/debian stretch stable
189 deb [arch=amd64] https://download.docker.com/linux/debian stretch stable
190 EOF
190 EOF
191
191
192 elif [ "$DISTRO" = "Ubuntu" ]; then
192 elif [ "$DISTRO" = "Ubuntu" ]; then
193 cat << EOF | sudo tee -a /etc/apt/sources.list
193 cat << EOF | sudo tee -a /etc/apt/sources.list
194 deb [arch=amd64] https://download.docker.com/linux/ubuntu $LSB_RELEASE stable
194 deb [arch=amd64] https://download.docker.com/linux/ubuntu $LSB_RELEASE stable
195 EOF
195 EOF
196
196
197 fi
197 fi
198
198
199 sudo apt-get update
199 sudo apt-get update
200
200
201 PACKAGES="\
201 PACKAGES="\
202 btrfs-progs \
202 btrfs-progs \
203 build-essential \
203 build-essential \
204 bzr \
204 bzr \
205 clang-format-6.0 \
205 clang-format-6.0 \
206 cvs \
206 cvs \
207 darcs \
207 darcs \
208 debhelper \
208 debhelper \
209 devscripts \
209 devscripts \
210 dpkg-dev \
210 dpkg-dev \
211 dstat \
211 dstat \
212 emacs \
212 emacs \
213 gettext \
213 gettext \
214 git \
214 git \
215 htop \
215 htop \
216 iotop \
216 iotop \
217 jfsutils \
217 jfsutils \
218 libbz2-dev \
218 libbz2-dev \
219 libexpat1-dev \
219 libexpat1-dev \
220 libffi-dev \
220 libffi-dev \
221 libgdbm-dev \
221 libgdbm-dev \
222 liblzma-dev \
222 liblzma-dev \
223 libncurses5-dev \
223 libncurses5-dev \
224 libnss3-dev \
224 libnss3-dev \
225 libreadline-dev \
225 libreadline-dev \
226 libsqlite3-dev \
226 libsqlite3-dev \
227 libssl-dev \
227 libssl-dev \
228 netbase \
228 netbase \
229 ntfs-3g \
229 ntfs-3g \
230 nvme-cli \
230 nvme-cli \
231 pyflakes \
231 pyflakes \
232 pyflakes3 \
232 pyflakes3 \
233 pylint \
233 pylint \
234 pylint3 \
234 pylint3 \
235 python-all-dev \
235 python-all-dev \
236 python-dev \
236 python-dev \
237 python-docutils \
237 python-docutils \
238 python-fuzzywuzzy \
238 python-fuzzywuzzy \
239 python-pygments \
239 python-pygments \
240 python-subversion \
240 python-subversion \
241 python-vcr \
241 python-vcr \
242 python3-dev \
242 python3-dev \
243 python3-docutils \
243 python3-docutils \
244 python3-fuzzywuzzy \
244 python3-fuzzywuzzy \
245 python3-pygments \
245 python3-pygments \
246 python3-vcr \
246 python3-vcr \
247 rsync \
247 rsync \
248 sqlite3 \
248 sqlite3 \
249 subversion \
249 subversion \
250 tcl-dev \
250 tcl-dev \
251 tk-dev \
251 tk-dev \
252 tla \
252 tla \
253 unzip \
253 unzip \
254 uuid-dev \
254 uuid-dev \
255 vim \
255 vim \
256 virtualenv \
256 virtualenv \
257 wget \
257 wget \
258 xfsprogs \
258 xfsprogs \
259 zip \
259 zip \
260 zlib1g-dev"
260 zlib1g-dev"
261
261
262 if [ "$DEBIAN_VERSION" = "9.8" ]; then
262 if [ "$DEBIAN_VERSION" = "9.8" ]; then
263 PACKAGES="$PACKAGES linux-perf"
263 PACKAGES="$PACKAGES linux-perf"
264 elif [ "$DISTRO" = "Ubuntu" ]; then
264 elif [ "$DISTRO" = "Ubuntu" ]; then
265 PACKAGES="$PACKAGES linux-tools-common"
265 PACKAGES="$PACKAGES linux-tools-common"
266 fi
266 fi
267
267
268 # Ubuntu 19.04 removes monotone.
268 # Ubuntu 19.04 removes monotone.
269 if [ "$LSB_RELEASE" != "disco" ]; then
269 if [ "$LSB_RELEASE" != "disco" ]; then
270 PACKAGES="$PACKAGES monotone"
270 PACKAGES="$PACKAGES monotone"
271 fi
271 fi
272
272
273 # As of April 27, 2019, Docker hasn't published packages for
273 # As of April 27, 2019, Docker hasn't published packages for
274 # Ubuntu 19.04 yet.
274 # Ubuntu 19.04 yet.
275 if [ "$LSB_RELEASE" != "disco" ]; then
275 if [ "$LSB_RELEASE" != "disco" ]; then
276 PACKAGES="$PACKAGES docker-ce"
276 PACKAGES="$PACKAGES docker-ce"
277 fi
277 fi
278
278
279 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends $PACKAGES
279 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends $PACKAGES
280
280
281 # Create clang-format symlink so test harness finds it.
281 # Create clang-format symlink so test harness finds it.
282 sudo update-alternatives --install /usr/bin/clang-format clang-format \
282 sudo update-alternatives --install /usr/bin/clang-format clang-format \
283 /usr/bin/clang-format-6.0 1000
283 /usr/bin/clang-format-6.0 1000
284
284
285 sudo mkdir /hgdev
285 sudo mkdir /hgdev
286 # Will be normalized to hg:hg later.
286 # Will be normalized to hg:hg later.
287 sudo chown `whoami` /hgdev
287 sudo chown `whoami` /hgdev
288
288
289 cp requirements-py2.txt /hgdev/requirements-py2.txt
289 cp requirements-py2.txt /hgdev/requirements-py2.txt
290 cp requirements-py3.txt /hgdev/requirements-py3.txt
290 cp requirements-py3.txt /hgdev/requirements-py3.txt
291
291
292 # Disable the pip version check because it uses the network and can
292 # Disable the pip version check because it uses the network and can
293 # be annoying.
293 # be annoying.
294 cat << EOF | sudo tee -a /etc/pip.conf
294 cat << EOF | sudo tee -a /etc/pip.conf
295 [global]
295 [global]
296 disable-pip-version-check = True
296 disable-pip-version-check = True
297 EOF
297 EOF
298
298
299 {install_pythons}
299 {install_pythons}
300 {bootstrap_virtualenv}
300 {bootstrap_virtualenv}
301
301
302 /hgdev/venv-bootstrap/bin/hg clone https://www.mercurial-scm.org/repo/hg /hgdev/src
302 /hgdev/venv-bootstrap/bin/hg clone https://www.mercurial-scm.org/repo/hg /hgdev/src
303
303
304 # Mark the repo as non-publishing.
304 # Mark the repo as non-publishing.
305 cat >> /hgdev/src/.hg/hgrc << EOF
305 cat >> /hgdev/src/.hg/hgrc << EOF
306 [phases]
306 [phases]
307 publish = false
307 publish = false
308 EOF
308 EOF
309
309
310 sudo chown -R hg:hg /hgdev
310 sudo chown -R hg:hg /hgdev
311 '''.lstrip().format(
311 '''.lstrip().format(
312 install_pythons=INSTALL_PYTHONS,
312 install_pythons=INSTALL_PYTHONS,
313 bootstrap_virtualenv=BOOTSTRAP_VIRTUALENV
313 bootstrap_virtualenv=BOOTSTRAP_VIRTUALENV
314 ).replace('\r\n', '\n')
314 ).replace('\r\n', '\n')
315
315
316
316
317 # Prepares /hgdev for operations.
317 # Prepares /hgdev for operations.
318 PREPARE_HGDEV = '''
318 PREPARE_HGDEV = '''
319 #!/bin/bash
319 #!/bin/bash
320
320
321 set -e
321 set -e
322
322
323 FS=$1
323 FS=$1
324
324
325 ensure_device() {
325 ensure_device() {
326 if [ -z "${DEVICE}" ]; then
326 if [ -z "${DEVICE}" ]; then
327 echo "could not find block device to format"
327 echo "could not find block device to format"
328 exit 1
328 exit 1
329 fi
329 fi
330 }
330 }
331
331
332 # Determine device to partition for extra filesystem.
332 # Determine device to partition for extra filesystem.
333 # If only 1 volume is present, it will be the root volume and
333 # If only 1 volume is present, it will be the root volume and
334 # should be /dev/nvme0. If multiple volumes are present, the
334 # should be /dev/nvme0. If multiple volumes are present, the
335 # root volume could be nvme0 or nvme1. Use whichever one doesn't have
335 # root volume could be nvme0 or nvme1. Use whichever one doesn't have
336 # a partition.
336 # a partition.
337 if [ -e /dev/nvme1n1 ]; then
337 if [ -e /dev/nvme1n1 ]; then
338 if [ -e /dev/nvme0n1p1 ]; then
338 if [ -e /dev/nvme0n1p1 ]; then
339 DEVICE=/dev/nvme1n1
339 DEVICE=/dev/nvme1n1
340 else
340 else
341 DEVICE=/dev/nvme0n1
341 DEVICE=/dev/nvme0n1
342 fi
342 fi
343 else
343 else
344 DEVICE=
344 DEVICE=
345 fi
345 fi
346
346
347 sudo mkdir /hgwork
347 sudo mkdir /hgwork
348
348
349 if [ "${FS}" != "default" -a "${FS}" != "tmpfs" ]; then
349 if [ "${FS}" != "default" -a "${FS}" != "tmpfs" ]; then
350 ensure_device
350 ensure_device
351 echo "creating ${FS} filesystem on ${DEVICE}"
351 echo "creating ${FS} filesystem on ${DEVICE}"
352 fi
352 fi
353
353
354 if [ "${FS}" = "default" ]; then
354 if [ "${FS}" = "default" ]; then
355 :
355 :
356
356
357 elif [ "${FS}" = "btrfs" ]; then
357 elif [ "${FS}" = "btrfs" ]; then
358 sudo mkfs.btrfs ${DEVICE}
358 sudo mkfs.btrfs ${DEVICE}
359 sudo mount ${DEVICE} /hgwork
359 sudo mount ${DEVICE} /hgwork
360
360
361 elif [ "${FS}" = "ext3" ]; then
361 elif [ "${FS}" = "ext3" ]; then
362 # lazy_journal_init speeds up filesystem creation at the expense of
362 # lazy_journal_init speeds up filesystem creation at the expense of
363 # integrity if things crash. We are an ephemeral instance, so we don't
363 # integrity if things crash. We are an ephemeral instance, so we don't
364 # care about integrity.
364 # care about integrity.
365 sudo mkfs.ext3 -E lazy_journal_init=1 ${DEVICE}
365 sudo mkfs.ext3 -E lazy_journal_init=1 ${DEVICE}
366 sudo mount ${DEVICE} /hgwork
366 sudo mount ${DEVICE} /hgwork
367
367
368 elif [ "${FS}" = "ext4" ]; then
368 elif [ "${FS}" = "ext4" ]; then
369 sudo mkfs.ext4 -E lazy_journal_init=1 ${DEVICE}
369 sudo mkfs.ext4 -E lazy_journal_init=1 ${DEVICE}
370 sudo mount ${DEVICE} /hgwork
370 sudo mount ${DEVICE} /hgwork
371
371
372 elif [ "${FS}" = "jfs" ]; then
372 elif [ "${FS}" = "jfs" ]; then
373 sudo mkfs.jfs ${DEVICE}
373 sudo mkfs.jfs ${DEVICE}
374 sudo mount ${DEVICE} /hgwork
374 sudo mount ${DEVICE} /hgwork
375
375
376 elif [ "${FS}" = "tmpfs" ]; then
376 elif [ "${FS}" = "tmpfs" ]; then
377 echo "creating tmpfs volume in /hgwork"
377 echo "creating tmpfs volume in /hgwork"
378 sudo mount -t tmpfs -o size=1024M tmpfs /hgwork
378 sudo mount -t tmpfs -o size=1024M tmpfs /hgwork
379
379
380 elif [ "${FS}" = "xfs" ]; then
380 elif [ "${FS}" = "xfs" ]; then
381 sudo mkfs.xfs ${DEVICE}
381 sudo mkfs.xfs ${DEVICE}
382 sudo mount ${DEVICE} /hgwork
382 sudo mount ${DEVICE} /hgwork
383
383
384 else
384 else
385 echo "unsupported filesystem: ${FS}"
385 echo "unsupported filesystem: ${FS}"
386 exit 1
386 exit 1
387 fi
387 fi
388
388
389 echo "/hgwork ready"
389 echo "/hgwork ready"
390
390
391 sudo chown hg:hg /hgwork
391 sudo chown hg:hg /hgwork
392 mkdir /hgwork/tmp
392 mkdir /hgwork/tmp
393 chown hg:hg /hgwork/tmp
393 chown hg:hg /hgwork/tmp
394
394
395 rsync -a /hgdev/src /hgwork/
395 rsync -a /hgdev/src /hgwork/
396 '''.lstrip().replace('\r\n', '\n')
396 '''.lstrip().replace('\r\n', '\n')
397
397
398
398
399 HG_UPDATE_CLEAN = '''
399 HG_UPDATE_CLEAN = '''
400 set -ex
400 set -ex
401
401
402 HG=/hgdev/venv-bootstrap/bin/hg
402 HG=/hgdev/venv-bootstrap/bin/hg
403
403
404 cd /hgwork/src
404 cd /hgwork/src
405 ${HG} --config extensions.purge= purge --all
405 ${HG} --config extensions.purge= purge --all
406 ${HG} update -C $1
406 ${HG} update -C $1
407 ${HG} log -r .
407 ${HG} log -r .
408 '''.lstrip().replace('\r\n', '\n')
408 '''.lstrip().replace('\r\n', '\n')
409
409
410
410
411 def prepare_exec_environment(ssh_client, filesystem='default'):
411 def prepare_exec_environment(ssh_client, filesystem='default'):
412 """Prepare an EC2 instance to execute things.
412 """Prepare an EC2 instance to execute things.
413
413
414 The AMI has an ``/hgdev`` bootstrapped with various Python installs
414 The AMI has an ``/hgdev`` bootstrapped with various Python installs
415 and a clone of the Mercurial repo.
415 and a clone of the Mercurial repo.
416
416
417 In EC2, EBS volumes launched from snapshots have wonky performance behavior.
417 In EC2, EBS volumes launched from snapshots have wonky performance behavior.
418 Notably, blocks have to be copied on first access, which makes volume
418 Notably, blocks have to be copied on first access, which makes volume
419 I/O extremely slow on fresh volumes.
419 I/O extremely slow on fresh volumes.
420
420
421 Furthermore, we may want to run operations, tests, etc on alternative
421 Furthermore, we may want to run operations, tests, etc on alternative
422 filesystems so we examine behavior on different filesystems.
422 filesystems so we examine behavior on different filesystems.
423
423
424 This function is used to facilitate executing operations on alternate
424 This function is used to facilitate executing operations on alternate
425 volumes.
425 volumes.
426 """
426 """
427 sftp = ssh_client.open_sftp()
427 sftp = ssh_client.open_sftp()
428
428
429 with sftp.open('/hgdev/prepare-hgdev', 'wb') as fh:
429 with sftp.open('/hgdev/prepare-hgdev', 'wb') as fh:
430 fh.write(PREPARE_HGDEV)
430 fh.write(PREPARE_HGDEV)
431 fh.chmod(0o0777)
431 fh.chmod(0o0777)
432
432
433 command = 'sudo /hgdev/prepare-hgdev %s' % filesystem
433 command = 'sudo /hgdev/prepare-hgdev %s' % filesystem
434 chan, stdin, stdout = exec_command(ssh_client, command)
434 chan, stdin, stdout = exec_command(ssh_client, command)
435 stdin.close()
435 stdin.close()
436
436
437 for line in stdout:
437 for line in stdout:
438 print(line, end='')
438 print(line, end='')
439
439
440 res = chan.recv_exit_status()
440 res = chan.recv_exit_status()
441
441
442 if res:
442 if res:
443 raise Exception('non-0 exit code updating working directory; %d'
443 raise Exception('non-0 exit code updating working directory; %d'
444 % res)
444 % res)
445
445
446
446
447 def synchronize_hg(source_path: pathlib.Path, ec2_instance, revision: str=None):
447 def synchronize_hg(source_path: pathlib.Path, ec2_instance, revision: str=None):
448 """Synchronize a local Mercurial source path to remote EC2 instance."""
448 """Synchronize a local Mercurial source path to remote EC2 instance."""
449
449
450 with tempfile.TemporaryDirectory() as temp_dir:
450 with tempfile.TemporaryDirectory() as temp_dir:
451 temp_dir = pathlib.Path(temp_dir)
451 temp_dir = pathlib.Path(temp_dir)
452
452
453 ssh_dir = temp_dir / '.ssh'
453 ssh_dir = temp_dir / '.ssh'
454 ssh_dir.mkdir()
454 ssh_dir.mkdir()
455 ssh_dir.chmod(0o0700)
455 ssh_dir.chmod(0o0700)
456
456
457 public_ip = ec2_instance.public_ip_address
457 public_ip = ec2_instance.public_ip_address
458
458
459 ssh_config = ssh_dir / 'config'
459 ssh_config = ssh_dir / 'config'
460
460
461 with ssh_config.open('w', encoding='utf-8') as fh:
461 with ssh_config.open('w', encoding='utf-8') as fh:
462 fh.write('Host %s\n' % public_ip)
462 fh.write('Host %s\n' % public_ip)
463 fh.write(' User hg\n')
463 fh.write(' User hg\n')
464 fh.write(' StrictHostKeyChecking no\n')
464 fh.write(' StrictHostKeyChecking no\n')
465 fh.write(' UserKnownHostsFile %s\n' % (ssh_dir / 'known_hosts'))
465 fh.write(' UserKnownHostsFile %s\n' % (ssh_dir / 'known_hosts'))
466 fh.write(' IdentityFile %s\n' % ec2_instance.ssh_private_key_path)
466 fh.write(' IdentityFile %s\n' % ec2_instance.ssh_private_key_path)
467
467
468 if not (source_path / '.hg').is_dir():
468 if not (source_path / '.hg').is_dir():
469 raise Exception('%s is not a Mercurial repository; synchronization '
469 raise Exception('%s is not a Mercurial repository; synchronization '
470 'not yet supported' % source_path)
470 'not yet supported' % source_path)
471
471
472 env = dict(os.environ)
472 env = dict(os.environ)
473 env['HGPLAIN'] = '1'
473 env['HGPLAIN'] = '1'
474 env['HGENCODING'] = 'utf-8'
474 env['HGENCODING'] = 'utf-8'
475
475
476 hg_bin = source_path / 'hg'
476 hg_bin = source_path / 'hg'
477
477
478 res = subprocess.run(
478 res = subprocess.run(
479 ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
479 ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
480 cwd=str(source_path), env=env, check=True, capture_output=True)
480 cwd=str(source_path), env=env, check=True, capture_output=True)
481
481
482 full_revision = res.stdout.decode('ascii')
482 full_revision = res.stdout.decode('ascii')
483
483
484 args = [
484 args = [
485 'python2.7', str(hg_bin),
485 'python2.7', str(hg_bin),
486 '--config', 'ui.ssh=ssh -F %s' % ssh_config,
486 '--config', 'ui.ssh=ssh -F %s' % ssh_config,
487 '--config', 'ui.remotecmd=/hgdev/venv-bootstrap/bin/hg',
487 '--config', 'ui.remotecmd=/hgdev/venv-bootstrap/bin/hg',
488 # Also ensure .hgtags changes are present so auto version
488 # Also ensure .hgtags changes are present so auto version
489 # calculation works.
489 # calculation works.
490 'push', '-f', '-r', full_revision, '-r', 'file(.hgtags)',
490 'push', '-f', '-r', full_revision, '-r', 'file(.hgtags)',
491 'ssh://%s//hgwork/src' % public_ip,
491 'ssh://%s//hgwork/src' % public_ip,
492 ]
492 ]
493
493
494 res = subprocess.run(args, cwd=str(source_path), env=env)
494 res = subprocess.run(args, cwd=str(source_path), env=env)
495
495
496 # Allow 1 (no-op) to not trigger error.
496 # Allow 1 (no-op) to not trigger error.
497 if res.returncode not in (0, 1):
497 if res.returncode not in (0, 1):
498 res.check_returncode()
498 res.check_returncode()
499
499
500 # TODO support synchronizing dirty working directory.
500 # TODO support synchronizing dirty working directory.
501
501
502 sftp = ec2_instance.ssh_client.open_sftp()
502 sftp = ec2_instance.ssh_client.open_sftp()
503
503
504 with sftp.open('/hgdev/hgup', 'wb') as fh:
504 with sftp.open('/hgdev/hgup', 'wb') as fh:
505 fh.write(HG_UPDATE_CLEAN)
505 fh.write(HG_UPDATE_CLEAN)
506 fh.chmod(0o0700)
506 fh.chmod(0o0700)
507
507
508 chan, stdin, stdout = exec_command(
508 chan, stdin, stdout = exec_command(
509 ec2_instance.ssh_client, '/hgdev/hgup %s' % full_revision)
509 ec2_instance.ssh_client, '/hgdev/hgup %s' % full_revision)
510 stdin.close()
510 stdin.close()
511
511
512 for line in stdout:
512 for line in stdout:
513 print(line, end='')
513 print(line, end='')
514
514
515 res = chan.recv_exit_status()
515 res = chan.recv_exit_status()
516
516
517 if res:
517 if res:
518 raise Exception('non-0 exit code updating working directory; %d'
518 raise Exception('non-0 exit code updating working directory; %d'
519 % res)
519 % res)
520
520
521
521
522 def run_tests(ssh_client, python_version, test_flags=None):
522 def run_tests(ssh_client, python_version, test_flags=None):
523 """Run tests on a remote Linux machine via an SSH client."""
523 """Run tests on a remote Linux machine via an SSH client."""
524 test_flags = test_flags or []
524 test_flags = test_flags or []
525
525
526 print('running tests')
526 print('running tests')
527
527
528 if python_version == 'system2':
528 if python_version == 'system2':
529 python = '/usr/bin/python2'
529 python = '/usr/bin/python2'
530 elif python_version == 'system3':
530 elif python_version == 'system3':
531 python = '/usr/bin/python3'
531 python = '/usr/bin/python3'
532 elif python_version.startswith('pypy'):
532 elif python_version.startswith('pypy'):
533 python = '/hgdev/pyenv/shims/%s' % python_version
533 python = '/hgdev/pyenv/shims/%s' % python_version
534 else:
534 else:
535 python = '/hgdev/pyenv/shims/python%s' % python_version
535 python = '/hgdev/pyenv/shims/python%s' % python_version
536
536
537 test_flags = ' '.join(shlex.quote(a) for a in test_flags)
537 test_flags = ' '.join(shlex.quote(a) for a in test_flags)
538
538
539 command = (
539 command = (
540 '/bin/sh -c "export TMPDIR=/hgwork/tmp; '
540 '/bin/sh -c "export TMPDIR=/hgwork/tmp; '
541 'cd /hgwork/src/tests && %s run-tests.py %s"' % (
541 'cd /hgwork/src/tests && %s run-tests.py %s"' % (
542 python, test_flags))
542 python, test_flags))
543
543
544 chan, stdin, stdout = exec_command(ssh_client, command)
544 chan, stdin, stdout = exec_command(ssh_client, command)
545
545
546 stdin.close()
546 stdin.close()
547
547
548 for line in stdout:
548 for line in stdout:
549 print(line, end='')
549 print(line, end='')
550
550
551 return chan.recv_exit_status()
551 return chan.recv_exit_status()
General Comments 0
You need to be logged in to leave comments. Login now