##// END OF EJS Templates
automation: upgrade packages in Linux environment...
Gregory Szorc -
r43284:cbd94ee3 default
parent child Browse files
Show More
@@ -1,560 +1,560 b''
1 # linux.py - Linux specific automation functionality
1 # linux.py - Linux specific automation functionality
2 #
2 #
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 # no-check-code because Python 3 native.
8 # no-check-code because Python 3 native.
9
9
10 import os
10 import os
11 import pathlib
11 import pathlib
12 import shlex
12 import shlex
13 import subprocess
13 import subprocess
14 import tempfile
14 import tempfile
15
15
16 from .ssh import (
16 from .ssh import (
17 exec_command,
17 exec_command,
18 )
18 )
19
19
20
20
21 # Linux distributions that are supported.
21 # Linux distributions that are supported.
22 DISTROS = {
22 DISTROS = {
23 'debian9',
23 'debian9',
24 'ubuntu18.04',
24 'ubuntu18.04',
25 'ubuntu19.04',
25 'ubuntu19.04',
26 }
26 }
27
27
28 INSTALL_PYTHONS = r'''
28 INSTALL_PYTHONS = r'''
29 PYENV2_VERSIONS="2.7.16 pypy2.7-7.1.1"
29 PYENV2_VERSIONS="2.7.16 pypy2.7-7.1.1"
30 PYENV3_VERSIONS="3.5.7 3.6.9 3.7.4 3.8-dev pypy3.5-7.0.0 pypy3.6-7.1.1"
30 PYENV3_VERSIONS="3.5.7 3.6.9 3.7.4 3.8-dev pypy3.5-7.0.0 pypy3.6-7.1.1"
31
31
32 git clone https://github.com/pyenv/pyenv.git /hgdev/pyenv
32 git clone https://github.com/pyenv/pyenv.git /hgdev/pyenv
33 pushd /hgdev/pyenv
33 pushd /hgdev/pyenv
34 git checkout 17f44b7cd6f58ea2fa68ec0371fb9e7a826b8be2
34 git checkout 17f44b7cd6f58ea2fa68ec0371fb9e7a826b8be2
35 popd
35 popd
36
36
37 export PYENV_ROOT="/hgdev/pyenv"
37 export PYENV_ROOT="/hgdev/pyenv"
38 export PATH="$PYENV_ROOT/bin:$PATH"
38 export PATH="$PYENV_ROOT/bin:$PATH"
39
39
40 # pip 19.0.3.
40 # pip 19.2.3.
41 PIP_SHA256=efe99298f3fbb1f56201ce6b81d2658067d2f7d7dfc2d412e0d3cacc9a397c61
41 PIP_SHA256=57e3643ff19f018f8a00dfaa6b7e4620e3c1a7a2171fd218425366ec006b3bfe
42 wget -O get-pip.py --progress dot:mega https://github.com/pypa/get-pip/raw/fee32c376da1ff6496a798986d7939cd51e1644f/get-pip.py
42 wget -O get-pip.py --progress dot:mega https://github.com/pypa/get-pip/raw/309a56c5fd94bd1134053a541cb4657a4e47e09d/get-pip.py
43 echo "${PIP_SHA256} get-pip.py" | sha256sum --check -
43 echo "${PIP_SHA256} get-pip.py" | sha256sum --check -
44
44
45 VIRTUALENV_SHA256=984d7e607b0a5d1329425dd8845bd971b957424b5ba664729fab51ab8c11bc39
45 VIRTUALENV_SHA256=f78d81b62d3147396ac33fc9d77579ddc42cc2a98dd9ea38886f616b33bc7fb2
46 VIRTUALENV_TARBALL=virtualenv-16.4.3.tar.gz
46 VIRTUALENV_TARBALL=virtualenv-16.7.5.tar.gz
47 wget -O ${VIRTUALENV_TARBALL} --progress dot:mega https://files.pythonhosted.org/packages/37/db/89d6b043b22052109da35416abc3c397655e4bd3cff031446ba02b9654fa/${VIRTUALENV_TARBALL}
47 wget -O ${VIRTUALENV_TARBALL} --progress dot:mega https://files.pythonhosted.org/packages/66/f0/6867af06d2e2f511e4e1d7094ff663acdebc4f15d4a0cb0fed1007395124/${VIRTUALENV_TARBALL}
48 echo "${VIRTUALENV_SHA256} ${VIRTUALENV_TARBALL}" | sha256sum --check -
48 echo "${VIRTUALENV_SHA256} ${VIRTUALENV_TARBALL}" | sha256sum --check -
49
49
50 for v in ${PYENV2_VERSIONS}; do
50 for v in ${PYENV2_VERSIONS}; do
51 pyenv install -v ${v}
51 pyenv install -v ${v}
52 ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
52 ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
53 ${PYENV_ROOT}/versions/${v}/bin/pip install ${VIRTUALENV_TARBALL}
53 ${PYENV_ROOT}/versions/${v}/bin/pip install ${VIRTUALENV_TARBALL}
54 ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py2.txt
54 ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py2.txt
55 done
55 done
56
56
57 for v in ${PYENV3_VERSIONS}; do
57 for v in ${PYENV3_VERSIONS}; do
58 pyenv install -v ${v}
58 pyenv install -v ${v}
59 ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
59 ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
60 ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py3.txt
60 ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py3.txt
61 done
61 done
62
62
63 pyenv global ${PYENV2_VERSIONS} ${PYENV3_VERSIONS} system
63 pyenv global ${PYENV2_VERSIONS} ${PYENV3_VERSIONS} system
64 '''.lstrip().replace('\r\n', '\n')
64 '''.lstrip().replace('\r\n', '\n')
65
65
66
66
67 INSTALL_RUST = r'''
67 INSTALL_RUST = r'''
68 RUSTUP_INIT_SHA256=a46fe67199b7bcbbde2dcbc23ae08db6f29883e260e23899a88b9073effc9076
68 RUSTUP_INIT_SHA256=a46fe67199b7bcbbde2dcbc23ae08db6f29883e260e23899a88b9073effc9076
69 wget -O rustup-init --progress dot:mega https://static.rust-lang.org/rustup/archive/1.18.3/x86_64-unknown-linux-gnu/rustup-init
69 wget -O rustup-init --progress dot:mega https://static.rust-lang.org/rustup/archive/1.18.3/x86_64-unknown-linux-gnu/rustup-init
70 echo "${RUSTUP_INIT_SHA256} rustup-init" | sha256sum --check -
70 echo "${RUSTUP_INIT_SHA256} rustup-init" | sha256sum --check -
71
71
72 chmod +x rustup-init
72 chmod +x rustup-init
73 sudo -H -u hg -g hg ./rustup-init -y
73 sudo -H -u hg -g hg ./rustup-init -y
74 sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup install 1.31.1 1.34.2
74 sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup install 1.31.1 1.34.2
75 sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup component add clippy
75 sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup component add clippy
76 '''
76 '''
77
77
78
78
79 BOOTSTRAP_VIRTUALENV = r'''
79 BOOTSTRAP_VIRTUALENV = r'''
80 /usr/bin/virtualenv /hgdev/venv-bootstrap
80 /usr/bin/virtualenv /hgdev/venv-bootstrap
81
81
82 HG_SHA256=1bdd21bb87d1e05fb5cd395d488d0e0cc2f2f90ce0fd248e31a03595da5ccb47
82 HG_SHA256=35fc8ba5e0379c1b3affa2757e83fb0509e8ac314cbd9f1fd133cf265d16e49f
83 HG_TARBALL=mercurial-4.9.1.tar.gz
83 HG_TARBALL=mercurial-5.1.1.tar.gz
84
84
85 wget -O ${HG_TARBALL} --progress dot:mega https://www.mercurial-scm.org/release/${HG_TARBALL}
85 wget -O ${HG_TARBALL} --progress dot:mega https://www.mercurial-scm.org/release/${HG_TARBALL}
86 echo "${HG_SHA256} ${HG_TARBALL}" | sha256sum --check -
86 echo "${HG_SHA256} ${HG_TARBALL}" | sha256sum --check -
87
87
88 /hgdev/venv-bootstrap/bin/pip install ${HG_TARBALL}
88 /hgdev/venv-bootstrap/bin/pip install ${HG_TARBALL}
89 '''.lstrip().replace('\r\n', '\n')
89 '''.lstrip().replace('\r\n', '\n')
90
90
91
91
92 BOOTSTRAP_DEBIAN = r'''
92 BOOTSTRAP_DEBIAN = r'''
93 #!/bin/bash
93 #!/bin/bash
94
94
95 set -ex
95 set -ex
96
96
97 DISTRO=`grep DISTRIB_ID /etc/lsb-release | awk -F= '{{print $2}}'`
97 DISTRO=`grep DISTRIB_ID /etc/lsb-release | awk -F= '{{print $2}}'`
98 DEBIAN_VERSION=`cat /etc/debian_version`
98 DEBIAN_VERSION=`cat /etc/debian_version`
99 LSB_RELEASE=`lsb_release -cs`
99 LSB_RELEASE=`lsb_release -cs`
100
100
101 sudo /usr/sbin/groupadd hg
101 sudo /usr/sbin/groupadd hg
102 sudo /usr/sbin/groupadd docker
102 sudo /usr/sbin/groupadd docker
103 sudo /usr/sbin/useradd -g hg -G sudo,docker -d /home/hg -m -s /bin/bash hg
103 sudo /usr/sbin/useradd -g hg -G sudo,docker -d /home/hg -m -s /bin/bash hg
104 sudo mkdir /home/hg/.ssh
104 sudo mkdir /home/hg/.ssh
105 sudo cp ~/.ssh/authorized_keys /home/hg/.ssh/authorized_keys
105 sudo cp ~/.ssh/authorized_keys /home/hg/.ssh/authorized_keys
106 sudo chown -R hg:hg /home/hg/.ssh
106 sudo chown -R hg:hg /home/hg/.ssh
107 sudo chmod 700 /home/hg/.ssh
107 sudo chmod 700 /home/hg/.ssh
108 sudo chmod 600 /home/hg/.ssh/authorized_keys
108 sudo chmod 600 /home/hg/.ssh/authorized_keys
109
109
110 cat << EOF | sudo tee /etc/sudoers.d/90-hg
110 cat << EOF | sudo tee /etc/sudoers.d/90-hg
111 hg ALL=(ALL) NOPASSWD:ALL
111 hg ALL=(ALL) NOPASSWD:ALL
112 EOF
112 EOF
113
113
114 sudo apt-get update
114 sudo apt-get update
115 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq dist-upgrade
115 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq dist-upgrade
116
116
117 # Install packages necessary to set up Docker Apt repo.
117 # Install packages necessary to set up Docker Apt repo.
118 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends \
118 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends \
119 apt-transport-https \
119 apt-transport-https \
120 gnupg
120 gnupg
121
121
122 cat > docker-apt-key << EOF
122 cat > docker-apt-key << EOF
123 -----BEGIN PGP PUBLIC KEY BLOCK-----
123 -----BEGIN PGP PUBLIC KEY BLOCK-----
124
124
125 mQINBFit2ioBEADhWpZ8/wvZ6hUTiXOwQHXMAlaFHcPH9hAtr4F1y2+OYdbtMuth
125 mQINBFit2ioBEADhWpZ8/wvZ6hUTiXOwQHXMAlaFHcPH9hAtr4F1y2+OYdbtMuth
126 lqqwp028AqyY+PRfVMtSYMbjuQuu5byyKR01BbqYhuS3jtqQmljZ/bJvXqnmiVXh
126 lqqwp028AqyY+PRfVMtSYMbjuQuu5byyKR01BbqYhuS3jtqQmljZ/bJvXqnmiVXh
127 38UuLa+z077PxyxQhu5BbqntTPQMfiyqEiU+BKbq2WmANUKQf+1AmZY/IruOXbnq
127 38UuLa+z077PxyxQhu5BbqntTPQMfiyqEiU+BKbq2WmANUKQf+1AmZY/IruOXbnq
128 L4C1+gJ8vfmXQt99npCaxEjaNRVYfOS8QcixNzHUYnb6emjlANyEVlZzeqo7XKl7
128 L4C1+gJ8vfmXQt99npCaxEjaNRVYfOS8QcixNzHUYnb6emjlANyEVlZzeqo7XKl7
129 UrwV5inawTSzWNvtjEjj4nJL8NsLwscpLPQUhTQ+7BbQXAwAmeHCUTQIvvWXqw0N
129 UrwV5inawTSzWNvtjEjj4nJL8NsLwscpLPQUhTQ+7BbQXAwAmeHCUTQIvvWXqw0N
130 cmhh4HgeQscQHYgOJjjDVfoY5MucvglbIgCqfzAHW9jxmRL4qbMZj+b1XoePEtht
130 cmhh4HgeQscQHYgOJjjDVfoY5MucvglbIgCqfzAHW9jxmRL4qbMZj+b1XoePEtht
131 ku4bIQN1X5P07fNWzlgaRL5Z4POXDDZTlIQ/El58j9kp4bnWRCJW0lya+f8ocodo
131 ku4bIQN1X5P07fNWzlgaRL5Z4POXDDZTlIQ/El58j9kp4bnWRCJW0lya+f8ocodo
132 vZZ+Doi+fy4D5ZGrL4XEcIQP/Lv5uFyf+kQtl/94VFYVJOleAv8W92KdgDkhTcTD
132 vZZ+Doi+fy4D5ZGrL4XEcIQP/Lv5uFyf+kQtl/94VFYVJOleAv8W92KdgDkhTcTD
133 G7c0tIkVEKNUq48b3aQ64NOZQW7fVjfoKwEZdOqPE72Pa45jrZzvUFxSpdiNk2tZ
133 G7c0tIkVEKNUq48b3aQ64NOZQW7fVjfoKwEZdOqPE72Pa45jrZzvUFxSpdiNk2tZ
134 XYukHjlxxEgBdC/J3cMMNRE1F4NCA3ApfV1Y7/hTeOnmDuDYwr9/obA8t016Yljj
134 XYukHjlxxEgBdC/J3cMMNRE1F4NCA3ApfV1Y7/hTeOnmDuDYwr9/obA8t016Yljj
135 q5rdkywPf4JF8mXUW5eCN1vAFHxeg9ZWemhBtQmGxXnw9M+z6hWwc6ahmwARAQAB
135 q5rdkywPf4JF8mXUW5eCN1vAFHxeg9ZWemhBtQmGxXnw9M+z6hWwc6ahmwARAQAB
136 tCtEb2NrZXIgUmVsZWFzZSAoQ0UgZGViKSA8ZG9ja2VyQGRvY2tlci5jb20+iQI3
136 tCtEb2NrZXIgUmVsZWFzZSAoQ0UgZGViKSA8ZG9ja2VyQGRvY2tlci5jb20+iQI3
137 BBMBCgAhBQJYrefAAhsvBQsJCAcDBRUKCQgLBRYCAwEAAh4BAheAAAoJEI2BgDwO
137 BBMBCgAhBQJYrefAAhsvBQsJCAcDBRUKCQgLBRYCAwEAAh4BAheAAAoJEI2BgDwO
138 v82IsskP/iQZo68flDQmNvn8X5XTd6RRaUH33kXYXquT6NkHJciS7E2gTJmqvMqd
138 v82IsskP/iQZo68flDQmNvn8X5XTd6RRaUH33kXYXquT6NkHJciS7E2gTJmqvMqd
139 tI4mNYHCSEYxI5qrcYV5YqX9P6+Ko+vozo4nseUQLPH/ATQ4qL0Zok+1jkag3Lgk
139 tI4mNYHCSEYxI5qrcYV5YqX9P6+Ko+vozo4nseUQLPH/ATQ4qL0Zok+1jkag3Lgk
140 jonyUf9bwtWxFp05HC3GMHPhhcUSexCxQLQvnFWXD2sWLKivHp2fT8QbRGeZ+d3m
140 jonyUf9bwtWxFp05HC3GMHPhhcUSexCxQLQvnFWXD2sWLKivHp2fT8QbRGeZ+d3m
141 6fqcd5Fu7pxsqm0EUDK5NL+nPIgYhN+auTrhgzhK1CShfGccM/wfRlei9Utz6p9P
141 6fqcd5Fu7pxsqm0EUDK5NL+nPIgYhN+auTrhgzhK1CShfGccM/wfRlei9Utz6p9P
142 XRKIlWnXtT4qNGZNTN0tR+NLG/6Bqd8OYBaFAUcue/w1VW6JQ2VGYZHnZu9S8LMc
142 XRKIlWnXtT4qNGZNTN0tR+NLG/6Bqd8OYBaFAUcue/w1VW6JQ2VGYZHnZu9S8LMc
143 FYBa5Ig9PxwGQOgq6RDKDbV+PqTQT5EFMeR1mrjckk4DQJjbxeMZbiNMG5kGECA8
143 FYBa5Ig9PxwGQOgq6RDKDbV+PqTQT5EFMeR1mrjckk4DQJjbxeMZbiNMG5kGECA8
144 g383P3elhn03WGbEEa4MNc3Z4+7c236QI3xWJfNPdUbXRaAwhy/6rTSFbzwKB0Jm
144 g383P3elhn03WGbEEa4MNc3Z4+7c236QI3xWJfNPdUbXRaAwhy/6rTSFbzwKB0Jm
145 ebwzQfwjQY6f55MiI/RqDCyuPj3r3jyVRkK86pQKBAJwFHyqj9KaKXMZjfVnowLh
145 ebwzQfwjQY6f55MiI/RqDCyuPj3r3jyVRkK86pQKBAJwFHyqj9KaKXMZjfVnowLh
146 9svIGfNbGHpucATqREvUHuQbNnqkCx8VVhtYkhDb9fEP2xBu5VvHbR+3nfVhMut5
146 9svIGfNbGHpucATqREvUHuQbNnqkCx8VVhtYkhDb9fEP2xBu5VvHbR+3nfVhMut5
147 G34Ct5RS7Jt6LIfFdtcn8CaSas/l1HbiGeRgc70X/9aYx/V/CEJv0lIe8gP6uDoW
147 G34Ct5RS7Jt6LIfFdtcn8CaSas/l1HbiGeRgc70X/9aYx/V/CEJv0lIe8gP6uDoW
148 FPIZ7d6vH+Vro6xuWEGiuMaiznap2KhZmpkgfupyFmplh0s6knymuQINBFit2ioB
148 FPIZ7d6vH+Vro6xuWEGiuMaiznap2KhZmpkgfupyFmplh0s6knymuQINBFit2ioB
149 EADneL9S9m4vhU3blaRjVUUyJ7b/qTjcSylvCH5XUE6R2k+ckEZjfAMZPLpO+/tF
149 EADneL9S9m4vhU3blaRjVUUyJ7b/qTjcSylvCH5XUE6R2k+ckEZjfAMZPLpO+/tF
150 M2JIJMD4SifKuS3xck9KtZGCufGmcwiLQRzeHF7vJUKrLD5RTkNi23ydvWZgPjtx
150 M2JIJMD4SifKuS3xck9KtZGCufGmcwiLQRzeHF7vJUKrLD5RTkNi23ydvWZgPjtx
151 Q+DTT1Zcn7BrQFY6FgnRoUVIxwtdw1bMY/89rsFgS5wwuMESd3Q2RYgb7EOFOpnu
151 Q+DTT1Zcn7BrQFY6FgnRoUVIxwtdw1bMY/89rsFgS5wwuMESd3Q2RYgb7EOFOpnu
152 w6da7WakWf4IhnF5nsNYGDVaIHzpiqCl+uTbf1epCjrOlIzkZ3Z3Yk5CM/TiFzPk
152 w6da7WakWf4IhnF5nsNYGDVaIHzpiqCl+uTbf1epCjrOlIzkZ3Z3Yk5CM/TiFzPk
153 z2lLz89cpD8U+NtCsfagWWfjd2U3jDapgH+7nQnCEWpROtzaKHG6lA3pXdix5zG8
153 z2lLz89cpD8U+NtCsfagWWfjd2U3jDapgH+7nQnCEWpROtzaKHG6lA3pXdix5zG8
154 eRc6/0IbUSWvfjKxLLPfNeCS2pCL3IeEI5nothEEYdQH6szpLog79xB9dVnJyKJb
154 eRc6/0IbUSWvfjKxLLPfNeCS2pCL3IeEI5nothEEYdQH6szpLog79xB9dVnJyKJb
155 VfxXnseoYqVrRz2VVbUI5Blwm6B40E3eGVfUQWiux54DspyVMMk41Mx7QJ3iynIa
155 VfxXnseoYqVrRz2VVbUI5Blwm6B40E3eGVfUQWiux54DspyVMMk41Mx7QJ3iynIa
156 1N4ZAqVMAEruyXTRTxc9XW0tYhDMA/1GYvz0EmFpm8LzTHA6sFVtPm/ZlNCX6P1X
156 1N4ZAqVMAEruyXTRTxc9XW0tYhDMA/1GYvz0EmFpm8LzTHA6sFVtPm/ZlNCX6P1X
157 zJwrv7DSQKD6GGlBQUX+OeEJ8tTkkf8QTJSPUdh8P8YxDFS5EOGAvhhpMBYD42kQ
157 zJwrv7DSQKD6GGlBQUX+OeEJ8tTkkf8QTJSPUdh8P8YxDFS5EOGAvhhpMBYD42kQ
158 pqXjEC+XcycTvGI7impgv9PDY1RCC1zkBjKPa120rNhv/hkVk/YhuGoajoHyy4h7
158 pqXjEC+XcycTvGI7impgv9PDY1RCC1zkBjKPa120rNhv/hkVk/YhuGoajoHyy4h7
159 ZQopdcMtpN2dgmhEegny9JCSwxfQmQ0zK0g7m6SHiKMwjwARAQABiQQ+BBgBCAAJ
159 ZQopdcMtpN2dgmhEegny9JCSwxfQmQ0zK0g7m6SHiKMwjwARAQABiQQ+BBgBCAAJ
160 BQJYrdoqAhsCAikJEI2BgDwOv82IwV0gBBkBCAAGBQJYrdoqAAoJEH6gqcPyc/zY
160 BQJYrdoqAhsCAikJEI2BgDwOv82IwV0gBBkBCAAGBQJYrdoqAAoJEH6gqcPyc/zY
161 1WAP/2wJ+R0gE6qsce3rjaIz58PJmc8goKrir5hnElWhPgbq7cYIsW5qiFyLhkdp
161 1WAP/2wJ+R0gE6qsce3rjaIz58PJmc8goKrir5hnElWhPgbq7cYIsW5qiFyLhkdp
162 YcMmhD9mRiPpQn6Ya2w3e3B8zfIVKipbMBnke/ytZ9M7qHmDCcjoiSmwEXN3wKYI
162 YcMmhD9mRiPpQn6Ya2w3e3B8zfIVKipbMBnke/ytZ9M7qHmDCcjoiSmwEXN3wKYI
163 mD9VHONsl/CG1rU9Isw1jtB5g1YxuBA7M/m36XN6x2u+NtNMDB9P56yc4gfsZVES
163 mD9VHONsl/CG1rU9Isw1jtB5g1YxuBA7M/m36XN6x2u+NtNMDB9P56yc4gfsZVES
164 KA9v+yY2/l45L8d/WUkUi0YXomn6hyBGI7JrBLq0CX37GEYP6O9rrKipfz73XfO7
164 KA9v+yY2/l45L8d/WUkUi0YXomn6hyBGI7JrBLq0CX37GEYP6O9rrKipfz73XfO7
165 JIGzOKZlljb/D9RX/g7nRbCn+3EtH7xnk+TK/50euEKw8SMUg147sJTcpQmv6UzZ
165 JIGzOKZlljb/D9RX/g7nRbCn+3EtH7xnk+TK/50euEKw8SMUg147sJTcpQmv6UzZ
166 cM4JgL0HbHVCojV4C/plELwMddALOFeYQzTif6sMRPf+3DSj8frbInjChC3yOLy0
166 cM4JgL0HbHVCojV4C/plELwMddALOFeYQzTif6sMRPf+3DSj8frbInjChC3yOLy0
167 6br92KFom17EIj2CAcoeq7UPhi2oouYBwPxh5ytdehJkoo+sN7RIWua6P2WSmon5
167 6br92KFom17EIj2CAcoeq7UPhi2oouYBwPxh5ytdehJkoo+sN7RIWua6P2WSmon5
168 U888cSylXC0+ADFdgLX9K2zrDVYUG1vo8CX0vzxFBaHwN6Px26fhIT1/hYUHQR1z
168 U888cSylXC0+ADFdgLX9K2zrDVYUG1vo8CX0vzxFBaHwN6Px26fhIT1/hYUHQR1z
169 VfNDcyQmXqkOnZvvoMfz/Q0s9BhFJ/zU6AgQbIZE/hm1spsfgvtsD1frZfygXJ9f
169 VfNDcyQmXqkOnZvvoMfz/Q0s9BhFJ/zU6AgQbIZE/hm1spsfgvtsD1frZfygXJ9f
170 irP+MSAI80xHSf91qSRZOj4Pl3ZJNbq4yYxv0b1pkMqeGdjdCYhLU+LZ4wbQmpCk
170 irP+MSAI80xHSf91qSRZOj4Pl3ZJNbq4yYxv0b1pkMqeGdjdCYhLU+LZ4wbQmpCk
171 SVe2prlLureigXtmZfkqevRz7FrIZiu9ky8wnCAPwC7/zmS18rgP/17bOtL4/iIz
171 SVe2prlLureigXtmZfkqevRz7FrIZiu9ky8wnCAPwC7/zmS18rgP/17bOtL4/iIz
172 QhxAAoAMWVrGyJivSkjhSGx1uCojsWfsTAm11P7jsruIL61ZzMUVE2aM3Pmj5G+W
172 QhxAAoAMWVrGyJivSkjhSGx1uCojsWfsTAm11P7jsruIL61ZzMUVE2aM3Pmj5G+W
173 9AcZ58Em+1WsVnAXdUR//bMmhyr8wL/G1YO1V3JEJTRdxsSxdYa4deGBBY/Adpsw
173 9AcZ58Em+1WsVnAXdUR//bMmhyr8wL/G1YO1V3JEJTRdxsSxdYa4deGBBY/Adpsw
174 24jxhOJR+lsJpqIUeb999+R8euDhRHG9eFO7DRu6weatUJ6suupoDTRWtr/4yGqe
174 24jxhOJR+lsJpqIUeb999+R8euDhRHG9eFO7DRu6weatUJ6suupoDTRWtr/4yGqe
175 dKxV3qQhNLSnaAzqW/1nA3iUB4k7kCaKZxhdhDbClf9P37qaRW467BLCVO/coL3y
175 dKxV3qQhNLSnaAzqW/1nA3iUB4k7kCaKZxhdhDbClf9P37qaRW467BLCVO/coL3y
176 Vm50dwdrNtKpMBh3ZpbB1uJvgi9mXtyBOMJ3v8RZeDzFiG8HdCtg9RvIt/AIFoHR
176 Vm50dwdrNtKpMBh3ZpbB1uJvgi9mXtyBOMJ3v8RZeDzFiG8HdCtg9RvIt/AIFoHR
177 H3S+U79NT6i0KPzLImDfs8T7RlpyuMc4Ufs8ggyg9v3Ae6cN3eQyxcK3w0cbBwsh
177 H3S+U79NT6i0KPzLImDfs8T7RlpyuMc4Ufs8ggyg9v3Ae6cN3eQyxcK3w0cbBwsh
178 /nQNfsA6uu+9H7NhbehBMhYnpNZyrHzCmzyXkauwRAqoCbGCNykTRwsur9gS41TQ
178 /nQNfsA6uu+9H7NhbehBMhYnpNZyrHzCmzyXkauwRAqoCbGCNykTRwsur9gS41TQ
179 M8ssD1jFheOJf3hODnkKU+HKjvMROl1DK7zdmLdNzA1cvtZH/nCC9KPj1z8QC47S
179 M8ssD1jFheOJf3hODnkKU+HKjvMROl1DK7zdmLdNzA1cvtZH/nCC9KPj1z8QC47S
180 xx+dTZSx4ONAhwbS/LN3PoKtn8LPjY9NP9uDWI+TWYquS2U+KHDrBDlsgozDbs/O
180 xx+dTZSx4ONAhwbS/LN3PoKtn8LPjY9NP9uDWI+TWYquS2U+KHDrBDlsgozDbs/O
181 jCxcpDzNmXpWQHEtHU7649OXHP7UeNST1mCUCH5qdank0V1iejF6/CfTFU4MfcrG
181 jCxcpDzNmXpWQHEtHU7649OXHP7UeNST1mCUCH5qdank0V1iejF6/CfTFU4MfcrG
182 YT90qFF93M3v01BbxP+EIY2/9tiIPbrd
182 YT90qFF93M3v01BbxP+EIY2/9tiIPbrd
183 =0YYh
183 =0YYh
184 -----END PGP PUBLIC KEY BLOCK-----
184 -----END PGP PUBLIC KEY BLOCK-----
185 EOF
185 EOF
186
186
187 sudo apt-key add docker-apt-key
187 sudo apt-key add docker-apt-key
188
188
189 if [ "$LSB_RELEASE" = "stretch" ]; then
189 if [ "$LSB_RELEASE" = "stretch" ]; then
190 cat << EOF | sudo tee -a /etc/apt/sources.list
190 cat << EOF | sudo tee -a /etc/apt/sources.list
191 # Need backports for clang-format-6.0
191 # Need backports for clang-format-6.0
192 deb http://deb.debian.org/debian stretch-backports main
192 deb http://deb.debian.org/debian stretch-backports main
193
193
194 # Sources are useful if we want to compile things locally.
194 # Sources are useful if we want to compile things locally.
195 deb-src http://deb.debian.org/debian stretch main
195 deb-src http://deb.debian.org/debian stretch main
196 deb-src http://security.debian.org/debian-security stretch/updates main
196 deb-src http://security.debian.org/debian-security stretch/updates main
197 deb-src http://deb.debian.org/debian stretch-updates main
197 deb-src http://deb.debian.org/debian stretch-updates main
198 deb-src http://deb.debian.org/debian stretch-backports main
198 deb-src http://deb.debian.org/debian stretch-backports main
199
199
200 deb [arch=amd64] https://download.docker.com/linux/debian stretch stable
200 deb [arch=amd64] https://download.docker.com/linux/debian stretch stable
201 EOF
201 EOF
202
202
203 elif [ "$DISTRO" = "Ubuntu" ]; then
203 elif [ "$DISTRO" = "Ubuntu" ]; then
204 cat << EOF | sudo tee -a /etc/apt/sources.list
204 cat << EOF | sudo tee -a /etc/apt/sources.list
205 deb [arch=amd64] https://download.docker.com/linux/ubuntu $LSB_RELEASE stable
205 deb [arch=amd64] https://download.docker.com/linux/ubuntu $LSB_RELEASE stable
206 EOF
206 EOF
207
207
208 fi
208 fi
209
209
210 sudo apt-get update
210 sudo apt-get update
211
211
212 PACKAGES="\
212 PACKAGES="\
213 btrfs-progs \
213 btrfs-progs \
214 build-essential \
214 build-essential \
215 bzr \
215 bzr \
216 clang-format-6.0 \
216 clang-format-6.0 \
217 cvs \
217 cvs \
218 darcs \
218 darcs \
219 debhelper \
219 debhelper \
220 devscripts \
220 devscripts \
221 docker-ce \
221 docker-ce \
222 dpkg-dev \
222 dpkg-dev \
223 dstat \
223 dstat \
224 emacs \
224 emacs \
225 gettext \
225 gettext \
226 git \
226 git \
227 htop \
227 htop \
228 iotop \
228 iotop \
229 jfsutils \
229 jfsutils \
230 libbz2-dev \
230 libbz2-dev \
231 libexpat1-dev \
231 libexpat1-dev \
232 libffi-dev \
232 libffi-dev \
233 libgdbm-dev \
233 libgdbm-dev \
234 liblzma-dev \
234 liblzma-dev \
235 libncurses5-dev \
235 libncurses5-dev \
236 libnss3-dev \
236 libnss3-dev \
237 libreadline-dev \
237 libreadline-dev \
238 libsqlite3-dev \
238 libsqlite3-dev \
239 libssl-dev \
239 libssl-dev \
240 netbase \
240 netbase \
241 ntfs-3g \
241 ntfs-3g \
242 nvme-cli \
242 nvme-cli \
243 pyflakes \
243 pyflakes \
244 pyflakes3 \
244 pyflakes3 \
245 pylint \
245 pylint \
246 pylint3 \
246 pylint3 \
247 python-all-dev \
247 python-all-dev \
248 python-dev \
248 python-dev \
249 python-docutils \
249 python-docutils \
250 python-fuzzywuzzy \
250 python-fuzzywuzzy \
251 python-pygments \
251 python-pygments \
252 python-subversion \
252 python-subversion \
253 python-vcr \
253 python-vcr \
254 python3-dev \
254 python3-dev \
255 python3-docutils \
255 python3-docutils \
256 python3-fuzzywuzzy \
256 python3-fuzzywuzzy \
257 python3-pygments \
257 python3-pygments \
258 python3-vcr \
258 python3-vcr \
259 rsync \
259 rsync \
260 sqlite3 \
260 sqlite3 \
261 subversion \
261 subversion \
262 tcl-dev \
262 tcl-dev \
263 tk-dev \
263 tk-dev \
264 tla \
264 tla \
265 unzip \
265 unzip \
266 uuid-dev \
266 uuid-dev \
267 vim \
267 vim \
268 virtualenv \
268 virtualenv \
269 wget \
269 wget \
270 xfsprogs \
270 xfsprogs \
271 zip \
271 zip \
272 zlib1g-dev"
272 zlib1g-dev"
273
273
274 if [ "LSB_RELEASE" = "stretch" ]; then
274 if [ "LSB_RELEASE" = "stretch" ]; then
275 PACKAGES="$PACKAGES linux-perf"
275 PACKAGES="$PACKAGES linux-perf"
276 elif [ "$DISTRO" = "Ubuntu" ]; then
276 elif [ "$DISTRO" = "Ubuntu" ]; then
277 PACKAGES="$PACKAGES linux-tools-common"
277 PACKAGES="$PACKAGES linux-tools-common"
278 fi
278 fi
279
279
280 # Ubuntu 19.04 removes monotone.
280 # Ubuntu 19.04 removes monotone.
281 if [ "$LSB_RELEASE" != "disco" ]; then
281 if [ "$LSB_RELEASE" != "disco" ]; then
282 PACKAGES="$PACKAGES monotone"
282 PACKAGES="$PACKAGES monotone"
283 fi
283 fi
284
284
285 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends $PACKAGES
285 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends $PACKAGES
286
286
287 # Create clang-format symlink so test harness finds it.
287 # Create clang-format symlink so test harness finds it.
288 sudo update-alternatives --install /usr/bin/clang-format clang-format \
288 sudo update-alternatives --install /usr/bin/clang-format clang-format \
289 /usr/bin/clang-format-6.0 1000
289 /usr/bin/clang-format-6.0 1000
290
290
291 sudo mkdir /hgdev
291 sudo mkdir /hgdev
292 # Will be normalized to hg:hg later.
292 # Will be normalized to hg:hg later.
293 sudo chown `whoami` /hgdev
293 sudo chown `whoami` /hgdev
294
294
295 {install_rust}
295 {install_rust}
296
296
297 cp requirements-py2.txt /hgdev/requirements-py2.txt
297 cp requirements-py2.txt /hgdev/requirements-py2.txt
298 cp requirements-py3.txt /hgdev/requirements-py3.txt
298 cp requirements-py3.txt /hgdev/requirements-py3.txt
299
299
300 # Disable the pip version check because it uses the network and can
300 # Disable the pip version check because it uses the network and can
301 # be annoying.
301 # be annoying.
302 cat << EOF | sudo tee -a /etc/pip.conf
302 cat << EOF | sudo tee -a /etc/pip.conf
303 [global]
303 [global]
304 disable-pip-version-check = True
304 disable-pip-version-check = True
305 EOF
305 EOF
306
306
307 {install_pythons}
307 {install_pythons}
308 {bootstrap_virtualenv}
308 {bootstrap_virtualenv}
309
309
310 /hgdev/venv-bootstrap/bin/hg clone https://www.mercurial-scm.org/repo/hg /hgdev/src
310 /hgdev/venv-bootstrap/bin/hg clone https://www.mercurial-scm.org/repo/hg /hgdev/src
311
311
312 # Mark the repo as non-publishing.
312 # Mark the repo as non-publishing.
313 cat >> /hgdev/src/.hg/hgrc << EOF
313 cat >> /hgdev/src/.hg/hgrc << EOF
314 [phases]
314 [phases]
315 publish = false
315 publish = false
316 EOF
316 EOF
317
317
318 sudo chown -R hg:hg /hgdev
318 sudo chown -R hg:hg /hgdev
319 '''.lstrip().format(
319 '''.lstrip().format(
320 install_rust=INSTALL_RUST,
320 install_rust=INSTALL_RUST,
321 install_pythons=INSTALL_PYTHONS,
321 install_pythons=INSTALL_PYTHONS,
322 bootstrap_virtualenv=BOOTSTRAP_VIRTUALENV
322 bootstrap_virtualenv=BOOTSTRAP_VIRTUALENV
323 ).replace('\r\n', '\n')
323 ).replace('\r\n', '\n')
324
324
325
325
326 # Prepares /hgdev for operations.
326 # Prepares /hgdev for operations.
327 PREPARE_HGDEV = '''
327 PREPARE_HGDEV = '''
328 #!/bin/bash
328 #!/bin/bash
329
329
330 set -e
330 set -e
331
331
332 FS=$1
332 FS=$1
333
333
334 ensure_device() {
334 ensure_device() {
335 if [ -z "${DEVICE}" ]; then
335 if [ -z "${DEVICE}" ]; then
336 echo "could not find block device to format"
336 echo "could not find block device to format"
337 exit 1
337 exit 1
338 fi
338 fi
339 }
339 }
340
340
341 # Determine device to partition for extra filesystem.
341 # Determine device to partition for extra filesystem.
342 # If only 1 volume is present, it will be the root volume and
342 # If only 1 volume is present, it will be the root volume and
343 # should be /dev/nvme0. If multiple volumes are present, the
343 # should be /dev/nvme0. If multiple volumes are present, the
344 # root volume could be nvme0 or nvme1. Use whichever one doesn't have
344 # root volume could be nvme0 or nvme1. Use whichever one doesn't have
345 # a partition.
345 # a partition.
346 if [ -e /dev/nvme1n1 ]; then
346 if [ -e /dev/nvme1n1 ]; then
347 if [ -e /dev/nvme0n1p1 ]; then
347 if [ -e /dev/nvme0n1p1 ]; then
348 DEVICE=/dev/nvme1n1
348 DEVICE=/dev/nvme1n1
349 else
349 else
350 DEVICE=/dev/nvme0n1
350 DEVICE=/dev/nvme0n1
351 fi
351 fi
352 else
352 else
353 DEVICE=
353 DEVICE=
354 fi
354 fi
355
355
356 sudo mkdir /hgwork
356 sudo mkdir /hgwork
357
357
358 if [ "${FS}" != "default" -a "${FS}" != "tmpfs" ]; then
358 if [ "${FS}" != "default" -a "${FS}" != "tmpfs" ]; then
359 ensure_device
359 ensure_device
360 echo "creating ${FS} filesystem on ${DEVICE}"
360 echo "creating ${FS} filesystem on ${DEVICE}"
361 fi
361 fi
362
362
363 if [ "${FS}" = "default" ]; then
363 if [ "${FS}" = "default" ]; then
364 :
364 :
365
365
366 elif [ "${FS}" = "btrfs" ]; then
366 elif [ "${FS}" = "btrfs" ]; then
367 sudo mkfs.btrfs ${DEVICE}
367 sudo mkfs.btrfs ${DEVICE}
368 sudo mount ${DEVICE} /hgwork
368 sudo mount ${DEVICE} /hgwork
369
369
370 elif [ "${FS}" = "ext3" ]; then
370 elif [ "${FS}" = "ext3" ]; then
371 # lazy_journal_init speeds up filesystem creation at the expense of
371 # lazy_journal_init speeds up filesystem creation at the expense of
372 # integrity if things crash. We are an ephemeral instance, so we don't
372 # integrity if things crash. We are an ephemeral instance, so we don't
373 # care about integrity.
373 # care about integrity.
374 sudo mkfs.ext3 -E lazy_journal_init=1 ${DEVICE}
374 sudo mkfs.ext3 -E lazy_journal_init=1 ${DEVICE}
375 sudo mount ${DEVICE} /hgwork
375 sudo mount ${DEVICE} /hgwork
376
376
377 elif [ "${FS}" = "ext4" ]; then
377 elif [ "${FS}" = "ext4" ]; then
378 sudo mkfs.ext4 -E lazy_journal_init=1 ${DEVICE}
378 sudo mkfs.ext4 -E lazy_journal_init=1 ${DEVICE}
379 sudo mount ${DEVICE} /hgwork
379 sudo mount ${DEVICE} /hgwork
380
380
381 elif [ "${FS}" = "jfs" ]; then
381 elif [ "${FS}" = "jfs" ]; then
382 sudo mkfs.jfs ${DEVICE}
382 sudo mkfs.jfs ${DEVICE}
383 sudo mount ${DEVICE} /hgwork
383 sudo mount ${DEVICE} /hgwork
384
384
385 elif [ "${FS}" = "tmpfs" ]; then
385 elif [ "${FS}" = "tmpfs" ]; then
386 echo "creating tmpfs volume in /hgwork"
386 echo "creating tmpfs volume in /hgwork"
387 sudo mount -t tmpfs -o size=1024M tmpfs /hgwork
387 sudo mount -t tmpfs -o size=1024M tmpfs /hgwork
388
388
389 elif [ "${FS}" = "xfs" ]; then
389 elif [ "${FS}" = "xfs" ]; then
390 sudo mkfs.xfs ${DEVICE}
390 sudo mkfs.xfs ${DEVICE}
391 sudo mount ${DEVICE} /hgwork
391 sudo mount ${DEVICE} /hgwork
392
392
393 else
393 else
394 echo "unsupported filesystem: ${FS}"
394 echo "unsupported filesystem: ${FS}"
395 exit 1
395 exit 1
396 fi
396 fi
397
397
398 echo "/hgwork ready"
398 echo "/hgwork ready"
399
399
400 sudo chown hg:hg /hgwork
400 sudo chown hg:hg /hgwork
401 mkdir /hgwork/tmp
401 mkdir /hgwork/tmp
402 chown hg:hg /hgwork/tmp
402 chown hg:hg /hgwork/tmp
403
403
404 rsync -a /hgdev/src /hgwork/
404 rsync -a /hgdev/src /hgwork/
405 '''.lstrip().replace('\r\n', '\n')
405 '''.lstrip().replace('\r\n', '\n')
406
406
407
407
408 HG_UPDATE_CLEAN = '''
408 HG_UPDATE_CLEAN = '''
409 set -ex
409 set -ex
410
410
411 HG=/hgdev/venv-bootstrap/bin/hg
411 HG=/hgdev/venv-bootstrap/bin/hg
412
412
413 cd /hgwork/src
413 cd /hgwork/src
414 ${HG} --config extensions.purge= purge --all
414 ${HG} --config extensions.purge= purge --all
415 ${HG} update -C $1
415 ${HG} update -C $1
416 ${HG} log -r .
416 ${HG} log -r .
417 '''.lstrip().replace('\r\n', '\n')
417 '''.lstrip().replace('\r\n', '\n')
418
418
419
419
420 def prepare_exec_environment(ssh_client, filesystem='default'):
420 def prepare_exec_environment(ssh_client, filesystem='default'):
421 """Prepare an EC2 instance to execute things.
421 """Prepare an EC2 instance to execute things.
422
422
423 The AMI has an ``/hgdev`` bootstrapped with various Python installs
423 The AMI has an ``/hgdev`` bootstrapped with various Python installs
424 and a clone of the Mercurial repo.
424 and a clone of the Mercurial repo.
425
425
426 In EC2, EBS volumes launched from snapshots have wonky performance behavior.
426 In EC2, EBS volumes launched from snapshots have wonky performance behavior.
427 Notably, blocks have to be copied on first access, which makes volume
427 Notably, blocks have to be copied on first access, which makes volume
428 I/O extremely slow on fresh volumes.
428 I/O extremely slow on fresh volumes.
429
429
430 Furthermore, we may want to run operations, tests, etc on alternative
430 Furthermore, we may want to run operations, tests, etc on alternative
431 filesystems so we examine behavior on different filesystems.
431 filesystems so we examine behavior on different filesystems.
432
432
433 This function is used to facilitate executing operations on alternate
433 This function is used to facilitate executing operations on alternate
434 volumes.
434 volumes.
435 """
435 """
436 sftp = ssh_client.open_sftp()
436 sftp = ssh_client.open_sftp()
437
437
438 with sftp.open('/hgdev/prepare-hgdev', 'wb') as fh:
438 with sftp.open('/hgdev/prepare-hgdev', 'wb') as fh:
439 fh.write(PREPARE_HGDEV)
439 fh.write(PREPARE_HGDEV)
440 fh.chmod(0o0777)
440 fh.chmod(0o0777)
441
441
442 command = 'sudo /hgdev/prepare-hgdev %s' % filesystem
442 command = 'sudo /hgdev/prepare-hgdev %s' % filesystem
443 chan, stdin, stdout = exec_command(ssh_client, command)
443 chan, stdin, stdout = exec_command(ssh_client, command)
444 stdin.close()
444 stdin.close()
445
445
446 for line in stdout:
446 for line in stdout:
447 print(line, end='')
447 print(line, end='')
448
448
449 res = chan.recv_exit_status()
449 res = chan.recv_exit_status()
450
450
451 if res:
451 if res:
452 raise Exception('non-0 exit code updating working directory; %d'
452 raise Exception('non-0 exit code updating working directory; %d'
453 % res)
453 % res)
454
454
455
455
456 def synchronize_hg(source_path: pathlib.Path, ec2_instance, revision: str=None):
456 def synchronize_hg(source_path: pathlib.Path, ec2_instance, revision: str=None):
457 """Synchronize a local Mercurial source path to remote EC2 instance."""
457 """Synchronize a local Mercurial source path to remote EC2 instance."""
458
458
459 with tempfile.TemporaryDirectory() as temp_dir:
459 with tempfile.TemporaryDirectory() as temp_dir:
460 temp_dir = pathlib.Path(temp_dir)
460 temp_dir = pathlib.Path(temp_dir)
461
461
462 ssh_dir = temp_dir / '.ssh'
462 ssh_dir = temp_dir / '.ssh'
463 ssh_dir.mkdir()
463 ssh_dir.mkdir()
464 ssh_dir.chmod(0o0700)
464 ssh_dir.chmod(0o0700)
465
465
466 public_ip = ec2_instance.public_ip_address
466 public_ip = ec2_instance.public_ip_address
467
467
468 ssh_config = ssh_dir / 'config'
468 ssh_config = ssh_dir / 'config'
469
469
470 with ssh_config.open('w', encoding='utf-8') as fh:
470 with ssh_config.open('w', encoding='utf-8') as fh:
471 fh.write('Host %s\n' % public_ip)
471 fh.write('Host %s\n' % public_ip)
472 fh.write(' User hg\n')
472 fh.write(' User hg\n')
473 fh.write(' StrictHostKeyChecking no\n')
473 fh.write(' StrictHostKeyChecking no\n')
474 fh.write(' UserKnownHostsFile %s\n' % (ssh_dir / 'known_hosts'))
474 fh.write(' UserKnownHostsFile %s\n' % (ssh_dir / 'known_hosts'))
475 fh.write(' IdentityFile %s\n' % ec2_instance.ssh_private_key_path)
475 fh.write(' IdentityFile %s\n' % ec2_instance.ssh_private_key_path)
476
476
477 if not (source_path / '.hg').is_dir():
477 if not (source_path / '.hg').is_dir():
478 raise Exception('%s is not a Mercurial repository; synchronization '
478 raise Exception('%s is not a Mercurial repository; synchronization '
479 'not yet supported' % source_path)
479 'not yet supported' % source_path)
480
480
481 env = dict(os.environ)
481 env = dict(os.environ)
482 env['HGPLAIN'] = '1'
482 env['HGPLAIN'] = '1'
483 env['HGENCODING'] = 'utf-8'
483 env['HGENCODING'] = 'utf-8'
484
484
485 hg_bin = source_path / 'hg'
485 hg_bin = source_path / 'hg'
486
486
487 res = subprocess.run(
487 res = subprocess.run(
488 ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
488 ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
489 cwd=str(source_path), env=env, check=True, capture_output=True)
489 cwd=str(source_path), env=env, check=True, capture_output=True)
490
490
491 full_revision = res.stdout.decode('ascii')
491 full_revision = res.stdout.decode('ascii')
492
492
493 args = [
493 args = [
494 'python2.7', str(hg_bin),
494 'python2.7', str(hg_bin),
495 '--config', 'ui.ssh=ssh -F %s' % ssh_config,
495 '--config', 'ui.ssh=ssh -F %s' % ssh_config,
496 '--config', 'ui.remotecmd=/hgdev/venv-bootstrap/bin/hg',
496 '--config', 'ui.remotecmd=/hgdev/venv-bootstrap/bin/hg',
497 # Also ensure .hgtags changes are present so auto version
497 # Also ensure .hgtags changes are present so auto version
498 # calculation works.
498 # calculation works.
499 'push', '-f', '-r', full_revision, '-r', 'file(.hgtags)',
499 'push', '-f', '-r', full_revision, '-r', 'file(.hgtags)',
500 'ssh://%s//hgwork/src' % public_ip,
500 'ssh://%s//hgwork/src' % public_ip,
501 ]
501 ]
502
502
503 res = subprocess.run(args, cwd=str(source_path), env=env)
503 res = subprocess.run(args, cwd=str(source_path), env=env)
504
504
505 # Allow 1 (no-op) to not trigger error.
505 # Allow 1 (no-op) to not trigger error.
506 if res.returncode not in (0, 1):
506 if res.returncode not in (0, 1):
507 res.check_returncode()
507 res.check_returncode()
508
508
509 # TODO support synchronizing dirty working directory.
509 # TODO support synchronizing dirty working directory.
510
510
511 sftp = ec2_instance.ssh_client.open_sftp()
511 sftp = ec2_instance.ssh_client.open_sftp()
512
512
513 with sftp.open('/hgdev/hgup', 'wb') as fh:
513 with sftp.open('/hgdev/hgup', 'wb') as fh:
514 fh.write(HG_UPDATE_CLEAN)
514 fh.write(HG_UPDATE_CLEAN)
515 fh.chmod(0o0700)
515 fh.chmod(0o0700)
516
516
517 chan, stdin, stdout = exec_command(
517 chan, stdin, stdout = exec_command(
518 ec2_instance.ssh_client, '/hgdev/hgup %s' % full_revision)
518 ec2_instance.ssh_client, '/hgdev/hgup %s' % full_revision)
519 stdin.close()
519 stdin.close()
520
520
521 for line in stdout:
521 for line in stdout:
522 print(line, end='')
522 print(line, end='')
523
523
524 res = chan.recv_exit_status()
524 res = chan.recv_exit_status()
525
525
526 if res:
526 if res:
527 raise Exception('non-0 exit code updating working directory; %d'
527 raise Exception('non-0 exit code updating working directory; %d'
528 % res)
528 % res)
529
529
530
530
531 def run_tests(ssh_client, python_version, test_flags=None):
531 def run_tests(ssh_client, python_version, test_flags=None):
532 """Run tests on a remote Linux machine via an SSH client."""
532 """Run tests on a remote Linux machine via an SSH client."""
533 test_flags = test_flags or []
533 test_flags = test_flags or []
534
534
535 print('running tests')
535 print('running tests')
536
536
537 if python_version == 'system2':
537 if python_version == 'system2':
538 python = '/usr/bin/python2'
538 python = '/usr/bin/python2'
539 elif python_version == 'system3':
539 elif python_version == 'system3':
540 python = '/usr/bin/python3'
540 python = '/usr/bin/python3'
541 elif python_version.startswith('pypy'):
541 elif python_version.startswith('pypy'):
542 python = '/hgdev/pyenv/shims/%s' % python_version
542 python = '/hgdev/pyenv/shims/%s' % python_version
543 else:
543 else:
544 python = '/hgdev/pyenv/shims/python%s' % python_version
544 python = '/hgdev/pyenv/shims/python%s' % python_version
545
545
546 test_flags = ' '.join(shlex.quote(a) for a in test_flags)
546 test_flags = ' '.join(shlex.quote(a) for a in test_flags)
547
547
548 command = (
548 command = (
549 '/bin/sh -c "export TMPDIR=/hgwork/tmp; '
549 '/bin/sh -c "export TMPDIR=/hgwork/tmp; '
550 'cd /hgwork/src/tests && %s run-tests.py %s"' % (
550 'cd /hgwork/src/tests && %s run-tests.py %s"' % (
551 python, test_flags))
551 python, test_flags))
552
552
553 chan, stdin, stdout = exec_command(ssh_client, command)
553 chan, stdin, stdout = exec_command(ssh_client, command)
554
554
555 stdin.close()
555 stdin.close()
556
556
557 for line in stdout:
557 for line in stdout:
558 print(line, end='')
558 print(line, end='')
559
559
560 return chan.recv_exit_status()
560 return chan.recv_exit_status()
General Comments 0
You need to be logged in to leave comments. Login now