##// END OF EJS Templates
automation: upgrade packages in Linux environment...
Gregory Szorc -
r43284:cbd94ee3 default
parent child Browse files
Show More
@@ -1,560 +1,560 b''
1 1 # linux.py - Linux specific automation functionality
2 2 #
3 3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 # no-check-code because Python 3 native.
9 9
10 10 import os
11 11 import pathlib
12 12 import shlex
13 13 import subprocess
14 14 import tempfile
15 15
16 16 from .ssh import (
17 17 exec_command,
18 18 )
19 19
20 20
21 21 # Linux distributions that are supported.
22 22 DISTROS = {
23 23 'debian9',
24 24 'ubuntu18.04',
25 25 'ubuntu19.04',
26 26 }
27 27
28 28 INSTALL_PYTHONS = r'''
29 29 PYENV2_VERSIONS="2.7.16 pypy2.7-7.1.1"
30 30 PYENV3_VERSIONS="3.5.7 3.6.9 3.7.4 3.8-dev pypy3.5-7.0.0 pypy3.6-7.1.1"
31 31
32 32 git clone https://github.com/pyenv/pyenv.git /hgdev/pyenv
33 33 pushd /hgdev/pyenv
34 34 git checkout 17f44b7cd6f58ea2fa68ec0371fb9e7a826b8be2
35 35 popd
36 36
37 37 export PYENV_ROOT="/hgdev/pyenv"
38 38 export PATH="$PYENV_ROOT/bin:$PATH"
39 39
40 # pip 19.0.3.
41 PIP_SHA256=efe99298f3fbb1f56201ce6b81d2658067d2f7d7dfc2d412e0d3cacc9a397c61
42 wget -O get-pip.py --progress dot:mega https://github.com/pypa/get-pip/raw/fee32c376da1ff6496a798986d7939cd51e1644f/get-pip.py
40 # pip 19.2.3.
41 PIP_SHA256=57e3643ff19f018f8a00dfaa6b7e4620e3c1a7a2171fd218425366ec006b3bfe
42 wget -O get-pip.py --progress dot:mega https://github.com/pypa/get-pip/raw/309a56c5fd94bd1134053a541cb4657a4e47e09d/get-pip.py
43 43 echo "${PIP_SHA256} get-pip.py" | sha256sum --check -
44 44
45 VIRTUALENV_SHA256=984d7e607b0a5d1329425dd8845bd971b957424b5ba664729fab51ab8c11bc39
46 VIRTUALENV_TARBALL=virtualenv-16.4.3.tar.gz
47 wget -O ${VIRTUALENV_TARBALL} --progress dot:mega https://files.pythonhosted.org/packages/37/db/89d6b043b22052109da35416abc3c397655e4bd3cff031446ba02b9654fa/${VIRTUALENV_TARBALL}
45 VIRTUALENV_SHA256=f78d81b62d3147396ac33fc9d77579ddc42cc2a98dd9ea38886f616b33bc7fb2
46 VIRTUALENV_TARBALL=virtualenv-16.7.5.tar.gz
47 wget -O ${VIRTUALENV_TARBALL} --progress dot:mega https://files.pythonhosted.org/packages/66/f0/6867af06d2e2f511e4e1d7094ff663acdebc4f15d4a0cb0fed1007395124/${VIRTUALENV_TARBALL}
48 48 echo "${VIRTUALENV_SHA256} ${VIRTUALENV_TARBALL}" | sha256sum --check -
49 49
50 50 for v in ${PYENV2_VERSIONS}; do
51 51 pyenv install -v ${v}
52 52 ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
53 53 ${PYENV_ROOT}/versions/${v}/bin/pip install ${VIRTUALENV_TARBALL}
54 54 ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py2.txt
55 55 done
56 56
57 57 for v in ${PYENV3_VERSIONS}; do
58 58 pyenv install -v ${v}
59 59 ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
60 60 ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py3.txt
61 61 done
62 62
63 63 pyenv global ${PYENV2_VERSIONS} ${PYENV3_VERSIONS} system
64 64 '''.lstrip().replace('\r\n', '\n')
65 65
66 66
67 67 INSTALL_RUST = r'''
68 68 RUSTUP_INIT_SHA256=a46fe67199b7bcbbde2dcbc23ae08db6f29883e260e23899a88b9073effc9076
69 69 wget -O rustup-init --progress dot:mega https://static.rust-lang.org/rustup/archive/1.18.3/x86_64-unknown-linux-gnu/rustup-init
70 70 echo "${RUSTUP_INIT_SHA256} rustup-init" | sha256sum --check -
71 71
72 72 chmod +x rustup-init
73 73 sudo -H -u hg -g hg ./rustup-init -y
74 74 sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup install 1.31.1 1.34.2
75 75 sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup component add clippy
76 76 '''
77 77
78 78
79 79 BOOTSTRAP_VIRTUALENV = r'''
80 80 /usr/bin/virtualenv /hgdev/venv-bootstrap
81 81
82 HG_SHA256=1bdd21bb87d1e05fb5cd395d488d0e0cc2f2f90ce0fd248e31a03595da5ccb47
83 HG_TARBALL=mercurial-4.9.1.tar.gz
82 HG_SHA256=35fc8ba5e0379c1b3affa2757e83fb0509e8ac314cbd9f1fd133cf265d16e49f
83 HG_TARBALL=mercurial-5.1.1.tar.gz
84 84
85 85 wget -O ${HG_TARBALL} --progress dot:mega https://www.mercurial-scm.org/release/${HG_TARBALL}
86 86 echo "${HG_SHA256} ${HG_TARBALL}" | sha256sum --check -
87 87
88 88 /hgdev/venv-bootstrap/bin/pip install ${HG_TARBALL}
89 89 '''.lstrip().replace('\r\n', '\n')
90 90
91 91
92 92 BOOTSTRAP_DEBIAN = r'''
93 93 #!/bin/bash
94 94
95 95 set -ex
96 96
97 97 DISTRO=`grep DISTRIB_ID /etc/lsb-release | awk -F= '{{print $2}}'`
98 98 DEBIAN_VERSION=`cat /etc/debian_version`
99 99 LSB_RELEASE=`lsb_release -cs`
100 100
101 101 sudo /usr/sbin/groupadd hg
102 102 sudo /usr/sbin/groupadd docker
103 103 sudo /usr/sbin/useradd -g hg -G sudo,docker -d /home/hg -m -s /bin/bash hg
104 104 sudo mkdir /home/hg/.ssh
105 105 sudo cp ~/.ssh/authorized_keys /home/hg/.ssh/authorized_keys
106 106 sudo chown -R hg:hg /home/hg/.ssh
107 107 sudo chmod 700 /home/hg/.ssh
108 108 sudo chmod 600 /home/hg/.ssh/authorized_keys
109 109
110 110 cat << EOF | sudo tee /etc/sudoers.d/90-hg
111 111 hg ALL=(ALL) NOPASSWD:ALL
112 112 EOF
113 113
114 114 sudo apt-get update
115 115 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq dist-upgrade
116 116
117 117 # Install packages necessary to set up Docker Apt repo.
118 118 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends \
119 119 apt-transport-https \
120 120 gnupg
121 121
122 122 cat > docker-apt-key << EOF
123 123 -----BEGIN PGP PUBLIC KEY BLOCK-----
124 124
125 125 mQINBFit2ioBEADhWpZ8/wvZ6hUTiXOwQHXMAlaFHcPH9hAtr4F1y2+OYdbtMuth
126 126 lqqwp028AqyY+PRfVMtSYMbjuQuu5byyKR01BbqYhuS3jtqQmljZ/bJvXqnmiVXh
127 127 38UuLa+z077PxyxQhu5BbqntTPQMfiyqEiU+BKbq2WmANUKQf+1AmZY/IruOXbnq
128 128 L4C1+gJ8vfmXQt99npCaxEjaNRVYfOS8QcixNzHUYnb6emjlANyEVlZzeqo7XKl7
129 129 UrwV5inawTSzWNvtjEjj4nJL8NsLwscpLPQUhTQ+7BbQXAwAmeHCUTQIvvWXqw0N
130 130 cmhh4HgeQscQHYgOJjjDVfoY5MucvglbIgCqfzAHW9jxmRL4qbMZj+b1XoePEtht
131 131 ku4bIQN1X5P07fNWzlgaRL5Z4POXDDZTlIQ/El58j9kp4bnWRCJW0lya+f8ocodo
132 132 vZZ+Doi+fy4D5ZGrL4XEcIQP/Lv5uFyf+kQtl/94VFYVJOleAv8W92KdgDkhTcTD
133 133 G7c0tIkVEKNUq48b3aQ64NOZQW7fVjfoKwEZdOqPE72Pa45jrZzvUFxSpdiNk2tZ
134 134 XYukHjlxxEgBdC/J3cMMNRE1F4NCA3ApfV1Y7/hTeOnmDuDYwr9/obA8t016Yljj
135 135 q5rdkywPf4JF8mXUW5eCN1vAFHxeg9ZWemhBtQmGxXnw9M+z6hWwc6ahmwARAQAB
136 136 tCtEb2NrZXIgUmVsZWFzZSAoQ0UgZGViKSA8ZG9ja2VyQGRvY2tlci5jb20+iQI3
137 137 BBMBCgAhBQJYrefAAhsvBQsJCAcDBRUKCQgLBRYCAwEAAh4BAheAAAoJEI2BgDwO
138 138 v82IsskP/iQZo68flDQmNvn8X5XTd6RRaUH33kXYXquT6NkHJciS7E2gTJmqvMqd
139 139 tI4mNYHCSEYxI5qrcYV5YqX9P6+Ko+vozo4nseUQLPH/ATQ4qL0Zok+1jkag3Lgk
140 140 jonyUf9bwtWxFp05HC3GMHPhhcUSexCxQLQvnFWXD2sWLKivHp2fT8QbRGeZ+d3m
141 141 6fqcd5Fu7pxsqm0EUDK5NL+nPIgYhN+auTrhgzhK1CShfGccM/wfRlei9Utz6p9P
142 142 XRKIlWnXtT4qNGZNTN0tR+NLG/6Bqd8OYBaFAUcue/w1VW6JQ2VGYZHnZu9S8LMc
143 143 FYBa5Ig9PxwGQOgq6RDKDbV+PqTQT5EFMeR1mrjckk4DQJjbxeMZbiNMG5kGECA8
144 144 g383P3elhn03WGbEEa4MNc3Z4+7c236QI3xWJfNPdUbXRaAwhy/6rTSFbzwKB0Jm
145 145 ebwzQfwjQY6f55MiI/RqDCyuPj3r3jyVRkK86pQKBAJwFHyqj9KaKXMZjfVnowLh
146 146 9svIGfNbGHpucATqREvUHuQbNnqkCx8VVhtYkhDb9fEP2xBu5VvHbR+3nfVhMut5
147 147 G34Ct5RS7Jt6LIfFdtcn8CaSas/l1HbiGeRgc70X/9aYx/V/CEJv0lIe8gP6uDoW
148 148 FPIZ7d6vH+Vro6xuWEGiuMaiznap2KhZmpkgfupyFmplh0s6knymuQINBFit2ioB
149 149 EADneL9S9m4vhU3blaRjVUUyJ7b/qTjcSylvCH5XUE6R2k+ckEZjfAMZPLpO+/tF
150 150 M2JIJMD4SifKuS3xck9KtZGCufGmcwiLQRzeHF7vJUKrLD5RTkNi23ydvWZgPjtx
151 151 Q+DTT1Zcn7BrQFY6FgnRoUVIxwtdw1bMY/89rsFgS5wwuMESd3Q2RYgb7EOFOpnu
152 152 w6da7WakWf4IhnF5nsNYGDVaIHzpiqCl+uTbf1epCjrOlIzkZ3Z3Yk5CM/TiFzPk
153 153 z2lLz89cpD8U+NtCsfagWWfjd2U3jDapgH+7nQnCEWpROtzaKHG6lA3pXdix5zG8
154 154 eRc6/0IbUSWvfjKxLLPfNeCS2pCL3IeEI5nothEEYdQH6szpLog79xB9dVnJyKJb
155 155 VfxXnseoYqVrRz2VVbUI5Blwm6B40E3eGVfUQWiux54DspyVMMk41Mx7QJ3iynIa
156 156 1N4ZAqVMAEruyXTRTxc9XW0tYhDMA/1GYvz0EmFpm8LzTHA6sFVtPm/ZlNCX6P1X
157 157 zJwrv7DSQKD6GGlBQUX+OeEJ8tTkkf8QTJSPUdh8P8YxDFS5EOGAvhhpMBYD42kQ
158 158 pqXjEC+XcycTvGI7impgv9PDY1RCC1zkBjKPa120rNhv/hkVk/YhuGoajoHyy4h7
159 159 ZQopdcMtpN2dgmhEegny9JCSwxfQmQ0zK0g7m6SHiKMwjwARAQABiQQ+BBgBCAAJ
160 160 BQJYrdoqAhsCAikJEI2BgDwOv82IwV0gBBkBCAAGBQJYrdoqAAoJEH6gqcPyc/zY
161 161 1WAP/2wJ+R0gE6qsce3rjaIz58PJmc8goKrir5hnElWhPgbq7cYIsW5qiFyLhkdp
162 162 YcMmhD9mRiPpQn6Ya2w3e3B8zfIVKipbMBnke/ytZ9M7qHmDCcjoiSmwEXN3wKYI
163 163 mD9VHONsl/CG1rU9Isw1jtB5g1YxuBA7M/m36XN6x2u+NtNMDB9P56yc4gfsZVES
164 164 KA9v+yY2/l45L8d/WUkUi0YXomn6hyBGI7JrBLq0CX37GEYP6O9rrKipfz73XfO7
165 165 JIGzOKZlljb/D9RX/g7nRbCn+3EtH7xnk+TK/50euEKw8SMUg147sJTcpQmv6UzZ
166 166 cM4JgL0HbHVCojV4C/plELwMddALOFeYQzTif6sMRPf+3DSj8frbInjChC3yOLy0
167 167 6br92KFom17EIj2CAcoeq7UPhi2oouYBwPxh5ytdehJkoo+sN7RIWua6P2WSmon5
168 168 U888cSylXC0+ADFdgLX9K2zrDVYUG1vo8CX0vzxFBaHwN6Px26fhIT1/hYUHQR1z
169 169 VfNDcyQmXqkOnZvvoMfz/Q0s9BhFJ/zU6AgQbIZE/hm1spsfgvtsD1frZfygXJ9f
170 170 irP+MSAI80xHSf91qSRZOj4Pl3ZJNbq4yYxv0b1pkMqeGdjdCYhLU+LZ4wbQmpCk
171 171 SVe2prlLureigXtmZfkqevRz7FrIZiu9ky8wnCAPwC7/zmS18rgP/17bOtL4/iIz
172 172 QhxAAoAMWVrGyJivSkjhSGx1uCojsWfsTAm11P7jsruIL61ZzMUVE2aM3Pmj5G+W
173 173 9AcZ58Em+1WsVnAXdUR//bMmhyr8wL/G1YO1V3JEJTRdxsSxdYa4deGBBY/Adpsw
174 174 24jxhOJR+lsJpqIUeb999+R8euDhRHG9eFO7DRu6weatUJ6suupoDTRWtr/4yGqe
175 175 dKxV3qQhNLSnaAzqW/1nA3iUB4k7kCaKZxhdhDbClf9P37qaRW467BLCVO/coL3y
176 176 Vm50dwdrNtKpMBh3ZpbB1uJvgi9mXtyBOMJ3v8RZeDzFiG8HdCtg9RvIt/AIFoHR
177 177 H3S+U79NT6i0KPzLImDfs8T7RlpyuMc4Ufs8ggyg9v3Ae6cN3eQyxcK3w0cbBwsh
178 178 /nQNfsA6uu+9H7NhbehBMhYnpNZyrHzCmzyXkauwRAqoCbGCNykTRwsur9gS41TQ
179 179 M8ssD1jFheOJf3hODnkKU+HKjvMROl1DK7zdmLdNzA1cvtZH/nCC9KPj1z8QC47S
180 180 xx+dTZSx4ONAhwbS/LN3PoKtn8LPjY9NP9uDWI+TWYquS2U+KHDrBDlsgozDbs/O
181 181 jCxcpDzNmXpWQHEtHU7649OXHP7UeNST1mCUCH5qdank0V1iejF6/CfTFU4MfcrG
182 182 YT90qFF93M3v01BbxP+EIY2/9tiIPbrd
183 183 =0YYh
184 184 -----END PGP PUBLIC KEY BLOCK-----
185 185 EOF
186 186
187 187 sudo apt-key add docker-apt-key
188 188
189 189 if [ "$LSB_RELEASE" = "stretch" ]; then
190 190 cat << EOF | sudo tee -a /etc/apt/sources.list
191 191 # Need backports for clang-format-6.0
192 192 deb http://deb.debian.org/debian stretch-backports main
193 193
194 194 # Sources are useful if we want to compile things locally.
195 195 deb-src http://deb.debian.org/debian stretch main
196 196 deb-src http://security.debian.org/debian-security stretch/updates main
197 197 deb-src http://deb.debian.org/debian stretch-updates main
198 198 deb-src http://deb.debian.org/debian stretch-backports main
199 199
200 200 deb [arch=amd64] https://download.docker.com/linux/debian stretch stable
201 201 EOF
202 202
203 203 elif [ "$DISTRO" = "Ubuntu" ]; then
204 204 cat << EOF | sudo tee -a /etc/apt/sources.list
205 205 deb [arch=amd64] https://download.docker.com/linux/ubuntu $LSB_RELEASE stable
206 206 EOF
207 207
208 208 fi
209 209
210 210 sudo apt-get update
211 211
212 212 PACKAGES="\
213 213 btrfs-progs \
214 214 build-essential \
215 215 bzr \
216 216 clang-format-6.0 \
217 217 cvs \
218 218 darcs \
219 219 debhelper \
220 220 devscripts \
221 221 docker-ce \
222 222 dpkg-dev \
223 223 dstat \
224 224 emacs \
225 225 gettext \
226 226 git \
227 227 htop \
228 228 iotop \
229 229 jfsutils \
230 230 libbz2-dev \
231 231 libexpat1-dev \
232 232 libffi-dev \
233 233 libgdbm-dev \
234 234 liblzma-dev \
235 235 libncurses5-dev \
236 236 libnss3-dev \
237 237 libreadline-dev \
238 238 libsqlite3-dev \
239 239 libssl-dev \
240 240 netbase \
241 241 ntfs-3g \
242 242 nvme-cli \
243 243 pyflakes \
244 244 pyflakes3 \
245 245 pylint \
246 246 pylint3 \
247 247 python-all-dev \
248 248 python-dev \
249 249 python-docutils \
250 250 python-fuzzywuzzy \
251 251 python-pygments \
252 252 python-subversion \
253 253 python-vcr \
254 254 python3-dev \
255 255 python3-docutils \
256 256 python3-fuzzywuzzy \
257 257 python3-pygments \
258 258 python3-vcr \
259 259 rsync \
260 260 sqlite3 \
261 261 subversion \
262 262 tcl-dev \
263 263 tk-dev \
264 264 tla \
265 265 unzip \
266 266 uuid-dev \
267 267 vim \
268 268 virtualenv \
269 269 wget \
270 270 xfsprogs \
271 271 zip \
272 272 zlib1g-dev"
273 273
274 274 if [ "LSB_RELEASE" = "stretch" ]; then
275 275 PACKAGES="$PACKAGES linux-perf"
276 276 elif [ "$DISTRO" = "Ubuntu" ]; then
277 277 PACKAGES="$PACKAGES linux-tools-common"
278 278 fi
279 279
280 280 # Ubuntu 19.04 removes monotone.
281 281 if [ "$LSB_RELEASE" != "disco" ]; then
282 282 PACKAGES="$PACKAGES monotone"
283 283 fi
284 284
285 285 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends $PACKAGES
286 286
287 287 # Create clang-format symlink so test harness finds it.
288 288 sudo update-alternatives --install /usr/bin/clang-format clang-format \
289 289 /usr/bin/clang-format-6.0 1000
290 290
291 291 sudo mkdir /hgdev
292 292 # Will be normalized to hg:hg later.
293 293 sudo chown `whoami` /hgdev
294 294
295 295 {install_rust}
296 296
297 297 cp requirements-py2.txt /hgdev/requirements-py2.txt
298 298 cp requirements-py3.txt /hgdev/requirements-py3.txt
299 299
300 300 # Disable the pip version check because it uses the network and can
301 301 # be annoying.
302 302 cat << EOF | sudo tee -a /etc/pip.conf
303 303 [global]
304 304 disable-pip-version-check = True
305 305 EOF
306 306
307 307 {install_pythons}
308 308 {bootstrap_virtualenv}
309 309
310 310 /hgdev/venv-bootstrap/bin/hg clone https://www.mercurial-scm.org/repo/hg /hgdev/src
311 311
312 312 # Mark the repo as non-publishing.
313 313 cat >> /hgdev/src/.hg/hgrc << EOF
314 314 [phases]
315 315 publish = false
316 316 EOF
317 317
318 318 sudo chown -R hg:hg /hgdev
319 319 '''.lstrip().format(
320 320 install_rust=INSTALL_RUST,
321 321 install_pythons=INSTALL_PYTHONS,
322 322 bootstrap_virtualenv=BOOTSTRAP_VIRTUALENV
323 323 ).replace('\r\n', '\n')
324 324
325 325
326 326 # Prepares /hgdev for operations.
327 327 PREPARE_HGDEV = '''
328 328 #!/bin/bash
329 329
330 330 set -e
331 331
332 332 FS=$1
333 333
334 334 ensure_device() {
335 335 if [ -z "${DEVICE}" ]; then
336 336 echo "could not find block device to format"
337 337 exit 1
338 338 fi
339 339 }
340 340
341 341 # Determine device to partition for extra filesystem.
342 342 # If only 1 volume is present, it will be the root volume and
343 343 # should be /dev/nvme0. If multiple volumes are present, the
344 344 # root volume could be nvme0 or nvme1. Use whichever one doesn't have
345 345 # a partition.
346 346 if [ -e /dev/nvme1n1 ]; then
347 347 if [ -e /dev/nvme0n1p1 ]; then
348 348 DEVICE=/dev/nvme1n1
349 349 else
350 350 DEVICE=/dev/nvme0n1
351 351 fi
352 352 else
353 353 DEVICE=
354 354 fi
355 355
356 356 sudo mkdir /hgwork
357 357
358 358 if [ "${FS}" != "default" -a "${FS}" != "tmpfs" ]; then
359 359 ensure_device
360 360 echo "creating ${FS} filesystem on ${DEVICE}"
361 361 fi
362 362
363 363 if [ "${FS}" = "default" ]; then
364 364 :
365 365
366 366 elif [ "${FS}" = "btrfs" ]; then
367 367 sudo mkfs.btrfs ${DEVICE}
368 368 sudo mount ${DEVICE} /hgwork
369 369
370 370 elif [ "${FS}" = "ext3" ]; then
371 371 # lazy_journal_init speeds up filesystem creation at the expense of
372 372 # integrity if things crash. We are an ephemeral instance, so we don't
373 373 # care about integrity.
374 374 sudo mkfs.ext3 -E lazy_journal_init=1 ${DEVICE}
375 375 sudo mount ${DEVICE} /hgwork
376 376
377 377 elif [ "${FS}" = "ext4" ]; then
378 378 sudo mkfs.ext4 -E lazy_journal_init=1 ${DEVICE}
379 379 sudo mount ${DEVICE} /hgwork
380 380
381 381 elif [ "${FS}" = "jfs" ]; then
382 382 sudo mkfs.jfs ${DEVICE}
383 383 sudo mount ${DEVICE} /hgwork
384 384
385 385 elif [ "${FS}" = "tmpfs" ]; then
386 386 echo "creating tmpfs volume in /hgwork"
387 387 sudo mount -t tmpfs -o size=1024M tmpfs /hgwork
388 388
389 389 elif [ "${FS}" = "xfs" ]; then
390 390 sudo mkfs.xfs ${DEVICE}
391 391 sudo mount ${DEVICE} /hgwork
392 392
393 393 else
394 394 echo "unsupported filesystem: ${FS}"
395 395 exit 1
396 396 fi
397 397
398 398 echo "/hgwork ready"
399 399
400 400 sudo chown hg:hg /hgwork
401 401 mkdir /hgwork/tmp
402 402 chown hg:hg /hgwork/tmp
403 403
404 404 rsync -a /hgdev/src /hgwork/
405 405 '''.lstrip().replace('\r\n', '\n')
406 406
407 407
408 408 HG_UPDATE_CLEAN = '''
409 409 set -ex
410 410
411 411 HG=/hgdev/venv-bootstrap/bin/hg
412 412
413 413 cd /hgwork/src
414 414 ${HG} --config extensions.purge= purge --all
415 415 ${HG} update -C $1
416 416 ${HG} log -r .
417 417 '''.lstrip().replace('\r\n', '\n')
418 418
419 419
420 420 def prepare_exec_environment(ssh_client, filesystem='default'):
421 421 """Prepare an EC2 instance to execute things.
422 422
423 423 The AMI has an ``/hgdev`` bootstrapped with various Python installs
424 424 and a clone of the Mercurial repo.
425 425
426 426 In EC2, EBS volumes launched from snapshots have wonky performance behavior.
427 427 Notably, blocks have to be copied on first access, which makes volume
428 428 I/O extremely slow on fresh volumes.
429 429
430 430 Furthermore, we may want to run operations, tests, etc on alternative
431 431 filesystems so we examine behavior on different filesystems.
432 432
433 433 This function is used to facilitate executing operations on alternate
434 434 volumes.
435 435 """
436 436 sftp = ssh_client.open_sftp()
437 437
438 438 with sftp.open('/hgdev/prepare-hgdev', 'wb') as fh:
439 439 fh.write(PREPARE_HGDEV)
440 440 fh.chmod(0o0777)
441 441
442 442 command = 'sudo /hgdev/prepare-hgdev %s' % filesystem
443 443 chan, stdin, stdout = exec_command(ssh_client, command)
444 444 stdin.close()
445 445
446 446 for line in stdout:
447 447 print(line, end='')
448 448
449 449 res = chan.recv_exit_status()
450 450
451 451 if res:
452 452 raise Exception('non-0 exit code updating working directory; %d'
453 453 % res)
454 454
455 455
456 456 def synchronize_hg(source_path: pathlib.Path, ec2_instance, revision: str=None):
457 457 """Synchronize a local Mercurial source path to remote EC2 instance."""
458 458
459 459 with tempfile.TemporaryDirectory() as temp_dir:
460 460 temp_dir = pathlib.Path(temp_dir)
461 461
462 462 ssh_dir = temp_dir / '.ssh'
463 463 ssh_dir.mkdir()
464 464 ssh_dir.chmod(0o0700)
465 465
466 466 public_ip = ec2_instance.public_ip_address
467 467
468 468 ssh_config = ssh_dir / 'config'
469 469
470 470 with ssh_config.open('w', encoding='utf-8') as fh:
471 471 fh.write('Host %s\n' % public_ip)
472 472 fh.write(' User hg\n')
473 473 fh.write(' StrictHostKeyChecking no\n')
474 474 fh.write(' UserKnownHostsFile %s\n' % (ssh_dir / 'known_hosts'))
475 475 fh.write(' IdentityFile %s\n' % ec2_instance.ssh_private_key_path)
476 476
477 477 if not (source_path / '.hg').is_dir():
478 478 raise Exception('%s is not a Mercurial repository; synchronization '
479 479 'not yet supported' % source_path)
480 480
481 481 env = dict(os.environ)
482 482 env['HGPLAIN'] = '1'
483 483 env['HGENCODING'] = 'utf-8'
484 484
485 485 hg_bin = source_path / 'hg'
486 486
487 487 res = subprocess.run(
488 488 ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
489 489 cwd=str(source_path), env=env, check=True, capture_output=True)
490 490
491 491 full_revision = res.stdout.decode('ascii')
492 492
493 493 args = [
494 494 'python2.7', str(hg_bin),
495 495 '--config', 'ui.ssh=ssh -F %s' % ssh_config,
496 496 '--config', 'ui.remotecmd=/hgdev/venv-bootstrap/bin/hg',
497 497 # Also ensure .hgtags changes are present so auto version
498 498 # calculation works.
499 499 'push', '-f', '-r', full_revision, '-r', 'file(.hgtags)',
500 500 'ssh://%s//hgwork/src' % public_ip,
501 501 ]
502 502
503 503 res = subprocess.run(args, cwd=str(source_path), env=env)
504 504
505 505 # Allow 1 (no-op) to not trigger error.
506 506 if res.returncode not in (0, 1):
507 507 res.check_returncode()
508 508
509 509 # TODO support synchronizing dirty working directory.
510 510
511 511 sftp = ec2_instance.ssh_client.open_sftp()
512 512
513 513 with sftp.open('/hgdev/hgup', 'wb') as fh:
514 514 fh.write(HG_UPDATE_CLEAN)
515 515 fh.chmod(0o0700)
516 516
517 517 chan, stdin, stdout = exec_command(
518 518 ec2_instance.ssh_client, '/hgdev/hgup %s' % full_revision)
519 519 stdin.close()
520 520
521 521 for line in stdout:
522 522 print(line, end='')
523 523
524 524 res = chan.recv_exit_status()
525 525
526 526 if res:
527 527 raise Exception('non-0 exit code updating working directory; %d'
528 528 % res)
529 529
530 530
531 531 def run_tests(ssh_client, python_version, test_flags=None):
532 532 """Run tests on a remote Linux machine via an SSH client."""
533 533 test_flags = test_flags or []
534 534
535 535 print('running tests')
536 536
537 537 if python_version == 'system2':
538 538 python = '/usr/bin/python2'
539 539 elif python_version == 'system3':
540 540 python = '/usr/bin/python3'
541 541 elif python_version.startswith('pypy'):
542 542 python = '/hgdev/pyenv/shims/%s' % python_version
543 543 else:
544 544 python = '/hgdev/pyenv/shims/python%s' % python_version
545 545
546 546 test_flags = ' '.join(shlex.quote(a) for a in test_flags)
547 547
548 548 command = (
549 549 '/bin/sh -c "export TMPDIR=/hgwork/tmp; '
550 550 'cd /hgwork/src/tests && %s run-tests.py %s"' % (
551 551 python, test_flags))
552 552
553 553 chan, stdin, stdout = exec_command(ssh_client, command)
554 554
555 555 stdin.close()
556 556
557 557 for line in stdout:
558 558 print(line, end='')
559 559
560 560 return chan.recv_exit_status()
General Comments 0
You need to be logged in to leave comments. Login now