##// END OF EJS Templates
automation: update rust in Linux environment...
Gregory Szorc -
r47968:2df655f6 default
parent child Browse files
Show More
@@ -1,609 +1,609 b''
1 1 # linux.py - Linux specific automation functionality
2 2 #
3 3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 # no-check-code because Python 3 native.
9 9
10 10 import os
11 11 import pathlib
12 12 import shlex
13 13 import subprocess
14 14 import tempfile
15 15
16 16 from .ssh import exec_command
17 17
18 18
19 19 # Linux distributions that are supported.
20 20 DISTROS = {
21 21 'debian9',
22 22 'debian10',
23 23 'ubuntu18.04',
24 24 'ubuntu19.04',
25 25 }
26 26
27 27 INSTALL_PYTHONS = r'''
28 28 PYENV2_VERSIONS="2.7.17 pypy2.7-7.2.0"
29 29 PYENV3_VERSIONS="3.5.10 3.6.13 3.7.10 3.8.10 3.9.5 pypy3.5-7.0.0 pypy3.6-7.3.3 pypy3.7-7.3.3"
30 30
31 31 git clone https://github.com/pyenv/pyenv.git /hgdev/pyenv
32 32 pushd /hgdev/pyenv
33 33 git checkout 328fd42c3a2fbf14ae46dae2021a087fe27ba7e2
34 34 popd
35 35
36 36 export PYENV_ROOT="/hgdev/pyenv"
37 37 export PATH="$PYENV_ROOT/bin:$PATH"
38 38
39 39 # pip 19.2.3.
40 40 PIP_SHA256=57e3643ff19f018f8a00dfaa6b7e4620e3c1a7a2171fd218425366ec006b3bfe
41 41 wget -O get-pip.py --progress dot:mega https://github.com/pypa/get-pip/raw/309a56c5fd94bd1134053a541cb4657a4e47e09d/get-pip.py
42 42 echo "${PIP_SHA256} get-pip.py" | sha256sum --check -
43 43
44 44 VIRTUALENV_SHA256=f78d81b62d3147396ac33fc9d77579ddc42cc2a98dd9ea38886f616b33bc7fb2
45 45 VIRTUALENV_TARBALL=virtualenv-16.7.5.tar.gz
46 46 wget -O ${VIRTUALENV_TARBALL} --progress dot:mega https://files.pythonhosted.org/packages/66/f0/6867af06d2e2f511e4e1d7094ff663acdebc4f15d4a0cb0fed1007395124/${VIRTUALENV_TARBALL}
47 47 echo "${VIRTUALENV_SHA256} ${VIRTUALENV_TARBALL}" | sha256sum --check -
48 48
49 49 for v in ${PYENV2_VERSIONS}; do
50 50 pyenv install -v ${v}
51 51 ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
52 52 ${PYENV_ROOT}/versions/${v}/bin/pip install ${VIRTUALENV_TARBALL}
53 53 ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py2.txt
54 54 done
55 55
56 56 for v in ${PYENV3_VERSIONS}; do
57 57 pyenv install -v ${v}
58 58 ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
59 59
60 60 case ${v} in
61 61 3.5.*)
62 62 REQUIREMENTS=requirements-py3.5.txt
63 63 ;;
64 64 pypy3.5*)
65 65 REQUIREMENTS=requirements-py3.5.txt
66 66 ;;
67 67 *)
68 68 REQUIREMENTS=requirements-py3.txt
69 69 ;;
70 70 esac
71 71
72 72 ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/${REQUIREMENTS}
73 73 done
74 74
75 75 pyenv global ${PYENV2_VERSIONS} ${PYENV3_VERSIONS} system
76 76 '''.lstrip().replace(
77 77 '\r\n', '\n'
78 78 )
79 79
80 80
81 81 INSTALL_RUST = r'''
82 82 RUSTUP_INIT_SHA256=a46fe67199b7bcbbde2dcbc23ae08db6f29883e260e23899a88b9073effc9076
83 83 wget -O rustup-init --progress dot:mega https://static.rust-lang.org/rustup/archive/1.18.3/x86_64-unknown-linux-gnu/rustup-init
84 84 echo "${RUSTUP_INIT_SHA256} rustup-init" | sha256sum --check -
85 85
86 86 chmod +x rustup-init
87 87 sudo -H -u hg -g hg ./rustup-init -y
88 sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup install 1.31.1 1.46.0
88 sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup install 1.41.1 1.52.0
89 89 sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup component add clippy
90 90
91 91 sudo -H -u hg -g hg /home/hg/.cargo/bin/cargo install --version 0.10.3 pyoxidizer
92 92 '''
93 93
94 94
95 95 BOOTSTRAP_VIRTUALENV = r'''
96 96 /usr/bin/virtualenv /hgdev/venv-bootstrap
97 97
98 98 HG_SHA256=35fc8ba5e0379c1b3affa2757e83fb0509e8ac314cbd9f1fd133cf265d16e49f
99 99 HG_TARBALL=mercurial-5.1.1.tar.gz
100 100
101 101 wget -O ${HG_TARBALL} --progress dot:mega https://www.mercurial-scm.org/release/${HG_TARBALL}
102 102 echo "${HG_SHA256} ${HG_TARBALL}" | sha256sum --check -
103 103
104 104 /hgdev/venv-bootstrap/bin/pip install ${HG_TARBALL}
105 105 '''.lstrip().replace(
106 106 '\r\n', '\n'
107 107 )
108 108
109 109
110 110 BOOTSTRAP_DEBIAN = (
111 111 r'''
112 112 #!/bin/bash
113 113
114 114 set -ex
115 115
116 116 DISTRO=`grep DISTRIB_ID /etc/lsb-release | awk -F= '{{print $2}}'`
117 117 DEBIAN_VERSION=`cat /etc/debian_version`
118 118 LSB_RELEASE=`lsb_release -cs`
119 119
120 120 sudo /usr/sbin/groupadd hg
121 121 sudo /usr/sbin/groupadd docker
122 122 sudo /usr/sbin/useradd -g hg -G sudo,docker -d /home/hg -m -s /bin/bash hg
123 123 sudo mkdir /home/hg/.ssh
124 124 sudo cp ~/.ssh/authorized_keys /home/hg/.ssh/authorized_keys
125 125 sudo chown -R hg:hg /home/hg/.ssh
126 126 sudo chmod 700 /home/hg/.ssh
127 127 sudo chmod 600 /home/hg/.ssh/authorized_keys
128 128
129 129 cat << EOF | sudo tee /etc/sudoers.d/90-hg
130 130 hg ALL=(ALL) NOPASSWD:ALL
131 131 EOF
132 132
133 133 sudo apt-get update
134 134 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq dist-upgrade
135 135
136 136 # Install packages necessary to set up Docker Apt repo.
137 137 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends \
138 138 apt-transport-https \
139 139 gnupg
140 140
141 141 cat > docker-apt-key << EOF
142 142 -----BEGIN PGP PUBLIC KEY BLOCK-----
143 143
144 144 mQINBFit2ioBEADhWpZ8/wvZ6hUTiXOwQHXMAlaFHcPH9hAtr4F1y2+OYdbtMuth
145 145 lqqwp028AqyY+PRfVMtSYMbjuQuu5byyKR01BbqYhuS3jtqQmljZ/bJvXqnmiVXh
146 146 38UuLa+z077PxyxQhu5BbqntTPQMfiyqEiU+BKbq2WmANUKQf+1AmZY/IruOXbnq
147 147 L4C1+gJ8vfmXQt99npCaxEjaNRVYfOS8QcixNzHUYnb6emjlANyEVlZzeqo7XKl7
148 148 UrwV5inawTSzWNvtjEjj4nJL8NsLwscpLPQUhTQ+7BbQXAwAmeHCUTQIvvWXqw0N
149 149 cmhh4HgeQscQHYgOJjjDVfoY5MucvglbIgCqfzAHW9jxmRL4qbMZj+b1XoePEtht
150 150 ku4bIQN1X5P07fNWzlgaRL5Z4POXDDZTlIQ/El58j9kp4bnWRCJW0lya+f8ocodo
151 151 vZZ+Doi+fy4D5ZGrL4XEcIQP/Lv5uFyf+kQtl/94VFYVJOleAv8W92KdgDkhTcTD
152 152 G7c0tIkVEKNUq48b3aQ64NOZQW7fVjfoKwEZdOqPE72Pa45jrZzvUFxSpdiNk2tZ
153 153 XYukHjlxxEgBdC/J3cMMNRE1F4NCA3ApfV1Y7/hTeOnmDuDYwr9/obA8t016Yljj
154 154 q5rdkywPf4JF8mXUW5eCN1vAFHxeg9ZWemhBtQmGxXnw9M+z6hWwc6ahmwARAQAB
155 155 tCtEb2NrZXIgUmVsZWFzZSAoQ0UgZGViKSA8ZG9ja2VyQGRvY2tlci5jb20+iQI3
156 156 BBMBCgAhBQJYrefAAhsvBQsJCAcDBRUKCQgLBRYCAwEAAh4BAheAAAoJEI2BgDwO
157 157 v82IsskP/iQZo68flDQmNvn8X5XTd6RRaUH33kXYXquT6NkHJciS7E2gTJmqvMqd
158 158 tI4mNYHCSEYxI5qrcYV5YqX9P6+Ko+vozo4nseUQLPH/ATQ4qL0Zok+1jkag3Lgk
159 159 jonyUf9bwtWxFp05HC3GMHPhhcUSexCxQLQvnFWXD2sWLKivHp2fT8QbRGeZ+d3m
160 160 6fqcd5Fu7pxsqm0EUDK5NL+nPIgYhN+auTrhgzhK1CShfGccM/wfRlei9Utz6p9P
161 161 XRKIlWnXtT4qNGZNTN0tR+NLG/6Bqd8OYBaFAUcue/w1VW6JQ2VGYZHnZu9S8LMc
162 162 FYBa5Ig9PxwGQOgq6RDKDbV+PqTQT5EFMeR1mrjckk4DQJjbxeMZbiNMG5kGECA8
163 163 g383P3elhn03WGbEEa4MNc3Z4+7c236QI3xWJfNPdUbXRaAwhy/6rTSFbzwKB0Jm
164 164 ebwzQfwjQY6f55MiI/RqDCyuPj3r3jyVRkK86pQKBAJwFHyqj9KaKXMZjfVnowLh
165 165 9svIGfNbGHpucATqREvUHuQbNnqkCx8VVhtYkhDb9fEP2xBu5VvHbR+3nfVhMut5
166 166 G34Ct5RS7Jt6LIfFdtcn8CaSas/l1HbiGeRgc70X/9aYx/V/CEJv0lIe8gP6uDoW
167 167 FPIZ7d6vH+Vro6xuWEGiuMaiznap2KhZmpkgfupyFmplh0s6knymuQINBFit2ioB
168 168 EADneL9S9m4vhU3blaRjVUUyJ7b/qTjcSylvCH5XUE6R2k+ckEZjfAMZPLpO+/tF
169 169 M2JIJMD4SifKuS3xck9KtZGCufGmcwiLQRzeHF7vJUKrLD5RTkNi23ydvWZgPjtx
170 170 Q+DTT1Zcn7BrQFY6FgnRoUVIxwtdw1bMY/89rsFgS5wwuMESd3Q2RYgb7EOFOpnu
171 171 w6da7WakWf4IhnF5nsNYGDVaIHzpiqCl+uTbf1epCjrOlIzkZ3Z3Yk5CM/TiFzPk
172 172 z2lLz89cpD8U+NtCsfagWWfjd2U3jDapgH+7nQnCEWpROtzaKHG6lA3pXdix5zG8
173 173 eRc6/0IbUSWvfjKxLLPfNeCS2pCL3IeEI5nothEEYdQH6szpLog79xB9dVnJyKJb
174 174 VfxXnseoYqVrRz2VVbUI5Blwm6B40E3eGVfUQWiux54DspyVMMk41Mx7QJ3iynIa
175 175 1N4ZAqVMAEruyXTRTxc9XW0tYhDMA/1GYvz0EmFpm8LzTHA6sFVtPm/ZlNCX6P1X
176 176 zJwrv7DSQKD6GGlBQUX+OeEJ8tTkkf8QTJSPUdh8P8YxDFS5EOGAvhhpMBYD42kQ
177 177 pqXjEC+XcycTvGI7impgv9PDY1RCC1zkBjKPa120rNhv/hkVk/YhuGoajoHyy4h7
178 178 ZQopdcMtpN2dgmhEegny9JCSwxfQmQ0zK0g7m6SHiKMwjwARAQABiQQ+BBgBCAAJ
179 179 BQJYrdoqAhsCAikJEI2BgDwOv82IwV0gBBkBCAAGBQJYrdoqAAoJEH6gqcPyc/zY
180 180 1WAP/2wJ+R0gE6qsce3rjaIz58PJmc8goKrir5hnElWhPgbq7cYIsW5qiFyLhkdp
181 181 YcMmhD9mRiPpQn6Ya2w3e3B8zfIVKipbMBnke/ytZ9M7qHmDCcjoiSmwEXN3wKYI
182 182 mD9VHONsl/CG1rU9Isw1jtB5g1YxuBA7M/m36XN6x2u+NtNMDB9P56yc4gfsZVES
183 183 KA9v+yY2/l45L8d/WUkUi0YXomn6hyBGI7JrBLq0CX37GEYP6O9rrKipfz73XfO7
184 184 JIGzOKZlljb/D9RX/g7nRbCn+3EtH7xnk+TK/50euEKw8SMUg147sJTcpQmv6UzZ
185 185 cM4JgL0HbHVCojV4C/plELwMddALOFeYQzTif6sMRPf+3DSj8frbInjChC3yOLy0
186 186 6br92KFom17EIj2CAcoeq7UPhi2oouYBwPxh5ytdehJkoo+sN7RIWua6P2WSmon5
187 187 U888cSylXC0+ADFdgLX9K2zrDVYUG1vo8CX0vzxFBaHwN6Px26fhIT1/hYUHQR1z
188 188 VfNDcyQmXqkOnZvvoMfz/Q0s9BhFJ/zU6AgQbIZE/hm1spsfgvtsD1frZfygXJ9f
189 189 irP+MSAI80xHSf91qSRZOj4Pl3ZJNbq4yYxv0b1pkMqeGdjdCYhLU+LZ4wbQmpCk
190 190 SVe2prlLureigXtmZfkqevRz7FrIZiu9ky8wnCAPwC7/zmS18rgP/17bOtL4/iIz
191 191 QhxAAoAMWVrGyJivSkjhSGx1uCojsWfsTAm11P7jsruIL61ZzMUVE2aM3Pmj5G+W
192 192 9AcZ58Em+1WsVnAXdUR//bMmhyr8wL/G1YO1V3JEJTRdxsSxdYa4deGBBY/Adpsw
193 193 24jxhOJR+lsJpqIUeb999+R8euDhRHG9eFO7DRu6weatUJ6suupoDTRWtr/4yGqe
194 194 dKxV3qQhNLSnaAzqW/1nA3iUB4k7kCaKZxhdhDbClf9P37qaRW467BLCVO/coL3y
195 195 Vm50dwdrNtKpMBh3ZpbB1uJvgi9mXtyBOMJ3v8RZeDzFiG8HdCtg9RvIt/AIFoHR
196 196 H3S+U79NT6i0KPzLImDfs8T7RlpyuMc4Ufs8ggyg9v3Ae6cN3eQyxcK3w0cbBwsh
197 197 /nQNfsA6uu+9H7NhbehBMhYnpNZyrHzCmzyXkauwRAqoCbGCNykTRwsur9gS41TQ
198 198 M8ssD1jFheOJf3hODnkKU+HKjvMROl1DK7zdmLdNzA1cvtZH/nCC9KPj1z8QC47S
199 199 xx+dTZSx4ONAhwbS/LN3PoKtn8LPjY9NP9uDWI+TWYquS2U+KHDrBDlsgozDbs/O
200 200 jCxcpDzNmXpWQHEtHU7649OXHP7UeNST1mCUCH5qdank0V1iejF6/CfTFU4MfcrG
201 201 YT90qFF93M3v01BbxP+EIY2/9tiIPbrd
202 202 =0YYh
203 203 -----END PGP PUBLIC KEY BLOCK-----
204 204 EOF
205 205
206 206 sudo apt-key add docker-apt-key
207 207
208 208 if [ "$LSB_RELEASE" = "stretch" ]; then
209 209 cat << EOF | sudo tee -a /etc/apt/sources.list
210 210 # Need backports for clang-format-6.0
211 211 deb http://deb.debian.org/debian stretch-backports main
212 212 EOF
213 213 fi
214 214
215 215 if [ "$LSB_RELEASE" = "stretch" -o "$LSB_RELEASE" = "buster" ]; then
216 216 cat << EOF | sudo tee -a /etc/apt/sources.list
217 217 # Sources are useful if we want to compile things locally.
218 218 deb-src http://deb.debian.org/debian $LSB_RELEASE main
219 219 deb-src http://security.debian.org/debian-security $LSB_RELEASE/updates main
220 220 deb-src http://deb.debian.org/debian $LSB_RELEASE-updates main
221 221 deb-src http://deb.debian.org/debian $LSB_RELEASE-backports main
222 222
223 223 deb [arch=amd64] https://download.docker.com/linux/debian $LSB_RELEASE stable
224 224 EOF
225 225
226 226 elif [ "$DISTRO" = "Ubuntu" ]; then
227 227 cat << EOF | sudo tee -a /etc/apt/sources.list
228 228 deb [arch=amd64] https://download.docker.com/linux/ubuntu $LSB_RELEASE stable
229 229 EOF
230 230
231 231 fi
232 232
233 233 sudo apt-get update
234 234
235 235 PACKAGES="\
236 236 awscli \
237 237 btrfs-progs \
238 238 build-essential \
239 239 bzr \
240 240 clang-format-6.0 \
241 241 cvs \
242 242 darcs \
243 243 debhelper \
244 244 devscripts \
245 245 docker-ce \
246 246 dpkg-dev \
247 247 dstat \
248 248 emacs \
249 249 gettext \
250 250 git \
251 251 htop \
252 252 iotop \
253 253 jfsutils \
254 254 libbz2-dev \
255 255 libexpat1-dev \
256 256 libffi-dev \
257 257 libgdbm-dev \
258 258 liblzma-dev \
259 259 libncurses5-dev \
260 260 libnss3-dev \
261 261 libreadline-dev \
262 262 libsqlite3-dev \
263 263 libssl-dev \
264 264 netbase \
265 265 ntfs-3g \
266 266 nvme-cli \
267 267 pyflakes \
268 268 pyflakes3 \
269 269 pylint \
270 270 pylint3 \
271 271 python-all-dev \
272 272 python-dev \
273 273 python-docutils \
274 274 python-fuzzywuzzy \
275 275 python-pygments \
276 276 python-subversion \
277 277 python-vcr \
278 278 python3-boto3 \
279 279 python3-dev \
280 280 python3-docutils \
281 281 python3-fuzzywuzzy \
282 282 python3-pygments \
283 283 python3-vcr \
284 284 python3-venv \
285 285 rsync \
286 286 sqlite3 \
287 287 subversion \
288 288 tcl-dev \
289 289 tk-dev \
290 290 tla \
291 291 unzip \
292 292 uuid-dev \
293 293 vim \
294 294 virtualenv \
295 295 wget \
296 296 xfsprogs \
297 297 zip \
298 298 zlib1g-dev"
299 299
300 300 if [ "LSB_RELEASE" = "stretch" ]; then
301 301 PACKAGES="$PACKAGES linux-perf"
302 302 elif [ "$DISTRO" = "Ubuntu" ]; then
303 303 PACKAGES="$PACKAGES linux-tools-common"
304 304 fi
305 305
306 306 # Monotone only available in older releases.
307 307 if [ "$LSB_RELEASE" = "stretch" -o "$LSB_RELEASE" = "xenial" ]; then
308 308 PACKAGES="$PACKAGES monotone"
309 309 fi
310 310
311 311 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends $PACKAGES
312 312
313 313 # Create clang-format symlink so test harness finds it.
314 314 sudo update-alternatives --install /usr/bin/clang-format clang-format \
315 315 /usr/bin/clang-format-6.0 1000
316 316
317 317 sudo mkdir /hgdev
318 318 # Will be normalized to hg:hg later.
319 319 sudo chown `whoami` /hgdev
320 320
321 321 {install_rust}
322 322
323 323 cp requirements-*.txt /hgdev/
324 324
325 325 # Disable the pip version check because it uses the network and can
326 326 # be annoying.
327 327 cat << EOF | sudo tee -a /etc/pip.conf
328 328 [global]
329 329 disable-pip-version-check = True
330 330 EOF
331 331
332 332 {install_pythons}
333 333 {bootstrap_virtualenv}
334 334
335 335 /hgdev/venv-bootstrap/bin/hg clone https://www.mercurial-scm.org/repo/hg /hgdev/src
336 336
337 337 # Mark the repo as non-publishing.
338 338 cat >> /hgdev/src/.hg/hgrc << EOF
339 339 [phases]
340 340 publish = false
341 341 EOF
342 342
343 343 sudo chown -R hg:hg /hgdev
344 344 '''.lstrip()
345 345 .format(
346 346 install_rust=INSTALL_RUST,
347 347 install_pythons=INSTALL_PYTHONS,
348 348 bootstrap_virtualenv=BOOTSTRAP_VIRTUALENV,
349 349 )
350 350 .replace('\r\n', '\n')
351 351 )
352 352
353 353
354 354 # Prepares /hgdev for operations.
355 355 PREPARE_HGDEV = '''
356 356 #!/bin/bash
357 357
358 358 set -e
359 359
360 360 FS=$1
361 361
362 362 ensure_device() {
363 363 if [ -z "${DEVICE}" ]; then
364 364 echo "could not find block device to format"
365 365 exit 1
366 366 fi
367 367 }
368 368
369 369 # Determine device to partition for extra filesystem.
370 370 # If only 1 volume is present, it will be the root volume and
371 371 # should be /dev/nvme0. If multiple volumes are present, the
372 372 # root volume could be nvme0 or nvme1. Use whichever one doesn't have
373 373 # a partition.
374 374 if [ -e /dev/nvme1n1 ]; then
375 375 if [ -e /dev/nvme0n1p1 ]; then
376 376 DEVICE=/dev/nvme1n1
377 377 else
378 378 DEVICE=/dev/nvme0n1
379 379 fi
380 380 else
381 381 DEVICE=
382 382 fi
383 383
384 384 sudo mkdir /hgwork
385 385
386 386 if [ "${FS}" != "default" -a "${FS}" != "tmpfs" ]; then
387 387 ensure_device
388 388 echo "creating ${FS} filesystem on ${DEVICE}"
389 389 fi
390 390
391 391 if [ "${FS}" = "default" ]; then
392 392 :
393 393
394 394 elif [ "${FS}" = "btrfs" ]; then
395 395 sudo mkfs.btrfs ${DEVICE}
396 396 sudo mount ${DEVICE} /hgwork
397 397
398 398 elif [ "${FS}" = "ext3" ]; then
399 399 # lazy_journal_init speeds up filesystem creation at the expense of
400 400 # integrity if things crash. We are an ephemeral instance, so we don't
401 401 # care about integrity.
402 402 sudo mkfs.ext3 -E lazy_journal_init=1 ${DEVICE}
403 403 sudo mount ${DEVICE} /hgwork
404 404
405 405 elif [ "${FS}" = "ext4" ]; then
406 406 sudo mkfs.ext4 -E lazy_journal_init=1 ${DEVICE}
407 407 sudo mount ${DEVICE} /hgwork
408 408
409 409 elif [ "${FS}" = "jfs" ]; then
410 410 sudo mkfs.jfs ${DEVICE}
411 411 sudo mount ${DEVICE} /hgwork
412 412
413 413 elif [ "${FS}" = "tmpfs" ]; then
414 414 echo "creating tmpfs volume in /hgwork"
415 415 sudo mount -t tmpfs -o size=1024M tmpfs /hgwork
416 416
417 417 elif [ "${FS}" = "xfs" ]; then
418 418 sudo mkfs.xfs ${DEVICE}
419 419 sudo mount ${DEVICE} /hgwork
420 420
421 421 else
422 422 echo "unsupported filesystem: ${FS}"
423 423 exit 1
424 424 fi
425 425
426 426 echo "/hgwork ready"
427 427
428 428 sudo chown hg:hg /hgwork
429 429 mkdir /hgwork/tmp
430 430 chown hg:hg /hgwork/tmp
431 431
432 432 rsync -a /hgdev/src /hgwork/
433 433 '''.lstrip().replace(
434 434 '\r\n', '\n'
435 435 )
436 436
437 437
438 438 HG_UPDATE_CLEAN = '''
439 439 set -ex
440 440
441 441 HG=/hgdev/venv-bootstrap/bin/hg
442 442
443 443 cd /hgwork/src
444 444 ${HG} --config extensions.purge= purge --all
445 445 ${HG} update -C $1
446 446 ${HG} log -r .
447 447 '''.lstrip().replace(
448 448 '\r\n', '\n'
449 449 )
450 450
451 451
452 452 def prepare_exec_environment(ssh_client, filesystem='default'):
453 453 """Prepare an EC2 instance to execute things.
454 454
455 455 The AMI has an ``/hgdev`` bootstrapped with various Python installs
456 456 and a clone of the Mercurial repo.
457 457
458 458 In EC2, EBS volumes launched from snapshots have wonky performance behavior.
459 459 Notably, blocks have to be copied on first access, which makes volume
460 460 I/O extremely slow on fresh volumes.
461 461
462 462 Furthermore, we may want to run operations, tests, etc on alternative
463 463 filesystems so we examine behavior on different filesystems.
464 464
465 465 This function is used to facilitate executing operations on alternate
466 466 volumes.
467 467 """
468 468 sftp = ssh_client.open_sftp()
469 469
470 470 with sftp.open('/hgdev/prepare-hgdev', 'wb') as fh:
471 471 fh.write(PREPARE_HGDEV)
472 472 fh.chmod(0o0777)
473 473
474 474 command = 'sudo /hgdev/prepare-hgdev %s' % filesystem
475 475 chan, stdin, stdout = exec_command(ssh_client, command)
476 476 stdin.close()
477 477
478 478 for line in stdout:
479 479 print(line, end='')
480 480
481 481 res = chan.recv_exit_status()
482 482
483 483 if res:
484 484 raise Exception('non-0 exit code updating working directory; %d' % res)
485 485
486 486
487 487 def synchronize_hg(
488 488 source_path: pathlib.Path, ec2_instance, revision: str = None
489 489 ):
490 490 """Synchronize a local Mercurial source path to remote EC2 instance."""
491 491
492 492 with tempfile.TemporaryDirectory() as temp_dir:
493 493 temp_dir = pathlib.Path(temp_dir)
494 494
495 495 ssh_dir = temp_dir / '.ssh'
496 496 ssh_dir.mkdir()
497 497 ssh_dir.chmod(0o0700)
498 498
499 499 public_ip = ec2_instance.public_ip_address
500 500
501 501 ssh_config = ssh_dir / 'config'
502 502
503 503 with ssh_config.open('w', encoding='utf-8') as fh:
504 504 fh.write('Host %s\n' % public_ip)
505 505 fh.write(' User hg\n')
506 506 fh.write(' StrictHostKeyChecking no\n')
507 507 fh.write(' UserKnownHostsFile %s\n' % (ssh_dir / 'known_hosts'))
508 508 fh.write(' IdentityFile %s\n' % ec2_instance.ssh_private_key_path)
509 509
510 510 if not (source_path / '.hg').is_dir():
511 511 raise Exception(
512 512 '%s is not a Mercurial repository; synchronization '
513 513 'not yet supported' % source_path
514 514 )
515 515
516 516 env = dict(os.environ)
517 517 env['HGPLAIN'] = '1'
518 518 env['HGENCODING'] = 'utf-8'
519 519
520 520 hg_bin = source_path / 'hg'
521 521
522 522 res = subprocess.run(
523 523 ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
524 524 cwd=str(source_path),
525 525 env=env,
526 526 check=True,
527 527 capture_output=True,
528 528 )
529 529
530 530 full_revision = res.stdout.decode('ascii')
531 531
532 532 args = [
533 533 'python2.7',
534 534 str(hg_bin),
535 535 '--config',
536 536 'ui.ssh=ssh -F %s' % ssh_config,
537 537 '--config',
538 538 'ui.remotecmd=/hgdev/venv-bootstrap/bin/hg',
539 539 # Also ensure .hgtags changes are present so auto version
540 540 # calculation works.
541 541 'push',
542 542 '-f',
543 543 '-r',
544 544 full_revision,
545 545 '-r',
546 546 'file(.hgtags)',
547 547 'ssh://%s//hgwork/src' % public_ip,
548 548 ]
549 549
550 550 res = subprocess.run(args, cwd=str(source_path), env=env)
551 551
552 552 # Allow 1 (no-op) to not trigger error.
553 553 if res.returncode not in (0, 1):
554 554 res.check_returncode()
555 555
556 556 # TODO support synchronizing dirty working directory.
557 557
558 558 sftp = ec2_instance.ssh_client.open_sftp()
559 559
560 560 with sftp.open('/hgdev/hgup', 'wb') as fh:
561 561 fh.write(HG_UPDATE_CLEAN)
562 562 fh.chmod(0o0700)
563 563
564 564 chan, stdin, stdout = exec_command(
565 565 ec2_instance.ssh_client, '/hgdev/hgup %s' % full_revision
566 566 )
567 567 stdin.close()
568 568
569 569 for line in stdout:
570 570 print(line, end='')
571 571
572 572 res = chan.recv_exit_status()
573 573
574 574 if res:
575 575 raise Exception(
576 576 'non-0 exit code updating working directory; %d' % res
577 577 )
578 578
579 579
580 580 def run_tests(ssh_client, python_version, test_flags=None):
581 581 """Run tests on a remote Linux machine via an SSH client."""
582 582 test_flags = test_flags or []
583 583
584 584 print('running tests')
585 585
586 586 if python_version == 'system2':
587 587 python = '/usr/bin/python2'
588 588 elif python_version == 'system3':
589 589 python = '/usr/bin/python3'
590 590 elif python_version.startswith('pypy'):
591 591 python = '/hgdev/pyenv/shims/%s' % python_version
592 592 else:
593 593 python = '/hgdev/pyenv/shims/python%s' % python_version
594 594
595 595 test_flags = ' '.join(shlex.quote(a) for a in test_flags)
596 596
597 597 command = (
598 598 '/bin/sh -c "export TMPDIR=/hgwork/tmp; '
599 599 'cd /hgwork/src/tests && %s run-tests.py %s"' % (python, test_flags)
600 600 )
601 601
602 602 chan, stdin, stdout = exec_command(ssh_client, command)
603 603
604 604 stdin.close()
605 605
606 606 for line in stdout:
607 607 print(line, end='')
608 608
609 609 return chan.recv_exit_status()
General Comments 0
You need to be logged in to leave comments. Login now