##// END OF EJS Templates
automation: install Rust in Linux environment...
Gregory Szorc -
r42924:89ba8177 default
parent child Browse files
Show More
@@ -1,551 +1,566
1 1 # linux.py - Linux specific automation functionality
2 2 #
3 3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 # no-check-code because Python 3 native.
9 9
10 10 import os
11 11 import pathlib
12 12 import shlex
13 13 import subprocess
14 14 import tempfile
15 15
16 16 from .ssh import (
17 17 exec_command,
18 18 )
19 19
20 20
21 21 # Linux distributions that are supported.
22 22 DISTROS = {
23 23 'debian9',
24 24 'ubuntu18.04',
25 25 'ubuntu18.10',
26 26 'ubuntu19.04',
27 27 }
28 28
29 29 INSTALL_PYTHONS = r'''
30 30 PYENV2_VERSIONS="2.7.16 pypy2.7-7.1.1"
31 31 PYENV3_VERSIONS="3.5.7 3.6.9 3.7.4 3.8-dev pypy3.5-7.0.0 pypy3.6-7.1.1"
32 32
33 33 git clone https://github.com/pyenv/pyenv.git /hgdev/pyenv
34 34 pushd /hgdev/pyenv
35 35 git checkout 17f44b7cd6f58ea2fa68ec0371fb9e7a826b8be2
36 36 popd
37 37
38 38 export PYENV_ROOT="/hgdev/pyenv"
39 39 export PATH="$PYENV_ROOT/bin:$PATH"
40 40
41 41 # pip 19.0.3.
42 42 PIP_SHA256=efe99298f3fbb1f56201ce6b81d2658067d2f7d7dfc2d412e0d3cacc9a397c61
43 43 wget -O get-pip.py --progress dot:mega https://github.com/pypa/get-pip/raw/fee32c376da1ff6496a798986d7939cd51e1644f/get-pip.py
44 44 echo "${PIP_SHA256} get-pip.py" | sha256sum --check -
45 45
46 46 VIRTUALENV_SHA256=984d7e607b0a5d1329425dd8845bd971b957424b5ba664729fab51ab8c11bc39
47 47 VIRTUALENV_TARBALL=virtualenv-16.4.3.tar.gz
48 48 wget -O ${VIRTUALENV_TARBALL} --progress dot:mega https://files.pythonhosted.org/packages/37/db/89d6b043b22052109da35416abc3c397655e4bd3cff031446ba02b9654fa/${VIRTUALENV_TARBALL}
49 49 echo "${VIRTUALENV_SHA256} ${VIRTUALENV_TARBALL}" | sha256sum --check -
50 50
51 51 for v in ${PYENV2_VERSIONS}; do
52 52 pyenv install -v ${v}
53 53 ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
54 54 ${PYENV_ROOT}/versions/${v}/bin/pip install ${VIRTUALENV_TARBALL}
55 55 ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py2.txt
56 56 done
57 57
58 58 for v in ${PYENV3_VERSIONS}; do
59 59 pyenv install -v ${v}
60 60 ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
61 61 ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py3.txt
62 62 done
63 63
64 64 pyenv global ${PYENV2_VERSIONS} ${PYENV3_VERSIONS} system
65 65 '''.lstrip().replace('\r\n', '\n')
66 66
67 67
68 INSTALL_RUST = r'''
69 RUSTUP_INIT_SHA256=a46fe67199b7bcbbde2dcbc23ae08db6f29883e260e23899a88b9073effc9076
70 wget -O rustup-init --progress dot:mega https://static.rust-lang.org/rustup/archive/1.18.3/x86_64-unknown-linux-gnu/rustup-init
71 echo "${RUSTUP_INIT_SHA256} rustup-init" | sha256sum --check -
72
73 chmod +x rustup-init
74 sudo -H -u hg -g hg ./rustup-init -y
75 sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup install 1.31.1 1.34.2
76 sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup component add clippy
77 '''
78
79
68 80 BOOTSTRAP_VIRTUALENV = r'''
69 81 /usr/bin/virtualenv /hgdev/venv-bootstrap
70 82
71 83 HG_SHA256=1bdd21bb87d1e05fb5cd395d488d0e0cc2f2f90ce0fd248e31a03595da5ccb47
72 84 HG_TARBALL=mercurial-4.9.1.tar.gz
73 85
74 86 wget -O ${HG_TARBALL} --progress dot:mega https://www.mercurial-scm.org/release/${HG_TARBALL}
75 87 echo "${HG_SHA256} ${HG_TARBALL}" | sha256sum --check -
76 88
77 89 /hgdev/venv-bootstrap/bin/pip install ${HG_TARBALL}
78 90 '''.lstrip().replace('\r\n', '\n')
79 91
80 92
81 93 BOOTSTRAP_DEBIAN = r'''
82 94 #!/bin/bash
83 95
84 96 set -ex
85 97
86 98 DISTRO=`grep DISTRIB_ID /etc/lsb-release | awk -F= '{{print $2}}'`
87 99 DEBIAN_VERSION=`cat /etc/debian_version`
88 100 LSB_RELEASE=`lsb_release -cs`
89 101
90 102 sudo /usr/sbin/groupadd hg
91 103 sudo /usr/sbin/groupadd docker
92 104 sudo /usr/sbin/useradd -g hg -G sudo,docker -d /home/hg -m -s /bin/bash hg
93 105 sudo mkdir /home/hg/.ssh
94 106 sudo cp ~/.ssh/authorized_keys /home/hg/.ssh/authorized_keys
95 107 sudo chown -R hg:hg /home/hg/.ssh
96 108 sudo chmod 700 /home/hg/.ssh
97 109 sudo chmod 600 /home/hg/.ssh/authorized_keys
98 110
99 111 cat << EOF | sudo tee /etc/sudoers.d/90-hg
100 112 hg ALL=(ALL) NOPASSWD:ALL
101 113 EOF
102 114
103 115 sudo apt-get update
104 116 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq dist-upgrade
105 117
106 118 # Install packages necessary to set up Docker Apt repo.
107 119 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends \
108 120 apt-transport-https \
109 121 gnupg
110 122
111 123 cat > docker-apt-key << EOF
112 124 -----BEGIN PGP PUBLIC KEY BLOCK-----
113 125
114 126 mQINBFit2ioBEADhWpZ8/wvZ6hUTiXOwQHXMAlaFHcPH9hAtr4F1y2+OYdbtMuth
115 127 lqqwp028AqyY+PRfVMtSYMbjuQuu5byyKR01BbqYhuS3jtqQmljZ/bJvXqnmiVXh
116 128 38UuLa+z077PxyxQhu5BbqntTPQMfiyqEiU+BKbq2WmANUKQf+1AmZY/IruOXbnq
117 129 L4C1+gJ8vfmXQt99npCaxEjaNRVYfOS8QcixNzHUYnb6emjlANyEVlZzeqo7XKl7
118 130 UrwV5inawTSzWNvtjEjj4nJL8NsLwscpLPQUhTQ+7BbQXAwAmeHCUTQIvvWXqw0N
119 131 cmhh4HgeQscQHYgOJjjDVfoY5MucvglbIgCqfzAHW9jxmRL4qbMZj+b1XoePEtht
120 132 ku4bIQN1X5P07fNWzlgaRL5Z4POXDDZTlIQ/El58j9kp4bnWRCJW0lya+f8ocodo
121 133 vZZ+Doi+fy4D5ZGrL4XEcIQP/Lv5uFyf+kQtl/94VFYVJOleAv8W92KdgDkhTcTD
122 134 G7c0tIkVEKNUq48b3aQ64NOZQW7fVjfoKwEZdOqPE72Pa45jrZzvUFxSpdiNk2tZ
123 135 XYukHjlxxEgBdC/J3cMMNRE1F4NCA3ApfV1Y7/hTeOnmDuDYwr9/obA8t016Yljj
124 136 q5rdkywPf4JF8mXUW5eCN1vAFHxeg9ZWemhBtQmGxXnw9M+z6hWwc6ahmwARAQAB
125 137 tCtEb2NrZXIgUmVsZWFzZSAoQ0UgZGViKSA8ZG9ja2VyQGRvY2tlci5jb20+iQI3
126 138 BBMBCgAhBQJYrefAAhsvBQsJCAcDBRUKCQgLBRYCAwEAAh4BAheAAAoJEI2BgDwO
127 139 v82IsskP/iQZo68flDQmNvn8X5XTd6RRaUH33kXYXquT6NkHJciS7E2gTJmqvMqd
128 140 tI4mNYHCSEYxI5qrcYV5YqX9P6+Ko+vozo4nseUQLPH/ATQ4qL0Zok+1jkag3Lgk
129 141 jonyUf9bwtWxFp05HC3GMHPhhcUSexCxQLQvnFWXD2sWLKivHp2fT8QbRGeZ+d3m
130 142 6fqcd5Fu7pxsqm0EUDK5NL+nPIgYhN+auTrhgzhK1CShfGccM/wfRlei9Utz6p9P
131 143 XRKIlWnXtT4qNGZNTN0tR+NLG/6Bqd8OYBaFAUcue/w1VW6JQ2VGYZHnZu9S8LMc
132 144 FYBa5Ig9PxwGQOgq6RDKDbV+PqTQT5EFMeR1mrjckk4DQJjbxeMZbiNMG5kGECA8
133 145 g383P3elhn03WGbEEa4MNc3Z4+7c236QI3xWJfNPdUbXRaAwhy/6rTSFbzwKB0Jm
134 146 ebwzQfwjQY6f55MiI/RqDCyuPj3r3jyVRkK86pQKBAJwFHyqj9KaKXMZjfVnowLh
135 147 9svIGfNbGHpucATqREvUHuQbNnqkCx8VVhtYkhDb9fEP2xBu5VvHbR+3nfVhMut5
136 148 G34Ct5RS7Jt6LIfFdtcn8CaSas/l1HbiGeRgc70X/9aYx/V/CEJv0lIe8gP6uDoW
137 149 FPIZ7d6vH+Vro6xuWEGiuMaiznap2KhZmpkgfupyFmplh0s6knymuQINBFit2ioB
138 150 EADneL9S9m4vhU3blaRjVUUyJ7b/qTjcSylvCH5XUE6R2k+ckEZjfAMZPLpO+/tF
139 151 M2JIJMD4SifKuS3xck9KtZGCufGmcwiLQRzeHF7vJUKrLD5RTkNi23ydvWZgPjtx
140 152 Q+DTT1Zcn7BrQFY6FgnRoUVIxwtdw1bMY/89rsFgS5wwuMESd3Q2RYgb7EOFOpnu
141 153 w6da7WakWf4IhnF5nsNYGDVaIHzpiqCl+uTbf1epCjrOlIzkZ3Z3Yk5CM/TiFzPk
142 154 z2lLz89cpD8U+NtCsfagWWfjd2U3jDapgH+7nQnCEWpROtzaKHG6lA3pXdix5zG8
143 155 eRc6/0IbUSWvfjKxLLPfNeCS2pCL3IeEI5nothEEYdQH6szpLog79xB9dVnJyKJb
144 156 VfxXnseoYqVrRz2VVbUI5Blwm6B40E3eGVfUQWiux54DspyVMMk41Mx7QJ3iynIa
145 157 1N4ZAqVMAEruyXTRTxc9XW0tYhDMA/1GYvz0EmFpm8LzTHA6sFVtPm/ZlNCX6P1X
146 158 zJwrv7DSQKD6GGlBQUX+OeEJ8tTkkf8QTJSPUdh8P8YxDFS5EOGAvhhpMBYD42kQ
147 159 pqXjEC+XcycTvGI7impgv9PDY1RCC1zkBjKPa120rNhv/hkVk/YhuGoajoHyy4h7
148 160 ZQopdcMtpN2dgmhEegny9JCSwxfQmQ0zK0g7m6SHiKMwjwARAQABiQQ+BBgBCAAJ
149 161 BQJYrdoqAhsCAikJEI2BgDwOv82IwV0gBBkBCAAGBQJYrdoqAAoJEH6gqcPyc/zY
150 162 1WAP/2wJ+R0gE6qsce3rjaIz58PJmc8goKrir5hnElWhPgbq7cYIsW5qiFyLhkdp
151 163 YcMmhD9mRiPpQn6Ya2w3e3B8zfIVKipbMBnke/ytZ9M7qHmDCcjoiSmwEXN3wKYI
152 164 mD9VHONsl/CG1rU9Isw1jtB5g1YxuBA7M/m36XN6x2u+NtNMDB9P56yc4gfsZVES
153 165 KA9v+yY2/l45L8d/WUkUi0YXomn6hyBGI7JrBLq0CX37GEYP6O9rrKipfz73XfO7
154 166 JIGzOKZlljb/D9RX/g7nRbCn+3EtH7xnk+TK/50euEKw8SMUg147sJTcpQmv6UzZ
155 167 cM4JgL0HbHVCojV4C/plELwMddALOFeYQzTif6sMRPf+3DSj8frbInjChC3yOLy0
156 168 6br92KFom17EIj2CAcoeq7UPhi2oouYBwPxh5ytdehJkoo+sN7RIWua6P2WSmon5
157 169 U888cSylXC0+ADFdgLX9K2zrDVYUG1vo8CX0vzxFBaHwN6Px26fhIT1/hYUHQR1z
158 170 VfNDcyQmXqkOnZvvoMfz/Q0s9BhFJ/zU6AgQbIZE/hm1spsfgvtsD1frZfygXJ9f
159 171 irP+MSAI80xHSf91qSRZOj4Pl3ZJNbq4yYxv0b1pkMqeGdjdCYhLU+LZ4wbQmpCk
160 172 SVe2prlLureigXtmZfkqevRz7FrIZiu9ky8wnCAPwC7/zmS18rgP/17bOtL4/iIz
161 173 QhxAAoAMWVrGyJivSkjhSGx1uCojsWfsTAm11P7jsruIL61ZzMUVE2aM3Pmj5G+W
162 174 9AcZ58Em+1WsVnAXdUR//bMmhyr8wL/G1YO1V3JEJTRdxsSxdYa4deGBBY/Adpsw
163 175 24jxhOJR+lsJpqIUeb999+R8euDhRHG9eFO7DRu6weatUJ6suupoDTRWtr/4yGqe
164 176 dKxV3qQhNLSnaAzqW/1nA3iUB4k7kCaKZxhdhDbClf9P37qaRW467BLCVO/coL3y
165 177 Vm50dwdrNtKpMBh3ZpbB1uJvgi9mXtyBOMJ3v8RZeDzFiG8HdCtg9RvIt/AIFoHR
166 178 H3S+U79NT6i0KPzLImDfs8T7RlpyuMc4Ufs8ggyg9v3Ae6cN3eQyxcK3w0cbBwsh
167 179 /nQNfsA6uu+9H7NhbehBMhYnpNZyrHzCmzyXkauwRAqoCbGCNykTRwsur9gS41TQ
168 180 M8ssD1jFheOJf3hODnkKU+HKjvMROl1DK7zdmLdNzA1cvtZH/nCC9KPj1z8QC47S
169 181 xx+dTZSx4ONAhwbS/LN3PoKtn8LPjY9NP9uDWI+TWYquS2U+KHDrBDlsgozDbs/O
170 182 jCxcpDzNmXpWQHEtHU7649OXHP7UeNST1mCUCH5qdank0V1iejF6/CfTFU4MfcrG
171 183 YT90qFF93M3v01BbxP+EIY2/9tiIPbrd
172 184 =0YYh
173 185 -----END PGP PUBLIC KEY BLOCK-----
174 186 EOF
175 187
176 188 sudo apt-key add docker-apt-key
177 189
178 190 if [ "$DEBIAN_VERSION" = "9.8" ]; then
179 191 cat << EOF | sudo tee -a /etc/apt/sources.list
180 192 # Need backports for clang-format-6.0
181 193 deb http://deb.debian.org/debian stretch-backports main
182 194
183 195 # Sources are useful if we want to compile things locally.
184 196 deb-src http://deb.debian.org/debian stretch main
185 197 deb-src http://security.debian.org/debian-security stretch/updates main
186 198 deb-src http://deb.debian.org/debian stretch-updates main
187 199 deb-src http://deb.debian.org/debian stretch-backports main
188 200
189 201 deb [arch=amd64] https://download.docker.com/linux/debian stretch stable
190 202 EOF
191 203
192 204 elif [ "$DISTRO" = "Ubuntu" ]; then
193 205 cat << EOF | sudo tee -a /etc/apt/sources.list
194 206 deb [arch=amd64] https://download.docker.com/linux/ubuntu $LSB_RELEASE stable
195 207 EOF
196 208
197 209 fi
198 210
199 211 sudo apt-get update
200 212
201 213 PACKAGES="\
202 214 btrfs-progs \
203 215 build-essential \
204 216 bzr \
205 217 clang-format-6.0 \
206 218 cvs \
207 219 darcs \
208 220 debhelper \
209 221 devscripts \
210 222 dpkg-dev \
211 223 dstat \
212 224 emacs \
213 225 gettext \
214 226 git \
215 227 htop \
216 228 iotop \
217 229 jfsutils \
218 230 libbz2-dev \
219 231 libexpat1-dev \
220 232 libffi-dev \
221 233 libgdbm-dev \
222 234 liblzma-dev \
223 235 libncurses5-dev \
224 236 libnss3-dev \
225 237 libreadline-dev \
226 238 libsqlite3-dev \
227 239 libssl-dev \
228 240 netbase \
229 241 ntfs-3g \
230 242 nvme-cli \
231 243 pyflakes \
232 244 pyflakes3 \
233 245 pylint \
234 246 pylint3 \
235 247 python-all-dev \
236 248 python-dev \
237 249 python-docutils \
238 250 python-fuzzywuzzy \
239 251 python-pygments \
240 252 python-subversion \
241 253 python-vcr \
242 254 python3-dev \
243 255 python3-docutils \
244 256 python3-fuzzywuzzy \
245 257 python3-pygments \
246 258 python3-vcr \
247 259 rsync \
248 260 sqlite3 \
249 261 subversion \
250 262 tcl-dev \
251 263 tk-dev \
252 264 tla \
253 265 unzip \
254 266 uuid-dev \
255 267 vim \
256 268 virtualenv \
257 269 wget \
258 270 xfsprogs \
259 271 zip \
260 272 zlib1g-dev"
261 273
262 274 if [ "$DEBIAN_VERSION" = "9.8" ]; then
263 275 PACKAGES="$PACKAGES linux-perf"
264 276 elif [ "$DISTRO" = "Ubuntu" ]; then
265 277 PACKAGES="$PACKAGES linux-tools-common"
266 278 fi
267 279
268 280 # Ubuntu 19.04 removes monotone.
269 281 if [ "$LSB_RELEASE" != "disco" ]; then
270 282 PACKAGES="$PACKAGES monotone"
271 283 fi
272 284
273 285 # As of April 27, 2019, Docker hasn't published packages for
274 286 # Ubuntu 19.04 yet.
275 287 if [ "$LSB_RELEASE" != "disco" ]; then
276 288 PACKAGES="$PACKAGES docker-ce"
277 289 fi
278 290
279 291 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends $PACKAGES
280 292
281 293 # Create clang-format symlink so test harness finds it.
282 294 sudo update-alternatives --install /usr/bin/clang-format clang-format \
283 295 /usr/bin/clang-format-6.0 1000
284 296
285 297 sudo mkdir /hgdev
286 298 # Will be normalized to hg:hg later.
287 299 sudo chown `whoami` /hgdev
288 300
301 {install_rust}
302
289 303 cp requirements-py2.txt /hgdev/requirements-py2.txt
290 304 cp requirements-py3.txt /hgdev/requirements-py3.txt
291 305
292 306 # Disable the pip version check because it uses the network and can
293 307 # be annoying.
294 308 cat << EOF | sudo tee -a /etc/pip.conf
295 309 [global]
296 310 disable-pip-version-check = True
297 311 EOF
298 312
299 313 {install_pythons}
300 314 {bootstrap_virtualenv}
301 315
302 316 /hgdev/venv-bootstrap/bin/hg clone https://www.mercurial-scm.org/repo/hg /hgdev/src
303 317
304 318 # Mark the repo as non-publishing.
305 319 cat >> /hgdev/src/.hg/hgrc << EOF
306 320 [phases]
307 321 publish = false
308 322 EOF
309 323
310 324 sudo chown -R hg:hg /hgdev
311 325 '''.lstrip().format(
326 install_rust=INSTALL_RUST,
312 327 install_pythons=INSTALL_PYTHONS,
313 328 bootstrap_virtualenv=BOOTSTRAP_VIRTUALENV
314 329 ).replace('\r\n', '\n')
315 330
316 331
317 332 # Prepares /hgdev for operations.
318 333 PREPARE_HGDEV = '''
319 334 #!/bin/bash
320 335
321 336 set -e
322 337
323 338 FS=$1
324 339
325 340 ensure_device() {
326 341 if [ -z "${DEVICE}" ]; then
327 342 echo "could not find block device to format"
328 343 exit 1
329 344 fi
330 345 }
331 346
332 347 # Determine device to partition for extra filesystem.
333 348 # If only 1 volume is present, it will be the root volume and
334 349 # should be /dev/nvme0. If multiple volumes are present, the
335 350 # root volume could be nvme0 or nvme1. Use whichever one doesn't have
336 351 # a partition.
337 352 if [ -e /dev/nvme1n1 ]; then
338 353 if [ -e /dev/nvme0n1p1 ]; then
339 354 DEVICE=/dev/nvme1n1
340 355 else
341 356 DEVICE=/dev/nvme0n1
342 357 fi
343 358 else
344 359 DEVICE=
345 360 fi
346 361
347 362 sudo mkdir /hgwork
348 363
349 364 if [ "${FS}" != "default" -a "${FS}" != "tmpfs" ]; then
350 365 ensure_device
351 366 echo "creating ${FS} filesystem on ${DEVICE}"
352 367 fi
353 368
354 369 if [ "${FS}" = "default" ]; then
355 370 :
356 371
357 372 elif [ "${FS}" = "btrfs" ]; then
358 373 sudo mkfs.btrfs ${DEVICE}
359 374 sudo mount ${DEVICE} /hgwork
360 375
361 376 elif [ "${FS}" = "ext3" ]; then
362 377 # lazy_journal_init speeds up filesystem creation at the expense of
363 378 # integrity if things crash. We are an ephemeral instance, so we don't
364 379 # care about integrity.
365 380 sudo mkfs.ext3 -E lazy_journal_init=1 ${DEVICE}
366 381 sudo mount ${DEVICE} /hgwork
367 382
368 383 elif [ "${FS}" = "ext4" ]; then
369 384 sudo mkfs.ext4 -E lazy_journal_init=1 ${DEVICE}
370 385 sudo mount ${DEVICE} /hgwork
371 386
372 387 elif [ "${FS}" = "jfs" ]; then
373 388 sudo mkfs.jfs ${DEVICE}
374 389 sudo mount ${DEVICE} /hgwork
375 390
376 391 elif [ "${FS}" = "tmpfs" ]; then
377 392 echo "creating tmpfs volume in /hgwork"
378 393 sudo mount -t tmpfs -o size=1024M tmpfs /hgwork
379 394
380 395 elif [ "${FS}" = "xfs" ]; then
381 396 sudo mkfs.xfs ${DEVICE}
382 397 sudo mount ${DEVICE} /hgwork
383 398
384 399 else
385 400 echo "unsupported filesystem: ${FS}"
386 401 exit 1
387 402 fi
388 403
389 404 echo "/hgwork ready"
390 405
391 406 sudo chown hg:hg /hgwork
392 407 mkdir /hgwork/tmp
393 408 chown hg:hg /hgwork/tmp
394 409
395 410 rsync -a /hgdev/src /hgwork/
396 411 '''.lstrip().replace('\r\n', '\n')
397 412
398 413
399 414 HG_UPDATE_CLEAN = '''
400 415 set -ex
401 416
402 417 HG=/hgdev/venv-bootstrap/bin/hg
403 418
404 419 cd /hgwork/src
405 420 ${HG} --config extensions.purge= purge --all
406 421 ${HG} update -C $1
407 422 ${HG} log -r .
408 423 '''.lstrip().replace('\r\n', '\n')
409 424
410 425
411 426 def prepare_exec_environment(ssh_client, filesystem='default'):
412 427 """Prepare an EC2 instance to execute things.
413 428
414 429 The AMI has an ``/hgdev`` bootstrapped with various Python installs
415 430 and a clone of the Mercurial repo.
416 431
417 432 In EC2, EBS volumes launched from snapshots have wonky performance behavior.
418 433 Notably, blocks have to be copied on first access, which makes volume
419 434 I/O extremely slow on fresh volumes.
420 435
421 436 Furthermore, we may want to run operations, tests, etc on alternative
422 437 filesystems so we examine behavior on different filesystems.
423 438
424 439 This function is used to facilitate executing operations on alternate
425 440 volumes.
426 441 """
427 442 sftp = ssh_client.open_sftp()
428 443
429 444 with sftp.open('/hgdev/prepare-hgdev', 'wb') as fh:
430 445 fh.write(PREPARE_HGDEV)
431 446 fh.chmod(0o0777)
432 447
433 448 command = 'sudo /hgdev/prepare-hgdev %s' % filesystem
434 449 chan, stdin, stdout = exec_command(ssh_client, command)
435 450 stdin.close()
436 451
437 452 for line in stdout:
438 453 print(line, end='')
439 454
440 455 res = chan.recv_exit_status()
441 456
442 457 if res:
443 458 raise Exception('non-0 exit code updating working directory; %d'
444 459 % res)
445 460
446 461
447 462 def synchronize_hg(source_path: pathlib.Path, ec2_instance, revision: str=None):
448 463 """Synchronize a local Mercurial source path to remote EC2 instance."""
449 464
450 465 with tempfile.TemporaryDirectory() as temp_dir:
451 466 temp_dir = pathlib.Path(temp_dir)
452 467
453 468 ssh_dir = temp_dir / '.ssh'
454 469 ssh_dir.mkdir()
455 470 ssh_dir.chmod(0o0700)
456 471
457 472 public_ip = ec2_instance.public_ip_address
458 473
459 474 ssh_config = ssh_dir / 'config'
460 475
461 476 with ssh_config.open('w', encoding='utf-8') as fh:
462 477 fh.write('Host %s\n' % public_ip)
463 478 fh.write(' User hg\n')
464 479 fh.write(' StrictHostKeyChecking no\n')
465 480 fh.write(' UserKnownHostsFile %s\n' % (ssh_dir / 'known_hosts'))
466 481 fh.write(' IdentityFile %s\n' % ec2_instance.ssh_private_key_path)
467 482
468 483 if not (source_path / '.hg').is_dir():
469 484 raise Exception('%s is not a Mercurial repository; synchronization '
470 485 'not yet supported' % source_path)
471 486
472 487 env = dict(os.environ)
473 488 env['HGPLAIN'] = '1'
474 489 env['HGENCODING'] = 'utf-8'
475 490
476 491 hg_bin = source_path / 'hg'
477 492
478 493 res = subprocess.run(
479 494 ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
480 495 cwd=str(source_path), env=env, check=True, capture_output=True)
481 496
482 497 full_revision = res.stdout.decode('ascii')
483 498
484 499 args = [
485 500 'python2.7', str(hg_bin),
486 501 '--config', 'ui.ssh=ssh -F %s' % ssh_config,
487 502 '--config', 'ui.remotecmd=/hgdev/venv-bootstrap/bin/hg',
488 503 # Also ensure .hgtags changes are present so auto version
489 504 # calculation works.
490 505 'push', '-f', '-r', full_revision, '-r', 'file(.hgtags)',
491 506 'ssh://%s//hgwork/src' % public_ip,
492 507 ]
493 508
494 509 res = subprocess.run(args, cwd=str(source_path), env=env)
495 510
496 511 # Allow 1 (no-op) to not trigger error.
497 512 if res.returncode not in (0, 1):
498 513 res.check_returncode()
499 514
500 515 # TODO support synchronizing dirty working directory.
501 516
502 517 sftp = ec2_instance.ssh_client.open_sftp()
503 518
504 519 with sftp.open('/hgdev/hgup', 'wb') as fh:
505 520 fh.write(HG_UPDATE_CLEAN)
506 521 fh.chmod(0o0700)
507 522
508 523 chan, stdin, stdout = exec_command(
509 524 ec2_instance.ssh_client, '/hgdev/hgup %s' % full_revision)
510 525 stdin.close()
511 526
512 527 for line in stdout:
513 528 print(line, end='')
514 529
515 530 res = chan.recv_exit_status()
516 531
517 532 if res:
518 533 raise Exception('non-0 exit code updating working directory; %d'
519 534 % res)
520 535
521 536
522 537 def run_tests(ssh_client, python_version, test_flags=None):
523 538 """Run tests on a remote Linux machine via an SSH client."""
524 539 test_flags = test_flags or []
525 540
526 541 print('running tests')
527 542
528 543 if python_version == 'system2':
529 544 python = '/usr/bin/python2'
530 545 elif python_version == 'system3':
531 546 python = '/usr/bin/python3'
532 547 elif python_version.startswith('pypy'):
533 548 python = '/hgdev/pyenv/shims/%s' % python_version
534 549 else:
535 550 python = '/hgdev/pyenv/shims/python%s' % python_version
536 551
537 552 test_flags = ' '.join(shlex.quote(a) for a in test_flags)
538 553
539 554 command = (
540 555 '/bin/sh -c "export TMPDIR=/hgwork/tmp; '
541 556 'cd /hgwork/src/tests && %s run-tests.py %s"' % (
542 557 python, test_flags))
543 558
544 559 chan, stdin, stdout = exec_command(ssh_client, command)
545 560
546 561 stdin.close()
547 562
548 563 for line in stdout:
549 564 print(line, end='')
550 565
551 566 return chan.recv_exit_status()
General Comments 0
You need to be logged in to leave comments. Login now