##// END OF EJS Templates
automation: install python3-venv Debian package...
Gregory Szorc -
r43731:acdd4f28 stable
parent child Browse files
Show More
@@ -1,594 +1,595 b''
1 1 # linux.py - Linux specific automation functionality
2 2 #
3 3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 # no-check-code because Python 3 native.
9 9
10 10 import os
11 11 import pathlib
12 12 import shlex
13 13 import subprocess
14 14 import tempfile
15 15
16 16 from .ssh import exec_command
17 17
18 18
19 19 # Linux distributions that are supported.
20 20 DISTROS = {
21 21 'debian9',
22 22 'debian10',
23 23 'ubuntu18.04',
24 24 'ubuntu19.04',
25 25 }
26 26
27 27 INSTALL_PYTHONS = r'''
28 28 PYENV2_VERSIONS="2.7.17 pypy2.7-7.2.0"
29 29 PYENV3_VERSIONS="3.5.7 3.6.9 3.7.5 3.8.0 pypy3.5-7.0.0 pypy3.6-7.2.0"
30 30
31 31 git clone https://github.com/pyenv/pyenv.git /hgdev/pyenv
32 32 pushd /hgdev/pyenv
33 33 git checkout 0e7cfc3b3d4eca46ad83d632e1505f5932cd179b
34 34 popd
35 35
36 36 export PYENV_ROOT="/hgdev/pyenv"
37 37 export PATH="$PYENV_ROOT/bin:$PATH"
38 38
39 39 # pip 19.2.3.
40 40 PIP_SHA256=57e3643ff19f018f8a00dfaa6b7e4620e3c1a7a2171fd218425366ec006b3bfe
41 41 wget -O get-pip.py --progress dot:mega https://github.com/pypa/get-pip/raw/309a56c5fd94bd1134053a541cb4657a4e47e09d/get-pip.py
42 42 echo "${PIP_SHA256} get-pip.py" | sha256sum --check -
43 43
44 44 VIRTUALENV_SHA256=f78d81b62d3147396ac33fc9d77579ddc42cc2a98dd9ea38886f616b33bc7fb2
45 45 VIRTUALENV_TARBALL=virtualenv-16.7.5.tar.gz
46 46 wget -O ${VIRTUALENV_TARBALL} --progress dot:mega https://files.pythonhosted.org/packages/66/f0/6867af06d2e2f511e4e1d7094ff663acdebc4f15d4a0cb0fed1007395124/${VIRTUALENV_TARBALL}
47 47 echo "${VIRTUALENV_SHA256} ${VIRTUALENV_TARBALL}" | sha256sum --check -
48 48
49 49 for v in ${PYENV2_VERSIONS}; do
50 50 pyenv install -v ${v}
51 51 ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
52 52 ${PYENV_ROOT}/versions/${v}/bin/pip install ${VIRTUALENV_TARBALL}
53 53 ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py2.txt
54 54 done
55 55
56 56 for v in ${PYENV3_VERSIONS}; do
57 57 pyenv install -v ${v}
58 58 ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
59 59 ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py3.txt
60 60 done
61 61
62 62 pyenv global ${PYENV2_VERSIONS} ${PYENV3_VERSIONS} system
63 63 '''.lstrip().replace(
64 64 '\r\n', '\n'
65 65 )
66 66
67 67
68 68 INSTALL_RUST = r'''
69 69 RUSTUP_INIT_SHA256=a46fe67199b7bcbbde2dcbc23ae08db6f29883e260e23899a88b9073effc9076
70 70 wget -O rustup-init --progress dot:mega https://static.rust-lang.org/rustup/archive/1.18.3/x86_64-unknown-linux-gnu/rustup-init
71 71 echo "${RUSTUP_INIT_SHA256} rustup-init" | sha256sum --check -
72 72
73 73 chmod +x rustup-init
74 74 sudo -H -u hg -g hg ./rustup-init -y
75 75 sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup install 1.31.1 1.34.2
76 76 sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup component add clippy
77 77 '''
78 78
79 79
80 80 BOOTSTRAP_VIRTUALENV = r'''
81 81 /usr/bin/virtualenv /hgdev/venv-bootstrap
82 82
83 83 HG_SHA256=35fc8ba5e0379c1b3affa2757e83fb0509e8ac314cbd9f1fd133cf265d16e49f
84 84 HG_TARBALL=mercurial-5.1.1.tar.gz
85 85
86 86 wget -O ${HG_TARBALL} --progress dot:mega https://www.mercurial-scm.org/release/${HG_TARBALL}
87 87 echo "${HG_SHA256} ${HG_TARBALL}" | sha256sum --check -
88 88
89 89 /hgdev/venv-bootstrap/bin/pip install ${HG_TARBALL}
90 90 '''.lstrip().replace(
91 91 '\r\n', '\n'
92 92 )
93 93
94 94
95 95 BOOTSTRAP_DEBIAN = (
96 96 r'''
97 97 #!/bin/bash
98 98
99 99 set -ex
100 100
101 101 DISTRO=`grep DISTRIB_ID /etc/lsb-release | awk -F= '{{print $2}}'`
102 102 DEBIAN_VERSION=`cat /etc/debian_version`
103 103 LSB_RELEASE=`lsb_release -cs`
104 104
105 105 sudo /usr/sbin/groupadd hg
106 106 sudo /usr/sbin/groupadd docker
107 107 sudo /usr/sbin/useradd -g hg -G sudo,docker -d /home/hg -m -s /bin/bash hg
108 108 sudo mkdir /home/hg/.ssh
109 109 sudo cp ~/.ssh/authorized_keys /home/hg/.ssh/authorized_keys
110 110 sudo chown -R hg:hg /home/hg/.ssh
111 111 sudo chmod 700 /home/hg/.ssh
112 112 sudo chmod 600 /home/hg/.ssh/authorized_keys
113 113
114 114 cat << EOF | sudo tee /etc/sudoers.d/90-hg
115 115 hg ALL=(ALL) NOPASSWD:ALL
116 116 EOF
117 117
118 118 sudo apt-get update
119 119 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq dist-upgrade
120 120
121 121 # Install packages necessary to set up Docker Apt repo.
122 122 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends \
123 123 apt-transport-https \
124 124 gnupg
125 125
126 126 cat > docker-apt-key << EOF
127 127 -----BEGIN PGP PUBLIC KEY BLOCK-----
128 128
129 129 mQINBFit2ioBEADhWpZ8/wvZ6hUTiXOwQHXMAlaFHcPH9hAtr4F1y2+OYdbtMuth
130 130 lqqwp028AqyY+PRfVMtSYMbjuQuu5byyKR01BbqYhuS3jtqQmljZ/bJvXqnmiVXh
131 131 38UuLa+z077PxyxQhu5BbqntTPQMfiyqEiU+BKbq2WmANUKQf+1AmZY/IruOXbnq
132 132 L4C1+gJ8vfmXQt99npCaxEjaNRVYfOS8QcixNzHUYnb6emjlANyEVlZzeqo7XKl7
133 133 UrwV5inawTSzWNvtjEjj4nJL8NsLwscpLPQUhTQ+7BbQXAwAmeHCUTQIvvWXqw0N
134 134 cmhh4HgeQscQHYgOJjjDVfoY5MucvglbIgCqfzAHW9jxmRL4qbMZj+b1XoePEtht
135 135 ku4bIQN1X5P07fNWzlgaRL5Z4POXDDZTlIQ/El58j9kp4bnWRCJW0lya+f8ocodo
136 136 vZZ+Doi+fy4D5ZGrL4XEcIQP/Lv5uFyf+kQtl/94VFYVJOleAv8W92KdgDkhTcTD
137 137 G7c0tIkVEKNUq48b3aQ64NOZQW7fVjfoKwEZdOqPE72Pa45jrZzvUFxSpdiNk2tZ
138 138 XYukHjlxxEgBdC/J3cMMNRE1F4NCA3ApfV1Y7/hTeOnmDuDYwr9/obA8t016Yljj
139 139 q5rdkywPf4JF8mXUW5eCN1vAFHxeg9ZWemhBtQmGxXnw9M+z6hWwc6ahmwARAQAB
140 140 tCtEb2NrZXIgUmVsZWFzZSAoQ0UgZGViKSA8ZG9ja2VyQGRvY2tlci5jb20+iQI3
141 141 BBMBCgAhBQJYrefAAhsvBQsJCAcDBRUKCQgLBRYCAwEAAh4BAheAAAoJEI2BgDwO
142 142 v82IsskP/iQZo68flDQmNvn8X5XTd6RRaUH33kXYXquT6NkHJciS7E2gTJmqvMqd
143 143 tI4mNYHCSEYxI5qrcYV5YqX9P6+Ko+vozo4nseUQLPH/ATQ4qL0Zok+1jkag3Lgk
144 144 jonyUf9bwtWxFp05HC3GMHPhhcUSexCxQLQvnFWXD2sWLKivHp2fT8QbRGeZ+d3m
145 145 6fqcd5Fu7pxsqm0EUDK5NL+nPIgYhN+auTrhgzhK1CShfGccM/wfRlei9Utz6p9P
146 146 XRKIlWnXtT4qNGZNTN0tR+NLG/6Bqd8OYBaFAUcue/w1VW6JQ2VGYZHnZu9S8LMc
147 147 FYBa5Ig9PxwGQOgq6RDKDbV+PqTQT5EFMeR1mrjckk4DQJjbxeMZbiNMG5kGECA8
148 148 g383P3elhn03WGbEEa4MNc3Z4+7c236QI3xWJfNPdUbXRaAwhy/6rTSFbzwKB0Jm
149 149 ebwzQfwjQY6f55MiI/RqDCyuPj3r3jyVRkK86pQKBAJwFHyqj9KaKXMZjfVnowLh
150 150 9svIGfNbGHpucATqREvUHuQbNnqkCx8VVhtYkhDb9fEP2xBu5VvHbR+3nfVhMut5
151 151 G34Ct5RS7Jt6LIfFdtcn8CaSas/l1HbiGeRgc70X/9aYx/V/CEJv0lIe8gP6uDoW
152 152 FPIZ7d6vH+Vro6xuWEGiuMaiznap2KhZmpkgfupyFmplh0s6knymuQINBFit2ioB
153 153 EADneL9S9m4vhU3blaRjVUUyJ7b/qTjcSylvCH5XUE6R2k+ckEZjfAMZPLpO+/tF
154 154 M2JIJMD4SifKuS3xck9KtZGCufGmcwiLQRzeHF7vJUKrLD5RTkNi23ydvWZgPjtx
155 155 Q+DTT1Zcn7BrQFY6FgnRoUVIxwtdw1bMY/89rsFgS5wwuMESd3Q2RYgb7EOFOpnu
156 156 w6da7WakWf4IhnF5nsNYGDVaIHzpiqCl+uTbf1epCjrOlIzkZ3Z3Yk5CM/TiFzPk
157 157 z2lLz89cpD8U+NtCsfagWWfjd2U3jDapgH+7nQnCEWpROtzaKHG6lA3pXdix5zG8
158 158 eRc6/0IbUSWvfjKxLLPfNeCS2pCL3IeEI5nothEEYdQH6szpLog79xB9dVnJyKJb
159 159 VfxXnseoYqVrRz2VVbUI5Blwm6B40E3eGVfUQWiux54DspyVMMk41Mx7QJ3iynIa
160 160 1N4ZAqVMAEruyXTRTxc9XW0tYhDMA/1GYvz0EmFpm8LzTHA6sFVtPm/ZlNCX6P1X
161 161 zJwrv7DSQKD6GGlBQUX+OeEJ8tTkkf8QTJSPUdh8P8YxDFS5EOGAvhhpMBYD42kQ
162 162 pqXjEC+XcycTvGI7impgv9PDY1RCC1zkBjKPa120rNhv/hkVk/YhuGoajoHyy4h7
163 163 ZQopdcMtpN2dgmhEegny9JCSwxfQmQ0zK0g7m6SHiKMwjwARAQABiQQ+BBgBCAAJ
164 164 BQJYrdoqAhsCAikJEI2BgDwOv82IwV0gBBkBCAAGBQJYrdoqAAoJEH6gqcPyc/zY
165 165 1WAP/2wJ+R0gE6qsce3rjaIz58PJmc8goKrir5hnElWhPgbq7cYIsW5qiFyLhkdp
166 166 YcMmhD9mRiPpQn6Ya2w3e3B8zfIVKipbMBnke/ytZ9M7qHmDCcjoiSmwEXN3wKYI
167 167 mD9VHONsl/CG1rU9Isw1jtB5g1YxuBA7M/m36XN6x2u+NtNMDB9P56yc4gfsZVES
168 168 KA9v+yY2/l45L8d/WUkUi0YXomn6hyBGI7JrBLq0CX37GEYP6O9rrKipfz73XfO7
169 169 JIGzOKZlljb/D9RX/g7nRbCn+3EtH7xnk+TK/50euEKw8SMUg147sJTcpQmv6UzZ
170 170 cM4JgL0HbHVCojV4C/plELwMddALOFeYQzTif6sMRPf+3DSj8frbInjChC3yOLy0
171 171 6br92KFom17EIj2CAcoeq7UPhi2oouYBwPxh5ytdehJkoo+sN7RIWua6P2WSmon5
172 172 U888cSylXC0+ADFdgLX9K2zrDVYUG1vo8CX0vzxFBaHwN6Px26fhIT1/hYUHQR1z
173 173 VfNDcyQmXqkOnZvvoMfz/Q0s9BhFJ/zU6AgQbIZE/hm1spsfgvtsD1frZfygXJ9f
174 174 irP+MSAI80xHSf91qSRZOj4Pl3ZJNbq4yYxv0b1pkMqeGdjdCYhLU+LZ4wbQmpCk
175 175 SVe2prlLureigXtmZfkqevRz7FrIZiu9ky8wnCAPwC7/zmS18rgP/17bOtL4/iIz
176 176 QhxAAoAMWVrGyJivSkjhSGx1uCojsWfsTAm11P7jsruIL61ZzMUVE2aM3Pmj5G+W
177 177 9AcZ58Em+1WsVnAXdUR//bMmhyr8wL/G1YO1V3JEJTRdxsSxdYa4deGBBY/Adpsw
178 178 24jxhOJR+lsJpqIUeb999+R8euDhRHG9eFO7DRu6weatUJ6suupoDTRWtr/4yGqe
179 179 dKxV3qQhNLSnaAzqW/1nA3iUB4k7kCaKZxhdhDbClf9P37qaRW467BLCVO/coL3y
180 180 Vm50dwdrNtKpMBh3ZpbB1uJvgi9mXtyBOMJ3v8RZeDzFiG8HdCtg9RvIt/AIFoHR
181 181 H3S+U79NT6i0KPzLImDfs8T7RlpyuMc4Ufs8ggyg9v3Ae6cN3eQyxcK3w0cbBwsh
182 182 /nQNfsA6uu+9H7NhbehBMhYnpNZyrHzCmzyXkauwRAqoCbGCNykTRwsur9gS41TQ
183 183 M8ssD1jFheOJf3hODnkKU+HKjvMROl1DK7zdmLdNzA1cvtZH/nCC9KPj1z8QC47S
184 184 xx+dTZSx4ONAhwbS/LN3PoKtn8LPjY9NP9uDWI+TWYquS2U+KHDrBDlsgozDbs/O
185 185 jCxcpDzNmXpWQHEtHU7649OXHP7UeNST1mCUCH5qdank0V1iejF6/CfTFU4MfcrG
186 186 YT90qFF93M3v01BbxP+EIY2/9tiIPbrd
187 187 =0YYh
188 188 -----END PGP PUBLIC KEY BLOCK-----
189 189 EOF
190 190
191 191 sudo apt-key add docker-apt-key
192 192
193 193 if [ "$LSB_RELEASE" = "stretch" ]; then
194 194 cat << EOF | sudo tee -a /etc/apt/sources.list
195 195 # Need backports for clang-format-6.0
196 196 deb http://deb.debian.org/debian stretch-backports main
197 197 EOF
198 198 fi
199 199
200 200 if [ "$LSB_RELEASE" = "stretch" -o "$LSB_RELEASE" = "buster" ]; then
201 201 cat << EOF | sudo tee -a /etc/apt/sources.list
202 202 # Sources are useful if we want to compile things locally.
203 203 deb-src http://deb.debian.org/debian $LSB_RELEASE main
204 204 deb-src http://security.debian.org/debian-security $LSB_RELEASE/updates main
205 205 deb-src http://deb.debian.org/debian $LSB_RELEASE-updates main
206 206 deb-src http://deb.debian.org/debian $LSB_RELEASE-backports main
207 207
208 208 deb [arch=amd64] https://download.docker.com/linux/debian $LSB_RELEASE stable
209 209 EOF
210 210
211 211 elif [ "$DISTRO" = "Ubuntu" ]; then
212 212 cat << EOF | sudo tee -a /etc/apt/sources.list
213 213 deb [arch=amd64] https://download.docker.com/linux/ubuntu $LSB_RELEASE stable
214 214 EOF
215 215
216 216 fi
217 217
218 218 sudo apt-get update
219 219
220 220 PACKAGES="\
221 221 awscli \
222 222 btrfs-progs \
223 223 build-essential \
224 224 bzr \
225 225 clang-format-6.0 \
226 226 cvs \
227 227 darcs \
228 228 debhelper \
229 229 devscripts \
230 230 docker-ce \
231 231 dpkg-dev \
232 232 dstat \
233 233 emacs \
234 234 gettext \
235 235 git \
236 236 htop \
237 237 iotop \
238 238 jfsutils \
239 239 libbz2-dev \
240 240 libexpat1-dev \
241 241 libffi-dev \
242 242 libgdbm-dev \
243 243 liblzma-dev \
244 244 libncurses5-dev \
245 245 libnss3-dev \
246 246 libreadline-dev \
247 247 libsqlite3-dev \
248 248 libssl-dev \
249 249 netbase \
250 250 ntfs-3g \
251 251 nvme-cli \
252 252 pyflakes \
253 253 pyflakes3 \
254 254 pylint \
255 255 pylint3 \
256 256 python-all-dev \
257 257 python-dev \
258 258 python-docutils \
259 259 python-fuzzywuzzy \
260 260 python-pygments \
261 261 python-subversion \
262 262 python-vcr \
263 263 python3-boto3 \
264 264 python3-dev \
265 265 python3-docutils \
266 266 python3-fuzzywuzzy \
267 267 python3-pygments \
268 268 python3-vcr \
269 python3-venv \
269 270 rsync \
270 271 sqlite3 \
271 272 subversion \
272 273 tcl-dev \
273 274 tk-dev \
274 275 tla \
275 276 unzip \
276 277 uuid-dev \
277 278 vim \
278 279 virtualenv \
279 280 wget \
280 281 xfsprogs \
281 282 zip \
282 283 zlib1g-dev"
283 284
284 285 if [ "LSB_RELEASE" = "stretch" ]; then
285 286 PACKAGES="$PACKAGES linux-perf"
286 287 elif [ "$DISTRO" = "Ubuntu" ]; then
287 288 PACKAGES="$PACKAGES linux-tools-common"
288 289 fi
289 290
290 291 # Monotone only available in older releases.
291 292 if [ "$LSB_RELEASE" = "stretch" -o "$LSB_RELEASE" = "xenial" ]; then
292 293 PACKAGES="$PACKAGES monotone"
293 294 fi
294 295
295 296 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends $PACKAGES
296 297
297 298 # Create clang-format symlink so test harness finds it.
298 299 sudo update-alternatives --install /usr/bin/clang-format clang-format \
299 300 /usr/bin/clang-format-6.0 1000
300 301
301 302 sudo mkdir /hgdev
302 303 # Will be normalized to hg:hg later.
303 304 sudo chown `whoami` /hgdev
304 305
305 306 {install_rust}
306 307
307 308 cp requirements-py2.txt /hgdev/requirements-py2.txt
308 309 cp requirements-py3.txt /hgdev/requirements-py3.txt
309 310
310 311 # Disable the pip version check because it uses the network and can
311 312 # be annoying.
312 313 cat << EOF | sudo tee -a /etc/pip.conf
313 314 [global]
314 315 disable-pip-version-check = True
315 316 EOF
316 317
317 318 {install_pythons}
318 319 {bootstrap_virtualenv}
319 320
320 321 /hgdev/venv-bootstrap/bin/hg clone https://www.mercurial-scm.org/repo/hg /hgdev/src
321 322
322 323 # Mark the repo as non-publishing.
323 324 cat >> /hgdev/src/.hg/hgrc << EOF
324 325 [phases]
325 326 publish = false
326 327 EOF
327 328
328 329 sudo chown -R hg:hg /hgdev
329 330 '''.lstrip()
330 331 .format(
331 332 install_rust=INSTALL_RUST,
332 333 install_pythons=INSTALL_PYTHONS,
333 334 bootstrap_virtualenv=BOOTSTRAP_VIRTUALENV,
334 335 )
335 336 .replace('\r\n', '\n')
336 337 )
337 338
338 339
339 340 # Prepares /hgdev for operations.
340 341 PREPARE_HGDEV = '''
341 342 #!/bin/bash
342 343
343 344 set -e
344 345
345 346 FS=$1
346 347
347 348 ensure_device() {
348 349 if [ -z "${DEVICE}" ]; then
349 350 echo "could not find block device to format"
350 351 exit 1
351 352 fi
352 353 }
353 354
354 355 # Determine device to partition for extra filesystem.
355 356 # If only 1 volume is present, it will be the root volume and
356 357 # should be /dev/nvme0. If multiple volumes are present, the
357 358 # root volume could be nvme0 or nvme1. Use whichever one doesn't have
358 359 # a partition.
359 360 if [ -e /dev/nvme1n1 ]; then
360 361 if [ -e /dev/nvme0n1p1 ]; then
361 362 DEVICE=/dev/nvme1n1
362 363 else
363 364 DEVICE=/dev/nvme0n1
364 365 fi
365 366 else
366 367 DEVICE=
367 368 fi
368 369
369 370 sudo mkdir /hgwork
370 371
371 372 if [ "${FS}" != "default" -a "${FS}" != "tmpfs" ]; then
372 373 ensure_device
373 374 echo "creating ${FS} filesystem on ${DEVICE}"
374 375 fi
375 376
376 377 if [ "${FS}" = "default" ]; then
377 378 :
378 379
379 380 elif [ "${FS}" = "btrfs" ]; then
380 381 sudo mkfs.btrfs ${DEVICE}
381 382 sudo mount ${DEVICE} /hgwork
382 383
383 384 elif [ "${FS}" = "ext3" ]; then
384 385 # lazy_journal_init speeds up filesystem creation at the expense of
385 386 # integrity if things crash. We are an ephemeral instance, so we don't
386 387 # care about integrity.
387 388 sudo mkfs.ext3 -E lazy_journal_init=1 ${DEVICE}
388 389 sudo mount ${DEVICE} /hgwork
389 390
390 391 elif [ "${FS}" = "ext4" ]; then
391 392 sudo mkfs.ext4 -E lazy_journal_init=1 ${DEVICE}
392 393 sudo mount ${DEVICE} /hgwork
393 394
394 395 elif [ "${FS}" = "jfs" ]; then
395 396 sudo mkfs.jfs ${DEVICE}
396 397 sudo mount ${DEVICE} /hgwork
397 398
398 399 elif [ "${FS}" = "tmpfs" ]; then
399 400 echo "creating tmpfs volume in /hgwork"
400 401 sudo mount -t tmpfs -o size=1024M tmpfs /hgwork
401 402
402 403 elif [ "${FS}" = "xfs" ]; then
403 404 sudo mkfs.xfs ${DEVICE}
404 405 sudo mount ${DEVICE} /hgwork
405 406
406 407 else
407 408 echo "unsupported filesystem: ${FS}"
408 409 exit 1
409 410 fi
410 411
411 412 echo "/hgwork ready"
412 413
413 414 sudo chown hg:hg /hgwork
414 415 mkdir /hgwork/tmp
415 416 chown hg:hg /hgwork/tmp
416 417
417 418 rsync -a /hgdev/src /hgwork/
418 419 '''.lstrip().replace(
419 420 '\r\n', '\n'
420 421 )
421 422
422 423
423 424 HG_UPDATE_CLEAN = '''
424 425 set -ex
425 426
426 427 HG=/hgdev/venv-bootstrap/bin/hg
427 428
428 429 cd /hgwork/src
429 430 ${HG} --config extensions.purge= purge --all
430 431 ${HG} update -C $1
431 432 ${HG} log -r .
432 433 '''.lstrip().replace(
433 434 '\r\n', '\n'
434 435 )
435 436
436 437
437 438 def prepare_exec_environment(ssh_client, filesystem='default'):
438 439 """Prepare an EC2 instance to execute things.
439 440
440 441 The AMI has an ``/hgdev`` bootstrapped with various Python installs
441 442 and a clone of the Mercurial repo.
442 443
443 444 In EC2, EBS volumes launched from snapshots have wonky performance behavior.
444 445 Notably, blocks have to be copied on first access, which makes volume
445 446 I/O extremely slow on fresh volumes.
446 447
447 448 Furthermore, we may want to run operations, tests, etc on alternative
448 449 filesystems so we examine behavior on different filesystems.
449 450
450 451 This function is used to facilitate executing operations on alternate
451 452 volumes.
452 453 """
453 454 sftp = ssh_client.open_sftp()
454 455
455 456 with sftp.open('/hgdev/prepare-hgdev', 'wb') as fh:
456 457 fh.write(PREPARE_HGDEV)
457 458 fh.chmod(0o0777)
458 459
459 460 command = 'sudo /hgdev/prepare-hgdev %s' % filesystem
460 461 chan, stdin, stdout = exec_command(ssh_client, command)
461 462 stdin.close()
462 463
463 464 for line in stdout:
464 465 print(line, end='')
465 466
466 467 res = chan.recv_exit_status()
467 468
468 469 if res:
469 470 raise Exception('non-0 exit code updating working directory; %d' % res)
470 471
471 472
472 473 def synchronize_hg(
473 474 source_path: pathlib.Path, ec2_instance, revision: str = None
474 475 ):
475 476 """Synchronize a local Mercurial source path to remote EC2 instance."""
476 477
477 478 with tempfile.TemporaryDirectory() as temp_dir:
478 479 temp_dir = pathlib.Path(temp_dir)
479 480
480 481 ssh_dir = temp_dir / '.ssh'
481 482 ssh_dir.mkdir()
482 483 ssh_dir.chmod(0o0700)
483 484
484 485 public_ip = ec2_instance.public_ip_address
485 486
486 487 ssh_config = ssh_dir / 'config'
487 488
488 489 with ssh_config.open('w', encoding='utf-8') as fh:
489 490 fh.write('Host %s\n' % public_ip)
490 491 fh.write(' User hg\n')
491 492 fh.write(' StrictHostKeyChecking no\n')
492 493 fh.write(' UserKnownHostsFile %s\n' % (ssh_dir / 'known_hosts'))
493 494 fh.write(' IdentityFile %s\n' % ec2_instance.ssh_private_key_path)
494 495
495 496 if not (source_path / '.hg').is_dir():
496 497 raise Exception(
497 498 '%s is not a Mercurial repository; synchronization '
498 499 'not yet supported' % source_path
499 500 )
500 501
501 502 env = dict(os.environ)
502 503 env['HGPLAIN'] = '1'
503 504 env['HGENCODING'] = 'utf-8'
504 505
505 506 hg_bin = source_path / 'hg'
506 507
507 508 res = subprocess.run(
508 509 ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
509 510 cwd=str(source_path),
510 511 env=env,
511 512 check=True,
512 513 capture_output=True,
513 514 )
514 515
515 516 full_revision = res.stdout.decode('ascii')
516 517
517 518 args = [
518 519 'python2.7',
519 520 str(hg_bin),
520 521 '--config',
521 522 'ui.ssh=ssh -F %s' % ssh_config,
522 523 '--config',
523 524 'ui.remotecmd=/hgdev/venv-bootstrap/bin/hg',
524 525 # Also ensure .hgtags changes are present so auto version
525 526 # calculation works.
526 527 'push',
527 528 '-f',
528 529 '-r',
529 530 full_revision,
530 531 '-r',
531 532 'file(.hgtags)',
532 533 'ssh://%s//hgwork/src' % public_ip,
533 534 ]
534 535
535 536 res = subprocess.run(args, cwd=str(source_path), env=env)
536 537
537 538 # Allow 1 (no-op) to not trigger error.
538 539 if res.returncode not in (0, 1):
539 540 res.check_returncode()
540 541
541 542 # TODO support synchronizing dirty working directory.
542 543
543 544 sftp = ec2_instance.ssh_client.open_sftp()
544 545
545 546 with sftp.open('/hgdev/hgup', 'wb') as fh:
546 547 fh.write(HG_UPDATE_CLEAN)
547 548 fh.chmod(0o0700)
548 549
549 550 chan, stdin, stdout = exec_command(
550 551 ec2_instance.ssh_client, '/hgdev/hgup %s' % full_revision
551 552 )
552 553 stdin.close()
553 554
554 555 for line in stdout:
555 556 print(line, end='')
556 557
557 558 res = chan.recv_exit_status()
558 559
559 560 if res:
560 561 raise Exception(
561 562 'non-0 exit code updating working directory; %d' % res
562 563 )
563 564
564 565
565 566 def run_tests(ssh_client, python_version, test_flags=None):
566 567 """Run tests on a remote Linux machine via an SSH client."""
567 568 test_flags = test_flags or []
568 569
569 570 print('running tests')
570 571
571 572 if python_version == 'system2':
572 573 python = '/usr/bin/python2'
573 574 elif python_version == 'system3':
574 575 python = '/usr/bin/python3'
575 576 elif python_version.startswith('pypy'):
576 577 python = '/hgdev/pyenv/shims/%s' % python_version
577 578 else:
578 579 python = '/hgdev/pyenv/shims/python%s' % python_version
579 580
580 581 test_flags = ' '.join(shlex.quote(a) for a in test_flags)
581 582
582 583 command = (
583 584 '/bin/sh -c "export TMPDIR=/hgwork/tmp; '
584 585 'cd /hgwork/src/tests && %s run-tests.py %s"' % (python, test_flags)
585 586 )
586 587
587 588 chan, stdin, stdout = exec_command(ssh_client, command)
588 589
589 590 stdin.close()
590 591
591 592 for line in stdout:
592 593 print(line, end='')
593 594
594 595 return chan.recv_exit_status()
General Comments 0
You need to be logged in to leave comments. Login now