##// END OF EJS Templates
automation: install latest Python versions...
Gregory Szorc -
r42922:6f7262fe default
parent child Browse files
Show More
@@ -1,551 +1,551 b''
1 1 # linux.py - Linux specific automation functionality
2 2 #
3 3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 # no-check-code because Python 3 native.
9 9
10 10 import os
11 11 import pathlib
12 12 import shlex
13 13 import subprocess
14 14 import tempfile
15 15
16 16 from .ssh import (
17 17 exec_command,
18 18 )
19 19
20 20
21 21 # Linux distributions that are supported.
22 22 DISTROS = {
23 23 'debian9',
24 24 'ubuntu18.04',
25 25 'ubuntu18.10',
26 26 'ubuntu19.04',
27 27 }
28 28
29 29 INSTALL_PYTHONS = r'''
30 30 PYENV2_VERSIONS="2.7.16 pypy2.7-7.1.1"
31 PYENV3_VERSIONS="3.5.7 3.6.8 3.7.3 3.8-dev pypy3.5-7.0.0 pypy3.6-7.1.1"
31 PYENV3_VERSIONS="3.5.7 3.6.9 3.7.4 3.8-dev pypy3.5-7.0.0 pypy3.6-7.1.1"
32 32
33 33 git clone https://github.com/pyenv/pyenv.git /hgdev/pyenv
34 34 pushd /hgdev/pyenv
35 git checkout 3faeda67bb33e07750d1a104271369a7384ca45c
35 git checkout 17f44b7cd6f58ea2fa68ec0371fb9e7a826b8be2
36 36 popd
37 37
38 38 export PYENV_ROOT="/hgdev/pyenv"
39 39 export PATH="$PYENV_ROOT/bin:$PATH"
40 40
41 41 # pip 19.0.3.
42 42 PIP_SHA256=efe99298f3fbb1f56201ce6b81d2658067d2f7d7dfc2d412e0d3cacc9a397c61
43 43 wget -O get-pip.py --progress dot:mega https://github.com/pypa/get-pip/raw/fee32c376da1ff6496a798986d7939cd51e1644f/get-pip.py
44 44 echo "${PIP_SHA256} get-pip.py" | sha256sum --check -
45 45
46 46 VIRTUALENV_SHA256=984d7e607b0a5d1329425dd8845bd971b957424b5ba664729fab51ab8c11bc39
47 47 VIRTUALENV_TARBALL=virtualenv-16.4.3.tar.gz
48 48 wget -O ${VIRTUALENV_TARBALL} --progress dot:mega https://files.pythonhosted.org/packages/37/db/89d6b043b22052109da35416abc3c397655e4bd3cff031446ba02b9654fa/${VIRTUALENV_TARBALL}
49 49 echo "${VIRTUALENV_SHA256} ${VIRTUALENV_TARBALL}" | sha256sum --check -
50 50
51 51 for v in ${PYENV2_VERSIONS}; do
52 52 pyenv install -v ${v}
53 53 ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
54 54 ${PYENV_ROOT}/versions/${v}/bin/pip install ${VIRTUALENV_TARBALL}
55 55 ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py2.txt
56 56 done
57 57
58 58 for v in ${PYENV3_VERSIONS}; do
59 59 pyenv install -v ${v}
60 60 ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
61 61 ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py3.txt
62 62 done
63 63
64 64 pyenv global ${PYENV2_VERSIONS} ${PYENV3_VERSIONS} system
65 65 '''.lstrip().replace('\r\n', '\n')
66 66
67 67
68 68 BOOTSTRAP_VIRTUALENV = r'''
69 69 /usr/bin/virtualenv /hgdev/venv-bootstrap
70 70
71 71 HG_SHA256=1bdd21bb87d1e05fb5cd395d488d0e0cc2f2f90ce0fd248e31a03595da5ccb47
72 72 HG_TARBALL=mercurial-4.9.1.tar.gz
73 73
74 74 wget -O ${HG_TARBALL} --progress dot:mega https://www.mercurial-scm.org/release/${HG_TARBALL}
75 75 echo "${HG_SHA256} ${HG_TARBALL}" | sha256sum --check -
76 76
77 77 /hgdev/venv-bootstrap/bin/pip install ${HG_TARBALL}
78 78 '''.lstrip().replace('\r\n', '\n')
79 79
80 80
81 81 BOOTSTRAP_DEBIAN = r'''
82 82 #!/bin/bash
83 83
84 84 set -ex
85 85
86 86 DISTRO=`grep DISTRIB_ID /etc/lsb-release | awk -F= '{{print $2}}'`
87 87 DEBIAN_VERSION=`cat /etc/debian_version`
88 88 LSB_RELEASE=`lsb_release -cs`
89 89
90 90 sudo /usr/sbin/groupadd hg
91 91 sudo /usr/sbin/groupadd docker
92 92 sudo /usr/sbin/useradd -g hg -G sudo,docker -d /home/hg -m -s /bin/bash hg
93 93 sudo mkdir /home/hg/.ssh
94 94 sudo cp ~/.ssh/authorized_keys /home/hg/.ssh/authorized_keys
95 95 sudo chown -R hg:hg /home/hg/.ssh
96 96 sudo chmod 700 /home/hg/.ssh
97 97 sudo chmod 600 /home/hg/.ssh/authorized_keys
98 98
99 99 cat << EOF | sudo tee /etc/sudoers.d/90-hg
100 100 hg ALL=(ALL) NOPASSWD:ALL
101 101 EOF
102 102
103 103 sudo apt-get update
104 104 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq dist-upgrade
105 105
106 106 # Install packages necessary to set up Docker Apt repo.
107 107 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends \
108 108 apt-transport-https \
109 109 gnupg
110 110
111 111 cat > docker-apt-key << EOF
112 112 -----BEGIN PGP PUBLIC KEY BLOCK-----
113 113
114 114 mQINBFit2ioBEADhWpZ8/wvZ6hUTiXOwQHXMAlaFHcPH9hAtr4F1y2+OYdbtMuth
115 115 lqqwp028AqyY+PRfVMtSYMbjuQuu5byyKR01BbqYhuS3jtqQmljZ/bJvXqnmiVXh
116 116 38UuLa+z077PxyxQhu5BbqntTPQMfiyqEiU+BKbq2WmANUKQf+1AmZY/IruOXbnq
117 117 L4C1+gJ8vfmXQt99npCaxEjaNRVYfOS8QcixNzHUYnb6emjlANyEVlZzeqo7XKl7
118 118 UrwV5inawTSzWNvtjEjj4nJL8NsLwscpLPQUhTQ+7BbQXAwAmeHCUTQIvvWXqw0N
119 119 cmhh4HgeQscQHYgOJjjDVfoY5MucvglbIgCqfzAHW9jxmRL4qbMZj+b1XoePEtht
120 120 ku4bIQN1X5P07fNWzlgaRL5Z4POXDDZTlIQ/El58j9kp4bnWRCJW0lya+f8ocodo
121 121 vZZ+Doi+fy4D5ZGrL4XEcIQP/Lv5uFyf+kQtl/94VFYVJOleAv8W92KdgDkhTcTD
122 122 G7c0tIkVEKNUq48b3aQ64NOZQW7fVjfoKwEZdOqPE72Pa45jrZzvUFxSpdiNk2tZ
123 123 XYukHjlxxEgBdC/J3cMMNRE1F4NCA3ApfV1Y7/hTeOnmDuDYwr9/obA8t016Yljj
124 124 q5rdkywPf4JF8mXUW5eCN1vAFHxeg9ZWemhBtQmGxXnw9M+z6hWwc6ahmwARAQAB
125 125 tCtEb2NrZXIgUmVsZWFzZSAoQ0UgZGViKSA8ZG9ja2VyQGRvY2tlci5jb20+iQI3
126 126 BBMBCgAhBQJYrefAAhsvBQsJCAcDBRUKCQgLBRYCAwEAAh4BAheAAAoJEI2BgDwO
127 127 v82IsskP/iQZo68flDQmNvn8X5XTd6RRaUH33kXYXquT6NkHJciS7E2gTJmqvMqd
128 128 tI4mNYHCSEYxI5qrcYV5YqX9P6+Ko+vozo4nseUQLPH/ATQ4qL0Zok+1jkag3Lgk
129 129 jonyUf9bwtWxFp05HC3GMHPhhcUSexCxQLQvnFWXD2sWLKivHp2fT8QbRGeZ+d3m
130 130 6fqcd5Fu7pxsqm0EUDK5NL+nPIgYhN+auTrhgzhK1CShfGccM/wfRlei9Utz6p9P
131 131 XRKIlWnXtT4qNGZNTN0tR+NLG/6Bqd8OYBaFAUcue/w1VW6JQ2VGYZHnZu9S8LMc
132 132 FYBa5Ig9PxwGQOgq6RDKDbV+PqTQT5EFMeR1mrjckk4DQJjbxeMZbiNMG5kGECA8
133 133 g383P3elhn03WGbEEa4MNc3Z4+7c236QI3xWJfNPdUbXRaAwhy/6rTSFbzwKB0Jm
134 134 ebwzQfwjQY6f55MiI/RqDCyuPj3r3jyVRkK86pQKBAJwFHyqj9KaKXMZjfVnowLh
135 135 9svIGfNbGHpucATqREvUHuQbNnqkCx8VVhtYkhDb9fEP2xBu5VvHbR+3nfVhMut5
136 136 G34Ct5RS7Jt6LIfFdtcn8CaSas/l1HbiGeRgc70X/9aYx/V/CEJv0lIe8gP6uDoW
137 137 FPIZ7d6vH+Vro6xuWEGiuMaiznap2KhZmpkgfupyFmplh0s6knymuQINBFit2ioB
138 138 EADneL9S9m4vhU3blaRjVUUyJ7b/qTjcSylvCH5XUE6R2k+ckEZjfAMZPLpO+/tF
139 139 M2JIJMD4SifKuS3xck9KtZGCufGmcwiLQRzeHF7vJUKrLD5RTkNi23ydvWZgPjtx
140 140 Q+DTT1Zcn7BrQFY6FgnRoUVIxwtdw1bMY/89rsFgS5wwuMESd3Q2RYgb7EOFOpnu
141 141 w6da7WakWf4IhnF5nsNYGDVaIHzpiqCl+uTbf1epCjrOlIzkZ3Z3Yk5CM/TiFzPk
142 142 z2lLz89cpD8U+NtCsfagWWfjd2U3jDapgH+7nQnCEWpROtzaKHG6lA3pXdix5zG8
143 143 eRc6/0IbUSWvfjKxLLPfNeCS2pCL3IeEI5nothEEYdQH6szpLog79xB9dVnJyKJb
144 144 VfxXnseoYqVrRz2VVbUI5Blwm6B40E3eGVfUQWiux54DspyVMMk41Mx7QJ3iynIa
145 145 1N4ZAqVMAEruyXTRTxc9XW0tYhDMA/1GYvz0EmFpm8LzTHA6sFVtPm/ZlNCX6P1X
146 146 zJwrv7DSQKD6GGlBQUX+OeEJ8tTkkf8QTJSPUdh8P8YxDFS5EOGAvhhpMBYD42kQ
147 147 pqXjEC+XcycTvGI7impgv9PDY1RCC1zkBjKPa120rNhv/hkVk/YhuGoajoHyy4h7
148 148 ZQopdcMtpN2dgmhEegny9JCSwxfQmQ0zK0g7m6SHiKMwjwARAQABiQQ+BBgBCAAJ
149 149 BQJYrdoqAhsCAikJEI2BgDwOv82IwV0gBBkBCAAGBQJYrdoqAAoJEH6gqcPyc/zY
150 150 1WAP/2wJ+R0gE6qsce3rjaIz58PJmc8goKrir5hnElWhPgbq7cYIsW5qiFyLhkdp
151 151 YcMmhD9mRiPpQn6Ya2w3e3B8zfIVKipbMBnke/ytZ9M7qHmDCcjoiSmwEXN3wKYI
152 152 mD9VHONsl/CG1rU9Isw1jtB5g1YxuBA7M/m36XN6x2u+NtNMDB9P56yc4gfsZVES
153 153 KA9v+yY2/l45L8d/WUkUi0YXomn6hyBGI7JrBLq0CX37GEYP6O9rrKipfz73XfO7
154 154 JIGzOKZlljb/D9RX/g7nRbCn+3EtH7xnk+TK/50euEKw8SMUg147sJTcpQmv6UzZ
155 155 cM4JgL0HbHVCojV4C/plELwMddALOFeYQzTif6sMRPf+3DSj8frbInjChC3yOLy0
156 156 6br92KFom17EIj2CAcoeq7UPhi2oouYBwPxh5ytdehJkoo+sN7RIWua6P2WSmon5
157 157 U888cSylXC0+ADFdgLX9K2zrDVYUG1vo8CX0vzxFBaHwN6Px26fhIT1/hYUHQR1z
158 158 VfNDcyQmXqkOnZvvoMfz/Q0s9BhFJ/zU6AgQbIZE/hm1spsfgvtsD1frZfygXJ9f
159 159 irP+MSAI80xHSf91qSRZOj4Pl3ZJNbq4yYxv0b1pkMqeGdjdCYhLU+LZ4wbQmpCk
160 160 SVe2prlLureigXtmZfkqevRz7FrIZiu9ky8wnCAPwC7/zmS18rgP/17bOtL4/iIz
161 161 QhxAAoAMWVrGyJivSkjhSGx1uCojsWfsTAm11P7jsruIL61ZzMUVE2aM3Pmj5G+W
162 162 9AcZ58Em+1WsVnAXdUR//bMmhyr8wL/G1YO1V3JEJTRdxsSxdYa4deGBBY/Adpsw
163 163 24jxhOJR+lsJpqIUeb999+R8euDhRHG9eFO7DRu6weatUJ6suupoDTRWtr/4yGqe
164 164 dKxV3qQhNLSnaAzqW/1nA3iUB4k7kCaKZxhdhDbClf9P37qaRW467BLCVO/coL3y
165 165 Vm50dwdrNtKpMBh3ZpbB1uJvgi9mXtyBOMJ3v8RZeDzFiG8HdCtg9RvIt/AIFoHR
166 166 H3S+U79NT6i0KPzLImDfs8T7RlpyuMc4Ufs8ggyg9v3Ae6cN3eQyxcK3w0cbBwsh
167 167 /nQNfsA6uu+9H7NhbehBMhYnpNZyrHzCmzyXkauwRAqoCbGCNykTRwsur9gS41TQ
168 168 M8ssD1jFheOJf3hODnkKU+HKjvMROl1DK7zdmLdNzA1cvtZH/nCC9KPj1z8QC47S
169 169 xx+dTZSx4ONAhwbS/LN3PoKtn8LPjY9NP9uDWI+TWYquS2U+KHDrBDlsgozDbs/O
170 170 jCxcpDzNmXpWQHEtHU7649OXHP7UeNST1mCUCH5qdank0V1iejF6/CfTFU4MfcrG
171 171 YT90qFF93M3v01BbxP+EIY2/9tiIPbrd
172 172 =0YYh
173 173 -----END PGP PUBLIC KEY BLOCK-----
174 174 EOF
175 175
176 176 sudo apt-key add docker-apt-key
177 177
178 178 if [ "$DEBIAN_VERSION" = "9.8" ]; then
179 179 cat << EOF | sudo tee -a /etc/apt/sources.list
180 180 # Need backports for clang-format-6.0
181 181 deb http://deb.debian.org/debian stretch-backports main
182 182
183 183 # Sources are useful if we want to compile things locally.
184 184 deb-src http://deb.debian.org/debian stretch main
185 185 deb-src http://security.debian.org/debian-security stretch/updates main
186 186 deb-src http://deb.debian.org/debian stretch-updates main
187 187 deb-src http://deb.debian.org/debian stretch-backports main
188 188
189 189 deb [arch=amd64] https://download.docker.com/linux/debian stretch stable
190 190 EOF
191 191
192 192 elif [ "$DISTRO" = "Ubuntu" ]; then
193 193 cat << EOF | sudo tee -a /etc/apt/sources.list
194 194 deb [arch=amd64] https://download.docker.com/linux/ubuntu $LSB_RELEASE stable
195 195 EOF
196 196
197 197 fi
198 198
199 199 sudo apt-get update
200 200
201 201 PACKAGES="\
202 202 btrfs-progs \
203 203 build-essential \
204 204 bzr \
205 205 clang-format-6.0 \
206 206 cvs \
207 207 darcs \
208 208 debhelper \
209 209 devscripts \
210 210 dpkg-dev \
211 211 dstat \
212 212 emacs \
213 213 gettext \
214 214 git \
215 215 htop \
216 216 iotop \
217 217 jfsutils \
218 218 libbz2-dev \
219 219 libexpat1-dev \
220 220 libffi-dev \
221 221 libgdbm-dev \
222 222 liblzma-dev \
223 223 libncurses5-dev \
224 224 libnss3-dev \
225 225 libreadline-dev \
226 226 libsqlite3-dev \
227 227 libssl-dev \
228 228 netbase \
229 229 ntfs-3g \
230 230 nvme-cli \
231 231 pyflakes \
232 232 pyflakes3 \
233 233 pylint \
234 234 pylint3 \
235 235 python-all-dev \
236 236 python-dev \
237 237 python-docutils \
238 238 python-fuzzywuzzy \
239 239 python-pygments \
240 240 python-subversion \
241 241 python-vcr \
242 242 python3-dev \
243 243 python3-docutils \
244 244 python3-fuzzywuzzy \
245 245 python3-pygments \
246 246 python3-vcr \
247 247 rsync \
248 248 sqlite3 \
249 249 subversion \
250 250 tcl-dev \
251 251 tk-dev \
252 252 tla \
253 253 unzip \
254 254 uuid-dev \
255 255 vim \
256 256 virtualenv \
257 257 wget \
258 258 xfsprogs \
259 259 zip \
260 260 zlib1g-dev"
261 261
262 262 if [ "$DEBIAN_VERSION" = "9.8" ]; then
263 263 PACKAGES="$PACKAGES linux-perf"
264 264 elif [ "$DISTRO" = "Ubuntu" ]; then
265 265 PACKAGES="$PACKAGES linux-tools-common"
266 266 fi
267 267
268 268 # Ubuntu 19.04 removes monotone.
269 269 if [ "$LSB_RELEASE" != "disco" ]; then
270 270 PACKAGES="$PACKAGES monotone"
271 271 fi
272 272
273 273 # As of April 27, 2019, Docker hasn't published packages for
274 274 # Ubuntu 19.04 yet.
275 275 if [ "$LSB_RELEASE" != "disco" ]; then
276 276 PACKAGES="$PACKAGES docker-ce"
277 277 fi
278 278
279 279 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends $PACKAGES
280 280
281 281 # Create clang-format symlink so test harness finds it.
282 282 sudo update-alternatives --install /usr/bin/clang-format clang-format \
283 283 /usr/bin/clang-format-6.0 1000
284 284
285 285 sudo mkdir /hgdev
286 286 # Will be normalized to hg:hg later.
287 287 sudo chown `whoami` /hgdev
288 288
289 289 cp requirements-py2.txt /hgdev/requirements-py2.txt
290 290 cp requirements-py3.txt /hgdev/requirements-py3.txt
291 291
292 292 # Disable the pip version check because it uses the network and can
293 293 # be annoying.
294 294 cat << EOF | sudo tee -a /etc/pip.conf
295 295 [global]
296 296 disable-pip-version-check = True
297 297 EOF
298 298
299 299 {install_pythons}
300 300 {bootstrap_virtualenv}
301 301
302 302 /hgdev/venv-bootstrap/bin/hg clone https://www.mercurial-scm.org/repo/hg /hgdev/src
303 303
304 304 # Mark the repo as non-publishing.
305 305 cat >> /hgdev/src/.hg/hgrc << EOF
306 306 [phases]
307 307 publish = false
308 308 EOF
309 309
310 310 sudo chown -R hg:hg /hgdev
311 311 '''.lstrip().format(
312 312 install_pythons=INSTALL_PYTHONS,
313 313 bootstrap_virtualenv=BOOTSTRAP_VIRTUALENV
314 314 ).replace('\r\n', '\n')
315 315
316 316
317 317 # Prepares /hgdev for operations.
318 318 PREPARE_HGDEV = '''
319 319 #!/bin/bash
320 320
321 321 set -e
322 322
323 323 FS=$1
324 324
325 325 ensure_device() {
326 326 if [ -z "${DEVICE}" ]; then
327 327 echo "could not find block device to format"
328 328 exit 1
329 329 fi
330 330 }
331 331
332 332 # Determine device to partition for extra filesystem.
333 333 # If only 1 volume is present, it will be the root volume and
334 334 # should be /dev/nvme0. If multiple volumes are present, the
335 335 # root volume could be nvme0 or nvme1. Use whichever one doesn't have
336 336 # a partition.
337 337 if [ -e /dev/nvme1n1 ]; then
338 338 if [ -e /dev/nvme0n1p1 ]; then
339 339 DEVICE=/dev/nvme1n1
340 340 else
341 341 DEVICE=/dev/nvme0n1
342 342 fi
343 343 else
344 344 DEVICE=
345 345 fi
346 346
347 347 sudo mkdir /hgwork
348 348
349 349 if [ "${FS}" != "default" -a "${FS}" != "tmpfs" ]; then
350 350 ensure_device
351 351 echo "creating ${FS} filesystem on ${DEVICE}"
352 352 fi
353 353
354 354 if [ "${FS}" = "default" ]; then
355 355 :
356 356
357 357 elif [ "${FS}" = "btrfs" ]; then
358 358 sudo mkfs.btrfs ${DEVICE}
359 359 sudo mount ${DEVICE} /hgwork
360 360
361 361 elif [ "${FS}" = "ext3" ]; then
362 362 # lazy_journal_init speeds up filesystem creation at the expense of
363 363 # integrity if things crash. We are an ephemeral instance, so we don't
364 364 # care about integrity.
365 365 sudo mkfs.ext3 -E lazy_journal_init=1 ${DEVICE}
366 366 sudo mount ${DEVICE} /hgwork
367 367
368 368 elif [ "${FS}" = "ext4" ]; then
369 369 sudo mkfs.ext4 -E lazy_journal_init=1 ${DEVICE}
370 370 sudo mount ${DEVICE} /hgwork
371 371
372 372 elif [ "${FS}" = "jfs" ]; then
373 373 sudo mkfs.jfs ${DEVICE}
374 374 sudo mount ${DEVICE} /hgwork
375 375
376 376 elif [ "${FS}" = "tmpfs" ]; then
377 377 echo "creating tmpfs volume in /hgwork"
378 378 sudo mount -t tmpfs -o size=1024M tmpfs /hgwork
379 379
380 380 elif [ "${FS}" = "xfs" ]; then
381 381 sudo mkfs.xfs ${DEVICE}
382 382 sudo mount ${DEVICE} /hgwork
383 383
384 384 else
385 385 echo "unsupported filesystem: ${FS}"
386 386 exit 1
387 387 fi
388 388
389 389 echo "/hgwork ready"
390 390
391 391 sudo chown hg:hg /hgwork
392 392 mkdir /hgwork/tmp
393 393 chown hg:hg /hgwork/tmp
394 394
395 395 rsync -a /hgdev/src /hgwork/
396 396 '''.lstrip().replace('\r\n', '\n')
397 397
398 398
399 399 HG_UPDATE_CLEAN = '''
400 400 set -ex
401 401
402 402 HG=/hgdev/venv-bootstrap/bin/hg
403 403
404 404 cd /hgwork/src
405 405 ${HG} --config extensions.purge= purge --all
406 406 ${HG} update -C $1
407 407 ${HG} log -r .
408 408 '''.lstrip().replace('\r\n', '\n')
409 409
410 410
411 411 def prepare_exec_environment(ssh_client, filesystem='default'):
412 412 """Prepare an EC2 instance to execute things.
413 413
414 414 The AMI has an ``/hgdev`` bootstrapped with various Python installs
415 415 and a clone of the Mercurial repo.
416 416
417 417 In EC2, EBS volumes launched from snapshots have wonky performance behavior.
418 418 Notably, blocks have to be copied on first access, which makes volume
419 419 I/O extremely slow on fresh volumes.
420 420
421 421 Furthermore, we may want to run operations, tests, etc on alternative
422 422 filesystems so we examine behavior on different filesystems.
423 423
424 424 This function is used to facilitate executing operations on alternate
425 425 volumes.
426 426 """
427 427 sftp = ssh_client.open_sftp()
428 428
429 429 with sftp.open('/hgdev/prepare-hgdev', 'wb') as fh:
430 430 fh.write(PREPARE_HGDEV)
431 431 fh.chmod(0o0777)
432 432
433 433 command = 'sudo /hgdev/prepare-hgdev %s' % filesystem
434 434 chan, stdin, stdout = exec_command(ssh_client, command)
435 435 stdin.close()
436 436
437 437 for line in stdout:
438 438 print(line, end='')
439 439
440 440 res = chan.recv_exit_status()
441 441
442 442 if res:
443 443 raise Exception('non-0 exit code updating working directory; %d'
444 444 % res)
445 445
446 446
447 447 def synchronize_hg(source_path: pathlib.Path, ec2_instance, revision: str=None):
448 448 """Synchronize a local Mercurial source path to remote EC2 instance."""
449 449
450 450 with tempfile.TemporaryDirectory() as temp_dir:
451 451 temp_dir = pathlib.Path(temp_dir)
452 452
453 453 ssh_dir = temp_dir / '.ssh'
454 454 ssh_dir.mkdir()
455 455 ssh_dir.chmod(0o0700)
456 456
457 457 public_ip = ec2_instance.public_ip_address
458 458
459 459 ssh_config = ssh_dir / 'config'
460 460
461 461 with ssh_config.open('w', encoding='utf-8') as fh:
462 462 fh.write('Host %s\n' % public_ip)
463 463 fh.write(' User hg\n')
464 464 fh.write(' StrictHostKeyChecking no\n')
465 465 fh.write(' UserKnownHostsFile %s\n' % (ssh_dir / 'known_hosts'))
466 466 fh.write(' IdentityFile %s\n' % ec2_instance.ssh_private_key_path)
467 467
468 468 if not (source_path / '.hg').is_dir():
469 469 raise Exception('%s is not a Mercurial repository; synchronization '
470 470 'not yet supported' % source_path)
471 471
472 472 env = dict(os.environ)
473 473 env['HGPLAIN'] = '1'
474 474 env['HGENCODING'] = 'utf-8'
475 475
476 476 hg_bin = source_path / 'hg'
477 477
478 478 res = subprocess.run(
479 479 ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
480 480 cwd=str(source_path), env=env, check=True, capture_output=True)
481 481
482 482 full_revision = res.stdout.decode('ascii')
483 483
484 484 args = [
485 485 'python2.7', str(hg_bin),
486 486 '--config', 'ui.ssh=ssh -F %s' % ssh_config,
487 487 '--config', 'ui.remotecmd=/hgdev/venv-bootstrap/bin/hg',
488 488 # Also ensure .hgtags changes are present so auto version
489 489 # calculation works.
490 490 'push', '-f', '-r', full_revision, '-r', 'file(.hgtags)',
491 491 'ssh://%s//hgwork/src' % public_ip,
492 492 ]
493 493
494 494 res = subprocess.run(args, cwd=str(source_path), env=env)
495 495
496 496 # Allow 1 (no-op) to not trigger error.
497 497 if res.returncode not in (0, 1):
498 498 res.check_returncode()
499 499
500 500 # TODO support synchronizing dirty working directory.
501 501
502 502 sftp = ec2_instance.ssh_client.open_sftp()
503 503
504 504 with sftp.open('/hgdev/hgup', 'wb') as fh:
505 505 fh.write(HG_UPDATE_CLEAN)
506 506 fh.chmod(0o0700)
507 507
508 508 chan, stdin, stdout = exec_command(
509 509 ec2_instance.ssh_client, '/hgdev/hgup %s' % full_revision)
510 510 stdin.close()
511 511
512 512 for line in stdout:
513 513 print(line, end='')
514 514
515 515 res = chan.recv_exit_status()
516 516
517 517 if res:
518 518 raise Exception('non-0 exit code updating working directory; %d'
519 519 % res)
520 520
521 521
522 522 def run_tests(ssh_client, python_version, test_flags=None):
523 523 """Run tests on a remote Linux machine via an SSH client."""
524 524 test_flags = test_flags or []
525 525
526 526 print('running tests')
527 527
528 528 if python_version == 'system2':
529 529 python = '/usr/bin/python2'
530 530 elif python_version == 'system3':
531 531 python = '/usr/bin/python3'
532 532 elif python_version.startswith('pypy'):
533 533 python = '/hgdev/pyenv/shims/%s' % python_version
534 534 else:
535 535 python = '/hgdev/pyenv/shims/python%s' % python_version
536 536
537 537 test_flags = ' '.join(shlex.quote(a) for a in test_flags)
538 538
539 539 command = (
540 540 '/bin/sh -c "export TMPDIR=/hgwork/tmp; '
541 541 'cd /hgwork/src/tests && %s run-tests.py %s"' % (
542 542 python, test_flags))
543 543
544 544 chan, stdin, stdout = exec_command(ssh_client, command)
545 545
546 546 stdin.close()
547 547
548 548 for line in stdout:
549 549 print(line, end='')
550 550
551 551 return chan.recv_exit_status()
General Comments 0
You need to be logged in to leave comments. Login now