##// END OF EJS Templates
automation: always install docker-ce...
Gregory Szorc -
r43282:136c2536 default
parent child Browse files
Show More
@@ -1,566 +1,561 b''
1 1 # linux.py - Linux specific automation functionality
2 2 #
3 3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 # no-check-code because Python 3 native.
9 9
10 10 import os
11 11 import pathlib
12 12 import shlex
13 13 import subprocess
14 14 import tempfile
15 15
16 16 from .ssh import (
17 17 exec_command,
18 18 )
19 19
20 20
21 21 # Linux distributions that are supported.
22 22 DISTROS = {
23 23 'debian9',
24 24 'ubuntu18.04',
25 25 'ubuntu18.10',
26 26 'ubuntu19.04',
27 27 }
28 28
29 29 INSTALL_PYTHONS = r'''
30 30 PYENV2_VERSIONS="2.7.16 pypy2.7-7.1.1"
31 31 PYENV3_VERSIONS="3.5.7 3.6.9 3.7.4 3.8-dev pypy3.5-7.0.0 pypy3.6-7.1.1"
32 32
33 33 git clone https://github.com/pyenv/pyenv.git /hgdev/pyenv
34 34 pushd /hgdev/pyenv
35 35 git checkout 17f44b7cd6f58ea2fa68ec0371fb9e7a826b8be2
36 36 popd
37 37
38 38 export PYENV_ROOT="/hgdev/pyenv"
39 39 export PATH="$PYENV_ROOT/bin:$PATH"
40 40
41 41 # pip 19.0.3.
42 42 PIP_SHA256=efe99298f3fbb1f56201ce6b81d2658067d2f7d7dfc2d412e0d3cacc9a397c61
43 43 wget -O get-pip.py --progress dot:mega https://github.com/pypa/get-pip/raw/fee32c376da1ff6496a798986d7939cd51e1644f/get-pip.py
44 44 echo "${PIP_SHA256} get-pip.py" | sha256sum --check -
45 45
46 46 VIRTUALENV_SHA256=984d7e607b0a5d1329425dd8845bd971b957424b5ba664729fab51ab8c11bc39
47 47 VIRTUALENV_TARBALL=virtualenv-16.4.3.tar.gz
48 48 wget -O ${VIRTUALENV_TARBALL} --progress dot:mega https://files.pythonhosted.org/packages/37/db/89d6b043b22052109da35416abc3c397655e4bd3cff031446ba02b9654fa/${VIRTUALENV_TARBALL}
49 49 echo "${VIRTUALENV_SHA256} ${VIRTUALENV_TARBALL}" | sha256sum --check -
50 50
51 51 for v in ${PYENV2_VERSIONS}; do
52 52 pyenv install -v ${v}
53 53 ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
54 54 ${PYENV_ROOT}/versions/${v}/bin/pip install ${VIRTUALENV_TARBALL}
55 55 ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py2.txt
56 56 done
57 57
58 58 for v in ${PYENV3_VERSIONS}; do
59 59 pyenv install -v ${v}
60 60 ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
61 61 ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py3.txt
62 62 done
63 63
64 64 pyenv global ${PYENV2_VERSIONS} ${PYENV3_VERSIONS} system
65 65 '''.lstrip().replace('\r\n', '\n')
66 66
67 67
68 68 INSTALL_RUST = r'''
69 69 RUSTUP_INIT_SHA256=a46fe67199b7bcbbde2dcbc23ae08db6f29883e260e23899a88b9073effc9076
70 70 wget -O rustup-init --progress dot:mega https://static.rust-lang.org/rustup/archive/1.18.3/x86_64-unknown-linux-gnu/rustup-init
71 71 echo "${RUSTUP_INIT_SHA256} rustup-init" | sha256sum --check -
72 72
73 73 chmod +x rustup-init
74 74 sudo -H -u hg -g hg ./rustup-init -y
75 75 sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup install 1.31.1 1.34.2
76 76 sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup component add clippy
77 77 '''
78 78
79 79
80 80 BOOTSTRAP_VIRTUALENV = r'''
81 81 /usr/bin/virtualenv /hgdev/venv-bootstrap
82 82
83 83 HG_SHA256=1bdd21bb87d1e05fb5cd395d488d0e0cc2f2f90ce0fd248e31a03595da5ccb47
84 84 HG_TARBALL=mercurial-4.9.1.tar.gz
85 85
86 86 wget -O ${HG_TARBALL} --progress dot:mega https://www.mercurial-scm.org/release/${HG_TARBALL}
87 87 echo "${HG_SHA256} ${HG_TARBALL}" | sha256sum --check -
88 88
89 89 /hgdev/venv-bootstrap/bin/pip install ${HG_TARBALL}
90 90 '''.lstrip().replace('\r\n', '\n')
91 91
92 92
93 93 BOOTSTRAP_DEBIAN = r'''
94 94 #!/bin/bash
95 95
96 96 set -ex
97 97
98 98 DISTRO=`grep DISTRIB_ID /etc/lsb-release | awk -F= '{{print $2}}'`
99 99 DEBIAN_VERSION=`cat /etc/debian_version`
100 100 LSB_RELEASE=`lsb_release -cs`
101 101
102 102 sudo /usr/sbin/groupadd hg
103 103 sudo /usr/sbin/groupadd docker
104 104 sudo /usr/sbin/useradd -g hg -G sudo,docker -d /home/hg -m -s /bin/bash hg
105 105 sudo mkdir /home/hg/.ssh
106 106 sudo cp ~/.ssh/authorized_keys /home/hg/.ssh/authorized_keys
107 107 sudo chown -R hg:hg /home/hg/.ssh
108 108 sudo chmod 700 /home/hg/.ssh
109 109 sudo chmod 600 /home/hg/.ssh/authorized_keys
110 110
111 111 cat << EOF | sudo tee /etc/sudoers.d/90-hg
112 112 hg ALL=(ALL) NOPASSWD:ALL
113 113 EOF
114 114
115 115 sudo apt-get update
116 116 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq dist-upgrade
117 117
118 118 # Install packages necessary to set up Docker Apt repo.
119 119 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends \
120 120 apt-transport-https \
121 121 gnupg
122 122
123 123 cat > docker-apt-key << EOF
124 124 -----BEGIN PGP PUBLIC KEY BLOCK-----
125 125
126 126 mQINBFit2ioBEADhWpZ8/wvZ6hUTiXOwQHXMAlaFHcPH9hAtr4F1y2+OYdbtMuth
127 127 lqqwp028AqyY+PRfVMtSYMbjuQuu5byyKR01BbqYhuS3jtqQmljZ/bJvXqnmiVXh
128 128 38UuLa+z077PxyxQhu5BbqntTPQMfiyqEiU+BKbq2WmANUKQf+1AmZY/IruOXbnq
129 129 L4C1+gJ8vfmXQt99npCaxEjaNRVYfOS8QcixNzHUYnb6emjlANyEVlZzeqo7XKl7
130 130 UrwV5inawTSzWNvtjEjj4nJL8NsLwscpLPQUhTQ+7BbQXAwAmeHCUTQIvvWXqw0N
131 131 cmhh4HgeQscQHYgOJjjDVfoY5MucvglbIgCqfzAHW9jxmRL4qbMZj+b1XoePEtht
132 132 ku4bIQN1X5P07fNWzlgaRL5Z4POXDDZTlIQ/El58j9kp4bnWRCJW0lya+f8ocodo
133 133 vZZ+Doi+fy4D5ZGrL4XEcIQP/Lv5uFyf+kQtl/94VFYVJOleAv8W92KdgDkhTcTD
134 134 G7c0tIkVEKNUq48b3aQ64NOZQW7fVjfoKwEZdOqPE72Pa45jrZzvUFxSpdiNk2tZ
135 135 XYukHjlxxEgBdC/J3cMMNRE1F4NCA3ApfV1Y7/hTeOnmDuDYwr9/obA8t016Yljj
136 136 q5rdkywPf4JF8mXUW5eCN1vAFHxeg9ZWemhBtQmGxXnw9M+z6hWwc6ahmwARAQAB
137 137 tCtEb2NrZXIgUmVsZWFzZSAoQ0UgZGViKSA8ZG9ja2VyQGRvY2tlci5jb20+iQI3
138 138 BBMBCgAhBQJYrefAAhsvBQsJCAcDBRUKCQgLBRYCAwEAAh4BAheAAAoJEI2BgDwO
139 139 v82IsskP/iQZo68flDQmNvn8X5XTd6RRaUH33kXYXquT6NkHJciS7E2gTJmqvMqd
140 140 tI4mNYHCSEYxI5qrcYV5YqX9P6+Ko+vozo4nseUQLPH/ATQ4qL0Zok+1jkag3Lgk
141 141 jonyUf9bwtWxFp05HC3GMHPhhcUSexCxQLQvnFWXD2sWLKivHp2fT8QbRGeZ+d3m
142 142 6fqcd5Fu7pxsqm0EUDK5NL+nPIgYhN+auTrhgzhK1CShfGccM/wfRlei9Utz6p9P
143 143 XRKIlWnXtT4qNGZNTN0tR+NLG/6Bqd8OYBaFAUcue/w1VW6JQ2VGYZHnZu9S8LMc
144 144 FYBa5Ig9PxwGQOgq6RDKDbV+PqTQT5EFMeR1mrjckk4DQJjbxeMZbiNMG5kGECA8
145 145 g383P3elhn03WGbEEa4MNc3Z4+7c236QI3xWJfNPdUbXRaAwhy/6rTSFbzwKB0Jm
146 146 ebwzQfwjQY6f55MiI/RqDCyuPj3r3jyVRkK86pQKBAJwFHyqj9KaKXMZjfVnowLh
147 147 9svIGfNbGHpucATqREvUHuQbNnqkCx8VVhtYkhDb9fEP2xBu5VvHbR+3nfVhMut5
148 148 G34Ct5RS7Jt6LIfFdtcn8CaSas/l1HbiGeRgc70X/9aYx/V/CEJv0lIe8gP6uDoW
149 149 FPIZ7d6vH+Vro6xuWEGiuMaiznap2KhZmpkgfupyFmplh0s6knymuQINBFit2ioB
150 150 EADneL9S9m4vhU3blaRjVUUyJ7b/qTjcSylvCH5XUE6R2k+ckEZjfAMZPLpO+/tF
151 151 M2JIJMD4SifKuS3xck9KtZGCufGmcwiLQRzeHF7vJUKrLD5RTkNi23ydvWZgPjtx
152 152 Q+DTT1Zcn7BrQFY6FgnRoUVIxwtdw1bMY/89rsFgS5wwuMESd3Q2RYgb7EOFOpnu
153 153 w6da7WakWf4IhnF5nsNYGDVaIHzpiqCl+uTbf1epCjrOlIzkZ3Z3Yk5CM/TiFzPk
154 154 z2lLz89cpD8U+NtCsfagWWfjd2U3jDapgH+7nQnCEWpROtzaKHG6lA3pXdix5zG8
155 155 eRc6/0IbUSWvfjKxLLPfNeCS2pCL3IeEI5nothEEYdQH6szpLog79xB9dVnJyKJb
156 156 VfxXnseoYqVrRz2VVbUI5Blwm6B40E3eGVfUQWiux54DspyVMMk41Mx7QJ3iynIa
157 157 1N4ZAqVMAEruyXTRTxc9XW0tYhDMA/1GYvz0EmFpm8LzTHA6sFVtPm/ZlNCX6P1X
158 158 zJwrv7DSQKD6GGlBQUX+OeEJ8tTkkf8QTJSPUdh8P8YxDFS5EOGAvhhpMBYD42kQ
159 159 pqXjEC+XcycTvGI7impgv9PDY1RCC1zkBjKPa120rNhv/hkVk/YhuGoajoHyy4h7
160 160 ZQopdcMtpN2dgmhEegny9JCSwxfQmQ0zK0g7m6SHiKMwjwARAQABiQQ+BBgBCAAJ
161 161 BQJYrdoqAhsCAikJEI2BgDwOv82IwV0gBBkBCAAGBQJYrdoqAAoJEH6gqcPyc/zY
162 162 1WAP/2wJ+R0gE6qsce3rjaIz58PJmc8goKrir5hnElWhPgbq7cYIsW5qiFyLhkdp
163 163 YcMmhD9mRiPpQn6Ya2w3e3B8zfIVKipbMBnke/ytZ9M7qHmDCcjoiSmwEXN3wKYI
164 164 mD9VHONsl/CG1rU9Isw1jtB5g1YxuBA7M/m36XN6x2u+NtNMDB9P56yc4gfsZVES
165 165 KA9v+yY2/l45L8d/WUkUi0YXomn6hyBGI7JrBLq0CX37GEYP6O9rrKipfz73XfO7
166 166 JIGzOKZlljb/D9RX/g7nRbCn+3EtH7xnk+TK/50euEKw8SMUg147sJTcpQmv6UzZ
167 167 cM4JgL0HbHVCojV4C/plELwMddALOFeYQzTif6sMRPf+3DSj8frbInjChC3yOLy0
168 168 6br92KFom17EIj2CAcoeq7UPhi2oouYBwPxh5ytdehJkoo+sN7RIWua6P2WSmon5
169 169 U888cSylXC0+ADFdgLX9K2zrDVYUG1vo8CX0vzxFBaHwN6Px26fhIT1/hYUHQR1z
170 170 VfNDcyQmXqkOnZvvoMfz/Q0s9BhFJ/zU6AgQbIZE/hm1spsfgvtsD1frZfygXJ9f
171 171 irP+MSAI80xHSf91qSRZOj4Pl3ZJNbq4yYxv0b1pkMqeGdjdCYhLU+LZ4wbQmpCk
172 172 SVe2prlLureigXtmZfkqevRz7FrIZiu9ky8wnCAPwC7/zmS18rgP/17bOtL4/iIz
173 173 QhxAAoAMWVrGyJivSkjhSGx1uCojsWfsTAm11P7jsruIL61ZzMUVE2aM3Pmj5G+W
174 174 9AcZ58Em+1WsVnAXdUR//bMmhyr8wL/G1YO1V3JEJTRdxsSxdYa4deGBBY/Adpsw
175 175 24jxhOJR+lsJpqIUeb999+R8euDhRHG9eFO7DRu6weatUJ6suupoDTRWtr/4yGqe
176 176 dKxV3qQhNLSnaAzqW/1nA3iUB4k7kCaKZxhdhDbClf9P37qaRW467BLCVO/coL3y
177 177 Vm50dwdrNtKpMBh3ZpbB1uJvgi9mXtyBOMJ3v8RZeDzFiG8HdCtg9RvIt/AIFoHR
178 178 H3S+U79NT6i0KPzLImDfs8T7RlpyuMc4Ufs8ggyg9v3Ae6cN3eQyxcK3w0cbBwsh
179 179 /nQNfsA6uu+9H7NhbehBMhYnpNZyrHzCmzyXkauwRAqoCbGCNykTRwsur9gS41TQ
180 180 M8ssD1jFheOJf3hODnkKU+HKjvMROl1DK7zdmLdNzA1cvtZH/nCC9KPj1z8QC47S
181 181 xx+dTZSx4ONAhwbS/LN3PoKtn8LPjY9NP9uDWI+TWYquS2U+KHDrBDlsgozDbs/O
182 182 jCxcpDzNmXpWQHEtHU7649OXHP7UeNST1mCUCH5qdank0V1iejF6/CfTFU4MfcrG
183 183 YT90qFF93M3v01BbxP+EIY2/9tiIPbrd
184 184 =0YYh
185 185 -----END PGP PUBLIC KEY BLOCK-----
186 186 EOF
187 187
188 188 sudo apt-key add docker-apt-key
189 189
190 190 if [ "$LSB_RELEASE" = "stretch" ]; then
191 191 cat << EOF | sudo tee -a /etc/apt/sources.list
192 192 # Need backports for clang-format-6.0
193 193 deb http://deb.debian.org/debian stretch-backports main
194 194
195 195 # Sources are useful if we want to compile things locally.
196 196 deb-src http://deb.debian.org/debian stretch main
197 197 deb-src http://security.debian.org/debian-security stretch/updates main
198 198 deb-src http://deb.debian.org/debian stretch-updates main
199 199 deb-src http://deb.debian.org/debian stretch-backports main
200 200
201 201 deb [arch=amd64] https://download.docker.com/linux/debian stretch stable
202 202 EOF
203 203
204 204 elif [ "$DISTRO" = "Ubuntu" ]; then
205 205 cat << EOF | sudo tee -a /etc/apt/sources.list
206 206 deb [arch=amd64] https://download.docker.com/linux/ubuntu $LSB_RELEASE stable
207 207 EOF
208 208
209 209 fi
210 210
211 211 sudo apt-get update
212 212
213 213 PACKAGES="\
214 214 btrfs-progs \
215 215 build-essential \
216 216 bzr \
217 217 clang-format-6.0 \
218 218 cvs \
219 219 darcs \
220 220 debhelper \
221 221 devscripts \
222 docker-ce \
222 223 dpkg-dev \
223 224 dstat \
224 225 emacs \
225 226 gettext \
226 227 git \
227 228 htop \
228 229 iotop \
229 230 jfsutils \
230 231 libbz2-dev \
231 232 libexpat1-dev \
232 233 libffi-dev \
233 234 libgdbm-dev \
234 235 liblzma-dev \
235 236 libncurses5-dev \
236 237 libnss3-dev \
237 238 libreadline-dev \
238 239 libsqlite3-dev \
239 240 libssl-dev \
240 241 netbase \
241 242 ntfs-3g \
242 243 nvme-cli \
243 244 pyflakes \
244 245 pyflakes3 \
245 246 pylint \
246 247 pylint3 \
247 248 python-all-dev \
248 249 python-dev \
249 250 python-docutils \
250 251 python-fuzzywuzzy \
251 252 python-pygments \
252 253 python-subversion \
253 254 python-vcr \
254 255 python3-dev \
255 256 python3-docutils \
256 257 python3-fuzzywuzzy \
257 258 python3-pygments \
258 259 python3-vcr \
259 260 rsync \
260 261 sqlite3 \
261 262 subversion \
262 263 tcl-dev \
263 264 tk-dev \
264 265 tla \
265 266 unzip \
266 267 uuid-dev \
267 268 vim \
268 269 virtualenv \
269 270 wget \
270 271 xfsprogs \
271 272 zip \
272 273 zlib1g-dev"
273 274
274 275 if [ "LSB_RELEASE" = "stretch" ]; then
275 276 PACKAGES="$PACKAGES linux-perf"
276 277 elif [ "$DISTRO" = "Ubuntu" ]; then
277 278 PACKAGES="$PACKAGES linux-tools-common"
278 279 fi
279 280
280 281 # Ubuntu 19.04 removes monotone.
281 282 if [ "$LSB_RELEASE" != "disco" ]; then
282 283 PACKAGES="$PACKAGES monotone"
283 284 fi
284 285
285 # As of April 27, 2019, Docker hasn't published packages for
286 # Ubuntu 19.04 yet.
287 if [ "$LSB_RELEASE" != "disco" ]; then
288 PACKAGES="$PACKAGES docker-ce"
289 fi
290
291 286 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends $PACKAGES
292 287
293 288 # Create clang-format symlink so test harness finds it.
294 289 sudo update-alternatives --install /usr/bin/clang-format clang-format \
295 290 /usr/bin/clang-format-6.0 1000
296 291
297 292 sudo mkdir /hgdev
298 293 # Will be normalized to hg:hg later.
299 294 sudo chown `whoami` /hgdev
300 295
301 296 {install_rust}
302 297
303 298 cp requirements-py2.txt /hgdev/requirements-py2.txt
304 299 cp requirements-py3.txt /hgdev/requirements-py3.txt
305 300
306 301 # Disable the pip version check because it uses the network and can
307 302 # be annoying.
308 303 cat << EOF | sudo tee -a /etc/pip.conf
309 304 [global]
310 305 disable-pip-version-check = True
311 306 EOF
312 307
313 308 {install_pythons}
314 309 {bootstrap_virtualenv}
315 310
316 311 /hgdev/venv-bootstrap/bin/hg clone https://www.mercurial-scm.org/repo/hg /hgdev/src
317 312
318 313 # Mark the repo as non-publishing.
319 314 cat >> /hgdev/src/.hg/hgrc << EOF
320 315 [phases]
321 316 publish = false
322 317 EOF
323 318
324 319 sudo chown -R hg:hg /hgdev
325 320 '''.lstrip().format(
326 321 install_rust=INSTALL_RUST,
327 322 install_pythons=INSTALL_PYTHONS,
328 323 bootstrap_virtualenv=BOOTSTRAP_VIRTUALENV
329 324 ).replace('\r\n', '\n')
330 325
331 326
332 327 # Prepares /hgdev for operations.
333 328 PREPARE_HGDEV = '''
334 329 #!/bin/bash
335 330
336 331 set -e
337 332
338 333 FS=$1
339 334
340 335 ensure_device() {
341 336 if [ -z "${DEVICE}" ]; then
342 337 echo "could not find block device to format"
343 338 exit 1
344 339 fi
345 340 }
346 341
347 342 # Determine device to partition for extra filesystem.
348 343 # If only 1 volume is present, it will be the root volume and
349 344 # should be /dev/nvme0. If multiple volumes are present, the
350 345 # root volume could be nvme0 or nvme1. Use whichever one doesn't have
351 346 # a partition.
352 347 if [ -e /dev/nvme1n1 ]; then
353 348 if [ -e /dev/nvme0n1p1 ]; then
354 349 DEVICE=/dev/nvme1n1
355 350 else
356 351 DEVICE=/dev/nvme0n1
357 352 fi
358 353 else
359 354 DEVICE=
360 355 fi
361 356
362 357 sudo mkdir /hgwork
363 358
364 359 if [ "${FS}" != "default" -a "${FS}" != "tmpfs" ]; then
365 360 ensure_device
366 361 echo "creating ${FS} filesystem on ${DEVICE}"
367 362 fi
368 363
369 364 if [ "${FS}" = "default" ]; then
370 365 :
371 366
372 367 elif [ "${FS}" = "btrfs" ]; then
373 368 sudo mkfs.btrfs ${DEVICE}
374 369 sudo mount ${DEVICE} /hgwork
375 370
376 371 elif [ "${FS}" = "ext3" ]; then
377 372 # lazy_journal_init speeds up filesystem creation at the expense of
378 373 # integrity if things crash. We are an ephemeral instance, so we don't
379 374 # care about integrity.
380 375 sudo mkfs.ext3 -E lazy_journal_init=1 ${DEVICE}
381 376 sudo mount ${DEVICE} /hgwork
382 377
383 378 elif [ "${FS}" = "ext4" ]; then
384 379 sudo mkfs.ext4 -E lazy_journal_init=1 ${DEVICE}
385 380 sudo mount ${DEVICE} /hgwork
386 381
387 382 elif [ "${FS}" = "jfs" ]; then
388 383 sudo mkfs.jfs ${DEVICE}
389 384 sudo mount ${DEVICE} /hgwork
390 385
391 386 elif [ "${FS}" = "tmpfs" ]; then
392 387 echo "creating tmpfs volume in /hgwork"
393 388 sudo mount -t tmpfs -o size=1024M tmpfs /hgwork
394 389
395 390 elif [ "${FS}" = "xfs" ]; then
396 391 sudo mkfs.xfs ${DEVICE}
397 392 sudo mount ${DEVICE} /hgwork
398 393
399 394 else
400 395 echo "unsupported filesystem: ${FS}"
401 396 exit 1
402 397 fi
403 398
404 399 echo "/hgwork ready"
405 400
406 401 sudo chown hg:hg /hgwork
407 402 mkdir /hgwork/tmp
408 403 chown hg:hg /hgwork/tmp
409 404
410 405 rsync -a /hgdev/src /hgwork/
411 406 '''.lstrip().replace('\r\n', '\n')
412 407
413 408
414 409 HG_UPDATE_CLEAN = '''
415 410 set -ex
416 411
417 412 HG=/hgdev/venv-bootstrap/bin/hg
418 413
419 414 cd /hgwork/src
420 415 ${HG} --config extensions.purge= purge --all
421 416 ${HG} update -C $1
422 417 ${HG} log -r .
423 418 '''.lstrip().replace('\r\n', '\n')
424 419
425 420
426 421 def prepare_exec_environment(ssh_client, filesystem='default'):
427 422 """Prepare an EC2 instance to execute things.
428 423
429 424 The AMI has an ``/hgdev`` bootstrapped with various Python installs
430 425 and a clone of the Mercurial repo.
431 426
432 427 In EC2, EBS volumes launched from snapshots have wonky performance behavior.
433 428 Notably, blocks have to be copied on first access, which makes volume
434 429 I/O extremely slow on fresh volumes.
435 430
436 431 Furthermore, we may want to run operations, tests, etc on alternative
437 432 filesystems so we examine behavior on different filesystems.
438 433
439 434 This function is used to facilitate executing operations on alternate
440 435 volumes.
441 436 """
442 437 sftp = ssh_client.open_sftp()
443 438
444 439 with sftp.open('/hgdev/prepare-hgdev', 'wb') as fh:
445 440 fh.write(PREPARE_HGDEV)
446 441 fh.chmod(0o0777)
447 442
448 443 command = 'sudo /hgdev/prepare-hgdev %s' % filesystem
449 444 chan, stdin, stdout = exec_command(ssh_client, command)
450 445 stdin.close()
451 446
452 447 for line in stdout:
453 448 print(line, end='')
454 449
455 450 res = chan.recv_exit_status()
456 451
457 452 if res:
458 453 raise Exception('non-0 exit code updating working directory; %d'
459 454 % res)
460 455
461 456
462 457 def synchronize_hg(source_path: pathlib.Path, ec2_instance, revision: str=None):
463 458 """Synchronize a local Mercurial source path to remote EC2 instance."""
464 459
465 460 with tempfile.TemporaryDirectory() as temp_dir:
466 461 temp_dir = pathlib.Path(temp_dir)
467 462
468 463 ssh_dir = temp_dir / '.ssh'
469 464 ssh_dir.mkdir()
470 465 ssh_dir.chmod(0o0700)
471 466
472 467 public_ip = ec2_instance.public_ip_address
473 468
474 469 ssh_config = ssh_dir / 'config'
475 470
476 471 with ssh_config.open('w', encoding='utf-8') as fh:
477 472 fh.write('Host %s\n' % public_ip)
478 473 fh.write(' User hg\n')
479 474 fh.write(' StrictHostKeyChecking no\n')
480 475 fh.write(' UserKnownHostsFile %s\n' % (ssh_dir / 'known_hosts'))
481 476 fh.write(' IdentityFile %s\n' % ec2_instance.ssh_private_key_path)
482 477
483 478 if not (source_path / '.hg').is_dir():
484 479 raise Exception('%s is not a Mercurial repository; synchronization '
485 480 'not yet supported' % source_path)
486 481
487 482 env = dict(os.environ)
488 483 env['HGPLAIN'] = '1'
489 484 env['HGENCODING'] = 'utf-8'
490 485
491 486 hg_bin = source_path / 'hg'
492 487
493 488 res = subprocess.run(
494 489 ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
495 490 cwd=str(source_path), env=env, check=True, capture_output=True)
496 491
497 492 full_revision = res.stdout.decode('ascii')
498 493
499 494 args = [
500 495 'python2.7', str(hg_bin),
501 496 '--config', 'ui.ssh=ssh -F %s' % ssh_config,
502 497 '--config', 'ui.remotecmd=/hgdev/venv-bootstrap/bin/hg',
503 498 # Also ensure .hgtags changes are present so auto version
504 499 # calculation works.
505 500 'push', '-f', '-r', full_revision, '-r', 'file(.hgtags)',
506 501 'ssh://%s//hgwork/src' % public_ip,
507 502 ]
508 503
509 504 res = subprocess.run(args, cwd=str(source_path), env=env)
510 505
511 506 # Allow 1 (no-op) to not trigger error.
512 507 if res.returncode not in (0, 1):
513 508 res.check_returncode()
514 509
515 510 # TODO support synchronizing dirty working directory.
516 511
517 512 sftp = ec2_instance.ssh_client.open_sftp()
518 513
519 514 with sftp.open('/hgdev/hgup', 'wb') as fh:
520 515 fh.write(HG_UPDATE_CLEAN)
521 516 fh.chmod(0o0700)
522 517
523 518 chan, stdin, stdout = exec_command(
524 519 ec2_instance.ssh_client, '/hgdev/hgup %s' % full_revision)
525 520 stdin.close()
526 521
527 522 for line in stdout:
528 523 print(line, end='')
529 524
530 525 res = chan.recv_exit_status()
531 526
532 527 if res:
533 528 raise Exception('non-0 exit code updating working directory; %d'
534 529 % res)
535 530
536 531
537 532 def run_tests(ssh_client, python_version, test_flags=None):
538 533 """Run tests on a remote Linux machine via an SSH client."""
539 534 test_flags = test_flags or []
540 535
541 536 print('running tests')
542 537
543 538 if python_version == 'system2':
544 539 python = '/usr/bin/python2'
545 540 elif python_version == 'system3':
546 541 python = '/usr/bin/python3'
547 542 elif python_version.startswith('pypy'):
548 543 python = '/hgdev/pyenv/shims/%s' % python_version
549 544 else:
550 545 python = '/hgdev/pyenv/shims/python%s' % python_version
551 546
552 547 test_flags = ' '.join(shlex.quote(a) for a in test_flags)
553 548
554 549 command = (
555 550 '/bin/sh -c "export TMPDIR=/hgwork/tmp; '
556 551 'cd /hgwork/src/tests && %s run-tests.py %s"' % (
557 552 python, test_flags))
558 553
559 554 chan, stdin, stdout = exec_command(ssh_client, command)
560 555
561 556 stdin.close()
562 557
563 558 for line in stdout:
564 559 print(line, end='')
565 560
566 561 return chan.recv_exit_status()
General Comments 0
You need to be logged in to leave comments. Login now