##// END OF EJS Templates
merge default into stable for 5.2 release
Augie Fackler -
r43600:59338f95 merge 5.2rc0 stable
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -0,0 +1,17 b''
1 [tool.black]
2 line-length = 80
3 exclude = '''
4 build/
5 | wheelhouse/
6 | dist/
7 | packages/
8 | \.hg/
9 | \.mypy_cache/
10 | \.venv/
11 | mercurial/thirdparty/
12 | hgext/fsmonitor/pywatchman/
13 | contrib/python-zstandard/
14 | contrib/grey.py
15 '''
16 skip-string-normalization = true
17 quiet = true
@@ -0,0 +1,21 b''
1 # pypi.py - Automation around PyPI
2 #
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
4 #
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
7
8 # no-check-code because Python 3 native.
9
10 from twine.commands.upload import upload as twine_upload
11 from twine.settings import Settings
12
13
14 def upload(paths):
15 """Upload files to PyPI.
16
17 `paths` is an iterable of `pathlib.Path`.
18 """
19 settings = Settings()
20
21 twine_upload(settings, [str(p) for p in paths])
@@ -0,0 +1,99 b''
1 # try_server.py - Interact with Try server
2 #
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
4 #
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
7
8 # no-check-code because Python 3 native.
9
10 import base64
11 import json
12 import os
13 import subprocess
14 import tempfile
15
16 from .aws import AWSConnection
17
18 LAMBDA_FUNCTION = "ci-try-server-upload"
19
20
21 def trigger_try(c: AWSConnection, rev="."):
22 """Trigger a new Try run."""
23 lambda_client = c.session.client("lambda")
24
25 cset, bundle = generate_bundle(rev=rev)
26
27 payload = {
28 "bundle": base64.b64encode(bundle).decode("utf-8"),
29 "node": cset["node"],
30 "branch": cset["branch"],
31 "user": cset["user"],
32 "message": cset["desc"],
33 }
34
35 print("resolved revision:")
36 print("node: %s" % cset["node"])
37 print("branch: %s" % cset["branch"])
38 print("user: %s" % cset["user"])
39 print("desc: %s" % cset["desc"].splitlines()[0])
40 print()
41
42 print("sending to Try...")
43 res = lambda_client.invoke(
44 FunctionName=LAMBDA_FUNCTION,
45 InvocationType="RequestResponse",
46 Payload=json.dumps(payload).encode("utf-8"),
47 )
48
49 body = json.load(res["Payload"])
50 for message in body:
51 print("remote: %s" % message)
52
53
54 def generate_bundle(rev="."):
55 """Generate a bundle suitable for use by the Try service.
56
57 Returns a tuple of revision metadata and raw Mercurial bundle data.
58 """
59 # `hg bundle` doesn't support streaming to stdout. So we use a temporary
60 # file.
61 path = None
62 try:
63 fd, path = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
64 os.close(fd)
65
66 args = [
67 "hg",
68 "bundle",
69 "--type",
70 "gzip-v2",
71 "--base",
72 "public()",
73 "--rev",
74 rev,
75 path,
76 ]
77
78 print("generating bundle...")
79 subprocess.run(args, check=True)
80
81 with open(path, "rb") as fh:
82 bundle_data = fh.read()
83
84 finally:
85 if path:
86 os.unlink(path)
87
88 args = [
89 "hg",
90 "log",
91 "-r",
92 rev,
93 # We have to upload as JSON, so it won't matter if we emit binary
94 # since we need to normalize to UTF-8.
95 "-T",
96 "json",
97 ]
98 res = subprocess.run(args, check=True, capture_output=True)
99 return json.loads(res.stdout)[0], bundle_data
@@ -0,0 +1,15 b''
1 [fix]
2 clang-format:command = clang-format --style file -i
3 clang-format:pattern = (**.c or **.cc or **.h) and not "listfile:contrib/clang-format-ignorelist"
4
5 rustfmt:command = rustfmt {rootpath}
6 rustfmt:pattern = set:**.rs
7
8 # We use black, but currently with
9 # https://github.com/psf/black/pull/826 applied. For now
10 # contrib/grey.py is our fork of black. You need to pip install
11 # git+https://github.com/python/black/@d9e71a75ccfefa3d9156a64c03313a0d4ad981e5
12 # to have the dependencies for grey.
13 #
14 # black:command = python3.7 contrib/grey.py --config=black.toml -
15 # black:pattern = set:**.py - hgext/fsmonitor/pywatchman/** - mercurial/thirdparty/** - "contrib/python-zstandard/** - contrib/grey.py"
@@ -0,0 +1,56 b''
1 #include <Python.h>
2 #include <assert.h>
3 #include <stdlib.h>
4 #include <unistd.h>
5
6 #include "pyutil.h"
7
8 #include <string>
9
10 extern "C" {
11
12 static PyCodeObject *code;
13
14 extern "C" int LLVMFuzzerInitialize(int *argc, char ***argv)
15 {
16 contrib::initpy(*argv[0]);
17 code = (PyCodeObject *)Py_CompileString(R"py(
18 from parsers import dirs
19 try:
20 files = mdata.split('\n')
21 d = dirs(files)
22 list(d)
23 'a' in d
24 if files:
25 files[0] in d
26 except Exception as e:
27 pass
28 # uncomment this print if you're editing this Python code
29 # to debug failures.
30 # print e
31 )py",
32 "fuzzer", Py_file_input);
33 return 0;
34 }
35
36 int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size)
37 {
38 // Don't allow fuzzer inputs larger than 100k, since we'll just bog
39 // down and not accomplish much.
40 if (Size > 100000) {
41 return 0;
42 }
43 PyObject *mtext =
44 PyBytes_FromStringAndSize((const char *)Data, (Py_ssize_t)Size);
45 PyObject *locals = PyDict_New();
46 PyDict_SetItemString(locals, "mdata", mtext);
47 PyObject *res = PyEval_EvalCode(code, contrib::pyglobals(), locals);
48 if (!res) {
49 PyErr_Print();
50 }
51 Py_XDECREF(res);
52 Py_DECREF(locals);
53 Py_DECREF(mtext);
54 return 0; // Non-zero return values are reserved for future use.
55 }
56 }
@@ -0,0 +1,78 b''
1 #include <Python.h>
2 #include <assert.h>
3 #include <stdlib.h>
4 #include <unistd.h>
5
6 #include "pyutil.h"
7
8 #include <iostream>
9 #include <string>
10
11 extern "C" {
12
13 static PyCodeObject *code;
14
15 extern "C" int LLVMFuzzerInitialize(int *argc, char ***argv)
16 {
17 contrib::initpy(*argv[0]);
18 code = (PyCodeObject *)Py_CompileString(R"py(
19 from parsers import (
20 isasciistr,
21 asciilower,
22 asciiupper,
23 encodedir,
24 pathencode,
25 lowerencode,
26 )
27
28 try:
29 for fn in (
30 isasciistr,
31 asciilower,
32 asciiupper,
33 encodedir,
34 pathencode,
35 lowerencode,
36 ):
37 try:
38 fn(data)
39 except UnicodeDecodeError:
40 pass # some functions emit this exception
41 except AttributeError:
42 # pathencode needs hashlib, which fails to import because the time
43 # module fails to import. We should try and fix that some day, but
44 # for now we at least get coverage on non-hashencoded codepaths.
45 if fn != pathencode:
46 raise
47 # uncomment this for debugging exceptions
48 # except Exception as e:
49 # raise Exception('%r: %r' % (fn, e))
50 except Exception as e:
51 pass
52 # uncomment this print if you're editing this Python code
53 # to debug failures.
54 # print(e)
55 )py",
56 "fuzzer", Py_file_input);
57 if (!code) {
58 std::cerr << "failed to compile Python code!" << std::endl;
59 }
60 return 0;
61 }
62
63 int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size)
64 {
65 PyObject *mtext =
66 PyBytes_FromStringAndSize((const char *)Data, (Py_ssize_t)Size);
67 PyObject *locals = PyDict_New();
68 PyDict_SetItemString(locals, "data", mtext);
69 PyObject *res = PyEval_EvalCode(code, contrib::pyglobals(), locals);
70 if (!res) {
71 PyErr_Print();
72 }
73 Py_XDECREF(res);
74 Py_DECREF(locals);
75 Py_DECREF(mtext);
76 return 0; // Non-zero return values are reserved for future use.
77 }
78 }
@@ -0,0 +1,57 b''
1 #include <Python.h>
2 #include <assert.h>
3 #include <stdlib.h>
4 #include <unistd.h>
5
6 #include "pyutil.h"
7
8 #include <fuzzer/FuzzedDataProvider.h>
9 #include <iostream>
10 #include <string>
11
12 extern "C" {
13
14 static PyCodeObject *code;
15
16 extern "C" int LLVMFuzzerInitialize(int *argc, char ***argv)
17 {
18 contrib::initpy(*argv[0]);
19 code = (PyCodeObject *)Py_CompileString(R"py(
20 from parsers import jsonescapeu8fast
21
22 try:
23 jsonescapeu8fast(data, paranoid)
24 except Exception as e:
25 pass
26 # uncomment this print if you're editing this Python code
27 # to debug failures.
28 # print(e)
29 )py",
30 "fuzzer", Py_file_input);
31 if (!code) {
32 std::cerr << "failed to compile Python code!" << std::endl;
33 }
34 return 0;
35 }
36
37 int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size)
38 {
39 FuzzedDataProvider provider(Data, Size);
40 bool paranoid = provider.ConsumeBool();
41 std::string remainder = provider.ConsumeRemainingBytesAsString();
42
43 PyObject *mtext = PyBytes_FromStringAndSize(
44 (const char *)remainder.c_str(), remainder.size());
45 PyObject *locals = PyDict_New();
46 PyDict_SetItemString(locals, "data", mtext);
47 PyDict_SetItemString(locals, "paranoid", paranoid ? Py_True : Py_False);
48 PyObject *res = PyEval_EvalCode(code, contrib::pyglobals(), locals);
49 if (!res) {
50 PyErr_Print();
51 }
52 Py_XDECREF(res);
53 Py_DECREF(locals);
54 Py_DECREF(mtext);
55 return 0; // Non-zero return values are reserved for future use.
56 }
57 }
This diff has been collapsed as it changes many lines, (4094 lines changed) Show them Hide them
@@ -0,0 +1,4094 b''
1 # no-check-code because 3rd party
2 import ast
3 import asyncio
4 from concurrent.futures import Executor, ProcessPoolExecutor
5 from contextlib import contextmanager
6 from datetime import datetime
7 from enum import Enum
8 from functools import lru_cache, partial, wraps
9 import io
10 import itertools
11 import logging
12 from multiprocessing import Manager, freeze_support
13 import os
14 from pathlib import Path
15 import pickle
16 import re
17 import signal
18 import sys
19 import tempfile
20 import tokenize
21 import traceback
22 from typing import (
23 Any,
24 Callable,
25 Collection,
26 Dict,
27 Generator,
28 Generic,
29 Iterable,
30 Iterator,
31 List,
32 Optional,
33 Pattern,
34 Sequence,
35 Set,
36 Tuple,
37 TypeVar,
38 Union,
39 cast,
40 )
41
42 from appdirs import user_cache_dir
43 from attr import dataclass, evolve, Factory
44 import click
45 import toml
46 from typed_ast import ast3, ast27
47
48 # lib2to3 fork
49 from blib2to3.pytree import Node, Leaf, type_repr
50 from blib2to3 import pygram, pytree
51 from blib2to3.pgen2 import driver, token
52 from blib2to3.pgen2.grammar import Grammar
53 from blib2to3.pgen2.parse import ParseError
54
55 __version__ = '19.3b1.dev95+gdc1add6.d20191005'
56
57 DEFAULT_LINE_LENGTH = 88
58 DEFAULT_EXCLUDES = (
59 r"/(\.eggs|\.git|\.hg|\.mypy_cache|\.nox|\.tox|\.venv|_build|buck-out|build|dist)/"
60 )
61 DEFAULT_INCLUDES = r"\.pyi?$"
62 CACHE_DIR = Path(user_cache_dir("black", version=__version__))
63
64
65 # types
66 FileContent = str
67 Encoding = str
68 NewLine = str
69 Depth = int
70 NodeType = int
71 LeafID = int
72 Priority = int
73 Index = int
74 LN = Union[Leaf, Node]
75 SplitFunc = Callable[["Line", Collection["Feature"]], Iterator["Line"]]
76 Timestamp = float
77 FileSize = int
78 CacheInfo = Tuple[Timestamp, FileSize]
79 Cache = Dict[Path, CacheInfo]
80 out = partial(click.secho, bold=True, err=True)
81 err = partial(click.secho, fg="red", err=True)
82
83 pygram.initialize(CACHE_DIR)
84 syms = pygram.python_symbols
85
86
87 class NothingChanged(UserWarning):
88 """Raised when reformatted code is the same as source."""
89
90
91 class CannotSplit(Exception):
92 """A readable split that fits the allotted line length is impossible."""
93
94
95 class InvalidInput(ValueError):
96 """Raised when input source code fails all parse attempts."""
97
98
99 class WriteBack(Enum):
100 NO = 0
101 YES = 1
102 DIFF = 2
103 CHECK = 3
104
105 @classmethod
106 def from_configuration(cls, *, check: bool, diff: bool) -> "WriteBack":
107 if check and not diff:
108 return cls.CHECK
109
110 return cls.DIFF if diff else cls.YES
111
112
113 class Changed(Enum):
114 NO = 0
115 CACHED = 1
116 YES = 2
117
118
119 class TargetVersion(Enum):
120 PY27 = 2
121 PY33 = 3
122 PY34 = 4
123 PY35 = 5
124 PY36 = 6
125 PY37 = 7
126 PY38 = 8
127
128 def is_python2(self) -> bool:
129 return self is TargetVersion.PY27
130
131
132 PY36_VERSIONS = {TargetVersion.PY36, TargetVersion.PY37, TargetVersion.PY38}
133
134
135 class Feature(Enum):
136 # All string literals are unicode
137 UNICODE_LITERALS = 1
138 F_STRINGS = 2
139 NUMERIC_UNDERSCORES = 3
140 TRAILING_COMMA_IN_CALL = 4
141 TRAILING_COMMA_IN_DEF = 5
142 # The following two feature-flags are mutually exclusive, and exactly one should be
143 # set for every version of python.
144 ASYNC_IDENTIFIERS = 6
145 ASYNC_KEYWORDS = 7
146 ASSIGNMENT_EXPRESSIONS = 8
147 POS_ONLY_ARGUMENTS = 9
148
149
150 VERSION_TO_FEATURES: Dict[TargetVersion, Set[Feature]] = {
151 TargetVersion.PY27: {Feature.ASYNC_IDENTIFIERS},
152 TargetVersion.PY33: {Feature.UNICODE_LITERALS, Feature.ASYNC_IDENTIFIERS},
153 TargetVersion.PY34: {Feature.UNICODE_LITERALS, Feature.ASYNC_IDENTIFIERS},
154 TargetVersion.PY35: {
155 Feature.UNICODE_LITERALS,
156 Feature.TRAILING_COMMA_IN_CALL,
157 Feature.ASYNC_IDENTIFIERS,
158 },
159 TargetVersion.PY36: {
160 Feature.UNICODE_LITERALS,
161 Feature.F_STRINGS,
162 Feature.NUMERIC_UNDERSCORES,
163 Feature.TRAILING_COMMA_IN_CALL,
164 Feature.TRAILING_COMMA_IN_DEF,
165 Feature.ASYNC_IDENTIFIERS,
166 },
167 TargetVersion.PY37: {
168 Feature.UNICODE_LITERALS,
169 Feature.F_STRINGS,
170 Feature.NUMERIC_UNDERSCORES,
171 Feature.TRAILING_COMMA_IN_CALL,
172 Feature.TRAILING_COMMA_IN_DEF,
173 Feature.ASYNC_KEYWORDS,
174 },
175 TargetVersion.PY38: {
176 Feature.UNICODE_LITERALS,
177 Feature.F_STRINGS,
178 Feature.NUMERIC_UNDERSCORES,
179 Feature.TRAILING_COMMA_IN_CALL,
180 Feature.TRAILING_COMMA_IN_DEF,
181 Feature.ASYNC_KEYWORDS,
182 Feature.ASSIGNMENT_EXPRESSIONS,
183 Feature.POS_ONLY_ARGUMENTS,
184 },
185 }
186
187
188 @dataclass
189 class FileMode:
190 target_versions: Set[TargetVersion] = Factory(set)
191 line_length: int = DEFAULT_LINE_LENGTH
192 string_normalization: bool = True
193 is_pyi: bool = False
194
195 def get_cache_key(self) -> str:
196 if self.target_versions:
197 version_str = ",".join(
198 str(version.value)
199 for version in sorted(self.target_versions, key=lambda v: v.value)
200 )
201 else:
202 version_str = "-"
203 parts = [
204 version_str,
205 str(self.line_length),
206 str(int(self.string_normalization)),
207 str(int(self.is_pyi)),
208 ]
209 return ".".join(parts)
210
211
212 def supports_feature(target_versions: Set[TargetVersion], feature: Feature) -> bool:
213 return all(feature in VERSION_TO_FEATURES[version] for version in target_versions)
214
215
216 def read_pyproject_toml(
217 ctx: click.Context, param: click.Parameter, value: Union[str, int, bool, None]
218 ) -> Optional[str]:
219 """Inject Black configuration from "pyproject.toml" into defaults in `ctx`.
220
221 Returns the path to a successfully found and read configuration file, None
222 otherwise.
223 """
224 assert not isinstance(value, (int, bool)), "Invalid parameter type passed"
225 if not value:
226 root = find_project_root(ctx.params.get("src", ()))
227 path = root / "pyproject.toml"
228 if path.is_file():
229 value = str(path)
230 else:
231 return None
232
233 try:
234 pyproject_toml = toml.load(value)
235 config = pyproject_toml.get("tool", {}).get("black", {})
236 except (toml.TomlDecodeError, OSError) as e:
237 raise click.FileError(
238 filename=value, hint=f"Error reading configuration file: {e}"
239 )
240
241 if not config:
242 return None
243
244 if ctx.default_map is None:
245 ctx.default_map = {}
246 ctx.default_map.update( # type: ignore # bad types in .pyi
247 {k.replace("--", "").replace("-", "_"): v for k, v in config.items()}
248 )
249 return value
250
251
252 @click.command(context_settings=dict(help_option_names=["-h", "--help"]))
253 @click.option("-c", "--code", type=str, help="Format the code passed in as a string.")
254 @click.option(
255 "-l",
256 "--line-length",
257 type=int,
258 default=DEFAULT_LINE_LENGTH,
259 help="How many characters per line to allow.",
260 show_default=True,
261 )
262 @click.option(
263 "-t",
264 "--target-version",
265 type=click.Choice([v.name.lower() for v in TargetVersion]),
266 callback=lambda c, p, v: [TargetVersion[val.upper()] for val in v],
267 multiple=True,
268 help=(
269 "Python versions that should be supported by Black's output. [default: "
270 "per-file auto-detection]"
271 ),
272 )
273 @click.option(
274 "--py36",
275 is_flag=True,
276 help=(
277 "Allow using Python 3.6-only syntax on all input files. This will put "
278 "trailing commas in function signatures and calls also after *args and "
279 "**kwargs. Deprecated; use --target-version instead. "
280 "[default: per-file auto-detection]"
281 ),
282 )
283 @click.option(
284 "--pyi",
285 is_flag=True,
286 help=(
287 "Format all input files like typing stubs regardless of file extension "
288 "(useful when piping source on standard input)."
289 ),
290 )
291 @click.option(
292 "-S",
293 "--skip-string-normalization",
294 is_flag=True,
295 help="Don't normalize string quotes or prefixes.",
296 )
297 @click.option(
298 "--check",
299 is_flag=True,
300 help=(
301 "Don't write the files back, just return the status. Return code 0 "
302 "means nothing would change. Return code 1 means some files would be "
303 "reformatted. Return code 123 means there was an internal error."
304 ),
305 )
306 @click.option(
307 "--diff",
308 is_flag=True,
309 help="Don't write the files back, just output a diff for each file on stdout.",
310 )
311 @click.option(
312 "--fast/--safe",
313 is_flag=True,
314 help="If --fast given, skip temporary sanity checks. [default: --safe]",
315 )
316 @click.option(
317 "--include",
318 type=str,
319 default=DEFAULT_INCLUDES,
320 help=(
321 "A regular expression that matches files and directories that should be "
322 "included on recursive searches. An empty value means all files are "
323 "included regardless of the name. Use forward slashes for directories on "
324 "all platforms (Windows, too). Exclusions are calculated first, inclusions "
325 "later."
326 ),
327 show_default=True,
328 )
329 @click.option(
330 "--exclude",
331 type=str,
332 default=DEFAULT_EXCLUDES,
333 help=(
334 "A regular expression that matches files and directories that should be "
335 "excluded on recursive searches. An empty value means no paths are excluded. "
336 "Use forward slashes for directories on all platforms (Windows, too). "
337 "Exclusions are calculated first, inclusions later."
338 ),
339 show_default=True,
340 )
341 @click.option(
342 "-q",
343 "--quiet",
344 is_flag=True,
345 help=(
346 "Don't emit non-error messages to stderr. Errors are still emitted; "
347 "silence those with 2>/dev/null."
348 ),
349 )
350 @click.option(
351 "-v",
352 "--verbose",
353 is_flag=True,
354 help=(
355 "Also emit messages to stderr about files that were not changed or were "
356 "ignored due to --exclude=."
357 ),
358 )
359 @click.version_option(version=__version__)
360 @click.argument(
361 "src",
362 nargs=-1,
363 type=click.Path(
364 exists=True, file_okay=True, dir_okay=True, readable=True, allow_dash=True
365 ),
366 is_eager=True,
367 )
368 @click.option(
369 "--config",
370 type=click.Path(
371 exists=False, file_okay=True, dir_okay=False, readable=True, allow_dash=False
372 ),
373 is_eager=True,
374 callback=read_pyproject_toml,
375 help="Read configuration from PATH.",
376 )
377 @click.pass_context
378 def main(
379 ctx: click.Context,
380 code: Optional[str],
381 line_length: int,
382 target_version: List[TargetVersion],
383 check: bool,
384 diff: bool,
385 fast: bool,
386 pyi: bool,
387 py36: bool,
388 skip_string_normalization: bool,
389 quiet: bool,
390 verbose: bool,
391 include: str,
392 exclude: str,
393 src: Tuple[str],
394 config: Optional[str],
395 ) -> None:
396 """The uncompromising code formatter."""
397 write_back = WriteBack.from_configuration(check=check, diff=diff)
398 if target_version:
399 if py36:
400 err(f"Cannot use both --target-version and --py36")
401 ctx.exit(2)
402 else:
403 versions = set(target_version)
404 elif py36:
405 err(
406 "--py36 is deprecated and will be removed in a future version. "
407 "Use --target-version py36 instead."
408 )
409 versions = PY36_VERSIONS
410 else:
411 # We'll autodetect later.
412 versions = set()
413 mode = FileMode(
414 target_versions=versions,
415 line_length=line_length,
416 is_pyi=pyi,
417 string_normalization=not skip_string_normalization,
418 )
419 if config and verbose:
420 out(f"Using configuration from {config}.", bold=False, fg="blue")
421 if code is not None:
422 print(format_str(code, mode=mode))
423 ctx.exit(0)
424 try:
425 include_regex = re_compile_maybe_verbose(include)
426 except re.error:
427 err(f"Invalid regular expression for include given: {include!r}")
428 ctx.exit(2)
429 try:
430 exclude_regex = re_compile_maybe_verbose(exclude)
431 except re.error:
432 err(f"Invalid regular expression for exclude given: {exclude!r}")
433 ctx.exit(2)
434 report = Report(check=check, quiet=quiet, verbose=verbose)
435 root = find_project_root(src)
436 sources: Set[Path] = set()
437 path_empty(src, quiet, verbose, ctx)
438 for s in src:
439 p = Path(s)
440 if p.is_dir():
441 sources.update(
442 gen_python_files_in_dir(p, root, include_regex, exclude_regex, report)
443 )
444 elif p.is_file() or s == "-":
445 # if a file was explicitly given, we don't care about its extension
446 sources.add(p)
447 else:
448 err(f"invalid path: {s}")
449 if len(sources) == 0:
450 if verbose or not quiet:
451 out("No Python files are present to be formatted. Nothing to do 😴")
452 ctx.exit(0)
453
454 if len(sources) == 1:
455 reformat_one(
456 src=sources.pop(),
457 fast=fast,
458 write_back=write_back,
459 mode=mode,
460 report=report,
461 )
462 else:
463 reformat_many(
464 sources=sources, fast=fast, write_back=write_back, mode=mode, report=report
465 )
466
467 if verbose or not quiet:
468 out("Oh no! 💥 💔 💥" if report.return_code else "All done! ✨ 🍰 ✨")
469 click.secho(str(report), err=True)
470 ctx.exit(report.return_code)
471
472
473 def path_empty(src: Tuple[str], quiet: bool, verbose: bool, ctx: click.Context) -> None:
474 """
475 Exit if there is no `src` provided for formatting
476 """
477 if not src:
478 if verbose or not quiet:
479 out("No Path provided. Nothing to do 😴")
480 ctx.exit(0)
481
482
483 def reformat_one(
484 src: Path, fast: bool, write_back: WriteBack, mode: FileMode, report: "Report"
485 ) -> None:
486 """Reformat a single file under `src` without spawning child processes.
487
488 `fast`, `write_back`, and `mode` options are passed to
489 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
490 """
491 try:
492 changed = Changed.NO
493 if not src.is_file() and str(src) == "-":
494 if format_stdin_to_stdout(fast=fast, write_back=write_back, mode=mode):
495 changed = Changed.YES
496 else:
497 cache: Cache = {}
498 if write_back != WriteBack.DIFF:
499 cache = read_cache(mode)
500 res_src = src.resolve()
501 if res_src in cache and cache[res_src] == get_cache_info(res_src):
502 changed = Changed.CACHED
503 if changed is not Changed.CACHED and format_file_in_place(
504 src, fast=fast, write_back=write_back, mode=mode
505 ):
506 changed = Changed.YES
507 if (write_back is WriteBack.YES and changed is not Changed.CACHED) or (
508 write_back is WriteBack.CHECK and changed is Changed.NO
509 ):
510 write_cache(cache, [src], mode)
511 report.done(src, changed)
512 except Exception as exc:
513 report.failed(src, str(exc))
514
515
516 def reformat_many(
517 sources: Set[Path],
518 fast: bool,
519 write_back: WriteBack,
520 mode: FileMode,
521 report: "Report",
522 ) -> None:
523 """Reformat multiple files using a ProcessPoolExecutor."""
524 loop = asyncio.get_event_loop()
525 worker_count = os.cpu_count()
526 if sys.platform == "win32":
527 # Work around https://bugs.python.org/issue26903
528 worker_count = min(worker_count, 61)
529 executor = ProcessPoolExecutor(max_workers=worker_count)
530 try:
531 loop.run_until_complete(
532 schedule_formatting(
533 sources=sources,
534 fast=fast,
535 write_back=write_back,
536 mode=mode,
537 report=report,
538 loop=loop,
539 executor=executor,
540 )
541 )
542 finally:
543 shutdown(loop)
544 executor.shutdown()
545
546
547 async def schedule_formatting(
548 sources: Set[Path],
549 fast: bool,
550 write_back: WriteBack,
551 mode: FileMode,
552 report: "Report",
553 loop: asyncio.AbstractEventLoop,
554 executor: Executor,
555 ) -> None:
556 """Run formatting of `sources` in parallel using the provided `executor`.
557
558 (Use ProcessPoolExecutors for actual parallelism.)
559
560 `write_back`, `fast`, and `mode` options are passed to
561 :func:`format_file_in_place`.
562 """
563 cache: Cache = {}
564 if write_back != WriteBack.DIFF:
565 cache = read_cache(mode)
566 sources, cached = filter_cached(cache, sources)
567 for src in sorted(cached):
568 report.done(src, Changed.CACHED)
569 if not sources:
570 return
571
572 cancelled = []
573 sources_to_cache = []
574 lock = None
575 if write_back == WriteBack.DIFF:
576 # For diff output, we need locks to ensure we don't interleave output
577 # from different processes.
578 manager = Manager()
579 lock = manager.Lock()
580 tasks = {
581 asyncio.ensure_future(
582 loop.run_in_executor(
583 executor, format_file_in_place, src, fast, mode, write_back, lock
584 )
585 ): src
586 for src in sorted(sources)
587 }
588 pending: Iterable[asyncio.Future] = tasks.keys()
589 try:
590 loop.add_signal_handler(signal.SIGINT, cancel, pending)
591 loop.add_signal_handler(signal.SIGTERM, cancel, pending)
592 except NotImplementedError:
593 # There are no good alternatives for these on Windows.
594 pass
595 while pending:
596 done, _ = await asyncio.wait(pending, return_when=asyncio.FIRST_COMPLETED)
597 for task in done:
598 src = tasks.pop(task)
599 if task.cancelled():
600 cancelled.append(task)
601 elif task.exception():
602 report.failed(src, str(task.exception()))
603 else:
604 changed = Changed.YES if task.result() else Changed.NO
605 # If the file was written back or was successfully checked as
606 # well-formatted, store this information in the cache.
607 if write_back is WriteBack.YES or (
608 write_back is WriteBack.CHECK and changed is Changed.NO
609 ):
610 sources_to_cache.append(src)
611 report.done(src, changed)
612 if cancelled:
613 await asyncio.gather(*cancelled, loop=loop, return_exceptions=True)
614 if sources_to_cache:
615 write_cache(cache, sources_to_cache, mode)
616
617
618 def format_file_in_place(
619 src: Path,
620 fast: bool,
621 mode: FileMode,
622 write_back: WriteBack = WriteBack.NO,
623 lock: Any = None, # multiprocessing.Manager().Lock() is some crazy proxy
624 ) -> bool:
625 """Format file under `src` path. Return True if changed.
626
627 If `write_back` is DIFF, write a diff to stdout. If it is YES, write reformatted
628 code to the file.
629 `mode` and `fast` options are passed to :func:`format_file_contents`.
630 """
631 if src.suffix == ".pyi":
632 mode = evolve(mode, is_pyi=True)
633
634 then = datetime.utcfromtimestamp(src.stat().st_mtime)
635 with open(src, "rb") as buf:
636 src_contents, encoding, newline = decode_bytes(buf.read())
637 try:
638 dst_contents = format_file_contents(src_contents, fast=fast, mode=mode)
639 except NothingChanged:
640 return False
641
642 if write_back == write_back.YES:
643 with open(src, "w", encoding=encoding, newline=newline) as f:
644 f.write(dst_contents)
645 elif write_back == write_back.DIFF:
646 now = datetime.utcnow()
647 src_name = f"{src}\t{then} +0000"
648 dst_name = f"{src}\t{now} +0000"
649 diff_contents = diff(src_contents, dst_contents, src_name, dst_name)
650
651 with lock or nullcontext():
652 f = io.TextIOWrapper(
653 sys.stdout.buffer,
654 encoding=encoding,
655 newline=newline,
656 write_through=True,
657 )
658 f.write(diff_contents)
659 f.detach()
660
661 return True
662
663
664 def format_stdin_to_stdout(
665 fast: bool, *, write_back: WriteBack = WriteBack.NO, mode: FileMode
666 ) -> bool:
667 """Format file on stdin. Return True if changed.
668
669 If `write_back` is YES, write reformatted code back to stdout. If it is DIFF,
670 write a diff to stdout. The `mode` argument is passed to
671 :func:`format_file_contents`.
672 """
673 then = datetime.utcnow()
674 src, encoding, newline = decode_bytes(sys.stdin.buffer.read())
675 dst = src
676 try:
677 dst = format_file_contents(src, fast=fast, mode=mode)
678 return True
679
680 except NothingChanged:
681 return False
682
683 finally:
684 f = io.TextIOWrapper(
685 sys.stdout.buffer, encoding=encoding, newline=newline, write_through=True
686 )
687 if write_back == WriteBack.YES:
688 f.write(dst)
689 elif write_back == WriteBack.DIFF:
690 now = datetime.utcnow()
691 src_name = f"STDIN\t{then} +0000"
692 dst_name = f"STDOUT\t{now} +0000"
693 f.write(diff(src, dst, src_name, dst_name))
694 f.detach()
695
696
697 def format_file_contents(
698 src_contents: str, *, fast: bool, mode: FileMode
699 ) -> FileContent:
700 """Reformat contents a file and return new contents.
701
702 If `fast` is False, additionally confirm that the reformatted code is
703 valid by calling :func:`assert_equivalent` and :func:`assert_stable` on it.
704 `mode` is passed to :func:`format_str`.
705 """
706 if src_contents.strip() == "":
707 raise NothingChanged
708
709 dst_contents = format_str(src_contents, mode=mode)
710 if src_contents == dst_contents:
711 raise NothingChanged
712
713 if not fast:
714 assert_equivalent(src_contents, dst_contents)
715 assert_stable(src_contents, dst_contents, mode=mode)
716 return dst_contents
717
718
719 def format_str(src_contents: str, *, mode: FileMode) -> FileContent:
720 """Reformat a string and return new contents.
721
722 `mode` determines formatting options, such as how many characters per line are
723 allowed.
724 """
725 src_node = lib2to3_parse(src_contents.lstrip(), mode.target_versions)
726 dst_contents = []
727 future_imports = get_future_imports(src_node)
728 if mode.target_versions:
729 versions = mode.target_versions
730 else:
731 versions = detect_target_versions(src_node)
732 normalize_fmt_off(src_node)
733 lines = LineGenerator(
734 remove_u_prefix="unicode_literals" in future_imports
735 or supports_feature(versions, Feature.UNICODE_LITERALS),
736 is_pyi=mode.is_pyi,
737 normalize_strings=mode.string_normalization,
738 )
739 elt = EmptyLineTracker(is_pyi=mode.is_pyi)
740 empty_line = Line()
741 after = 0
742 split_line_features = {
743 feature
744 for feature in {Feature.TRAILING_COMMA_IN_CALL, Feature.TRAILING_COMMA_IN_DEF}
745 if supports_feature(versions, feature)
746 }
747 for current_line in lines.visit(src_node):
748 for _ in range(after):
749 dst_contents.append(str(empty_line))
750 before, after = elt.maybe_empty_lines(current_line)
751 for _ in range(before):
752 dst_contents.append(str(empty_line))
753 for line in split_line(
754 current_line, line_length=mode.line_length, features=split_line_features
755 ):
756 dst_contents.append(str(line))
757 return "".join(dst_contents)
758
759
760 def decode_bytes(src: bytes) -> Tuple[FileContent, Encoding, NewLine]:
761 """Return a tuple of (decoded_contents, encoding, newline).
762
763 `newline` is either CRLF or LF but `decoded_contents` is decoded with
764 universal newlines (i.e. only contains LF).
765 """
766 srcbuf = io.BytesIO(src)
767 encoding, lines = tokenize.detect_encoding(srcbuf.readline)
768 if not lines:
769 return "", encoding, "\n"
770
771 newline = "\r\n" if b"\r\n" == lines[0][-2:] else "\n"
772 srcbuf.seek(0)
773 with io.TextIOWrapper(srcbuf, encoding) as tiow:
774 return tiow.read(), encoding, newline
775
776
777 def get_grammars(target_versions: Set[TargetVersion]) -> List[Grammar]:
778 if not target_versions:
779 # No target_version specified, so try all grammars.
780 return [
781 # Python 3.7+
782 pygram.python_grammar_no_print_statement_no_exec_statement_async_keywords,
783 # Python 3.0-3.6
784 pygram.python_grammar_no_print_statement_no_exec_statement,
785 # Python 2.7 with future print_function import
786 pygram.python_grammar_no_print_statement,
787 # Python 2.7
788 pygram.python_grammar,
789 ]
790 elif all(version.is_python2() for version in target_versions):
791 # Python 2-only code, so try Python 2 grammars.
792 return [
793 # Python 2.7 with future print_function import
794 pygram.python_grammar_no_print_statement,
795 # Python 2.7
796 pygram.python_grammar,
797 ]
798 else:
799 # Python 3-compatible code, so only try Python 3 grammar.
800 grammars = []
801 # If we have to parse both, try to parse async as a keyword first
802 if not supports_feature(target_versions, Feature.ASYNC_IDENTIFIERS):
803 # Python 3.7+
804 grammars.append(
805 pygram.python_grammar_no_print_statement_no_exec_statement_async_keywords # noqa: B950
806 )
807 if not supports_feature(target_versions, Feature.ASYNC_KEYWORDS):
808 # Python 3.0-3.6
809 grammars.append(pygram.python_grammar_no_print_statement_no_exec_statement)
810 # At least one of the above branches must have been taken, because every Python
811 # version has exactly one of the two 'ASYNC_*' flags
812 return grammars
813
814
815 def lib2to3_parse(src_txt: str, target_versions: Iterable[TargetVersion] = ()) -> Node:
816 """Given a string with source, return the lib2to3 Node."""
817 if src_txt[-1:] != "\n":
818 src_txt += "\n"
819
820 for grammar in get_grammars(set(target_versions)):
821 drv = driver.Driver(grammar, pytree.convert)
822 try:
823 result = drv.parse_string(src_txt, True)
824 break
825
826 except ParseError as pe:
827 lineno, column = pe.context[1]
828 lines = src_txt.splitlines()
829 try:
830 faulty_line = lines[lineno - 1]
831 except IndexError:
832 faulty_line = "<line number missing in source>"
833 exc = InvalidInput(f"Cannot parse: {lineno}:{column}: {faulty_line}")
834 else:
835 raise exc from None
836
837 if isinstance(result, Leaf):
838 result = Node(syms.file_input, [result])
839 return result
840
841
842 def lib2to3_unparse(node: Node) -> str:
843 """Given a lib2to3 node, return its string representation."""
844 code = str(node)
845 return code
846
847
848 T = TypeVar("T")
849
850
851 class Visitor(Generic[T]):
852 """Basic lib2to3 visitor that yields things of type `T` on `visit()`."""
853
854 def visit(self, node: LN) -> Iterator[T]:
855 """Main method to visit `node` and its children.
856
857 It tries to find a `visit_*()` method for the given `node.type`, like
858 `visit_simple_stmt` for Node objects or `visit_INDENT` for Leaf objects.
859 If no dedicated `visit_*()` method is found, chooses `visit_default()`
860 instead.
861
862 Then yields objects of type `T` from the selected visitor.
863 """
864 if node.type < 256:
865 name = token.tok_name[node.type]
866 else:
867 name = type_repr(node.type)
868 yield from getattr(self, f"visit_{name}", self.visit_default)(node)
869
870 def visit_default(self, node: LN) -> Iterator[T]:
871 """Default `visit_*()` implementation. Recurses to children of `node`."""
872 if isinstance(node, Node):
873 for child in node.children:
874 yield from self.visit(child)
875
876
877 @dataclass
878 class DebugVisitor(Visitor[T]):
879 tree_depth: int = 0
880
881 def visit_default(self, node: LN) -> Iterator[T]:
882 indent = " " * (2 * self.tree_depth)
883 if isinstance(node, Node):
884 _type = type_repr(node.type)
885 out(f"{indent}{_type}", fg="yellow")
886 self.tree_depth += 1
887 for child in node.children:
888 yield from self.visit(child)
889
890 self.tree_depth -= 1
891 out(f"{indent}/{_type}", fg="yellow", bold=False)
892 else:
893 _type = token.tok_name.get(node.type, str(node.type))
894 out(f"{indent}{_type}", fg="blue", nl=False)
895 if node.prefix:
896 # We don't have to handle prefixes for `Node` objects since
897 # that delegates to the first child anyway.
898 out(f" {node.prefix!r}", fg="green", bold=False, nl=False)
899 out(f" {node.value!r}", fg="blue", bold=False)
900
901 @classmethod
902 def show(cls, code: Union[str, Leaf, Node]) -> None:
903 """Pretty-print the lib2to3 AST of a given string of `code`.
904
905 Convenience method for debugging.
906 """
907 v: DebugVisitor[None] = DebugVisitor()
908 if isinstance(code, str):
909 code = lib2to3_parse(code)
910 list(v.visit(code))
911
912
913 WHITESPACE = {token.DEDENT, token.INDENT, token.NEWLINE}
914 STATEMENT = {
915 syms.if_stmt,
916 syms.while_stmt,
917 syms.for_stmt,
918 syms.try_stmt,
919 syms.except_clause,
920 syms.with_stmt,
921 syms.funcdef,
922 syms.classdef,
923 }
924 STANDALONE_COMMENT = 153
925 token.tok_name[STANDALONE_COMMENT] = "STANDALONE_COMMENT"
926 LOGIC_OPERATORS = {"and", "or"}
927 COMPARATORS = {
928 token.LESS,
929 token.GREATER,
930 token.EQEQUAL,
931 token.NOTEQUAL,
932 token.LESSEQUAL,
933 token.GREATEREQUAL,
934 }
935 MATH_OPERATORS = {
936 token.VBAR,
937 token.CIRCUMFLEX,
938 token.AMPER,
939 token.LEFTSHIFT,
940 token.RIGHTSHIFT,
941 token.PLUS,
942 token.MINUS,
943 token.STAR,
944 token.SLASH,
945 token.DOUBLESLASH,
946 token.PERCENT,
947 token.AT,
948 token.TILDE,
949 token.DOUBLESTAR,
950 }
951 STARS = {token.STAR, token.DOUBLESTAR}
952 VARARGS_SPECIALS = STARS | {token.SLASH}
953 VARARGS_PARENTS = {
954 syms.arglist,
955 syms.argument, # double star in arglist
956 syms.trailer, # single argument to call
957 syms.typedargslist,
958 syms.varargslist, # lambdas
959 }
960 UNPACKING_PARENTS = {
961 syms.atom, # single element of a list or set literal
962 syms.dictsetmaker,
963 syms.listmaker,
964 syms.testlist_gexp,
965 syms.testlist_star_expr,
966 }
967 TEST_DESCENDANTS = {
968 syms.test,
969 syms.lambdef,
970 syms.or_test,
971 syms.and_test,
972 syms.not_test,
973 syms.comparison,
974 syms.star_expr,
975 syms.expr,
976 syms.xor_expr,
977 syms.and_expr,
978 syms.shift_expr,
979 syms.arith_expr,
980 syms.trailer,
981 syms.term,
982 syms.power,
983 }
984 ASSIGNMENTS = {
985 "=",
986 "+=",
987 "-=",
988 "*=",
989 "@=",
990 "/=",
991 "%=",
992 "&=",
993 "|=",
994 "^=",
995 "<<=",
996 ">>=",
997 "**=",
998 "//=",
999 }
1000 COMPREHENSION_PRIORITY = 20
1001 COMMA_PRIORITY = 18
1002 TERNARY_PRIORITY = 16
1003 LOGIC_PRIORITY = 14
1004 STRING_PRIORITY = 12
1005 COMPARATOR_PRIORITY = 10
1006 MATH_PRIORITIES = {
1007 token.VBAR: 9,
1008 token.CIRCUMFLEX: 8,
1009 token.AMPER: 7,
1010 token.LEFTSHIFT: 6,
1011 token.RIGHTSHIFT: 6,
1012 token.PLUS: 5,
1013 token.MINUS: 5,
1014 token.STAR: 4,
1015 token.SLASH: 4,
1016 token.DOUBLESLASH: 4,
1017 token.PERCENT: 4,
1018 token.AT: 4,
1019 token.TILDE: 3,
1020 token.DOUBLESTAR: 2,
1021 }
1022 DOT_PRIORITY = 1
1023
1024
1025 @dataclass
1026 class BracketTracker:
1027 """Keeps track of brackets on a line."""
1028
1029 depth: int = 0
1030 bracket_match: Dict[Tuple[Depth, NodeType], Leaf] = Factory(dict)
1031 delimiters: Dict[LeafID, Priority] = Factory(dict)
1032 previous: Optional[Leaf] = None
1033 _for_loop_depths: List[int] = Factory(list)
1034 _lambda_argument_depths: List[int] = Factory(list)
1035
1036 def mark(self, leaf: Leaf) -> None:
1037 """Mark `leaf` with bracket-related metadata. Keep track of delimiters.
1038
1039 All leaves receive an int `bracket_depth` field that stores how deep
1040 within brackets a given leaf is. 0 means there are no enclosing brackets
1041 that started on this line.
1042
1043 If a leaf is itself a closing bracket, it receives an `opening_bracket`
1044 field that it forms a pair with. This is a one-directional link to
1045 avoid reference cycles.
1046
1047 If a leaf is a delimiter (a token on which Black can split the line if
1048 needed) and it's on depth 0, its `id()` is stored in the tracker's
1049 `delimiters` field.
1050 """
1051 if leaf.type == token.COMMENT:
1052 return
1053
1054 self.maybe_decrement_after_for_loop_variable(leaf)
1055 self.maybe_decrement_after_lambda_arguments(leaf)
1056 if leaf.type in CLOSING_BRACKETS:
1057 self.depth -= 1
1058 opening_bracket = self.bracket_match.pop((self.depth, leaf.type))
1059 leaf.opening_bracket = opening_bracket
1060 leaf.bracket_depth = self.depth
1061 if self.depth == 0:
1062 delim = is_split_before_delimiter(leaf, self.previous)
1063 if delim and self.previous is not None:
1064 self.delimiters[id(self.previous)] = delim
1065 else:
1066 delim = is_split_after_delimiter(leaf, self.previous)
1067 if delim:
1068 self.delimiters[id(leaf)] = delim
1069 if leaf.type in OPENING_BRACKETS:
1070 self.bracket_match[self.depth, BRACKET[leaf.type]] = leaf
1071 self.depth += 1
1072 self.previous = leaf
1073 self.maybe_increment_lambda_arguments(leaf)
1074 self.maybe_increment_for_loop_variable(leaf)
1075
1076 def any_open_brackets(self) -> bool:
1077 """Return True if there is an yet unmatched open bracket on the line."""
1078 return bool(self.bracket_match)
1079
1080 def max_delimiter_priority(self, exclude: Iterable[LeafID] = ()) -> Priority:
1081 """Return the highest priority of a delimiter found on the line.
1082
1083 Values are consistent with what `is_split_*_delimiter()` return.
1084 Raises ValueError on no delimiters.
1085 """
1086 return max(v for k, v in self.delimiters.items() if k not in exclude)
1087
1088 def delimiter_count_with_priority(self, priority: Priority = 0) -> int:
1089 """Return the number of delimiters with the given `priority`.
1090
1091 If no `priority` is passed, defaults to max priority on the line.
1092 """
1093 if not self.delimiters:
1094 return 0
1095
1096 priority = priority or self.max_delimiter_priority()
1097 return sum(1 for p in self.delimiters.values() if p == priority)
1098
1099 def maybe_increment_for_loop_variable(self, leaf: Leaf) -> bool:
1100 """In a for loop, or comprehension, the variables are often unpacks.
1101
1102 To avoid splitting on the comma in this situation, increase the depth of
1103 tokens between `for` and `in`.
1104 """
1105 if leaf.type == token.NAME and leaf.value == "for":
1106 self.depth += 1
1107 self._for_loop_depths.append(self.depth)
1108 return True
1109
1110 return False
1111
1112 def maybe_decrement_after_for_loop_variable(self, leaf: Leaf) -> bool:
1113 """See `maybe_increment_for_loop_variable` above for explanation."""
1114 if (
1115 self._for_loop_depths
1116 and self._for_loop_depths[-1] == self.depth
1117 and leaf.type == token.NAME
1118 and leaf.value == "in"
1119 ):
1120 self.depth -= 1
1121 self._for_loop_depths.pop()
1122 return True
1123
1124 return False
1125
1126 def maybe_increment_lambda_arguments(self, leaf: Leaf) -> bool:
1127 """In a lambda expression, there might be more than one argument.
1128
1129 To avoid splitting on the comma in this situation, increase the depth of
1130 tokens between `lambda` and `:`.
1131 """
1132 if leaf.type == token.NAME and leaf.value == "lambda":
1133 self.depth += 1
1134 self._lambda_argument_depths.append(self.depth)
1135 return True
1136
1137 return False
1138
1139 def maybe_decrement_after_lambda_arguments(self, leaf: Leaf) -> bool:
1140 """See `maybe_increment_lambda_arguments` above for explanation."""
1141 if (
1142 self._lambda_argument_depths
1143 and self._lambda_argument_depths[-1] == self.depth
1144 and leaf.type == token.COLON
1145 ):
1146 self.depth -= 1
1147 self._lambda_argument_depths.pop()
1148 return True
1149
1150 return False
1151
1152 def get_open_lsqb(self) -> Optional[Leaf]:
1153 """Return the most recent opening square bracket (if any)."""
1154 return self.bracket_match.get((self.depth - 1, token.RSQB))
1155
1156
1157 @dataclass
1158 class Line:
1159 """Holds leaves and comments. Can be printed with `str(line)`."""
1160
1161 depth: int = 0
1162 leaves: List[Leaf] = Factory(list)
1163 comments: Dict[LeafID, List[Leaf]] = Factory(dict) # keys ordered like `leaves`
1164 bracket_tracker: BracketTracker = Factory(BracketTracker)
1165 inside_brackets: bool = False
1166 should_explode: bool = False
1167
1168 def append(self, leaf: Leaf, preformatted: bool = False) -> None:
1169 """Add a new `leaf` to the end of the line.
1170
1171 Unless `preformatted` is True, the `leaf` will receive a new consistent
1172 whitespace prefix and metadata applied by :class:`BracketTracker`.
1173 Trailing commas are maybe removed, unpacked for loop variables are
1174 demoted from being delimiters.
1175
1176 Inline comments are put aside.
1177 """
1178 has_value = leaf.type in BRACKETS or bool(leaf.value.strip())
1179 if not has_value:
1180 return
1181
1182 if token.COLON == leaf.type and self.is_class_paren_empty:
1183 del self.leaves[-2:]
1184 if self.leaves and not preformatted:
1185 # Note: at this point leaf.prefix should be empty except for
1186 # imports, for which we only preserve newlines.
1187 leaf.prefix += whitespace(
1188 leaf, complex_subscript=self.is_complex_subscript(leaf)
1189 )
1190 if self.inside_brackets or not preformatted:
1191 self.bracket_tracker.mark(leaf)
1192 self.maybe_remove_trailing_comma(leaf)
1193 if not self.append_comment(leaf):
1194 self.leaves.append(leaf)
1195
1196 def append_safe(self, leaf: Leaf, preformatted: bool = False) -> None:
1197 """Like :func:`append()` but disallow invalid standalone comment structure.
1198
1199 Raises ValueError when any `leaf` is appended after a standalone comment
1200 or when a standalone comment is not the first leaf on the line.
1201 """
1202 if self.bracket_tracker.depth == 0:
1203 if self.is_comment:
1204 raise ValueError("cannot append to standalone comments")
1205
1206 if self.leaves and leaf.type == STANDALONE_COMMENT:
1207 raise ValueError(
1208 "cannot append standalone comments to a populated line"
1209 )
1210
1211 self.append(leaf, preformatted=preformatted)
1212
1213 @property
1214 def is_comment(self) -> bool:
1215 """Is this line a standalone comment?"""
1216 return len(self.leaves) == 1 and self.leaves[0].type == STANDALONE_COMMENT
1217
1218 @property
1219 def is_decorator(self) -> bool:
1220 """Is this line a decorator?"""
1221 return bool(self) and self.leaves[0].type == token.AT
1222
1223 @property
1224 def is_import(self) -> bool:
1225 """Is this an import line?"""
1226 return bool(self) and is_import(self.leaves[0])
1227
1228 @property
1229 def is_class(self) -> bool:
1230 """Is this line a class definition?"""
1231 return (
1232 bool(self)
1233 and self.leaves[0].type == token.NAME
1234 and self.leaves[0].value == "class"
1235 )
1236
1237 @property
1238 def is_stub_class(self) -> bool:
1239 """Is this line a class definition with a body consisting only of "..."?"""
1240 return self.is_class and self.leaves[-3:] == [
1241 Leaf(token.DOT, ".") for _ in range(3)
1242 ]
1243
1244 @property
1245 def is_collection_with_optional_trailing_comma(self) -> bool:
1246 """Is this line a collection literal with a trailing comma that's optional?
1247
1248 Note that the trailing comma in a 1-tuple is not optional.
1249 """
1250 if not self.leaves or len(self.leaves) < 4:
1251 return False
1252 # Look for and address a trailing colon.
1253 if self.leaves[-1].type == token.COLON:
1254 closer = self.leaves[-2]
1255 close_index = -2
1256 else:
1257 closer = self.leaves[-1]
1258 close_index = -1
1259 if closer.type not in CLOSING_BRACKETS or self.inside_brackets:
1260 return False
1261 if closer.type == token.RPAR:
1262 # Tuples require an extra check, because if there's only
1263 # one element in the tuple removing the comma unmakes the
1264 # tuple.
1265 #
1266 # We also check for parens before looking for the trailing
1267 # comma because in some cases (eg assigning a dict
1268 # literal) the literal gets wrapped in temporary parens
1269 # during parsing. This case is covered by the
1270 # collections.py test data.
1271 opener = closer.opening_bracket
1272 for _open_index, leaf in enumerate(self.leaves):
1273 if leaf is opener:
1274 break
1275 else:
1276 # Couldn't find the matching opening paren, play it safe.
1277 return False
1278 commas = 0
1279 comma_depth = self.leaves[close_index - 1].bracket_depth
1280 for leaf in self.leaves[_open_index + 1 : close_index]:
1281 if leaf.bracket_depth == comma_depth and leaf.type == token.COMMA:
1282 commas += 1
1283 if commas > 1:
1284 # We haven't looked yet for the trailing comma because
1285 # we might also have caught noop parens.
1286 return self.leaves[close_index - 1].type == token.COMMA
1287 elif commas == 1:
1288 return False # it's either a one-tuple or didn't have a trailing comma
1289 if self.leaves[close_index - 1].type in CLOSING_BRACKETS:
1290 close_index -= 1
1291 closer = self.leaves[close_index]
1292 if closer.type == token.RPAR:
1293 # TODO: this is a gut feeling. Will we ever see this?
1294 return False
1295 if self.leaves[close_index - 1].type != token.COMMA:
1296 return False
1297 return True
1298
1299 @property
1300 def is_def(self) -> bool:
1301 """Is this a function definition? (Also returns True for async defs.)"""
1302 try:
1303 first_leaf = self.leaves[0]
1304 except IndexError:
1305 return False
1306
1307 try:
1308 second_leaf: Optional[Leaf] = self.leaves[1]
1309 except IndexError:
1310 second_leaf = None
1311 return (first_leaf.type == token.NAME and first_leaf.value == "def") or (
1312 first_leaf.type == token.ASYNC
1313 and second_leaf is not None
1314 and second_leaf.type == token.NAME
1315 and second_leaf.value == "def"
1316 )
1317
1318 @property
1319 def is_class_paren_empty(self) -> bool:
1320 """Is this a class with no base classes but using parentheses?
1321
1322 Those are unnecessary and should be removed.
1323 """
1324 return (
1325 bool(self)
1326 and len(self.leaves) == 4
1327 and self.is_class
1328 and self.leaves[2].type == token.LPAR
1329 and self.leaves[2].value == "("
1330 and self.leaves[3].type == token.RPAR
1331 and self.leaves[3].value == ")"
1332 )
1333
1334 @property
1335 def is_triple_quoted_string(self) -> bool:
1336 """Is the line a triple quoted string?"""
1337 return (
1338 bool(self)
1339 and self.leaves[0].type == token.STRING
1340 and self.leaves[0].value.startswith(('"""', "'''"))
1341 )
1342
1343 def contains_standalone_comments(self, depth_limit: int = sys.maxsize) -> bool:
1344 """If so, needs to be split before emitting."""
1345 for leaf in self.leaves:
1346 if leaf.type == STANDALONE_COMMENT:
1347 if leaf.bracket_depth <= depth_limit:
1348 return True
1349 return False
1350
1351 def contains_uncollapsable_type_comments(self) -> bool:
1352 ignored_ids = set()
1353 try:
1354 last_leaf = self.leaves[-1]
1355 ignored_ids.add(id(last_leaf))
1356 if last_leaf.type == token.COMMA or (
1357 last_leaf.type == token.RPAR and not last_leaf.value
1358 ):
1359 # When trailing commas or optional parens are inserted by Black for
1360 # consistency, comments after the previous last element are not moved
1361 # (they don't have to, rendering will still be correct). So we ignore
1362 # trailing commas and invisible.
1363 last_leaf = self.leaves[-2]
1364 ignored_ids.add(id(last_leaf))
1365 except IndexError:
1366 return False
1367
1368 # A type comment is uncollapsable if it is attached to a leaf
1369 # that isn't at the end of the line (since that could cause it
1370 # to get associated to a different argument) or if there are
1371 # comments before it (since that could cause it to get hidden
1372 # behind a comment.
1373 comment_seen = False
1374 for leaf_id, comments in self.comments.items():
1375 for comment in comments:
1376 if is_type_comment(comment):
1377 if leaf_id not in ignored_ids or comment_seen:
1378 return True
1379
1380 comment_seen = True
1381
1382 return False
1383
1384 def contains_unsplittable_type_ignore(self) -> bool:
1385 if not self.leaves:
1386 return False
1387
1388 # If a 'type: ignore' is attached to the end of a line, we
1389 # can't split the line, because we can't know which of the
1390 # subexpressions the ignore was meant to apply to.
1391 #
1392 # We only want this to apply to actual physical lines from the
1393 # original source, though: we don't want the presence of a
1394 # 'type: ignore' at the end of a multiline expression to
1395 # justify pushing it all onto one line. Thus we
1396 # (unfortunately) need to check the actual source lines and
1397 # only report an unsplittable 'type: ignore' if this line was
1398 # one line in the original code.
1399 if self.leaves[0].lineno == self.leaves[-1].lineno:
1400 for comment in self.comments.get(id(self.leaves[-1]), []):
1401 if is_type_comment(comment, " ignore"):
1402 return True
1403
1404 return False
1405
1406 def contains_multiline_strings(self) -> bool:
1407 for leaf in self.leaves:
1408 if is_multiline_string(leaf):
1409 return True
1410
1411 return False
1412
1413 def maybe_remove_trailing_comma(self, closing: Leaf) -> bool:
1414 """Remove trailing comma if there is one and it's safe."""
1415 if not (self.leaves and self.leaves[-1].type == token.COMMA):
1416 return False
1417 # We remove trailing commas only in the case of importing a
1418 # single name from a module.
1419 if not (
1420 self.leaves
1421 and self.is_import
1422 and len(self.leaves) > 4
1423 and self.leaves[-1].type == token.COMMA
1424 and closing.type in CLOSING_BRACKETS
1425 and self.leaves[-4].type == token.NAME
1426 and (
1427 # regular `from foo import bar,`
1428 self.leaves[-4].value == "import"
1429 # `from foo import (bar as baz,)
1430 or (
1431 len(self.leaves) > 6
1432 and self.leaves[-6].value == "import"
1433 and self.leaves[-3].value == "as"
1434 )
1435 # `from foo import bar as baz,`
1436 or (
1437 len(self.leaves) > 5
1438 and self.leaves[-5].value == "import"
1439 and self.leaves[-3].value == "as"
1440 )
1441 )
1442 and closing.type == token.RPAR
1443 ):
1444 return False
1445
1446 self.remove_trailing_comma()
1447 return True
1448
1449 def append_comment(self, comment: Leaf) -> bool:
1450 """Add an inline or standalone comment to the line."""
1451 if (
1452 comment.type == STANDALONE_COMMENT
1453 and self.bracket_tracker.any_open_brackets()
1454 ):
1455 comment.prefix = ""
1456 return False
1457
1458 if comment.type != token.COMMENT:
1459 return False
1460
1461 if not self.leaves:
1462 comment.type = STANDALONE_COMMENT
1463 comment.prefix = ""
1464 return False
1465
1466 last_leaf = self.leaves[-1]
1467 if (
1468 last_leaf.type == token.RPAR
1469 and not last_leaf.value
1470 and last_leaf.parent
1471 and len(list(last_leaf.parent.leaves())) <= 3
1472 and not is_type_comment(comment)
1473 ):
1474 # Comments on an optional parens wrapping a single leaf should belong to
1475 # the wrapped node except if it's a type comment. Pinning the comment like
1476 # this avoids unstable formatting caused by comment migration.
1477 if len(self.leaves) < 2:
1478 comment.type = STANDALONE_COMMENT
1479 comment.prefix = ""
1480 return False
1481 last_leaf = self.leaves[-2]
1482 self.comments.setdefault(id(last_leaf), []).append(comment)
1483 return True
1484
1485 def comments_after(self, leaf: Leaf) -> List[Leaf]:
1486 """Generate comments that should appear directly after `leaf`."""
1487 return self.comments.get(id(leaf), [])
1488
1489 def remove_trailing_comma(self) -> None:
1490 """Remove the trailing comma and moves the comments attached to it."""
1491 trailing_comma = self.leaves.pop()
1492 trailing_comma_comments = self.comments.pop(id(trailing_comma), [])
1493 self.comments.setdefault(id(self.leaves[-1]), []).extend(
1494 trailing_comma_comments
1495 )
1496
1497 def is_complex_subscript(self, leaf: Leaf) -> bool:
1498 """Return True iff `leaf` is part of a slice with non-trivial exprs."""
1499 open_lsqb = self.bracket_tracker.get_open_lsqb()
1500 if open_lsqb is None:
1501 return False
1502
1503 subscript_start = open_lsqb.next_sibling
1504
1505 if isinstance(subscript_start, Node):
1506 if subscript_start.type == syms.listmaker:
1507 return False
1508
1509 if subscript_start.type == syms.subscriptlist:
1510 subscript_start = child_towards(subscript_start, leaf)
1511 return subscript_start is not None and any(
1512 n.type in TEST_DESCENDANTS for n in subscript_start.pre_order()
1513 )
1514
1515 def __str__(self) -> str:
1516 """Render the line."""
1517 if not self:
1518 return "\n"
1519
1520 indent = " " * self.depth
1521 leaves = iter(self.leaves)
1522 first = next(leaves)
1523 res = f"{first.prefix}{indent}{first.value}"
1524 for leaf in leaves:
1525 res += str(leaf)
1526 for comment in itertools.chain.from_iterable(self.comments.values()):
1527 res += str(comment)
1528 return res + "\n"
1529
1530 def __bool__(self) -> bool:
1531 """Return True if the line has leaves or comments."""
1532 return bool(self.leaves or self.comments)
1533
1534
1535 @dataclass
1536 class EmptyLineTracker:
1537 """Provides a stateful method that returns the number of potential extra
1538 empty lines needed before and after the currently processed line.
1539
1540 Note: this tracker works on lines that haven't been split yet. It assumes
1541 the prefix of the first leaf consists of optional newlines. Those newlines
1542 are consumed by `maybe_empty_lines()` and included in the computation.
1543 """
1544
1545 is_pyi: bool = False
1546 previous_line: Optional[Line] = None
1547 previous_after: int = 0
1548 previous_defs: List[int] = Factory(list)
1549
1550 def maybe_empty_lines(self, current_line: Line) -> Tuple[int, int]:
1551 """Return the number of extra empty lines before and after the `current_line`.
1552
1553 This is for separating `def`, `async def` and `class` with extra empty
1554 lines (two on module-level).
1555 """
1556 before, after = self._maybe_empty_lines(current_line)
1557 before = (
1558 # Black should not insert empty lines at the beginning
1559 # of the file
1560 0
1561 if self.previous_line is None
1562 else before - self.previous_after
1563 )
1564 self.previous_after = after
1565 self.previous_line = current_line
1566 return before, after
1567
1568 def _maybe_empty_lines(self, current_line: Line) -> Tuple[int, int]:
1569 max_allowed = 1
1570 if current_line.depth == 0:
1571 max_allowed = 1 if self.is_pyi else 2
1572 if current_line.leaves:
1573 # Consume the first leaf's extra newlines.
1574 first_leaf = current_line.leaves[0]
1575 before = first_leaf.prefix.count("\n")
1576 before = min(before, max_allowed)
1577 first_leaf.prefix = ""
1578 else:
1579 before = 0
1580 depth = current_line.depth
1581 while self.previous_defs and self.previous_defs[-1] >= depth:
1582 self.previous_defs.pop()
1583 if self.is_pyi:
1584 before = 0 if depth else 1
1585 else:
1586 before = 1 if depth else 2
1587 if current_line.is_decorator or current_line.is_def or current_line.is_class:
1588 return self._maybe_empty_lines_for_class_or_def(current_line, before)
1589
1590 if (
1591 self.previous_line
1592 and self.previous_line.is_import
1593 and not current_line.is_import
1594 and depth == self.previous_line.depth
1595 ):
1596 return (before or 1), 0
1597
1598 if (
1599 self.previous_line
1600 and self.previous_line.is_class
1601 and current_line.is_triple_quoted_string
1602 ):
1603 return before, 1
1604
1605 return before, 0
1606
1607 def _maybe_empty_lines_for_class_or_def(
1608 self, current_line: Line, before: int
1609 ) -> Tuple[int, int]:
1610 if not current_line.is_decorator:
1611 self.previous_defs.append(current_line.depth)
1612 if self.previous_line is None:
1613 # Don't insert empty lines before the first line in the file.
1614 return 0, 0
1615
1616 if self.previous_line.is_decorator:
1617 return 0, 0
1618
1619 if self.previous_line.depth < current_line.depth and (
1620 self.previous_line.is_class or self.previous_line.is_def
1621 ):
1622 return 0, 0
1623
1624 if (
1625 self.previous_line.is_comment
1626 and self.previous_line.depth == current_line.depth
1627 and before == 0
1628 ):
1629 return 0, 0
1630
1631 if self.is_pyi:
1632 if self.previous_line.depth > current_line.depth:
1633 newlines = 1
1634 elif current_line.is_class or self.previous_line.is_class:
1635 if current_line.is_stub_class and self.previous_line.is_stub_class:
1636 # No blank line between classes with an empty body
1637 newlines = 0
1638 else:
1639 newlines = 1
1640 elif current_line.is_def and not self.previous_line.is_def:
1641 # Blank line between a block of functions and a block of non-functions
1642 newlines = 1
1643 else:
1644 newlines = 0
1645 else:
1646 newlines = 2
1647 if current_line.depth and newlines:
1648 newlines -= 1
1649 return newlines, 0
1650
1651
1652 @dataclass
1653 class LineGenerator(Visitor[Line]):
1654 """Generates reformatted Line objects. Empty lines are not emitted.
1655
1656 Note: destroys the tree it's visiting by mutating prefixes of its leaves
1657 in ways that will no longer stringify to valid Python code on the tree.
1658 """
1659
1660 is_pyi: bool = False
1661 normalize_strings: bool = True
1662 current_line: Line = Factory(Line)
1663 remove_u_prefix: bool = False
1664
1665 def line(self, indent: int = 0) -> Iterator[Line]:
1666 """Generate a line.
1667
1668 If the line is empty, only emit if it makes sense.
1669 If the line is too long, split it first and then generate.
1670
1671 If any lines were generated, set up a new current_line.
1672 """
1673 if not self.current_line:
1674 self.current_line.depth += indent
1675 return # Line is empty, don't emit. Creating a new one unnecessary.
1676
1677 complete_line = self.current_line
1678 self.current_line = Line(depth=complete_line.depth + indent)
1679 yield complete_line
1680
1681 def visit_default(self, node: LN) -> Iterator[Line]:
1682 """Default `visit_*()` implementation. Recurses to children of `node`."""
1683 if isinstance(node, Leaf):
1684 any_open_brackets = self.current_line.bracket_tracker.any_open_brackets()
1685 for comment in generate_comments(node):
1686 if any_open_brackets:
1687 # any comment within brackets is subject to splitting
1688 self.current_line.append(comment)
1689 elif comment.type == token.COMMENT:
1690 # regular trailing comment
1691 self.current_line.append(comment)
1692 yield from self.line()
1693
1694 else:
1695 # regular standalone comment
1696 yield from self.line()
1697
1698 self.current_line.append(comment)
1699 yield from self.line()
1700
1701 normalize_prefix(node, inside_brackets=any_open_brackets)
1702 if self.normalize_strings and node.type == token.STRING:
1703 normalize_string_prefix(node, remove_u_prefix=self.remove_u_prefix)
1704 normalize_string_quotes(node)
1705 if node.type == token.NUMBER:
1706 normalize_numeric_literal(node)
1707 if node.type not in WHITESPACE:
1708 self.current_line.append(node)
1709 yield from super().visit_default(node)
1710
1711 def visit_atom(self, node: Node) -> Iterator[Line]:
1712 # Always make parentheses invisible around a single node, because it should
1713 # not be needed (except in the case of yield, where removing the parentheses
1714 # produces a SyntaxError).
1715 if (
1716 len(node.children) == 3
1717 and isinstance(node.children[0], Leaf)
1718 and node.children[0].type == token.LPAR
1719 and isinstance(node.children[2], Leaf)
1720 and node.children[2].type == token.RPAR
1721 and isinstance(node.children[1], Leaf)
1722 and not (
1723 node.children[1].type == token.NAME
1724 and node.children[1].value == "yield"
1725 )
1726 ):
1727 node.children[0].value = ""
1728 node.children[2].value = ""
1729 yield from super().visit_default(node)
1730
1731 def visit_factor(self, node: Node) -> Iterator[Line]:
1732 """Force parentheses between a unary op and a binary power:
1733
1734 -2 ** 8 -> -(2 ** 8)
1735 """
1736 child = node.children[1]
1737 if child.type == syms.power and len(child.children) == 3:
1738 lpar = Leaf(token.LPAR, "(")
1739 rpar = Leaf(token.RPAR, ")")
1740 index = child.remove() or 0
1741 node.insert_child(index, Node(syms.atom, [lpar, child, rpar]))
1742 yield from self.visit_default(node)
1743
1744 def visit_INDENT(self, node: Node) -> Iterator[Line]:
1745 """Increase indentation level, maybe yield a line."""
1746 # In blib2to3 INDENT never holds comments.
1747 yield from self.line(+1)
1748 yield from self.visit_default(node)
1749
1750 def visit_DEDENT(self, node: Node) -> Iterator[Line]:
1751 """Decrease indentation level, maybe yield a line."""
1752 # The current line might still wait for trailing comments. At DEDENT time
1753 # there won't be any (they would be prefixes on the preceding NEWLINE).
1754 # Emit the line then.
1755 yield from self.line()
1756
1757 # While DEDENT has no value, its prefix may contain standalone comments
1758 # that belong to the current indentation level. Get 'em.
1759 yield from self.visit_default(node)
1760
1761 # Finally, emit the dedent.
1762 yield from self.line(-1)
1763
1764 def visit_stmt(
1765 self, node: Node, keywords: Set[str], parens: Set[str]
1766 ) -> Iterator[Line]:
1767 """Visit a statement.
1768
1769 This implementation is shared for `if`, `while`, `for`, `try`, `except`,
1770 `def`, `with`, `class`, `assert` and assignments.
1771
1772 The relevant Python language `keywords` for a given statement will be
1773 NAME leaves within it. This methods puts those on a separate line.
1774
1775 `parens` holds a set of string leaf values immediately after which
1776 invisible parens should be put.
1777 """
1778 normalize_invisible_parens(node, parens_after=parens)
1779 for child in node.children:
1780 if child.type == token.NAME and child.value in keywords: # type: ignore
1781 yield from self.line()
1782
1783 yield from self.visit(child)
1784
1785 def visit_suite(self, node: Node) -> Iterator[Line]:
1786 """Visit a suite."""
1787 if self.is_pyi and is_stub_suite(node):
1788 yield from self.visit(node.children[2])
1789 else:
1790 yield from self.visit_default(node)
1791
1792 def visit_simple_stmt(self, node: Node) -> Iterator[Line]:
1793 """Visit a statement without nested statements."""
1794 is_suite_like = node.parent and node.parent.type in STATEMENT
1795 if is_suite_like:
1796 if self.is_pyi and is_stub_body(node):
1797 yield from self.visit_default(node)
1798 else:
1799 yield from self.line(+1)
1800 yield from self.visit_default(node)
1801 yield from self.line(-1)
1802
1803 else:
1804 if not self.is_pyi or not node.parent or not is_stub_suite(node.parent):
1805 yield from self.line()
1806 yield from self.visit_default(node)
1807
1808 def visit_async_stmt(self, node: Node) -> Iterator[Line]:
1809 """Visit `async def`, `async for`, `async with`."""
1810 yield from self.line()
1811
1812 children = iter(node.children)
1813 for child in children:
1814 yield from self.visit(child)
1815
1816 if child.type == token.ASYNC:
1817 break
1818
1819 internal_stmt = next(children)
1820 for child in internal_stmt.children:
1821 yield from self.visit(child)
1822
1823 def visit_decorators(self, node: Node) -> Iterator[Line]:
1824 """Visit decorators."""
1825 for child in node.children:
1826 yield from self.line()
1827 yield from self.visit(child)
1828
1829 def visit_SEMI(self, leaf: Leaf) -> Iterator[Line]:
1830 """Remove a semicolon and put the other statement on a separate line."""
1831 yield from self.line()
1832
1833 def visit_ENDMARKER(self, leaf: Leaf) -> Iterator[Line]:
1834 """End of file. Process outstanding comments and end with a newline."""
1835 yield from self.visit_default(leaf)
1836 yield from self.line()
1837
1838 def visit_STANDALONE_COMMENT(self, leaf: Leaf) -> Iterator[Line]:
1839 if not self.current_line.bracket_tracker.any_open_brackets():
1840 yield from self.line()
1841 yield from self.visit_default(leaf)
1842
1843 def __attrs_post_init__(self) -> None:
1844 """You are in a twisty little maze of passages."""
1845 v = self.visit_stmt
1846 Ø: Set[str] = set()
1847 self.visit_assert_stmt = partial(v, keywords={"assert"}, parens={"assert", ","})
1848 self.visit_if_stmt = partial(
1849 v, keywords={"if", "else", "elif"}, parens={"if", "elif"}
1850 )
1851 self.visit_while_stmt = partial(v, keywords={"while", "else"}, parens={"while"})
1852 self.visit_for_stmt = partial(v, keywords={"for", "else"}, parens={"for", "in"})
1853 self.visit_try_stmt = partial(
1854 v, keywords={"try", "except", "else", "finally"}, parens=Ø
1855 )
1856 self.visit_except_clause = partial(v, keywords={"except"}, parens=Ø)
1857 self.visit_with_stmt = partial(v, keywords={"with"}, parens=Ø)
1858 self.visit_funcdef = partial(v, keywords={"def"}, parens=Ø)
1859 self.visit_classdef = partial(v, keywords={"class"}, parens=Ø)
1860 self.visit_expr_stmt = partial(v, keywords=Ø, parens=ASSIGNMENTS)
1861 self.visit_return_stmt = partial(v, keywords={"return"}, parens={"return"})
1862 self.visit_import_from = partial(v, keywords=Ø, parens={"import"})
1863 self.visit_del_stmt = partial(v, keywords=Ø, parens={"del"})
1864 self.visit_async_funcdef = self.visit_async_stmt
1865 self.visit_decorated = self.visit_decorators
1866
1867
1868 IMPLICIT_TUPLE = {syms.testlist, syms.testlist_star_expr, syms.exprlist}
1869 BRACKET = {token.LPAR: token.RPAR, token.LSQB: token.RSQB, token.LBRACE: token.RBRACE}
1870 OPENING_BRACKETS = set(BRACKET.keys())
1871 CLOSING_BRACKETS = set(BRACKET.values())
1872 BRACKETS = OPENING_BRACKETS | CLOSING_BRACKETS
1873 ALWAYS_NO_SPACE = CLOSING_BRACKETS | {token.COMMA, STANDALONE_COMMENT}
1874
1875
1876 def whitespace(leaf: Leaf, *, complex_subscript: bool) -> str: # noqa: C901
1877 """Return whitespace prefix if needed for the given `leaf`.
1878
1879 `complex_subscript` signals whether the given leaf is part of a subscription
1880 which has non-trivial arguments, like arithmetic expressions or function calls.
1881 """
1882 NO = ""
1883 SPACE = " "
1884 DOUBLESPACE = " "
1885 t = leaf.type
1886 p = leaf.parent
1887 v = leaf.value
1888 if t in ALWAYS_NO_SPACE:
1889 return NO
1890
1891 if t == token.COMMENT:
1892 return DOUBLESPACE
1893
1894 assert p is not None, f"INTERNAL ERROR: hand-made leaf without parent: {leaf!r}"
1895 if t == token.COLON and p.type not in {
1896 syms.subscript,
1897 syms.subscriptlist,
1898 syms.sliceop,
1899 }:
1900 return NO
1901
1902 prev = leaf.prev_sibling
1903 if not prev:
1904 prevp = preceding_leaf(p)
1905 if not prevp or prevp.type in OPENING_BRACKETS:
1906 return NO
1907
1908 if t == token.COLON:
1909 if prevp.type == token.COLON:
1910 return NO
1911
1912 elif prevp.type != token.COMMA and not complex_subscript:
1913 return NO
1914
1915 return SPACE
1916
1917 if prevp.type == token.EQUAL:
1918 if prevp.parent:
1919 if prevp.parent.type in {
1920 syms.arglist,
1921 syms.argument,
1922 syms.parameters,
1923 syms.varargslist,
1924 }:
1925 return NO
1926
1927 elif prevp.parent.type == syms.typedargslist:
1928 # A bit hacky: if the equal sign has whitespace, it means we
1929 # previously found it's a typed argument. So, we're using
1930 # that, too.
1931 return prevp.prefix
1932
1933 elif prevp.type in VARARGS_SPECIALS:
1934 if is_vararg(prevp, within=VARARGS_PARENTS | UNPACKING_PARENTS):
1935 return NO
1936
1937 elif prevp.type == token.COLON:
1938 if prevp.parent and prevp.parent.type in {syms.subscript, syms.sliceop}:
1939 return SPACE if complex_subscript else NO
1940
1941 elif (
1942 prevp.parent
1943 and prevp.parent.type == syms.factor
1944 and prevp.type in MATH_OPERATORS
1945 ):
1946 return NO
1947
1948 elif (
1949 prevp.type == token.RIGHTSHIFT
1950 and prevp.parent
1951 and prevp.parent.type == syms.shift_expr
1952 and prevp.prev_sibling
1953 and prevp.prev_sibling.type == token.NAME
1954 and prevp.prev_sibling.value == "print" # type: ignore
1955 ):
1956 # Python 2 print chevron
1957 return NO
1958
1959 elif prev.type in OPENING_BRACKETS:
1960 return NO
1961
1962 if p.type in {syms.parameters, syms.arglist}:
1963 # untyped function signatures or calls
1964 if not prev or prev.type != token.COMMA:
1965 return NO
1966
1967 elif p.type == syms.varargslist:
1968 # lambdas
1969 if prev and prev.type != token.COMMA:
1970 return NO
1971
1972 elif p.type == syms.typedargslist:
1973 # typed function signatures
1974 if not prev:
1975 return NO
1976
1977 if t == token.EQUAL:
1978 if prev.type != syms.tname:
1979 return NO
1980
1981 elif prev.type == token.EQUAL:
1982 # A bit hacky: if the equal sign has whitespace, it means we
1983 # previously found it's a typed argument. So, we're using that, too.
1984 return prev.prefix
1985
1986 elif prev.type != token.COMMA:
1987 return NO
1988
1989 elif p.type == syms.tname:
1990 # type names
1991 if not prev:
1992 prevp = preceding_leaf(p)
1993 if not prevp or prevp.type != token.COMMA:
1994 return NO
1995
1996 elif p.type == syms.trailer:
1997 # attributes and calls
1998 if t == token.LPAR or t == token.RPAR:
1999 return NO
2000
2001 if not prev:
2002 if t == token.DOT:
2003 prevp = preceding_leaf(p)
2004 if not prevp or prevp.type != token.NUMBER:
2005 return NO
2006
2007 elif t == token.LSQB:
2008 return NO
2009
2010 elif prev.type != token.COMMA:
2011 return NO
2012
2013 elif p.type == syms.argument:
2014 # single argument
2015 if t == token.EQUAL:
2016 return NO
2017
2018 if not prev:
2019 prevp = preceding_leaf(p)
2020 if not prevp or prevp.type == token.LPAR:
2021 return NO
2022
2023 elif prev.type in {token.EQUAL} | VARARGS_SPECIALS:
2024 return NO
2025
2026 elif p.type == syms.decorator:
2027 # decorators
2028 return NO
2029
2030 elif p.type == syms.dotted_name:
2031 if prev:
2032 return NO
2033
2034 prevp = preceding_leaf(p)
2035 if not prevp or prevp.type == token.AT or prevp.type == token.DOT:
2036 return NO
2037
2038 elif p.type == syms.classdef:
2039 if t == token.LPAR:
2040 return NO
2041
2042 if prev and prev.type == token.LPAR:
2043 return NO
2044
2045 elif p.type in {syms.subscript, syms.sliceop}:
2046 # indexing
2047 if not prev:
2048 assert p.parent is not None, "subscripts are always parented"
2049 if p.parent.type == syms.subscriptlist:
2050 return SPACE
2051
2052 return NO
2053
2054 elif not complex_subscript:
2055 return NO
2056
2057 elif p.type == syms.atom:
2058 if prev and t == token.DOT:
2059 # dots, but not the first one.
2060 return NO
2061
2062 elif p.type == syms.dictsetmaker:
2063 # dict unpacking
2064 if prev and prev.type == token.DOUBLESTAR:
2065 return NO
2066
2067 elif p.type in {syms.factor, syms.star_expr}:
2068 # unary ops
2069 if not prev:
2070 prevp = preceding_leaf(p)
2071 if not prevp or prevp.type in OPENING_BRACKETS:
2072 return NO
2073
2074 prevp_parent = prevp.parent
2075 assert prevp_parent is not None
2076 if prevp.type == token.COLON and prevp_parent.type in {
2077 syms.subscript,
2078 syms.sliceop,
2079 }:
2080 return NO
2081
2082 elif prevp.type == token.EQUAL and prevp_parent.type == syms.argument:
2083 return NO
2084
2085 elif t in {token.NAME, token.NUMBER, token.STRING}:
2086 return NO
2087
2088 elif p.type == syms.import_from:
2089 if t == token.DOT:
2090 if prev and prev.type == token.DOT:
2091 return NO
2092
2093 elif t == token.NAME:
2094 if v == "import":
2095 return SPACE
2096
2097 if prev and prev.type == token.DOT:
2098 return NO
2099
2100 elif p.type == syms.sliceop:
2101 return NO
2102
2103 return SPACE
2104
2105
2106 def preceding_leaf(node: Optional[LN]) -> Optional[Leaf]:
2107 """Return the first leaf that precedes `node`, if any."""
2108 while node:
2109 res = node.prev_sibling
2110 if res:
2111 if isinstance(res, Leaf):
2112 return res
2113
2114 try:
2115 return list(res.leaves())[-1]
2116
2117 except IndexError:
2118 return None
2119
2120 node = node.parent
2121 return None
2122
2123
2124 def child_towards(ancestor: Node, descendant: LN) -> Optional[LN]:
2125 """Return the child of `ancestor` that contains `descendant`."""
2126 node: Optional[LN] = descendant
2127 while node and node.parent != ancestor:
2128 node = node.parent
2129 return node
2130
2131
2132 def container_of(leaf: Leaf) -> LN:
2133 """Return `leaf` or one of its ancestors that is the topmost container of it.
2134
2135 By "container" we mean a node where `leaf` is the very first child.
2136 """
2137 same_prefix = leaf.prefix
2138 container: LN = leaf
2139 while container:
2140 parent = container.parent
2141 if parent is None:
2142 break
2143
2144 if parent.children[0].prefix != same_prefix:
2145 break
2146
2147 if parent.type == syms.file_input:
2148 break
2149
2150 if parent.prev_sibling is not None and parent.prev_sibling.type in BRACKETS:
2151 break
2152
2153 container = parent
2154 return container
2155
2156
2157 def is_split_after_delimiter(leaf: Leaf, previous: Optional[Leaf] = None) -> Priority:
2158 """Return the priority of the `leaf` delimiter, given a line break after it.
2159
2160 The delimiter priorities returned here are from those delimiters that would
2161 cause a line break after themselves.
2162
2163 Higher numbers are higher priority.
2164 """
2165 if leaf.type == token.COMMA:
2166 return COMMA_PRIORITY
2167
2168 return 0
2169
2170
2171 def is_split_before_delimiter(leaf: Leaf, previous: Optional[Leaf] = None) -> Priority:
2172 """Return the priority of the `leaf` delimiter, given a line break before it.
2173
2174 The delimiter priorities returned here are from those delimiters that would
2175 cause a line break before themselves.
2176
2177 Higher numbers are higher priority.
2178 """
2179 if is_vararg(leaf, within=VARARGS_PARENTS | UNPACKING_PARENTS):
2180 # * and ** might also be MATH_OPERATORS but in this case they are not.
2181 # Don't treat them as a delimiter.
2182 return 0
2183
2184 if (
2185 leaf.type == token.DOT
2186 and leaf.parent
2187 and leaf.parent.type not in {syms.import_from, syms.dotted_name}
2188 and (previous is None or previous.type in CLOSING_BRACKETS)
2189 ):
2190 return DOT_PRIORITY
2191
2192 if (
2193 leaf.type in MATH_OPERATORS
2194 and leaf.parent
2195 and leaf.parent.type not in {syms.factor, syms.star_expr}
2196 ):
2197 return MATH_PRIORITIES[leaf.type]
2198
2199 if leaf.type in COMPARATORS:
2200 return COMPARATOR_PRIORITY
2201
2202 if (
2203 leaf.type == token.STRING
2204 and previous is not None
2205 and previous.type == token.STRING
2206 ):
2207 return STRING_PRIORITY
2208
2209 if leaf.type not in {token.NAME, token.ASYNC}:
2210 return 0
2211
2212 if (
2213 leaf.value == "for"
2214 and leaf.parent
2215 and leaf.parent.type in {syms.comp_for, syms.old_comp_for}
2216 or leaf.type == token.ASYNC
2217 ):
2218 if (
2219 not isinstance(leaf.prev_sibling, Leaf)
2220 or leaf.prev_sibling.value != "async"
2221 ):
2222 return COMPREHENSION_PRIORITY
2223
2224 if (
2225 leaf.value == "if"
2226 and leaf.parent
2227 and leaf.parent.type in {syms.comp_if, syms.old_comp_if}
2228 ):
2229 return COMPREHENSION_PRIORITY
2230
2231 if leaf.value in {"if", "else"} and leaf.parent and leaf.parent.type == syms.test:
2232 return TERNARY_PRIORITY
2233
2234 if leaf.value == "is":
2235 return COMPARATOR_PRIORITY
2236
2237 if (
2238 leaf.value == "in"
2239 and leaf.parent
2240 and leaf.parent.type in {syms.comp_op, syms.comparison}
2241 and not (
2242 previous is not None
2243 and previous.type == token.NAME
2244 and previous.value == "not"
2245 )
2246 ):
2247 return COMPARATOR_PRIORITY
2248
2249 if (
2250 leaf.value == "not"
2251 and leaf.parent
2252 and leaf.parent.type == syms.comp_op
2253 and not (
2254 previous is not None
2255 and previous.type == token.NAME
2256 and previous.value == "is"
2257 )
2258 ):
2259 return COMPARATOR_PRIORITY
2260
2261 if leaf.value in LOGIC_OPERATORS and leaf.parent:
2262 return LOGIC_PRIORITY
2263
2264 return 0
2265
2266
2267 FMT_OFF = {"# fmt: off", "# fmt:off", "# yapf: disable"}
2268 FMT_ON = {"# fmt: on", "# fmt:on", "# yapf: enable"}
2269
2270
2271 def generate_comments(leaf: LN) -> Iterator[Leaf]:
2272 """Clean the prefix of the `leaf` and generate comments from it, if any.
2273
2274 Comments in lib2to3 are shoved into the whitespace prefix. This happens
2275 in `pgen2/driver.py:Driver.parse_tokens()`. This was a brilliant implementation
2276 move because it does away with modifying the grammar to include all the
2277 possible places in which comments can be placed.
2278
2279 The sad consequence for us though is that comments don't "belong" anywhere.
2280 This is why this function generates simple parentless Leaf objects for
2281 comments. We simply don't know what the correct parent should be.
2282
2283 No matter though, we can live without this. We really only need to
2284 differentiate between inline and standalone comments. The latter don't
2285 share the line with any code.
2286
2287 Inline comments are emitted as regular token.COMMENT leaves. Standalone
2288 are emitted with a fake STANDALONE_COMMENT token identifier.
2289 """
2290 for pc in list_comments(leaf.prefix, is_endmarker=leaf.type == token.ENDMARKER):
2291 yield Leaf(pc.type, pc.value, prefix="\n" * pc.newlines)
2292
2293
2294 @dataclass
2295 class ProtoComment:
2296 """Describes a piece of syntax that is a comment.
2297
2298 It's not a :class:`blib2to3.pytree.Leaf` so that:
2299
2300 * it can be cached (`Leaf` objects should not be reused more than once as
2301 they store their lineno, column, prefix, and parent information);
2302 * `newlines` and `consumed` fields are kept separate from the `value`. This
2303 simplifies handling of special marker comments like ``# fmt: off/on``.
2304 """
2305
2306 type: int # token.COMMENT or STANDALONE_COMMENT
2307 value: str # content of the comment
2308 newlines: int # how many newlines before the comment
2309 consumed: int # how many characters of the original leaf's prefix did we consume
2310
2311
2312 @lru_cache(maxsize=4096)
2313 def list_comments(prefix: str, *, is_endmarker: bool) -> List[ProtoComment]:
2314 """Return a list of :class:`ProtoComment` objects parsed from the given `prefix`."""
2315 result: List[ProtoComment] = []
2316 if not prefix or "#" not in prefix:
2317 return result
2318
2319 consumed = 0
2320 nlines = 0
2321 ignored_lines = 0
2322 for index, line in enumerate(prefix.split("\n")):
2323 consumed += len(line) + 1 # adding the length of the split '\n'
2324 line = line.lstrip()
2325 if not line:
2326 nlines += 1
2327 if not line.startswith("#"):
2328 # Escaped newlines outside of a comment are not really newlines at
2329 # all. We treat a single-line comment following an escaped newline
2330 # as a simple trailing comment.
2331 if line.endswith("\\"):
2332 ignored_lines += 1
2333 continue
2334
2335 if index == ignored_lines and not is_endmarker:
2336 comment_type = token.COMMENT # simple trailing comment
2337 else:
2338 comment_type = STANDALONE_COMMENT
2339 comment = make_comment(line)
2340 result.append(
2341 ProtoComment(
2342 type=comment_type, value=comment, newlines=nlines, consumed=consumed
2343 )
2344 )
2345 nlines = 0
2346 return result
2347
2348
2349 def make_comment(content: str) -> str:
2350 """Return a consistently formatted comment from the given `content` string.
2351
2352 All comments (except for "##", "#!", "#:", '#'", "#%%") should have a single
2353 space between the hash sign and the content.
2354
2355 If `content` didn't start with a hash sign, one is provided.
2356 """
2357 content = content.rstrip()
2358 if not content:
2359 return "#"
2360
2361 if content[0] == "#":
2362 content = content[1:]
2363 if content and content[0] not in " !:#'%":
2364 content = " " + content
2365 return "#" + content
2366
2367
2368 def split_line(
2369 line: Line,
2370 line_length: int,
2371 inner: bool = False,
2372 features: Collection[Feature] = (),
2373 ) -> Iterator[Line]:
2374 """Split a `line` into potentially many lines.
2375
2376 They should fit in the allotted `line_length` but might not be able to.
2377 `inner` signifies that there were a pair of brackets somewhere around the
2378 current `line`, possibly transitively. This means we can fallback to splitting
2379 by delimiters if the LHS/RHS don't yield any results.
2380
2381 `features` are syntactical features that may be used in the output.
2382 """
2383 if line.is_comment:
2384 yield line
2385 return
2386
2387 line_str = str(line).strip("\n")
2388
2389 if (
2390 not line.contains_uncollapsable_type_comments()
2391 and not line.should_explode
2392 and not line.is_collection_with_optional_trailing_comma
2393 and (
2394 is_line_short_enough(line, line_length=line_length, line_str=line_str)
2395 or line.contains_unsplittable_type_ignore()
2396 )
2397 ):
2398 yield line
2399 return
2400
2401 split_funcs: List[SplitFunc]
2402 if line.is_def:
2403 split_funcs = [left_hand_split]
2404 else:
2405
2406 def rhs(line: Line, features: Collection[Feature]) -> Iterator[Line]:
2407 for omit in generate_trailers_to_omit(line, line_length):
2408 lines = list(right_hand_split(line, line_length, features, omit=omit))
2409 if is_line_short_enough(lines[0], line_length=line_length):
2410 yield from lines
2411 return
2412
2413 # All splits failed, best effort split with no omits.
2414 # This mostly happens to multiline strings that are by definition
2415 # reported as not fitting a single line.
2416 yield from right_hand_split(line, line_length, features=features)
2417
2418 if line.inside_brackets:
2419 split_funcs = [delimiter_split, standalone_comment_split, rhs]
2420 else:
2421 split_funcs = [rhs]
2422 for split_func in split_funcs:
2423 # We are accumulating lines in `result` because we might want to abort
2424 # mission and return the original line in the end, or attempt a different
2425 # split altogether.
2426 result: List[Line] = []
2427 try:
2428 for l in split_func(line, features):
2429 if str(l).strip("\n") == line_str:
2430 raise CannotSplit("Split function returned an unchanged result")
2431
2432 result.extend(
2433 split_line(
2434 l, line_length=line_length, inner=True, features=features
2435 )
2436 )
2437 except CannotSplit:
2438 continue
2439
2440 else:
2441 yield from result
2442 break
2443
2444 else:
2445 yield line
2446
2447
2448 def left_hand_split(line: Line, features: Collection[Feature] = ()) -> Iterator[Line]:
2449 """Split line into many lines, starting with the first matching bracket pair.
2450
2451 Note: this usually looks weird, only use this for function definitions.
2452 Prefer RHS otherwise. This is why this function is not symmetrical with
2453 :func:`right_hand_split` which also handles optional parentheses.
2454 """
2455 tail_leaves: List[Leaf] = []
2456 body_leaves: List[Leaf] = []
2457 head_leaves: List[Leaf] = []
2458 current_leaves = head_leaves
2459 matching_bracket = None
2460 for leaf in line.leaves:
2461 if (
2462 current_leaves is body_leaves
2463 and leaf.type in CLOSING_BRACKETS
2464 and leaf.opening_bracket is matching_bracket
2465 ):
2466 current_leaves = tail_leaves if body_leaves else head_leaves
2467 current_leaves.append(leaf)
2468 if current_leaves is head_leaves:
2469 if leaf.type in OPENING_BRACKETS:
2470 matching_bracket = leaf
2471 current_leaves = body_leaves
2472 if not matching_bracket:
2473 raise CannotSplit("No brackets found")
2474
2475 head = bracket_split_build_line(head_leaves, line, matching_bracket)
2476 body = bracket_split_build_line(body_leaves, line, matching_bracket, is_body=True)
2477 tail = bracket_split_build_line(tail_leaves, line, matching_bracket)
2478 bracket_split_succeeded_or_raise(head, body, tail)
2479 for result in (head, body, tail):
2480 if result:
2481 yield result
2482
2483
2484 def right_hand_split(
2485 line: Line,
2486 line_length: int,
2487 features: Collection[Feature] = (),
2488 omit: Collection[LeafID] = (),
2489 ) -> Iterator[Line]:
2490 """Split line into many lines, starting with the last matching bracket pair.
2491
2492 If the split was by optional parentheses, attempt splitting without them, too.
2493 `omit` is a collection of closing bracket IDs that shouldn't be considered for
2494 this split.
2495
2496 Note: running this function modifies `bracket_depth` on the leaves of `line`.
2497 """
2498 tail_leaves: List[Leaf] = []
2499 body_leaves: List[Leaf] = []
2500 head_leaves: List[Leaf] = []
2501 current_leaves = tail_leaves
2502 opening_bracket = None
2503 closing_bracket = None
2504 for leaf in reversed(line.leaves):
2505 if current_leaves is body_leaves:
2506 if leaf is opening_bracket:
2507 current_leaves = head_leaves if body_leaves else tail_leaves
2508 current_leaves.append(leaf)
2509 if current_leaves is tail_leaves:
2510 if leaf.type in CLOSING_BRACKETS and id(leaf) not in omit:
2511 opening_bracket = leaf.opening_bracket
2512 closing_bracket = leaf
2513 current_leaves = body_leaves
2514 if not (opening_bracket and closing_bracket and head_leaves):
2515 # If there is no opening or closing_bracket that means the split failed and
2516 # all content is in the tail. Otherwise, if `head_leaves` are empty, it means
2517 # the matching `opening_bracket` wasn't available on `line` anymore.
2518 raise CannotSplit("No brackets found")
2519
2520 tail_leaves.reverse()
2521 body_leaves.reverse()
2522 head_leaves.reverse()
2523 head = bracket_split_build_line(head_leaves, line, opening_bracket)
2524 body = bracket_split_build_line(body_leaves, line, opening_bracket, is_body=True)
2525 tail = bracket_split_build_line(tail_leaves, line, opening_bracket)
2526 bracket_split_succeeded_or_raise(head, body, tail)
2527 if (
2528 # the body shouldn't be exploded
2529 not body.should_explode
2530 # the opening bracket is an optional paren
2531 and opening_bracket.type == token.LPAR
2532 and not opening_bracket.value
2533 # the closing bracket is an optional paren
2534 and closing_bracket.type == token.RPAR
2535 and not closing_bracket.value
2536 # it's not an import (optional parens are the only thing we can split on
2537 # in this case; attempting a split without them is a waste of time)
2538 and not line.is_import
2539 # there are no standalone comments in the body
2540 and not body.contains_standalone_comments(0)
2541 # and we can actually remove the parens
2542 and can_omit_invisible_parens(body, line_length)
2543 ):
2544 omit = {id(closing_bracket), *omit}
2545 try:
2546 yield from right_hand_split(line, line_length, features=features, omit=omit)
2547 return
2548
2549 except CannotSplit:
2550 if not (
2551 can_be_split(body)
2552 or is_line_short_enough(body, line_length=line_length)
2553 ):
2554 raise CannotSplit(
2555 "Splitting failed, body is still too long and can't be split."
2556 )
2557
2558 elif head.contains_multiline_strings() or tail.contains_multiline_strings():
2559 raise CannotSplit(
2560 "The current optional pair of parentheses is bound to fail to "
2561 "satisfy the splitting algorithm because the head or the tail "
2562 "contains multiline strings which by definition never fit one "
2563 "line."
2564 )
2565
2566 ensure_visible(opening_bracket)
2567 ensure_visible(closing_bracket)
2568 for result in (head, body, tail):
2569 if result:
2570 yield result
2571
2572
2573 def bracket_split_succeeded_or_raise(head: Line, body: Line, tail: Line) -> None:
2574 """Raise :exc:`CannotSplit` if the last left- or right-hand split failed.
2575
2576 Do nothing otherwise.
2577
2578 A left- or right-hand split is based on a pair of brackets. Content before
2579 (and including) the opening bracket is left on one line, content inside the
2580 brackets is put on a separate line, and finally content starting with and
2581 following the closing bracket is put on a separate line.
2582
2583 Those are called `head`, `body`, and `tail`, respectively. If the split
2584 produced the same line (all content in `head`) or ended up with an empty `body`
2585 and the `tail` is just the closing bracket, then it's considered failed.
2586 """
2587 tail_len = len(str(tail).strip())
2588 if not body:
2589 if tail_len == 0:
2590 raise CannotSplit("Splitting brackets produced the same line")
2591
2592 elif tail_len < 3:
2593 raise CannotSplit(
2594 f"Splitting brackets on an empty body to save "
2595 f"{tail_len} characters is not worth it"
2596 )
2597
2598
2599 def bracket_split_build_line(
2600 leaves: List[Leaf], original: Line, opening_bracket: Leaf, *, is_body: bool = False
2601 ) -> Line:
2602 """Return a new line with given `leaves` and respective comments from `original`.
2603
2604 If `is_body` is True, the result line is one-indented inside brackets and as such
2605 has its first leaf's prefix normalized and a trailing comma added when expected.
2606 """
2607 result = Line(depth=original.depth)
2608 if is_body:
2609 result.inside_brackets = True
2610 result.depth += 1
2611 if leaves:
2612 # Since body is a new indent level, remove spurious leading whitespace.
2613 normalize_prefix(leaves[0], inside_brackets=True)
2614 # Ensure a trailing comma for imports and standalone function arguments, but
2615 # be careful not to add one after any comments.
2616 no_commas = original.is_def and not any(
2617 l.type == token.COMMA for l in leaves
2618 )
2619
2620 if original.is_import or no_commas:
2621 for i in range(len(leaves) - 1, -1, -1):
2622 if leaves[i].type == STANDALONE_COMMENT:
2623 continue
2624 elif leaves[i].type == token.COMMA:
2625 break
2626 else:
2627 leaves.insert(i + 1, Leaf(token.COMMA, ","))
2628 break
2629 # Populate the line
2630 for leaf in leaves:
2631 result.append(leaf, preformatted=True)
2632 for comment_after in original.comments_after(leaf):
2633 result.append(comment_after, preformatted=True)
2634 if is_body:
2635 result.should_explode = should_explode(result, opening_bracket)
2636 return result
2637
2638
2639 def dont_increase_indentation(split_func: SplitFunc) -> SplitFunc:
2640 """Normalize prefix of the first leaf in every line returned by `split_func`.
2641
2642 This is a decorator over relevant split functions.
2643 """
2644
2645 @wraps(split_func)
2646 def split_wrapper(line: Line, features: Collection[Feature] = ()) -> Iterator[Line]:
2647 for l in split_func(line, features):
2648 normalize_prefix(l.leaves[0], inside_brackets=True)
2649 yield l
2650
2651 return split_wrapper
2652
2653
2654 @dont_increase_indentation
2655 def delimiter_split(line: Line, features: Collection[Feature] = ()) -> Iterator[Line]:
2656 """Split according to delimiters of the highest priority.
2657
2658 If the appropriate Features are given, the split will add trailing commas
2659 also in function signatures and calls that contain `*` and `**`.
2660 """
2661 try:
2662 last_leaf = line.leaves[-1]
2663 except IndexError:
2664 raise CannotSplit("Line empty")
2665
2666 bt = line.bracket_tracker
2667 try:
2668 delimiter_priority = bt.max_delimiter_priority(exclude={id(last_leaf)})
2669 except ValueError:
2670 raise CannotSplit("No delimiters found")
2671
2672 if delimiter_priority == DOT_PRIORITY:
2673 if bt.delimiter_count_with_priority(delimiter_priority) == 1:
2674 raise CannotSplit("Splitting a single attribute from its owner looks wrong")
2675
2676 current_line = Line(depth=line.depth, inside_brackets=line.inside_brackets)
2677 lowest_depth = sys.maxsize
2678 trailing_comma_safe = True
2679
2680 def append_to_line(leaf: Leaf) -> Iterator[Line]:
2681 """Append `leaf` to current line or to new line if appending impossible."""
2682 nonlocal current_line
2683 try:
2684 current_line.append_safe(leaf, preformatted=True)
2685 except ValueError:
2686 yield current_line
2687
2688 current_line = Line(depth=line.depth, inside_brackets=line.inside_brackets)
2689 current_line.append(leaf)
2690
2691 for leaf in line.leaves:
2692 yield from append_to_line(leaf)
2693
2694 for comment_after in line.comments_after(leaf):
2695 yield from append_to_line(comment_after)
2696
2697 lowest_depth = min(lowest_depth, leaf.bracket_depth)
2698 if leaf.bracket_depth == lowest_depth:
2699 if is_vararg(leaf, within={syms.typedargslist}):
2700 trailing_comma_safe = (
2701 trailing_comma_safe and Feature.TRAILING_COMMA_IN_DEF in features
2702 )
2703 elif is_vararg(leaf, within={syms.arglist, syms.argument}):
2704 trailing_comma_safe = (
2705 trailing_comma_safe and Feature.TRAILING_COMMA_IN_CALL in features
2706 )
2707
2708 leaf_priority = bt.delimiters.get(id(leaf))
2709 if leaf_priority == delimiter_priority:
2710 yield current_line
2711
2712 current_line = Line(depth=line.depth, inside_brackets=line.inside_brackets)
2713 if current_line:
2714 if (
2715 trailing_comma_safe
2716 and delimiter_priority == COMMA_PRIORITY
2717 and current_line.leaves[-1].type != token.COMMA
2718 and current_line.leaves[-1].type != STANDALONE_COMMENT
2719 ):
2720 current_line.append(Leaf(token.COMMA, ","))
2721 yield current_line
2722
2723
2724 @dont_increase_indentation
2725 def standalone_comment_split(
2726 line: Line, features: Collection[Feature] = ()
2727 ) -> Iterator[Line]:
2728 """Split standalone comments from the rest of the line."""
2729 if not line.contains_standalone_comments(0):
2730 raise CannotSplit("Line does not have any standalone comments")
2731
2732 current_line = Line(depth=line.depth, inside_brackets=line.inside_brackets)
2733
2734 def append_to_line(leaf: Leaf) -> Iterator[Line]:
2735 """Append `leaf` to current line or to new line if appending impossible."""
2736 nonlocal current_line
2737 try:
2738 current_line.append_safe(leaf, preformatted=True)
2739 except ValueError:
2740 yield current_line
2741
2742 current_line = Line(depth=line.depth, inside_brackets=line.inside_brackets)
2743 current_line.append(leaf)
2744
2745 for leaf in line.leaves:
2746 yield from append_to_line(leaf)
2747
2748 for comment_after in line.comments_after(leaf):
2749 yield from append_to_line(comment_after)
2750
2751 if current_line:
2752 yield current_line
2753
2754
2755 def is_import(leaf: Leaf) -> bool:
2756 """Return True if the given leaf starts an import statement."""
2757 p = leaf.parent
2758 t = leaf.type
2759 v = leaf.value
2760 return bool(
2761 t == token.NAME
2762 and (
2763 (v == "import" and p and p.type == syms.import_name)
2764 or (v == "from" and p and p.type == syms.import_from)
2765 )
2766 )
2767
2768
2769 def is_type_comment(leaf: Leaf, suffix: str = "") -> bool:
2770 """Return True if the given leaf is a special comment.
2771 Only returns true for type comments for now."""
2772 t = leaf.type
2773 v = leaf.value
2774 return t in {token.COMMENT, t == STANDALONE_COMMENT} and v.startswith(
2775 "# type:" + suffix
2776 )
2777
2778
2779 def normalize_prefix(leaf: Leaf, *, inside_brackets: bool) -> None:
2780 """Leave existing extra newlines if not `inside_brackets`. Remove everything
2781 else.
2782
2783 Note: don't use backslashes for formatting or you'll lose your voting rights.
2784 """
2785 if not inside_brackets:
2786 spl = leaf.prefix.split("#")
2787 if "\\" not in spl[0]:
2788 nl_count = spl[-1].count("\n")
2789 if len(spl) > 1:
2790 nl_count -= 1
2791 leaf.prefix = "\n" * nl_count
2792 return
2793
2794 leaf.prefix = ""
2795
2796
2797 def normalize_string_prefix(leaf: Leaf, remove_u_prefix: bool = False) -> None:
2798 """Make all string prefixes lowercase.
2799
2800 If remove_u_prefix is given, also removes any u prefix from the string.
2801
2802 Note: Mutates its argument.
2803 """
2804 match = re.match(r"^([furbFURB]*)(.*)$", leaf.value, re.DOTALL)
2805 assert match is not None, f"failed to match string {leaf.value!r}"
2806 orig_prefix = match.group(1)
2807 new_prefix = orig_prefix.lower()
2808 if remove_u_prefix:
2809 new_prefix = new_prefix.replace("u", "")
2810 leaf.value = f"{new_prefix}{match.group(2)}"
2811
2812
2813 def normalize_string_quotes(leaf: Leaf) -> None:
2814 """Prefer double quotes but only if it doesn't cause more escaping.
2815
2816 Adds or removes backslashes as appropriate. Doesn't parse and fix
2817 strings nested in f-strings (yet).
2818
2819 Note: Mutates its argument.
2820 """
2821 value = leaf.value.lstrip("furbFURB")
2822 if value[:3] == '"""':
2823 return
2824
2825 elif value[:3] == "'''":
2826 orig_quote = "'''"
2827 new_quote = '"""'
2828 elif value[0] == '"':
2829 orig_quote = '"'
2830 new_quote = "'"
2831 else:
2832 orig_quote = "'"
2833 new_quote = '"'
2834 first_quote_pos = leaf.value.find(orig_quote)
2835 if first_quote_pos == -1:
2836 return # There's an internal error
2837
2838 prefix = leaf.value[:first_quote_pos]
2839 unescaped_new_quote = re.compile(rf"(([^\\]|^)(\\\\)*){new_quote}")
2840 escaped_new_quote = re.compile(rf"([^\\]|^)\\((?:\\\\)*){new_quote}")
2841 escaped_orig_quote = re.compile(rf"([^\\]|^)\\((?:\\\\)*){orig_quote}")
2842 body = leaf.value[first_quote_pos + len(orig_quote) : -len(orig_quote)]
2843 if "r" in prefix.casefold():
2844 if unescaped_new_quote.search(body):
2845 # There's at least one unescaped new_quote in this raw string
2846 # so converting is impossible
2847 return
2848
2849 # Do not introduce or remove backslashes in raw strings
2850 new_body = body
2851 else:
2852 # remove unnecessary escapes
2853 new_body = sub_twice(escaped_new_quote, rf"\1\2{new_quote}", body)
2854 if body != new_body:
2855 # Consider the string without unnecessary escapes as the original
2856 body = new_body
2857 leaf.value = f"{prefix}{orig_quote}{body}{orig_quote}"
2858 new_body = sub_twice(escaped_orig_quote, rf"\1\2{orig_quote}", new_body)
2859 new_body = sub_twice(unescaped_new_quote, rf"\1\\{new_quote}", new_body)
2860 if "f" in prefix.casefold():
2861 matches = re.findall(
2862 r"""
2863 (?:[^{]|^)\{ # start of the string or a non-{ followed by a single {
2864 ([^{].*?) # contents of the brackets except if begins with {{
2865 \}(?:[^}]|$) # A } followed by end of the string or a non-}
2866 """,
2867 new_body,
2868 re.VERBOSE,
2869 )
2870 for m in matches:
2871 if "\\" in str(m):
2872 # Do not introduce backslashes in interpolated expressions
2873 return
2874 if new_quote == '"""' and new_body[-1:] == '"':
2875 # edge case:
2876 new_body = new_body[:-1] + '\\"'
2877 orig_escape_count = body.count("\\")
2878 new_escape_count = new_body.count("\\")
2879 if new_escape_count > orig_escape_count:
2880 return # Do not introduce more escaping
2881
2882 if new_escape_count == orig_escape_count and orig_quote == '"':
2883 return # Prefer double quotes
2884
2885 leaf.value = f"{prefix}{new_quote}{new_body}{new_quote}"
2886
2887
2888 def normalize_numeric_literal(leaf: Leaf) -> None:
2889 """Normalizes numeric (float, int, and complex) literals.
2890
2891 All letters used in the representation are normalized to lowercase (except
2892 in Python 2 long literals).
2893 """
2894 text = leaf.value.lower()
2895 if text.startswith(("0o", "0b")):
2896 # Leave octal and binary literals alone.
2897 pass
2898 elif text.startswith("0x"):
2899 # Change hex literals to upper case.
2900 before, after = text[:2], text[2:]
2901 text = f"{before}{after.upper()}"
2902 elif "e" in text:
2903 before, after = text.split("e")
2904 sign = ""
2905 if after.startswith("-"):
2906 after = after[1:]
2907 sign = "-"
2908 elif after.startswith("+"):
2909 after = after[1:]
2910 before = format_float_or_int_string(before)
2911 text = f"{before}e{sign}{after}"
2912 elif text.endswith(("j", "l")):
2913 number = text[:-1]
2914 suffix = text[-1]
2915 # Capitalize in "2L" because "l" looks too similar to "1".
2916 if suffix == "l":
2917 suffix = "L"
2918 text = f"{format_float_or_int_string(number)}{suffix}"
2919 else:
2920 text = format_float_or_int_string(text)
2921 leaf.value = text
2922
2923
2924 def format_float_or_int_string(text: str) -> str:
2925 """Formats a float string like "1.0"."""
2926 if "." not in text:
2927 return text
2928
2929 before, after = text.split(".")
2930 return f"{before or 0}.{after or 0}"
2931
2932
2933 def normalize_invisible_parens(node: Node, parens_after: Set[str]) -> None:
2934 """Make existing optional parentheses invisible or create new ones.
2935
2936 `parens_after` is a set of string leaf values immediately after which parens
2937 should be put.
2938
2939 Standardizes on visible parentheses for single-element tuples, and keeps
2940 existing visible parentheses for other tuples and generator expressions.
2941 """
2942 for pc in list_comments(node.prefix, is_endmarker=False):
2943 if pc.value in FMT_OFF:
2944 # This `node` has a prefix with `# fmt: off`, don't mess with parens.
2945 return
2946
2947 check_lpar = False
2948 for index, child in enumerate(list(node.children)):
2949 # Add parentheses around long tuple unpacking in assignments.
2950 if (
2951 index == 0
2952 and isinstance(child, Node)
2953 and child.type == syms.testlist_star_expr
2954 ):
2955 check_lpar = True
2956
2957 if check_lpar:
2958 if is_walrus_assignment(child):
2959 continue
2960 if child.type == syms.atom:
2961 # Determines if the underlying atom should be surrounded with
2962 # invisible params - also makes parens invisible recursively
2963 # within the atom and removes repeated invisible parens within
2964 # the atom
2965 should_surround_with_parens = maybe_make_parens_invisible_in_atom(
2966 child, parent=node
2967 )
2968
2969 if should_surround_with_parens:
2970 lpar = Leaf(token.LPAR, "")
2971 rpar = Leaf(token.RPAR, "")
2972 index = child.remove() or 0
2973 node.insert_child(index, Node(syms.atom, [lpar, child, rpar]))
2974 elif is_one_tuple(child):
2975 # wrap child in visible parentheses
2976 lpar = Leaf(token.LPAR, "(")
2977 rpar = Leaf(token.RPAR, ")")
2978 child.remove()
2979 node.insert_child(index, Node(syms.atom, [lpar, child, rpar]))
2980 elif node.type == syms.import_from:
2981 # "import from" nodes store parentheses directly as part of
2982 # the statement
2983 if child.type == token.LPAR:
2984 # make parentheses invisible
2985 child.value = "" # type: ignore
2986 node.children[-1].value = "" # type: ignore
2987 elif child.type != token.STAR:
2988 # insert invisible parentheses
2989 node.insert_child(index, Leaf(token.LPAR, ""))
2990 node.append_child(Leaf(token.RPAR, ""))
2991 break
2992
2993 elif not (isinstance(child, Leaf) and is_multiline_string(child)):
2994 # wrap child in invisible parentheses
2995 lpar = Leaf(token.LPAR, "")
2996 rpar = Leaf(token.RPAR, "")
2997 index = child.remove() or 0
2998 prefix = child.prefix
2999 child.prefix = ""
3000 new_child = Node(syms.atom, [lpar, child, rpar])
3001 new_child.prefix = prefix
3002 node.insert_child(index, new_child)
3003
3004 check_lpar = isinstance(child, Leaf) and child.value in parens_after
3005
3006
3007 def normalize_fmt_off(node: Node) -> None:
3008 """Convert content between `# fmt: off`/`# fmt: on` into standalone comments."""
3009 try_again = True
3010 while try_again:
3011 try_again = convert_one_fmt_off_pair(node)
3012
3013
3014 def convert_one_fmt_off_pair(node: Node) -> bool:
3015 """Convert content of a single `# fmt: off`/`# fmt: on` into a standalone comment.
3016
3017 Returns True if a pair was converted.
3018 """
3019 for leaf in node.leaves():
3020 previous_consumed = 0
3021 for comment in list_comments(leaf.prefix, is_endmarker=False):
3022 if comment.value in FMT_OFF:
3023 # We only want standalone comments. If there's no previous leaf or
3024 # the previous leaf is indentation, it's a standalone comment in
3025 # disguise.
3026 if comment.type != STANDALONE_COMMENT:
3027 prev = preceding_leaf(leaf)
3028 if prev and prev.type not in WHITESPACE:
3029 continue
3030
3031 ignored_nodes = list(generate_ignored_nodes(leaf))
3032 if not ignored_nodes:
3033 continue
3034
3035 first = ignored_nodes[0] # Can be a container node with the `leaf`.
3036 parent = first.parent
3037 prefix = first.prefix
3038 first.prefix = prefix[comment.consumed :]
3039 hidden_value = (
3040 comment.value + "\n" + "".join(str(n) for n in ignored_nodes)
3041 )
3042 if hidden_value.endswith("\n"):
3043 # That happens when one of the `ignored_nodes` ended with a NEWLINE
3044 # leaf (possibly followed by a DEDENT).
3045 hidden_value = hidden_value[:-1]
3046 first_idx = None
3047 for ignored in ignored_nodes:
3048 index = ignored.remove()
3049 if first_idx is None:
3050 first_idx = index
3051 assert parent is not None, "INTERNAL ERROR: fmt: on/off handling (1)"
3052 assert first_idx is not None, "INTERNAL ERROR: fmt: on/off handling (2)"
3053 parent.insert_child(
3054 first_idx,
3055 Leaf(
3056 STANDALONE_COMMENT,
3057 hidden_value,
3058 prefix=prefix[:previous_consumed] + "\n" * comment.newlines,
3059 ),
3060 )
3061 return True
3062
3063 previous_consumed = comment.consumed
3064
3065 return False
3066
3067
3068 def generate_ignored_nodes(leaf: Leaf) -> Iterator[LN]:
3069 """Starting from the container of `leaf`, generate all leaves until `# fmt: on`.
3070
3071 Stops at the end of the block.
3072 """
3073 container: Optional[LN] = container_of(leaf)
3074 while container is not None and container.type != token.ENDMARKER:
3075 for comment in list_comments(container.prefix, is_endmarker=False):
3076 if comment.value in FMT_ON:
3077 return
3078
3079 yield container
3080
3081 container = container.next_sibling
3082
3083
3084 def maybe_make_parens_invisible_in_atom(node: LN, parent: LN) -> bool:
3085 """If it's safe, make the parens in the atom `node` invisible, recursively.
3086 Additionally, remove repeated, adjacent invisible parens from the atom `node`
3087 as they are redundant.
3088
3089 Returns whether the node should itself be wrapped in invisible parentheses.
3090
3091 """
3092 if (
3093 node.type != syms.atom
3094 or is_empty_tuple(node)
3095 or is_one_tuple(node)
3096 or (is_yield(node) and parent.type != syms.expr_stmt)
3097 or max_delimiter_priority_in_atom(node) >= COMMA_PRIORITY
3098 ):
3099 return False
3100
3101 first = node.children[0]
3102 last = node.children[-1]
3103 if first.type == token.LPAR and last.type == token.RPAR:
3104 middle = node.children[1]
3105 # make parentheses invisible
3106 first.value = "" # type: ignore
3107 last.value = "" # type: ignore
3108 maybe_make_parens_invisible_in_atom(middle, parent=parent)
3109
3110 if is_atom_with_invisible_parens(middle):
3111 # Strip the invisible parens from `middle` by replacing
3112 # it with the child in-between the invisible parens
3113 middle.replace(middle.children[1])
3114
3115 return False
3116
3117 return True
3118
3119
3120 def is_atom_with_invisible_parens(node: LN) -> bool:
3121 """Given a `LN`, determines whether it's an atom `node` with invisible
3122 parens. Useful in dedupe-ing and normalizing parens.
3123 """
3124 if isinstance(node, Leaf) or node.type != syms.atom:
3125 return False
3126
3127 first, last = node.children[0], node.children[-1]
3128 return (
3129 isinstance(first, Leaf)
3130 and first.type == token.LPAR
3131 and first.value == ""
3132 and isinstance(last, Leaf)
3133 and last.type == token.RPAR
3134 and last.value == ""
3135 )
3136
3137
3138 def is_empty_tuple(node: LN) -> bool:
3139 """Return True if `node` holds an empty tuple."""
3140 return (
3141 node.type == syms.atom
3142 and len(node.children) == 2
3143 and node.children[0].type == token.LPAR
3144 and node.children[1].type == token.RPAR
3145 )
3146
3147
3148 def unwrap_singleton_parenthesis(node: LN) -> Optional[LN]:
3149 """Returns `wrapped` if `node` is of the shape ( wrapped ).
3150
3151 Parenthesis can be optional. Returns None otherwise"""
3152 if len(node.children) != 3:
3153 return None
3154 lpar, wrapped, rpar = node.children
3155 if not (lpar.type == token.LPAR and rpar.type == token.RPAR):
3156 return None
3157
3158 return wrapped
3159
3160
3161 def is_one_tuple(node: LN) -> bool:
3162 """Return True if `node` holds a tuple with one element, with or without parens."""
3163 if node.type == syms.atom:
3164 gexp = unwrap_singleton_parenthesis(node)
3165 if gexp is None or gexp.type != syms.testlist_gexp:
3166 return False
3167
3168 return len(gexp.children) == 2 and gexp.children[1].type == token.COMMA
3169
3170 return (
3171 node.type in IMPLICIT_TUPLE
3172 and len(node.children) == 2
3173 and node.children[1].type == token.COMMA
3174 )
3175
3176
3177 def is_walrus_assignment(node: LN) -> bool:
3178 """Return True iff `node` is of the shape ( test := test )"""
3179 inner = unwrap_singleton_parenthesis(node)
3180 return inner is not None and inner.type == syms.namedexpr_test
3181
3182
3183 def is_yield(node: LN) -> bool:
3184 """Return True if `node` holds a `yield` or `yield from` expression."""
3185 if node.type == syms.yield_expr:
3186 return True
3187
3188 if node.type == token.NAME and node.value == "yield": # type: ignore
3189 return True
3190
3191 if node.type != syms.atom:
3192 return False
3193
3194 if len(node.children) != 3:
3195 return False
3196
3197 lpar, expr, rpar = node.children
3198 if lpar.type == token.LPAR and rpar.type == token.RPAR:
3199 return is_yield(expr)
3200
3201 return False
3202
3203
3204 def is_vararg(leaf: Leaf, within: Set[NodeType]) -> bool:
3205 """Return True if `leaf` is a star or double star in a vararg or kwarg.
3206
3207 If `within` includes VARARGS_PARENTS, this applies to function signatures.
3208 If `within` includes UNPACKING_PARENTS, it applies to right hand-side
3209 extended iterable unpacking (PEP 3132) and additional unpacking
3210 generalizations (PEP 448).
3211 """
3212 if leaf.type not in VARARGS_SPECIALS or not leaf.parent:
3213 return False
3214
3215 p = leaf.parent
3216 if p.type == syms.star_expr:
3217 # Star expressions are also used as assignment targets in extended
3218 # iterable unpacking (PEP 3132). See what its parent is instead.
3219 if not p.parent:
3220 return False
3221
3222 p = p.parent
3223
3224 return p.type in within
3225
3226
3227 def is_multiline_string(leaf: Leaf) -> bool:
3228 """Return True if `leaf` is a multiline string that actually spans many lines."""
3229 value = leaf.value.lstrip("furbFURB")
3230 return value[:3] in {'"""', "'''"} and "\n" in value
3231
3232
3233 def is_stub_suite(node: Node) -> bool:
3234 """Return True if `node` is a suite with a stub body."""
3235 if (
3236 len(node.children) != 4
3237 or node.children[0].type != token.NEWLINE
3238 or node.children[1].type != token.INDENT
3239 or node.children[3].type != token.DEDENT
3240 ):
3241 return False
3242
3243 return is_stub_body(node.children[2])
3244
3245
3246 def is_stub_body(node: LN) -> bool:
3247 """Return True if `node` is a simple statement containing an ellipsis."""
3248 if not isinstance(node, Node) or node.type != syms.simple_stmt:
3249 return False
3250
3251 if len(node.children) != 2:
3252 return False
3253
3254 child = node.children[0]
3255 return (
3256 child.type == syms.atom
3257 and len(child.children) == 3
3258 and all(leaf == Leaf(token.DOT, ".") for leaf in child.children)
3259 )
3260
3261
3262 def max_delimiter_priority_in_atom(node: LN) -> Priority:
3263 """Return maximum delimiter priority inside `node`.
3264
3265 This is specific to atoms with contents contained in a pair of parentheses.
3266 If `node` isn't an atom or there are no enclosing parentheses, returns 0.
3267 """
3268 if node.type != syms.atom:
3269 return 0
3270
3271 first = node.children[0]
3272 last = node.children[-1]
3273 if not (first.type == token.LPAR and last.type == token.RPAR):
3274 return 0
3275
3276 bt = BracketTracker()
3277 for c in node.children[1:-1]:
3278 if isinstance(c, Leaf):
3279 bt.mark(c)
3280 else:
3281 for leaf in c.leaves():
3282 bt.mark(leaf)
3283 try:
3284 return bt.max_delimiter_priority()
3285
3286 except ValueError:
3287 return 0
3288
3289
3290 def ensure_visible(leaf: Leaf) -> None:
3291 """Make sure parentheses are visible.
3292
3293 They could be invisible as part of some statements (see
3294 :func:`normalize_invisible_parens` and :func:`visit_import_from`).
3295 """
3296 if leaf.type == token.LPAR:
3297 leaf.value = "("
3298 elif leaf.type == token.RPAR:
3299 leaf.value = ")"
3300
3301
3302 def should_explode(line: Line, opening_bracket: Leaf) -> bool:
3303 """Should `line` immediately be split with `delimiter_split()` after RHS?"""
3304
3305 if not (
3306 opening_bracket.parent
3307 and opening_bracket.parent.type in {syms.atom, syms.import_from}
3308 and opening_bracket.value in "[{("
3309 ):
3310 return False
3311
3312 try:
3313 last_leaf = line.leaves[-1]
3314 exclude = {id(last_leaf)} if last_leaf.type == token.COMMA else set()
3315 max_priority = line.bracket_tracker.max_delimiter_priority(exclude=exclude)
3316 except (IndexError, ValueError):
3317 return False
3318
3319 return max_priority == COMMA_PRIORITY
3320
3321
3322 def get_features_used(node: Node) -> Set[Feature]:
3323 """Return a set of (relatively) new Python features used in this file.
3324
3325 Currently looking for:
3326 - f-strings;
3327 - underscores in numeric literals;
3328 - trailing commas after * or ** in function signatures and calls;
3329 - positional only arguments in function signatures and lambdas;
3330 """
3331 features: Set[Feature] = set()
3332 for n in node.pre_order():
3333 if n.type == token.STRING:
3334 value_head = n.value[:2] # type: ignore
3335 if value_head in {'f"', 'F"', "f'", "F'", "rf", "fr", "RF", "FR"}:
3336 features.add(Feature.F_STRINGS)
3337
3338 elif n.type == token.NUMBER:
3339 if "_" in n.value: # type: ignore
3340 features.add(Feature.NUMERIC_UNDERSCORES)
3341
3342 elif n.type == token.SLASH:
3343 if n.parent and n.parent.type in {syms.typedargslist, syms.arglist}:
3344 features.add(Feature.POS_ONLY_ARGUMENTS)
3345
3346 elif n.type == token.COLONEQUAL:
3347 features.add(Feature.ASSIGNMENT_EXPRESSIONS)
3348
3349 elif (
3350 n.type in {syms.typedargslist, syms.arglist}
3351 and n.children
3352 and n.children[-1].type == token.COMMA
3353 ):
3354 if n.type == syms.typedargslist:
3355 feature = Feature.TRAILING_COMMA_IN_DEF
3356 else:
3357 feature = Feature.TRAILING_COMMA_IN_CALL
3358
3359 for ch in n.children:
3360 if ch.type in STARS:
3361 features.add(feature)
3362
3363 if ch.type == syms.argument:
3364 for argch in ch.children:
3365 if argch.type in STARS:
3366 features.add(feature)
3367
3368 return features
3369
3370
3371 def detect_target_versions(node: Node) -> Set[TargetVersion]:
3372 """Detect the version to target based on the nodes used."""
3373 features = get_features_used(node)
3374 return {
3375 version for version in TargetVersion if features <= VERSION_TO_FEATURES[version]
3376 }
3377
3378
3379 def generate_trailers_to_omit(line: Line, line_length: int) -> Iterator[Set[LeafID]]:
3380 """Generate sets of closing bracket IDs that should be omitted in a RHS.
3381
3382 Brackets can be omitted if the entire trailer up to and including
3383 a preceding closing bracket fits in one line.
3384
3385 Yielded sets are cumulative (contain results of previous yields, too). First
3386 set is empty.
3387 """
3388
3389 omit: Set[LeafID] = set()
3390 yield omit
3391
3392 length = 4 * line.depth
3393 opening_bracket = None
3394 closing_bracket = None
3395 inner_brackets: Set[LeafID] = set()
3396 for index, leaf, leaf_length in enumerate_with_length(line, reversed=True):
3397 length += leaf_length
3398 if length > line_length:
3399 break
3400
3401 has_inline_comment = leaf_length > len(leaf.value) + len(leaf.prefix)
3402 if leaf.type == STANDALONE_COMMENT or has_inline_comment:
3403 break
3404
3405 if opening_bracket:
3406 if leaf is opening_bracket:
3407 opening_bracket = None
3408 elif leaf.type in CLOSING_BRACKETS:
3409 inner_brackets.add(id(leaf))
3410 elif leaf.type in CLOSING_BRACKETS:
3411 if index > 0 and line.leaves[index - 1].type in OPENING_BRACKETS:
3412 # Empty brackets would fail a split so treat them as "inner"
3413 # brackets (e.g. only add them to the `omit` set if another
3414 # pair of brackets was good enough.
3415 inner_brackets.add(id(leaf))
3416 continue
3417
3418 if closing_bracket:
3419 omit.add(id(closing_bracket))
3420 omit.update(inner_brackets)
3421 inner_brackets.clear()
3422 yield omit
3423
3424 if leaf.value:
3425 opening_bracket = leaf.opening_bracket
3426 closing_bracket = leaf
3427
3428
3429 def get_future_imports(node: Node) -> Set[str]:
3430 """Return a set of __future__ imports in the file."""
3431 imports: Set[str] = set()
3432
3433 def get_imports_from_children(children: List[LN]) -> Generator[str, None, None]:
3434 for child in children:
3435 if isinstance(child, Leaf):
3436 if child.type == token.NAME:
3437 yield child.value
3438 elif child.type == syms.import_as_name:
3439 orig_name = child.children[0]
3440 assert isinstance(orig_name, Leaf), "Invalid syntax parsing imports"
3441 assert orig_name.type == token.NAME, "Invalid syntax parsing imports"
3442 yield orig_name.value
3443 elif child.type == syms.import_as_names:
3444 yield from get_imports_from_children(child.children)
3445 else:
3446 raise AssertionError("Invalid syntax parsing imports")
3447
3448 for child in node.children:
3449 if child.type != syms.simple_stmt:
3450 break
3451 first_child = child.children[0]
3452 if isinstance(first_child, Leaf):
3453 # Continue looking if we see a docstring; otherwise stop.
3454 if (
3455 len(child.children) == 2
3456 and first_child.type == token.STRING
3457 and child.children[1].type == token.NEWLINE
3458 ):
3459 continue
3460 else:
3461 break
3462 elif first_child.type == syms.import_from:
3463 module_name = first_child.children[1]
3464 if not isinstance(module_name, Leaf) or module_name.value != "__future__":
3465 break
3466 imports |= set(get_imports_from_children(first_child.children[3:]))
3467 else:
3468 break
3469 return imports
3470
3471
3472 def gen_python_files_in_dir(
3473 path: Path,
3474 root: Path,
3475 include: Pattern[str],
3476 exclude: Pattern[str],
3477 report: "Report",
3478 ) -> Iterator[Path]:
3479 """Generate all files under `path` whose paths are not excluded by the
3480 `exclude` regex, but are included by the `include` regex.
3481
3482 Symbolic links pointing outside of the `root` directory are ignored.
3483
3484 `report` is where output about exclusions goes.
3485 """
3486 assert root.is_absolute(), f"INTERNAL ERROR: `root` must be absolute but is {root}"
3487 for child in path.iterdir():
3488 try:
3489 normalized_path = "/" + child.resolve().relative_to(root).as_posix()
3490 except ValueError:
3491 if child.is_symlink():
3492 report.path_ignored(
3493 child, f"is a symbolic link that points outside {root}"
3494 )
3495 continue
3496
3497 raise
3498
3499 if child.is_dir():
3500 normalized_path += "/"
3501 exclude_match = exclude.search(normalized_path)
3502 if exclude_match and exclude_match.group(0):
3503 report.path_ignored(child, f"matches the --exclude regular expression")
3504 continue
3505
3506 if child.is_dir():
3507 yield from gen_python_files_in_dir(child, root, include, exclude, report)
3508
3509 elif child.is_file():
3510 include_match = include.search(normalized_path)
3511 if include_match:
3512 yield child
3513
3514
3515 @lru_cache()
3516 def find_project_root(srcs: Iterable[str]) -> Path:
3517 """Return a directory containing .git, .hg, or pyproject.toml.
3518
3519 That directory can be one of the directories passed in `srcs` or their
3520 common parent.
3521
3522 If no directory in the tree contains a marker that would specify it's the
3523 project root, the root of the file system is returned.
3524 """
3525 if not srcs:
3526 return Path("/").resolve()
3527
3528 common_base = min(Path(src).resolve() for src in srcs)
3529 if common_base.is_dir():
3530 # Append a fake file so `parents` below returns `common_base_dir`, too.
3531 common_base /= "fake-file"
3532 for directory in common_base.parents:
3533 if (directory / ".git").is_dir():
3534 return directory
3535
3536 if (directory / ".hg").is_dir():
3537 return directory
3538
3539 if (directory / "pyproject.toml").is_file():
3540 return directory
3541
3542 return directory
3543
3544
3545 @dataclass
3546 class Report:
3547 """Provides a reformatting counter. Can be rendered with `str(report)`."""
3548
3549 check: bool = False
3550 quiet: bool = False
3551 verbose: bool = False
3552 change_count: int = 0
3553 same_count: int = 0
3554 failure_count: int = 0
3555
3556 def done(self, src: Path, changed: Changed) -> None:
3557 """Increment the counter for successful reformatting. Write out a message."""
3558 if changed is Changed.YES:
3559 reformatted = "would reformat" if self.check else "reformatted"
3560 if self.verbose or not self.quiet:
3561 out(f"{reformatted} {src}")
3562 self.change_count += 1
3563 else:
3564 if self.verbose:
3565 if changed is Changed.NO:
3566 msg = f"{src} already well formatted, good job."
3567 else:
3568 msg = f"{src} wasn't modified on disk since last run."
3569 out(msg, bold=False)
3570 self.same_count += 1
3571
3572 def failed(self, src: Path, message: str) -> None:
3573 """Increment the counter for failed reformatting. Write out a message."""
3574 err(f"error: cannot format {src}: {message}")
3575 self.failure_count += 1
3576
3577 def path_ignored(self, path: Path, message: str) -> None:
3578 if self.verbose:
3579 out(f"{path} ignored: {message}", bold=False)
3580
3581 @property
3582 def return_code(self) -> int:
3583 """Return the exit code that the app should use.
3584
3585 This considers the current state of changed files and failures:
3586 - if there were any failures, return 123;
3587 - if any files were changed and --check is being used, return 1;
3588 - otherwise return 0.
3589 """
3590 # According to http://tldp.org/LDP/abs/html/exitcodes.html starting with
3591 # 126 we have special return codes reserved by the shell.
3592 if self.failure_count:
3593 return 123
3594
3595 elif self.change_count and self.check:
3596 return 1
3597
3598 return 0
3599
3600 def __str__(self) -> str:
3601 """Render a color report of the current state.
3602
3603 Use `click.unstyle` to remove colors.
3604 """
3605 if self.check:
3606 reformatted = "would be reformatted"
3607 unchanged = "would be left unchanged"
3608 failed = "would fail to reformat"
3609 else:
3610 reformatted = "reformatted"
3611 unchanged = "left unchanged"
3612 failed = "failed to reformat"
3613 report = []
3614 if self.change_count:
3615 s = "s" if self.change_count > 1 else ""
3616 report.append(
3617 click.style(f"{self.change_count} file{s} {reformatted}", bold=True)
3618 )
3619 if self.same_count:
3620 s = "s" if self.same_count > 1 else ""
3621 report.append(f"{self.same_count} file{s} {unchanged}")
3622 if self.failure_count:
3623 s = "s" if self.failure_count > 1 else ""
3624 report.append(
3625 click.style(f"{self.failure_count} file{s} {failed}", fg="red")
3626 )
3627 return ", ".join(report) + "."
3628
3629
3630 def parse_ast(src: str) -> Union[ast.AST, ast3.AST, ast27.AST]:
3631 filename = "<unknown>"
3632 if sys.version_info >= (3, 8):
3633 # TODO: support Python 4+ ;)
3634 for minor_version in range(sys.version_info[1], 4, -1):
3635 try:
3636 return ast.parse(src, filename, feature_version=(3, minor_version))
3637 except SyntaxError:
3638 continue
3639 else:
3640 for feature_version in (7, 6):
3641 try:
3642 return ast3.parse(src, filename, feature_version=feature_version)
3643 except SyntaxError:
3644 continue
3645
3646 return ast27.parse(src)
3647
3648
3649 def _fixup_ast_constants(
3650 node: Union[ast.AST, ast3.AST, ast27.AST]
3651 ) -> Union[ast.AST, ast3.AST, ast27.AST]:
3652 """Map ast nodes deprecated in 3.8 to Constant."""
3653 # casts are required until this is released:
3654 # https://github.com/python/typeshed/pull/3142
3655 if isinstance(node, (ast.Str, ast3.Str, ast27.Str, ast.Bytes, ast3.Bytes)):
3656 return cast(ast.AST, ast.Constant(value=node.s))
3657 elif isinstance(node, (ast.Num, ast3.Num, ast27.Num)):
3658 return cast(ast.AST, ast.Constant(value=node.n))
3659 elif isinstance(node, (ast.NameConstant, ast3.NameConstant)):
3660 return cast(ast.AST, ast.Constant(value=node.value))
3661 return node
3662
3663
3664 def assert_equivalent(src: str, dst: str) -> None:
3665 """Raise AssertionError if `src` and `dst` aren't equivalent."""
3666
3667 def _v(node: Union[ast.AST, ast3.AST, ast27.AST], depth: int = 0) -> Iterator[str]:
3668 """Simple visitor generating strings to compare ASTs by content."""
3669
3670 node = _fixup_ast_constants(node)
3671
3672 yield f"{' ' * depth}{node.__class__.__name__}("
3673
3674 for field in sorted(node._fields):
3675 # TypeIgnore has only one field 'lineno' which breaks this comparison
3676 type_ignore_classes = (ast3.TypeIgnore, ast27.TypeIgnore)
3677 if sys.version_info >= (3, 8):
3678 type_ignore_classes += (ast.TypeIgnore,)
3679 if isinstance(node, type_ignore_classes):
3680 break
3681
3682 try:
3683 value = getattr(node, field)
3684 except AttributeError:
3685 continue
3686
3687 yield f"{' ' * (depth+1)}{field}="
3688
3689 if isinstance(value, list):
3690 for item in value:
3691 # Ignore nested tuples within del statements, because we may insert
3692 # parentheses and they change the AST.
3693 if (
3694 field == "targets"
3695 and isinstance(node, (ast.Delete, ast3.Delete, ast27.Delete))
3696 and isinstance(item, (ast.Tuple, ast3.Tuple, ast27.Tuple))
3697 ):
3698 for item in item.elts:
3699 yield from _v(item, depth + 2)
3700 elif isinstance(item, (ast.AST, ast3.AST, ast27.AST)):
3701 yield from _v(item, depth + 2)
3702
3703 elif isinstance(value, (ast.AST, ast3.AST, ast27.AST)):
3704 yield from _v(value, depth + 2)
3705
3706 else:
3707 yield f"{' ' * (depth+2)}{value!r}, # {value.__class__.__name__}"
3708
3709 yield f"{' ' * depth}) # /{node.__class__.__name__}"
3710
3711 try:
3712 src_ast = parse_ast(src)
3713 except Exception as exc:
3714 raise AssertionError(
3715 f"cannot use --safe with this file; failed to parse source file. "
3716 f"AST error message: {exc}"
3717 )
3718
3719 try:
3720 dst_ast = parse_ast(dst)
3721 except Exception as exc:
3722 log = dump_to_file("".join(traceback.format_tb(exc.__traceback__)), dst)
3723 raise AssertionError(
3724 f"INTERNAL ERROR: Black produced invalid code: {exc}. "
3725 f"Please report a bug on https://github.com/psf/black/issues. "
3726 f"This invalid output might be helpful: {log}"
3727 ) from None
3728
3729 src_ast_str = "\n".join(_v(src_ast))
3730 dst_ast_str = "\n".join(_v(dst_ast))
3731 if src_ast_str != dst_ast_str:
3732 log = dump_to_file(diff(src_ast_str, dst_ast_str, "src", "dst"))
3733 raise AssertionError(
3734 f"INTERNAL ERROR: Black produced code that is not equivalent to "
3735 f"the source. "
3736 f"Please report a bug on https://github.com/psf/black/issues. "
3737 f"This diff might be helpful: {log}"
3738 ) from None
3739
3740
3741 def assert_stable(src: str, dst: str, mode: FileMode) -> None:
3742 """Raise AssertionError if `dst` reformats differently the second time."""
3743 newdst = format_str(dst, mode=mode)
3744 if dst != newdst:
3745 log = dump_to_file(
3746 diff(src, dst, "source", "first pass"),
3747 diff(dst, newdst, "first pass", "second pass"),
3748 )
3749 raise AssertionError(
3750 f"INTERNAL ERROR: Black produced different code on the second pass "
3751 f"of the formatter. "
3752 f"Please report a bug on https://github.com/psf/black/issues. "
3753 f"This diff might be helpful: {log}"
3754 ) from None
3755
3756
3757 def dump_to_file(*output: str) -> str:
3758 """Dump `output` to a temporary file. Return path to the file."""
3759 with tempfile.NamedTemporaryFile(
3760 mode="w", prefix="blk_", suffix=".log", delete=False, encoding="utf8"
3761 ) as f:
3762 for lines in output:
3763 f.write(lines)
3764 if lines and lines[-1] != "\n":
3765 f.write("\n")
3766 return f.name
3767
3768
3769 @contextmanager
3770 def nullcontext() -> Iterator[None]:
3771 """Return context manager that does nothing.
3772 Similar to `nullcontext` from python 3.7"""
3773 yield
3774
3775
3776 def diff(a: str, b: str, a_name: str, b_name: str) -> str:
3777 """Return a unified diff string between strings `a` and `b`."""
3778 import difflib
3779
3780 a_lines = [line + "\n" for line in a.split("\n")]
3781 b_lines = [line + "\n" for line in b.split("\n")]
3782 return "".join(
3783 difflib.unified_diff(a_lines, b_lines, fromfile=a_name, tofile=b_name, n=5)
3784 )
3785
3786
3787 def cancel(tasks: Iterable[asyncio.Task]) -> None:
3788 """asyncio signal handler that cancels all `tasks` and reports to stderr."""
3789 err("Aborted!")
3790 for task in tasks:
3791 task.cancel()
3792
3793
3794 def shutdown(loop: asyncio.AbstractEventLoop) -> None:
3795 """Cancel all pending tasks on `loop`, wait for them, and close the loop."""
3796 try:
3797 if sys.version_info[:2] >= (3, 7):
3798 all_tasks = asyncio.all_tasks
3799 else:
3800 all_tasks = asyncio.Task.all_tasks
3801 # This part is borrowed from asyncio/runners.py in Python 3.7b2.
3802 to_cancel = [task for task in all_tasks(loop) if not task.done()]
3803 if not to_cancel:
3804 return
3805
3806 for task in to_cancel:
3807 task.cancel()
3808 loop.run_until_complete(
3809 asyncio.gather(*to_cancel, loop=loop, return_exceptions=True)
3810 )
3811 finally:
3812 # `concurrent.futures.Future` objects cannot be cancelled once they
3813 # are already running. There might be some when the `shutdown()` happened.
3814 # Silence their logger's spew about the event loop being closed.
3815 cf_logger = logging.getLogger("concurrent.futures")
3816 cf_logger.setLevel(logging.CRITICAL)
3817 loop.close()
3818
3819
3820 def sub_twice(regex: Pattern[str], replacement: str, original: str) -> str:
3821 """Replace `regex` with `replacement` twice on `original`.
3822
3823 This is used by string normalization to perform replaces on
3824 overlapping matches.
3825 """
3826 return regex.sub(replacement, regex.sub(replacement, original))
3827
3828
3829 def re_compile_maybe_verbose(regex: str) -> Pattern[str]:
3830 """Compile a regular expression string in `regex`.
3831
3832 If it contains newlines, use verbose mode.
3833 """
3834 if "\n" in regex:
3835 regex = "(?x)" + regex
3836 return re.compile(regex)
3837
3838
3839 def enumerate_reversed(sequence: Sequence[T]) -> Iterator[Tuple[Index, T]]:
3840 """Like `reversed(enumerate(sequence))` if that were possible."""
3841 index = len(sequence) - 1
3842 for element in reversed(sequence):
3843 yield (index, element)
3844 index -= 1
3845
3846
3847 def enumerate_with_length(
3848 line: Line, reversed: bool = False
3849 ) -> Iterator[Tuple[Index, Leaf, int]]:
3850 """Return an enumeration of leaves with their length.
3851
3852 Stops prematurely on multiline strings and standalone comments.
3853 """
3854 op = cast(
3855 Callable[[Sequence[Leaf]], Iterator[Tuple[Index, Leaf]]],
3856 enumerate_reversed if reversed else enumerate,
3857 )
3858 for index, leaf in op(line.leaves):
3859 length = len(leaf.prefix) + len(leaf.value)
3860 if "\n" in leaf.value:
3861 return # Multiline strings, we can't continue.
3862
3863 for comment in line.comments_after(leaf):
3864 length += len(comment.value)
3865
3866 yield index, leaf, length
3867
3868
3869 def is_line_short_enough(line: Line, *, line_length: int, line_str: str = "") -> bool:
3870 """Return True if `line` is no longer than `line_length`.
3871
3872 Uses the provided `line_str` rendering, if any, otherwise computes a new one.
3873 """
3874 if not line_str:
3875 line_str = str(line).strip("\n")
3876 return (
3877 len(line_str) <= line_length
3878 and "\n" not in line_str # multiline strings
3879 and not line.contains_standalone_comments()
3880 )
3881
3882
3883 def can_be_split(line: Line) -> bool:
3884 """Return False if the line cannot be split *for sure*.
3885
3886 This is not an exhaustive search but a cheap heuristic that we can use to
3887 avoid some unfortunate formattings (mostly around wrapping unsplittable code
3888 in unnecessary parentheses).
3889 """
3890 leaves = line.leaves
3891 if len(leaves) < 2:
3892 return False
3893
3894 if leaves[0].type == token.STRING and leaves[1].type == token.DOT:
3895 call_count = 0
3896 dot_count = 0
3897 next = leaves[-1]
3898 for leaf in leaves[-2::-1]:
3899 if leaf.type in OPENING_BRACKETS:
3900 if next.type not in CLOSING_BRACKETS:
3901 return False
3902
3903 call_count += 1
3904 elif leaf.type == token.DOT:
3905 dot_count += 1
3906 elif leaf.type == token.NAME:
3907 if not (next.type == token.DOT or next.type in OPENING_BRACKETS):
3908 return False
3909
3910 elif leaf.type not in CLOSING_BRACKETS:
3911 return False
3912
3913 if dot_count > 1 and call_count > 1:
3914 return False
3915
3916 return True
3917
3918
3919 def can_omit_invisible_parens(line: Line, line_length: int) -> bool:
3920 """Does `line` have a shape safe to reformat without optional parens around it?
3921
3922 Returns True for only a subset of potentially nice looking formattings but
3923 the point is to not return false positives that end up producing lines that
3924 are too long.
3925 """
3926 bt = line.bracket_tracker
3927 if not bt.delimiters:
3928 # Without delimiters the optional parentheses are useless.
3929 return True
3930
3931 max_priority = bt.max_delimiter_priority()
3932 if bt.delimiter_count_with_priority(max_priority) > 1:
3933 # With more than one delimiter of a kind the optional parentheses read better.
3934 return False
3935
3936 if max_priority == DOT_PRIORITY:
3937 # A single stranded method call doesn't require optional parentheses.
3938 return True
3939
3940 assert len(line.leaves) >= 2, "Stranded delimiter"
3941
3942 first = line.leaves[0]
3943 second = line.leaves[1]
3944 penultimate = line.leaves[-2]
3945 last = line.leaves[-1]
3946
3947 # With a single delimiter, omit if the expression starts or ends with
3948 # a bracket.
3949 if first.type in OPENING_BRACKETS and second.type not in CLOSING_BRACKETS:
3950 remainder = False
3951 length = 4 * line.depth
3952 for _index, leaf, leaf_length in enumerate_with_length(line):
3953 if leaf.type in CLOSING_BRACKETS and leaf.opening_bracket is first:
3954 remainder = True
3955 if remainder:
3956 length += leaf_length
3957 if length > line_length:
3958 break
3959
3960 if leaf.type in OPENING_BRACKETS:
3961 # There are brackets we can further split on.
3962 remainder = False
3963
3964 else:
3965 # checked the entire string and line length wasn't exceeded
3966 if len(line.leaves) == _index + 1:
3967 return True
3968
3969 # Note: we are not returning False here because a line might have *both*
3970 # a leading opening bracket and a trailing closing bracket. If the
3971 # opening bracket doesn't match our rule, maybe the closing will.
3972
3973 if (
3974 last.type == token.RPAR
3975 or last.type == token.RBRACE
3976 or (
3977 # don't use indexing for omitting optional parentheses;
3978 # it looks weird
3979 last.type == token.RSQB
3980 and last.parent
3981 and last.parent.type != syms.trailer
3982 )
3983 ):
3984 if penultimate.type in OPENING_BRACKETS:
3985 # Empty brackets don't help.
3986 return False
3987
3988 if is_multiline_string(first):
3989 # Additional wrapping of a multiline string in this situation is
3990 # unnecessary.
3991 return True
3992
3993 length = 4 * line.depth
3994 seen_other_brackets = False
3995 for _index, leaf, leaf_length in enumerate_with_length(line):
3996 length += leaf_length
3997 if leaf is last.opening_bracket:
3998 if seen_other_brackets or length <= line_length:
3999 return True
4000
4001 elif leaf.type in OPENING_BRACKETS:
4002 # There are brackets we can further split on.
4003 seen_other_brackets = True
4004
4005 return False
4006
4007
4008 def get_cache_file(mode: FileMode) -> Path:
4009 return CACHE_DIR / f"cache.{mode.get_cache_key()}.pickle"
4010
4011
4012 def read_cache(mode: FileMode) -> Cache:
4013 """Read the cache if it exists and is well formed.
4014
4015 If it is not well formed, the call to write_cache later should resolve the issue.
4016 """
4017 cache_file = get_cache_file(mode)
4018 if not cache_file.exists():
4019 return {}
4020
4021 with cache_file.open("rb") as fobj:
4022 try:
4023 cache: Cache = pickle.load(fobj)
4024 except pickle.UnpicklingError:
4025 return {}
4026
4027 return cache
4028
4029
4030 def get_cache_info(path: Path) -> CacheInfo:
4031 """Return the information used to check if a file is already formatted or not."""
4032 stat = path.stat()
4033 return stat.st_mtime, stat.st_size
4034
4035
4036 def filter_cached(cache: Cache, sources: Iterable[Path]) -> Tuple[Set[Path], Set[Path]]:
4037 """Split an iterable of paths in `sources` into two sets.
4038
4039 The first contains paths of files that modified on disk or are not in the
4040 cache. The other contains paths to non-modified files.
4041 """
4042 todo, done = set(), set()
4043 for src in sources:
4044 src = src.resolve()
4045 if cache.get(src) != get_cache_info(src):
4046 todo.add(src)
4047 else:
4048 done.add(src)
4049 return todo, done
4050
4051
4052 def write_cache(cache: Cache, sources: Iterable[Path], mode: FileMode) -> None:
4053 """Update the cache file."""
4054 cache_file = get_cache_file(mode)
4055 try:
4056 CACHE_DIR.mkdir(parents=True, exist_ok=True)
4057 new_cache = {**cache, **{src.resolve(): get_cache_info(src) for src in sources}}
4058 with tempfile.NamedTemporaryFile(dir=str(cache_file.parent), delete=False) as f:
4059 pickle.dump(new_cache, f, protocol=pickle.HIGHEST_PROTOCOL)
4060 os.replace(f.name, cache_file)
4061 except OSError:
4062 pass
4063
4064
4065 def patch_click() -> None:
4066 """Make Click not crash.
4067
4068 On certain misconfigured environments, Python 3 selects the ASCII encoding as the
4069 default which restricts paths that it can access during the lifetime of the
4070 application. Click refuses to work in this scenario by raising a RuntimeError.
4071
4072 In case of Black the likelihood that non-ASCII characters are going to be used in
4073 file paths is minimal since it's Python source code. Moreover, this crash was
4074 spurious on Python 3.7 thanks to PEP 538 and PEP 540.
4075 """
4076 try:
4077 from click import core
4078 from click import _unicodefun # type: ignore
4079 except ModuleNotFoundError:
4080 return
4081
4082 for module in (core, _unicodefun):
4083 if hasattr(module, "_verify_python3_env"):
4084 module._verify_python3_env = lambda: None
4085
4086
4087 def patched_main() -> None:
4088 freeze_support()
4089 patch_click()
4090 main()
4091
4092
4093 if __name__ == "__main__":
4094 patched_main()
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,253 +1,261 b''
1 # If you want to change PREFIX, do not just edit it below. The changed
1 # If you want to change PREFIX, do not just edit it below. The changed
2 # value wont get passed on to recursive make calls. You should instead
2 # value wont get passed on to recursive make calls. You should instead
3 # override the variable on the command like:
3 # override the variable on the command like:
4 #
4 #
5 # % make PREFIX=/opt/ install
5 # % make PREFIX=/opt/ install
6
6
7 export PREFIX=/usr/local
7 export PREFIX=/usr/local
8 PYTHON?=python
8 PYTHON?=python
9 $(eval HGROOT := $(shell pwd))
9 $(eval HGROOT := $(shell pwd))
10 HGPYTHONS ?= $(HGROOT)/build/pythons
10 HGPYTHONS ?= $(HGROOT)/build/pythons
11 PURE=
11 PURE=
12 PYFILESCMD=find mercurial hgext doc -name '*.py'
12 PYFILESCMD=find mercurial hgext doc -name '*.py'
13 PYFILES:=$(shell $(PYFILESCMD))
13 PYFILES:=$(shell $(PYFILESCMD))
14 DOCFILES=mercurial/help/*.txt
14 DOCFILES=mercurial/help/*.txt
15 export LANGUAGE=C
15 export LANGUAGE=C
16 export LC_ALL=C
16 export LC_ALL=C
17 TESTFLAGS ?= $(shell echo $$HGTESTFLAGS)
17 TESTFLAGS ?= $(shell echo $$HGTESTFLAGS)
18 OSXVERSIONFLAGS ?= $(shell echo $$OSXVERSIONFLAGS)
18 OSXVERSIONFLAGS ?= $(shell echo $$OSXVERSIONFLAGS)
19 CARGO = cargo
19 CARGO = cargo
20
20
21 # Set this to e.g. "mingw32" to use a non-default compiler.
21 # Set this to e.g. "mingw32" to use a non-default compiler.
22 COMPILER=
22 COMPILER=
23
23
24 COMPILERFLAG_tmp_ =
24 COMPILERFLAG_tmp_ =
25 COMPILERFLAG_tmp_${COMPILER} ?= -c $(COMPILER)
25 COMPILERFLAG_tmp_${COMPILER} ?= -c $(COMPILER)
26 COMPILERFLAG=${COMPILERFLAG_tmp_${COMPILER}}
26 COMPILERFLAG=${COMPILERFLAG_tmp_${COMPILER}}
27
27
28 help:
28 help:
29 @echo 'Commonly used make targets:'
29 @echo 'Commonly used make targets:'
30 @echo ' all - build program and documentation'
30 @echo ' all - build program and documentation'
31 @echo ' install - install program and man pages to $$PREFIX ($(PREFIX))'
31 @echo ' install - install program and man pages to $$PREFIX ($(PREFIX))'
32 @echo ' install-home - install with setup.py install --home=$$HOME ($(HOME))'
32 @echo ' install-home - install with setup.py install --home=$$HOME ($(HOME))'
33 @echo ' local - build for inplace usage'
33 @echo ' local - build for inplace usage'
34 @echo ' tests - run all tests in the automatic test suite'
34 @echo ' tests - run all tests in the automatic test suite'
35 @echo ' test-foo - run only specified tests (e.g. test-merge1.t)'
35 @echo ' test-foo - run only specified tests (e.g. test-merge1.t)'
36 @echo ' dist - run all tests and create a source tarball in dist/'
36 @echo ' dist - run all tests and create a source tarball in dist/'
37 @echo ' clean - remove files created by other targets'
37 @echo ' clean - remove files created by other targets'
38 @echo ' (except installed files or dist source tarball)'
38 @echo ' (except installed files or dist source tarball)'
39 @echo ' update-pot - update i18n/hg.pot'
39 @echo ' update-pot - update i18n/hg.pot'
40 @echo
40 @echo
41 @echo 'Example for a system-wide installation under /usr/local:'
41 @echo 'Example for a system-wide installation under /usr/local:'
42 @echo ' make all && su -c "make install" && hg version'
42 @echo ' make all && su -c "make install" && hg version'
43 @echo
43 @echo
44 @echo 'Example for a local installation (usable in this directory):'
44 @echo 'Example for a local installation (usable in this directory):'
45 @echo ' make local && ./hg version'
45 @echo ' make local && ./hg version'
46
46
47 all: build doc
47 all: build doc
48
48
49 local:
49 local:
50 $(PYTHON) setup.py $(PURE) \
50 $(PYTHON) setup.py $(PURE) \
51 build_py -c -d . \
51 build_py -c -d . \
52 build_ext $(COMPILERFLAG) -i \
52 build_ext $(COMPILERFLAG) -i \
53 build_hgexe $(COMPILERFLAG) -i \
53 build_hgexe $(COMPILERFLAG) -i \
54 build_mo
54 build_mo
55 env HGRCPATH= $(PYTHON) hg version
55 env HGRCPATH= $(PYTHON) hg version
56
56
57 build:
57 build:
58 $(PYTHON) setup.py $(PURE) build $(COMPILERFLAG)
58 $(PYTHON) setup.py $(PURE) build $(COMPILERFLAG)
59
59
60 wheel:
60 wheel:
61 FORCE_SETUPTOOLS=1 $(PYTHON) setup.py $(PURE) bdist_wheel $(COMPILERFLAG)
61 FORCE_SETUPTOOLS=1 $(PYTHON) setup.py $(PURE) bdist_wheel $(COMPILERFLAG)
62
62
63 doc:
63 doc:
64 $(MAKE) -C doc
64 $(MAKE) -C doc
65
65
66 cleanbutpackages:
66 cleanbutpackages:
67 -$(PYTHON) setup.py clean --all # ignore errors from this command
67 -$(PYTHON) setup.py clean --all # ignore errors from this command
68 find contrib doc hgext hgext3rd i18n mercurial tests hgdemandimport \
68 find contrib doc hgext hgext3rd i18n mercurial tests hgdemandimport \
69 \( -name '*.py[cdo]' -o -name '*.so' \) -exec rm -f '{}' ';'
69 \( -name '*.py[cdo]' -o -name '*.so' \) -exec rm -f '{}' ';'
70 rm -f MANIFEST MANIFEST.in hgext/__index__.py tests/*.err
70 rm -f MANIFEST MANIFEST.in hgext/__index__.py tests/*.err
71 rm -f mercurial/__modulepolicy__.py
71 rm -f mercurial/__modulepolicy__.py
72 if test -d .hg; then rm -f mercurial/__version__.py; fi
72 if test -d .hg; then rm -f mercurial/__version__.py; fi
73 rm -rf build mercurial/locale
73 rm -rf build mercurial/locale
74 $(MAKE) -C doc clean
74 $(MAKE) -C doc clean
75 $(MAKE) -C contrib/chg distclean
75 $(MAKE) -C contrib/chg distclean
76 rm -rf rust/target
76 rm -rf rust/target
77 rm -f mercurial/rustext.so
77 rm -f mercurial/rustext.so
78
78
79 clean: cleanbutpackages
79 clean: cleanbutpackages
80 rm -rf packages
80 rm -rf packages
81
81
82 install: install-bin install-doc
82 install: install-bin install-doc
83
83
84 install-bin: build
84 install-bin: build
85 $(PYTHON) setup.py $(PURE) install --root="$(DESTDIR)/" --prefix="$(PREFIX)" --force
85 $(PYTHON) setup.py $(PURE) install --root="$(DESTDIR)/" --prefix="$(PREFIX)" --force
86
86
87 install-doc: doc
87 install-doc: doc
88 cd doc && $(MAKE) $(MFLAGS) install
88 cd doc && $(MAKE) $(MFLAGS) install
89
89
90 install-home: install-home-bin install-home-doc
90 install-home: install-home-bin install-home-doc
91
91
92 install-home-bin: build
92 install-home-bin: build
93 $(PYTHON) setup.py $(PURE) install --home="$(HOME)" --prefix="" --force
93 $(PYTHON) setup.py $(PURE) install --home="$(HOME)" --prefix="" --force
94
94
95 install-home-doc: doc
95 install-home-doc: doc
96 cd doc && $(MAKE) $(MFLAGS) PREFIX="$(HOME)" install
96 cd doc && $(MAKE) $(MFLAGS) PREFIX="$(HOME)" install
97
97
98 MANIFEST-doc:
98 MANIFEST-doc:
99 $(MAKE) -C doc MANIFEST
99 $(MAKE) -C doc MANIFEST
100
100
101 MANIFEST.in: MANIFEST-doc
101 MANIFEST.in: MANIFEST-doc
102 hg manifest | sed -e 's/^/include /' > MANIFEST.in
102 hg manifest | sed -e 's/^/include /' > MANIFEST.in
103 echo include mercurial/__version__.py >> MANIFEST.in
103 echo include mercurial/__version__.py >> MANIFEST.in
104 sed -e 's/^/include /' < doc/MANIFEST >> MANIFEST.in
104 sed -e 's/^/include /' < doc/MANIFEST >> MANIFEST.in
105
105
106 dist: tests dist-notests
106 dist: tests dist-notests
107
107
108 dist-notests: doc MANIFEST.in
108 dist-notests: doc MANIFEST.in
109 TAR_OPTIONS="--owner=root --group=root --mode=u+w,go-w,a+rX-s" $(PYTHON) setup.py -q sdist
109 TAR_OPTIONS="--owner=root --group=root --mode=u+w,go-w,a+rX-s" $(PYTHON) setup.py -q sdist
110
110
111 check: tests
111 check: tests
112
112
113 tests:
113 tests:
114 # Run Rust tests if cargo is installed
114 # Run Rust tests if cargo is installed
115 if command -v $(CARGO) >/dev/null 2>&1; then \
115 if command -v $(CARGO) >/dev/null 2>&1; then \
116 cd $(HGROOT)/rust/hg-cpython && $(CARGO) test --quiet --all; \
116 $(MAKE) rust-tests; \
117 fi
117 fi
118 cd tests && $(PYTHON) run-tests.py $(TESTFLAGS)
118 cd tests && $(PYTHON) run-tests.py $(TESTFLAGS)
119
119
120 test-%:
120 test-%:
121 cd tests && $(PYTHON) run-tests.py $(TESTFLAGS) $@
121 cd tests && $(PYTHON) run-tests.py $(TESTFLAGS) $@
122
122
123 testpy-%:
123 testpy-%:
124 @echo Looking for Python $* in $(HGPYTHONS)
124 @echo Looking for Python $* in $(HGPYTHONS)
125 [ -e $(HGPYTHONS)/$*/bin/python ] || ( \
125 [ -e $(HGPYTHONS)/$*/bin/python ] || ( \
126 cd $$(mktemp --directory --tmpdir) && \
126 cd $$(mktemp --directory --tmpdir) && \
127 $(MAKE) -f $(HGROOT)/contrib/Makefile.python PYTHONVER=$* PREFIX=$(HGPYTHONS)/$* python )
127 $(MAKE) -f $(HGROOT)/contrib/Makefile.python PYTHONVER=$* PREFIX=$(HGPYTHONS)/$* python )
128 cd tests && $(HGPYTHONS)/$*/bin/python run-tests.py $(TESTFLAGS)
128 cd tests && $(HGPYTHONS)/$*/bin/python run-tests.py $(TESTFLAGS)
129
129
130 rust-tests: py_feature = $(shell $(PYTHON) -c \
131 'import sys; print(["python27-bin", "python3-bin"][sys.version_info[0] >= 3])')
132 rust-tests:
133 cd $(HGROOT)/rust/hg-cpython \
134 && $(CARGO) test --quiet --all \
135 --no-default-features --features "$(py_feature)"
136
130 check-code:
137 check-code:
131 hg manifest | xargs python contrib/check-code.py
138 hg manifest | xargs python contrib/check-code.py
132
139
133 format-c:
140 format-c:
134 clang-format --style file -i \
141 clang-format --style file -i \
135 `hg files 'set:(**.c or **.cc or **.h) and not "listfile:contrib/clang-format-ignorelist"'`
142 `hg files 'set:(**.c or **.cc or **.h) and not "listfile:contrib/clang-format-ignorelist"'`
136
143
137 update-pot: i18n/hg.pot
144 update-pot: i18n/hg.pot
138
145
139 i18n/hg.pot: $(PYFILES) $(DOCFILES) i18n/posplit i18n/hggettext
146 i18n/hg.pot: $(PYFILES) $(DOCFILES) i18n/posplit i18n/hggettext
140 $(PYTHON) i18n/hggettext mercurial/commands.py \
147 $(PYTHON) i18n/hggettext mercurial/commands.py \
141 hgext/*.py hgext/*/__init__.py \
148 hgext/*.py hgext/*/__init__.py \
142 mercurial/fileset.py mercurial/revset.py \
149 mercurial/fileset.py mercurial/revset.py \
143 mercurial/templatefilters.py \
150 mercurial/templatefilters.py \
144 mercurial/templatefuncs.py \
151 mercurial/templatefuncs.py \
145 mercurial/templatekw.py \
152 mercurial/templatekw.py \
146 mercurial/filemerge.py \
153 mercurial/filemerge.py \
147 mercurial/hgweb/webcommands.py \
154 mercurial/hgweb/webcommands.py \
148 mercurial/util.py \
155 mercurial/util.py \
149 $(DOCFILES) > i18n/hg.pot.tmp
156 $(DOCFILES) > i18n/hg.pot.tmp
150 # All strings marked for translation in Mercurial contain
157 # All strings marked for translation in Mercurial contain
151 # ASCII characters only. But some files contain string
158 # ASCII characters only. But some files contain string
152 # literals like this '\037\213'. xgettext thinks it has to
159 # literals like this '\037\213'. xgettext thinks it has to
153 # parse them even though they are not marked for translation.
160 # parse them even though they are not marked for translation.
154 # Extracting with an explicit encoding of ISO-8859-1 will make
161 # Extracting with an explicit encoding of ISO-8859-1 will make
155 # xgettext "parse" and ignore them.
162 # xgettext "parse" and ignore them.
156 $(PYFILESCMD) | xargs \
163 $(PYFILESCMD) | xargs \
157 xgettext --package-name "Mercurial" \
164 xgettext --package-name "Mercurial" \
158 --msgid-bugs-address "<mercurial-devel@mercurial-scm.org>" \
165 --msgid-bugs-address "<mercurial-devel@mercurial-scm.org>" \
159 --copyright-holder "Matt Mackall <mpm@selenic.com> and others" \
166 --copyright-holder "Matt Mackall <mpm@selenic.com> and others" \
160 --from-code ISO-8859-1 --join --sort-by-file --add-comments=i18n: \
167 --from-code ISO-8859-1 --join --sort-by-file --add-comments=i18n: \
161 -d hg -p i18n -o hg.pot.tmp
168 -d hg -p i18n -o hg.pot.tmp
162 $(PYTHON) i18n/posplit i18n/hg.pot.tmp
169 $(PYTHON) i18n/posplit i18n/hg.pot.tmp
163 # The target file is not created before the last step. So it never is in
170 # The target file is not created before the last step. So it never is in
164 # an intermediate state.
171 # an intermediate state.
165 mv -f i18n/hg.pot.tmp i18n/hg.pot
172 mv -f i18n/hg.pot.tmp i18n/hg.pot
166
173
167 %.po: i18n/hg.pot
174 %.po: i18n/hg.pot
168 # work on a temporary copy for never having a half completed target
175 # work on a temporary copy for never having a half completed target
169 cp $@ $@.tmp
176 cp $@ $@.tmp
170 msgmerge --no-location --update $@.tmp $^
177 msgmerge --no-location --update $@.tmp $^
171 mv -f $@.tmp $@
178 mv -f $@.tmp $@
172
179
173 # Packaging targets
180 # Packaging targets
174
181
175 packaging_targets := \
182 packaging_targets := \
176 centos5 \
183 centos5 \
177 centos6 \
184 centos6 \
178 centos7 \
185 centos7 \
179 deb \
186 deb \
180 docker-centos5 \
187 docker-centos5 \
181 docker-centos6 \
188 docker-centos6 \
182 docker-centos7 \
189 docker-centos7 \
183 docker-debian-jessie \
190 docker-debian-jessie \
184 docker-debian-stretch \
191 docker-debian-stretch \
185 docker-fedora20 \
192 docker-fedora20 \
186 docker-fedora21 \
193 docker-fedora21 \
187 docker-fedora28 \
194 docker-fedora28 \
188 docker-fedora29 \
195 docker-fedora29 \
189 docker-ubuntu-trusty \
196 docker-ubuntu-trusty \
190 docker-ubuntu-trusty-ppa \
197 docker-ubuntu-trusty-ppa \
191 docker-ubuntu-xenial \
198 docker-ubuntu-xenial \
192 docker-ubuntu-xenial-ppa \
199 docker-ubuntu-xenial-ppa \
193 docker-ubuntu-artful \
200 docker-ubuntu-artful \
194 docker-ubuntu-artful-ppa \
201 docker-ubuntu-artful-ppa \
195 docker-ubuntu-bionic \
202 docker-ubuntu-bionic \
196 docker-ubuntu-bionic-ppa \
203 docker-ubuntu-bionic-ppa \
197 fedora20 \
204 fedora20 \
198 fedora21 \
205 fedora21 \
199 fedora28 \
206 fedora28 \
200 fedora29 \
207 fedora29 \
201 linux-wheels \
208 linux-wheels \
202 linux-wheels-x86_64 \
209 linux-wheels-x86_64 \
203 linux-wheels-i686 \
210 linux-wheels-i686 \
204 ppa
211 ppa
205
212
206 # Forward packaging targets for convenience.
213 # Forward packaging targets for convenience.
207 $(packaging_targets):
214 $(packaging_targets):
208 $(MAKE) -C contrib/packaging $@
215 $(MAKE) -C contrib/packaging $@
209
216
210 osx:
217 osx:
211 rm -rf build/mercurial
218 rm -rf build/mercurial
212 /usr/bin/python2.7 setup.py install --optimize=1 \
219 /usr/bin/python2.7 setup.py install --optimize=1 \
213 --root=build/mercurial/ --prefix=/usr/local/ \
220 --root=build/mercurial/ --prefix=/usr/local/ \
214 --install-lib=/Library/Python/2.7/site-packages/
221 --install-lib=/Library/Python/2.7/site-packages/
215 make -C doc all install DESTDIR="$(PWD)/build/mercurial/"
222 make -C doc all install DESTDIR="$(PWD)/build/mercurial/"
216 # Place a bogon .DS_Store file in the target dir so we can be
223 # Place a bogon .DS_Store file in the target dir so we can be
217 # sure it doesn't get included in the final package.
224 # sure it doesn't get included in the final package.
218 touch build/mercurial/.DS_Store
225 touch build/mercurial/.DS_Store
219 # install zsh completions - this location appears to be
226 # install zsh completions - this location appears to be
220 # searched by default as of macOS Sierra.
227 # searched by default as of macOS Sierra.
221 install -d build/mercurial/usr/local/share/zsh/site-functions/
228 install -d build/mercurial/usr/local/share/zsh/site-functions/
222 install -m 0644 contrib/zsh_completion build/mercurial/usr/local/share/zsh/site-functions/_hg
229 install -m 0644 contrib/zsh_completion build/mercurial/usr/local/share/zsh/site-functions/_hg
223 # install bash completions - there doesn't appear to be a
230 # install bash completions - there doesn't appear to be a
224 # place that's searched by default for bash, so we'll follow
231 # place that's searched by default for bash, so we'll follow
225 # the lead of Apple's git install and just put it in a
232 # the lead of Apple's git install and just put it in a
226 # location of our own.
233 # location of our own.
227 install -d build/mercurial/usr/local/hg/contrib/
234 install -d build/mercurial/usr/local/hg/contrib/
228 install -m 0644 contrib/bash_completion build/mercurial/usr/local/hg/contrib/hg-completion.bash
235 install -m 0644 contrib/bash_completion build/mercurial/usr/local/hg/contrib/hg-completion.bash
229 make -C contrib/chg \
236 make -C contrib/chg \
230 HGPATH=/usr/local/bin/hg \
237 HGPATH=/usr/local/bin/hg \
231 PYTHON=/usr/bin/python2.7 \
238 PYTHON=/usr/bin/python2.7 \
232 HGEXTDIR=/Library/Python/2.7/site-packages/hgext \
239 HGEXTDIR=/Library/Python/2.7/site-packages/hgext \
233 DESTDIR=../../build/mercurial \
240 DESTDIR=../../build/mercurial \
234 PREFIX=/usr/local \
241 PREFIX=/usr/local \
235 clean install
242 clean install
236 mkdir -p $${OUTPUTDIR:-dist}
243 mkdir -p $${OUTPUTDIR:-dist}
237 HGVER=$$(python contrib/genosxversion.py $(OSXVERSIONFLAGS) build/mercurial/Library/Python/2.7/site-packages/mercurial/__version__.py) && \
244 HGVER=$$(python contrib/genosxversion.py $(OSXVERSIONFLAGS) build/mercurial/Library/Python/2.7/site-packages/mercurial/__version__.py) && \
238 OSXVER=$$(sw_vers -productVersion | cut -d. -f1,2) && \
245 OSXVER=$$(sw_vers -productVersion | cut -d. -f1,2) && \
239 pkgbuild --filter \\.DS_Store --root build/mercurial/ \
246 pkgbuild --filter \\.DS_Store --root build/mercurial/ \
240 --identifier org.mercurial-scm.mercurial \
247 --identifier org.mercurial-scm.mercurial \
241 --version "$${HGVER}" \
248 --version "$${HGVER}" \
242 build/mercurial.pkg && \
249 build/mercurial.pkg && \
243 productbuild --distribution contrib/packaging/macosx/distribution.xml \
250 productbuild --distribution contrib/packaging/macosx/distribution.xml \
244 --package-path build/ \
251 --package-path build/ \
245 --version "$${HGVER}" \
252 --version "$${HGVER}" \
246 --resources contrib/packaging/macosx/ \
253 --resources contrib/packaging/macosx/ \
247 "$${OUTPUTDIR:-dist/}"/Mercurial-"$${HGVER}"-macosx"$${OSXVER}".pkg
254 "$${OUTPUTDIR:-dist/}"/Mercurial-"$${HGVER}"-macosx"$${OSXVER}".pkg
248
255
249 .PHONY: help all local build doc cleanbutpackages clean install install-bin \
256 .PHONY: help all local build doc cleanbutpackages clean install install-bin \
250 install-doc install-home install-home-bin install-home-doc \
257 install-doc install-home install-home-bin install-home-doc \
251 dist dist-notests check tests check-code format-c update-pot \
258 dist dist-notests check tests rust-tests check-code format-c \
259 update-pot \
252 $(packaging_targets) \
260 $(packaging_targets) \
253 osx
261 osx
@@ -1,183 +1,258 b''
1 ====================
1 ====================
2 Mercurial Automation
2 Mercurial Automation
3 ====================
3 ====================
4
4
5 This directory contains code and utilities for building and testing Mercurial
5 This directory contains code and utilities for building and testing Mercurial
6 on remote machines.
6 on remote machines.
7
7
8 The ``automation.py`` Script
8 The ``automation.py`` Script
9 ============================
9 ============================
10
10
11 ``automation.py`` is an executable Python script (requires Python 3.5+)
11 ``automation.py`` is an executable Python script (requires Python 3.5+)
12 that serves as a driver to common automation tasks.
12 that serves as a driver to common automation tasks.
13
13
14 When executed, the script will *bootstrap* a virtualenv in
14 When executed, the script will *bootstrap* a virtualenv in
15 ``<source-root>/build/venv-automation`` then re-execute itself using
15 ``<source-root>/build/venv-automation`` then re-execute itself using
16 that virtualenv. So there is no need for the caller to have a virtualenv
16 that virtualenv. So there is no need for the caller to have a virtualenv
17 explicitly activated. This virtualenv will be populated with various
17 explicitly activated. This virtualenv will be populated with various
18 dependencies (as defined by the ``requirements.txt`` file).
18 dependencies (as defined by the ``requirements.txt`` file).
19
19
20 To see what you can do with this script, simply run it::
20 To see what you can do with this script, simply run it::
21
21
22 $ ./automation.py
22 $ ./automation.py
23
23
24 Local State
24 Local State
25 ===========
25 ===========
26
26
27 By default, local state required to interact with remote servers is stored
27 By default, local state required to interact with remote servers is stored
28 in the ``~/.hgautomation`` directory.
28 in the ``~/.hgautomation`` directory.
29
29
30 We attempt to limit persistent state to this directory. Even when
30 We attempt to limit persistent state to this directory. Even when
31 performing tasks that may have side-effects, we try to limit those
31 performing tasks that may have side-effects, we try to limit those
32 side-effects so they don't impact the local system. e.g. when we SSH
32 side-effects so they don't impact the local system. e.g. when we SSH
33 into a remote machine, we create a temporary directory for the SSH
33 into a remote machine, we create a temporary directory for the SSH
34 config so the user's known hosts file isn't updated.
34 config so the user's known hosts file isn't updated.
35
35
36 Try Server
37 ==========
38
39 There exists a *Try Server* which allows automation to run against
40 an arbitrary Mercurial changeset and displays results via the web.
41
42 .. note::
43
44 The *Try Server* is still experimental infrastructure.
45
46 To use the *Try Server*::
47
48 $ ./automation.py try
49
50 With a custom AWS profile::
51
52 $ AWS_PROFILE=hg contrib/automation/automation.py try
53
54 By default, the ``.`` revision is submitted. **Any uncommitted changes
55 are not submitted.**
56
57 To switch which revision is used::
58
59 $ ./automation.py try -r abcdef
60
61 Access to the *Try Server* requires access to a special AWS account.
62 This account is currently run by Gregory Szorc. Here is the procedure
63 for accessing the *Try Server*:
64
65 1. Email Gregory Szorc at gregory.szorc@gmail.com and request a
66 username. This username will be stored in the public domain.
67 2. Wait for an email reply containing your temporary AWS credentials.
68 3. Log in at https://gregoryszorc-hg.signin.aws.amazon.com/console
69 and set a new, secure password.
70 4. Go to https://console.aws.amazon.com/iam/home?region=us-west-2#/security_credentials
71 5. Under ``Access keys for CLI, SDK, & API access``, click the
72 ``Create access key`` button.
73 6. See the ``AWS Integration`` section for instructions on
74 configuring your local client to use the generated credentials.
75
36 AWS Integration
76 AWS Integration
37 ===============
77 ===============
38
78
39 Various automation tasks integrate with AWS to provide access to
79 Various automation tasks integrate with AWS to provide access to
40 resources such as EC2 instances for generic compute.
80 resources such as EC2 instances for generic compute.
41
81
42 This obviously requires an AWS account and credentials to work.
82 This obviously requires an AWS account and credentials to work.
43
83
44 We use the ``boto3`` library for interacting with AWS APIs. We do not employ
84 We use the ``boto3`` library for interacting with AWS APIs. We do not employ
45 any special functionality for telling ``boto3`` where to find AWS credentials. See
85 any special functionality for telling ``boto3`` where to find AWS credentials. See
46 https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html
86 https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html
47 for how ``boto3`` works. Once you have configured your environment such
87 for how ``boto3`` works. Once you have configured your environment such
48 that ``boto3`` can find credentials, interaction with AWS should *just work*.
88 that ``boto3`` can find credentials, interaction with AWS should *just work*.
49
89
50 .. hint::
90 To configure ``boto3``, you can use the ``aws configure`` command to
91 write out configuration files. (The ``aws`` command is typically provided
92 by an ``awscli`` package available in your package manager, including
93 ``pip``.) Alternatively, you can write out files in ``~/.aws/`` directly.
94 e.g.::
95
96 # ~/.aws/config
97 [default]
98 region = us-west-2
51
99
52 Typically you have a ``~/.aws/credentials`` file containing AWS
100 # ~/.aws/credentials
53 credentials. If you manage multiple credentials, you can override which
101 [default]
54 *profile* to use at run-time by setting the ``AWS_PROFILE`` environment
102 aws_access_key_id = XXXX
55 variable.
103 aws_secret_access_key = YYYY
104
105 If you have multiple AWS accounts, you can name the profile something
106 different from ``default``. e.g. ``hg``. You can influence which profile
107 is used by ``boto3`` by setting the ``AWS_PROFILE`` environment variable.
108 e.g. ``AWS_PROFILE=hg``.
56
109
57 Resource Management
110 Resource Management
58 -------------------
111 -------------------
59
112
60 Depending on the task being performed, various AWS services will be accessed.
113 Depending on the task being performed, various AWS services will be accessed.
61 This of course requires AWS credentials with permissions to access these
114 This of course requires AWS credentials with permissions to access these
62 services.
115 services.
63
116
64 The following AWS services can be accessed by automation tasks:
117 The following AWS services can be accessed by automation tasks:
65
118
66 * EC2
119 * EC2
67 * IAM
120 * IAM
68 * Simple Systems Manager (SSM)
121 * Simple Systems Manager (SSM)
69
122
70 Various resources will also be created as part of performing various tasks.
123 Various resources will also be created as part of performing various tasks.
71 This also requires various permissions.
124 This also requires various permissions.
72
125
73 The following AWS resources can be created by automation tasks:
126 The following AWS resources can be created by automation tasks:
74
127
75 * EC2 key pairs
128 * EC2 key pairs
76 * EC2 security groups
129 * EC2 security groups
77 * EC2 instances
130 * EC2 instances
78 * IAM roles and instance profiles
131 * IAM roles and instance profiles
79 * SSM command invocations
132 * SSM command invocations
80
133
81 When possible, we prefix resource names with ``hg-`` so they can easily
134 When possible, we prefix resource names with ``hg-`` so they can easily
82 be identified as belonging to Mercurial.
135 be identified as belonging to Mercurial.
83
136
84 .. important::
137 .. important::
85
138
86 We currently assume that AWS accounts utilized by *us* are single
139 We currently assume that AWS accounts utilized by *us* are single
87 tenancy. Attempts to have discrete users of ``automation.py`` (including
140 tenancy. Attempts to have discrete users of ``automation.py`` (including
88 sharing credentials across machines) using the same AWS account can result
141 sharing credentials across machines) using the same AWS account can result
89 in them interfering with each other and things breaking.
142 in them interfering with each other and things breaking.
90
143
91 Cost of Operation
144 Cost of Operation
92 -----------------
145 -----------------
93
146
94 ``automation.py`` tries to be frugal with regards to utilization of remote
147 ``automation.py`` tries to be frugal with regards to utilization of remote
95 resources. Persistent remote resources are minimized in order to keep costs
148 resources. Persistent remote resources are minimized in order to keep costs
96 in check. For example, EC2 instances are often ephemeral and only live as long
149 in check. For example, EC2 instances are often ephemeral and only live as long
97 as the operation being performed.
150 as the operation being performed.
98
151
99 Under normal operation, recurring costs are limited to:
152 Under normal operation, recurring costs are limited to:
100
153
101 * Storage costs for AMI / EBS snapshots. This should be just a few pennies
154 * Storage costs for AMI / EBS snapshots. This should be just a few pennies
102 per month.
155 per month.
103
156
104 When running EC2 instances, you'll be billed accordingly. Default instance
157 When running EC2 instances, you'll be billed accordingly. Default instance
105 types vary by operation. We try to be respectful of your money when choosing
158 types vary by operation. We try to be respectful of your money when choosing
106 defaults. e.g. for Windows instances which are billed per hour, we use e.g.
159 defaults. e.g. for Windows instances which are billed per hour, we use e.g.
107 ``t3.medium`` instances, which cost ~$0.07 per hour. For operations that
160 ``t3.medium`` instances, which cost ~$0.07 per hour. For operations that
108 scale well to many CPUs like running Linux tests, we may use a more powerful
161 scale well to many CPUs like running Linux tests, we may use a more powerful
109 instance like ``c5.9xlarge``. However, since Linux instances are billed
162 instance like ``c5.9xlarge``. However, since Linux instances are billed
110 per second and the cost of running an e.g. ``c5.9xlarge`` for half the time
163 per second and the cost of running an e.g. ``c5.9xlarge`` for half the time
111 of a ``c5.4xlarge`` is roughly the same, the choice is justified.
164 of a ``c5.4xlarge`` is roughly the same, the choice is justified.
112
165
113 .. note::
166 .. note::
114
167
115 When running Windows EC2 instances, AWS bills at the full hourly cost, even
168 When running Windows EC2 instances, AWS bills at the full hourly cost, even
116 if the instance doesn't run for a full hour (per-second billing doesn't
169 if the instance doesn't run for a full hour (per-second billing doesn't
117 apply to Windows AMIs).
170 apply to Windows AMIs).
118
171
119 Managing Remote Resources
172 Managing Remote Resources
120 -------------------------
173 -------------------------
121
174
122 Occassionally, there may be an error purging a temporary resource. Or you
175 Occassionally, there may be an error purging a temporary resource. Or you
123 may wish to forcefully purge remote state. Commands can be invoked to manually
176 may wish to forcefully purge remote state. Commands can be invoked to manually
124 purge remote resources.
177 purge remote resources.
125
178
126 To terminate all EC2 instances that we manage::
179 To terminate all EC2 instances that we manage::
127
180
128 $ automation.py terminate-ec2-instances
181 $ automation.py terminate-ec2-instances
129
182
130 To purge all EC2 resources that we manage::
183 To purge all EC2 resources that we manage::
131
184
132 $ automation.py purge-ec2-resources
185 $ automation.py purge-ec2-resources
133
186
134 Remote Machine Interfaces
187 Remote Machine Interfaces
135 =========================
188 =========================
136
189
137 The code that connects to a remote machine and executes things is
190 The code that connects to a remote machine and executes things is
138 theoretically machine agnostic as long as the remote machine conforms to
191 theoretically machine agnostic as long as the remote machine conforms to
139 an *interface*. In other words, to perform actions like running tests
192 an *interface*. In other words, to perform actions like running tests
140 remotely or triggering packaging, it shouldn't matter if the remote machine
193 remotely or triggering packaging, it shouldn't matter if the remote machine
141 is an EC2 instance, a virtual machine, etc. This section attempts to document
194 is an EC2 instance, a virtual machine, etc. This section attempts to document
142 the interface that remote machines need to provide in order to be valid
195 the interface that remote machines need to provide in order to be valid
143 *targets* for remote execution. These interfaces are often not ideal nor
196 *targets* for remote execution. These interfaces are often not ideal nor
144 the most flexible. Instead, they have often evolved as the requirements of
197 the most flexible. Instead, they have often evolved as the requirements of
145 our automation code have evolved.
198 our automation code have evolved.
146
199
147 Linux
200 Linux
148 -----
201 -----
149
202
150 Remote Linux machines expose an SSH server on port 22. The SSH server
203 Remote Linux machines expose an SSH server on port 22. The SSH server
151 must allow the ``hg`` user to authenticate using the SSH key generated by
204 must allow the ``hg`` user to authenticate using the SSH key generated by
152 the automation code. The ``hg`` user should be part of the ``hg`` group
205 the automation code. The ``hg`` user should be part of the ``hg`` group
153 and it should have ``sudo`` access without password prompting.
206 and it should have ``sudo`` access without password prompting.
154
207
155 The SSH channel must support SFTP to facilitate transferring files from
208 The SSH channel must support SFTP to facilitate transferring files from
156 client to server.
209 client to server.
157
210
158 ``/bin/bash`` must be executable and point to a bash shell executable.
211 ``/bin/bash`` must be executable and point to a bash shell executable.
159
212
160 The ``/hgdev`` directory must exist and all its content owned by ``hg::hg``.
213 The ``/hgdev`` directory must exist and all its content owned by ``hg::hg``.
161
214
162 The ``/hgdev/pyenv`` directory should contain an installation of
215 The ``/hgdev/pyenv`` directory should contain an installation of
163 ``pyenv``. Various Python distributions should be installed. The exact
216 ``pyenv``. Various Python distributions should be installed. The exact
164 versions shouldn't matter. ``pyenv global`` should have been run so
217 versions shouldn't matter. ``pyenv global`` should have been run so
165 ``/hgdev/pyenv/shims/`` is populated with redirector scripts that point
218 ``/hgdev/pyenv/shims/`` is populated with redirector scripts that point
166 to the appropriate Python executable.
219 to the appropriate Python executable.
167
220
168 The ``/hgdev/venv-bootstrap`` directory must contain a virtualenv
221 The ``/hgdev/venv-bootstrap`` directory must contain a virtualenv
169 with Mercurial installed. The ``/hgdev/venv-bootstrap/bin/hg`` executable
222 with Mercurial installed. The ``/hgdev/venv-bootstrap/bin/hg`` executable
170 is referenced by various scripts and the client.
223 is referenced by various scripts and the client.
171
224
172 The ``/hgdev/src`` directory MUST contain a clone of the Mercurial
225 The ``/hgdev/src`` directory MUST contain a clone of the Mercurial
173 source code. The state of the working directory is not important.
226 source code. The state of the working directory is not important.
174
227
175 In order to run tests, the ``/hgwork`` directory will be created.
228 In order to run tests, the ``/hgwork`` directory will be created.
176 This may require running various ``mkfs.*`` executables and ``mount``
229 This may require running various ``mkfs.*`` executables and ``mount``
177 to provision a new filesystem. This will require elevated privileges
230 to provision a new filesystem. This will require elevated privileges
178 via ``sudo``.
231 via ``sudo``.
179
232
180 Various dependencies to run the Mercurial test harness are also required.
233 Various dependencies to run the Mercurial test harness are also required.
181 Documenting them is beyond the scope of this document. Various tests
234 Documenting them is beyond the scope of this document. Various tests
182 also require other optional dependencies and missing dependencies will
235 also require other optional dependencies and missing dependencies will
183 be printed by the test runner when a test is skipped.
236 be printed by the test runner when a test is skipped.
237
238 Releasing Windows Artifacts
239 ===========================
240
241 The `automation.py` script can be used to automate the release of Windows
242 artifacts::
243
244 $ ./automation.py build-all-windows-packages --revision 5.1.1
245 $ ./automation.py publish-windows-artifacts 5.1.1
246
247 The first command will launch an EC2 instance to build all Windows packages
248 and copy them into the `dist` directory relative to the repository root. The
249 second command will then attempt to upload these files to PyPI (via `twine`)
250 and to `mercurial-scm.org` (via SSH).
251
252 Uploading to PyPI requires a PyPI account with write access to the `Mercurial`
253 package. You can skip PyPI uploading by passing `--no-pypi`.
254
255 Uploading to `mercurial-scm.org` requires an SSH account on that server
256 with `windows` group membership and for the SSH key for that account to be the
257 default SSH key (e.g. `~/.ssh/id_rsa`) or in a running SSH agent. You can
258 skip `mercurial-scm.org` uploading by passing `--no-mercurial-scm-org`.
@@ -1,70 +1,74 b''
1 #!/usr/bin/env python3
1 #!/usr/bin/env python3
2 #
2 #
3 # automation.py - Perform tasks on remote machines
3 # automation.py - Perform tasks on remote machines
4 #
4 #
5 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
5 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 import os
10 import os
11 import pathlib
11 import pathlib
12 import subprocess
12 import subprocess
13 import sys
13 import sys
14 import venv
14 import venv
15
15
16
16
17 HERE = pathlib.Path(os.path.abspath(__file__)).parent
17 HERE = pathlib.Path(os.path.abspath(__file__)).parent
18 REQUIREMENTS_TXT = HERE / 'requirements.txt'
18 REQUIREMENTS_TXT = HERE / 'requirements.txt'
19 SOURCE_DIR = HERE.parent.parent
19 SOURCE_DIR = HERE.parent.parent
20 VENV = SOURCE_DIR / 'build' / 'venv-automation'
20 VENV = SOURCE_DIR / 'build' / 'venv-automation'
21
21
22
22
23 def bootstrap():
23 def bootstrap():
24 venv_created = not VENV.exists()
24 venv_created = not VENV.exists()
25
25
26 VENV.parent.mkdir(exist_ok=True)
26 VENV.parent.mkdir(exist_ok=True)
27
27
28 venv.create(VENV, with_pip=True)
28 venv.create(VENV, with_pip=True)
29
29
30 if os.name == 'nt':
30 if os.name == 'nt':
31 venv_bin = VENV / 'Scripts'
31 venv_bin = VENV / 'Scripts'
32 pip = venv_bin / 'pip.exe'
32 pip = venv_bin / 'pip.exe'
33 python = venv_bin / 'python.exe'
33 python = venv_bin / 'python.exe'
34 else:
34 else:
35 venv_bin = VENV / 'bin'
35 venv_bin = VENV / 'bin'
36 pip = venv_bin / 'pip'
36 pip = venv_bin / 'pip'
37 python = venv_bin / 'python'
37 python = venv_bin / 'python'
38
38
39 args = [str(pip), 'install', '-r', str(REQUIREMENTS_TXT),
39 args = [
40 '--disable-pip-version-check']
40 str(pip),
41 'install',
42 '-r',
43 str(REQUIREMENTS_TXT),
44 '--disable-pip-version-check',
45 ]
41
46
42 if not venv_created:
47 if not venv_created:
43 args.append('-q')
48 args.append('-q')
44
49
45 subprocess.run(args, check=True)
50 subprocess.run(args, check=True)
46
51
47 os.environ['HGAUTOMATION_BOOTSTRAPPED'] = '1'
52 os.environ['HGAUTOMATION_BOOTSTRAPPED'] = '1'
48 os.environ['PATH'] = '%s%s%s' % (
53 os.environ['PATH'] = '%s%s%s' % (venv_bin, os.pathsep, os.environ['PATH'])
49 venv_bin, os.pathsep, os.environ['PATH'])
50
54
51 subprocess.run([str(python), __file__] + sys.argv[1:], check=True)
55 subprocess.run([str(python), __file__] + sys.argv[1:], check=True)
52
56
53
57
54 def run():
58 def run():
55 import hgautomation.cli as cli
59 import hgautomation.cli as cli
56
60
57 # Need to strip off main Python executable.
61 # Need to strip off main Python executable.
58 cli.main()
62 cli.main()
59
63
60
64
61 if __name__ == '__main__':
65 if __name__ == '__main__':
62 try:
66 try:
63 if 'HGAUTOMATION_BOOTSTRAPPED' not in os.environ:
67 if 'HGAUTOMATION_BOOTSTRAPPED' not in os.environ:
64 bootstrap()
68 bootstrap()
65 else:
69 else:
66 run()
70 run()
67 except subprocess.CalledProcessError as e:
71 except subprocess.CalledProcessError as e:
68 sys.exit(e.returncode)
72 sys.exit(e.returncode)
69 except KeyboardInterrupt:
73 except KeyboardInterrupt:
70 sys.exit(1)
74 sys.exit(1)
@@ -1,59 +1,57 b''
1 # __init__.py - High-level automation interfaces
1 # __init__.py - High-level automation interfaces
2 #
2 #
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 # no-check-code because Python 3 native.
8 # no-check-code because Python 3 native.
9
9
10 import pathlib
10 import pathlib
11 import secrets
11 import secrets
12
12
13 from .aws import (
13 from .aws import AWSConnection
14 AWSConnection,
15 )
16
14
17
15
18 class HGAutomation:
16 class HGAutomation:
19 """High-level interface for Mercurial automation.
17 """High-level interface for Mercurial automation.
20
18
21 Holds global state, provides access to other primitives, etc.
19 Holds global state, provides access to other primitives, etc.
22 """
20 """
23
21
24 def __init__(self, state_path: pathlib.Path):
22 def __init__(self, state_path: pathlib.Path):
25 self.state_path = state_path
23 self.state_path = state_path
26
24
27 state_path.mkdir(exist_ok=True)
25 state_path.mkdir(exist_ok=True)
28
26
29 def default_password(self):
27 def default_password(self):
30 """Obtain the default password to use for remote machines.
28 """Obtain the default password to use for remote machines.
31
29
32 A new password will be generated if one is not stored.
30 A new password will be generated if one is not stored.
33 """
31 """
34 p = self.state_path / 'default-password'
32 p = self.state_path / 'default-password'
35
33
36 try:
34 try:
37 with p.open('r', encoding='ascii') as fh:
35 with p.open('r', encoding='ascii') as fh:
38 data = fh.read().strip()
36 data = fh.read().strip()
39
37
40 if data:
38 if data:
41 return data
39 return data
42
40
43 except FileNotFoundError:
41 except FileNotFoundError:
44 pass
42 pass
45
43
46 password = secrets.token_urlsafe(24)
44 password = secrets.token_urlsafe(24)
47
45
48 with p.open('w', encoding='ascii') as fh:
46 with p.open('w', encoding='ascii') as fh:
49 fh.write(password)
47 fh.write(password)
50 fh.write('\n')
48 fh.write('\n')
51
49
52 p.chmod(0o0600)
50 p.chmod(0o0600)
53
51
54 return password
52 return password
55
53
56 def aws_connection(self, region: str, ensure_ec2_state: bool=True):
54 def aws_connection(self, region: str, ensure_ec2_state: bool = True):
57 """Obtain an AWSConnection instance bound to a specific region."""
55 """Obtain an AWSConnection instance bound to a specific region."""
58
56
59 return AWSConnection(self, region, ensure_ec2_state=ensure_ec2_state)
57 return AWSConnection(self, region, ensure_ec2_state=ensure_ec2_state)
@@ -1,1209 +1,1262 b''
1 # aws.py - Automation code for Amazon Web Services
1 # aws.py - Automation code for Amazon Web Services
2 #
2 #
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 # no-check-code because Python 3 native.
8 # no-check-code because Python 3 native.
9
9
10 import contextlib
10 import contextlib
11 import copy
11 import copy
12 import hashlib
12 import hashlib
13 import json
13 import json
14 import os
14 import os
15 import pathlib
15 import pathlib
16 import subprocess
16 import subprocess
17 import time
17 import time
18
18
19 import boto3
19 import boto3
20 import botocore.exceptions
20 import botocore.exceptions
21
21
22 from .linux import (
22 from .linux import BOOTSTRAP_DEBIAN
23 BOOTSTRAP_DEBIAN,
24 )
25 from .ssh import (
23 from .ssh import (
26 exec_command as ssh_exec_command,
24 exec_command as ssh_exec_command,
27 wait_for_ssh,
25 wait_for_ssh,
28 )
26 )
29 from .winrm import (
27 from .winrm import (
30 run_powershell,
28 run_powershell,
31 wait_for_winrm,
29 wait_for_winrm,
32 )
30 )
33
31
34
32
35 SOURCE_ROOT = pathlib.Path(os.path.abspath(__file__)).parent.parent.parent.parent
33 SOURCE_ROOT = pathlib.Path(
34 os.path.abspath(__file__)
35 ).parent.parent.parent.parent
36
36
37 INSTALL_WINDOWS_DEPENDENCIES = (SOURCE_ROOT / 'contrib' /
37 INSTALL_WINDOWS_DEPENDENCIES = (
38 'install-windows-dependencies.ps1')
38 SOURCE_ROOT / 'contrib' / 'install-windows-dependencies.ps1'
39 )
39
40
40
41
41 INSTANCE_TYPES_WITH_STORAGE = {
42 INSTANCE_TYPES_WITH_STORAGE = {
42 'c5d',
43 'c5d',
43 'd2',
44 'd2',
44 'h1',
45 'h1',
45 'i3',
46 'i3',
46 'm5ad',
47 'm5ad',
47 'm5d',
48 'm5d',
48 'r5d',
49 'r5d',
49 'r5ad',
50 'r5ad',
50 'x1',
51 'x1',
51 'z1d',
52 'z1d',
52 }
53 }
53
54
54
55
55 AMAZON_ACCOUNT_ID = '801119661308'
56 AMAZON_ACCOUNT_ID = '801119661308'
56 DEBIAN_ACCOUNT_ID = '379101102735'
57 DEBIAN_ACCOUNT_ID = '379101102735'
58 DEBIAN_ACCOUNT_ID_2 = '136693071363'
57 UBUNTU_ACCOUNT_ID = '099720109477'
59 UBUNTU_ACCOUNT_ID = '099720109477'
58
60
59
61
60 WINDOWS_BASE_IMAGE_NAME = 'Windows_Server-2019-English-Full-Base-2019.07.12'
62 WINDOWS_BASE_IMAGE_NAME = 'Windows_Server-2019-English-Full-Base-2019.07.12'
61
63
62
64
63 KEY_PAIRS = {
65 KEY_PAIRS = {
64 'automation',
66 'automation',
65 }
67 }
66
68
67
69
68 SECURITY_GROUPS = {
70 SECURITY_GROUPS = {
69 'linux-dev-1': {
71 'linux-dev-1': {
70 'description': 'Mercurial Linux instances that perform build/test automation',
72 'description': 'Mercurial Linux instances that perform build/test automation',
71 'ingress': [
73 'ingress': [
72 {
74 {
73 'FromPort': 22,
75 'FromPort': 22,
74 'ToPort': 22,
76 'ToPort': 22,
75 'IpProtocol': 'tcp',
77 'IpProtocol': 'tcp',
76 'IpRanges': [
78 'IpRanges': [
77 {
79 {
78 'CidrIp': '0.0.0.0/0',
80 'CidrIp': '0.0.0.0/0',
79 'Description': 'SSH from entire Internet',
81 'Description': 'SSH from entire Internet',
80 },
82 },
81 ],
83 ],
82 },
84 },
83 ],
85 ],
84 },
86 },
85 'windows-dev-1': {
87 'windows-dev-1': {
86 'description': 'Mercurial Windows instances that perform build automation',
88 'description': 'Mercurial Windows instances that perform build automation',
87 'ingress': [
89 'ingress': [
88 {
90 {
89 'FromPort': 22,
91 'FromPort': 22,
90 'ToPort': 22,
92 'ToPort': 22,
91 'IpProtocol': 'tcp',
93 'IpProtocol': 'tcp',
92 'IpRanges': [
94 'IpRanges': [
93 {
95 {
94 'CidrIp': '0.0.0.0/0',
96 'CidrIp': '0.0.0.0/0',
95 'Description': 'SSH from entire Internet',
97 'Description': 'SSH from entire Internet',
96 },
98 },
97 ],
99 ],
98 },
100 },
99 {
101 {
100 'FromPort': 3389,
102 'FromPort': 3389,
101 'ToPort': 3389,
103 'ToPort': 3389,
102 'IpProtocol': 'tcp',
104 'IpProtocol': 'tcp',
103 'IpRanges': [
105 'IpRanges': [
104 {
106 {
105 'CidrIp': '0.0.0.0/0',
107 'CidrIp': '0.0.0.0/0',
106 'Description': 'RDP from entire Internet',
108 'Description': 'RDP from entire Internet',
107 },
109 },
108 ],
110 ],
109
110 },
111 },
111 {
112 {
112 'FromPort': 5985,
113 'FromPort': 5985,
113 'ToPort': 5986,
114 'ToPort': 5986,
114 'IpProtocol': 'tcp',
115 'IpProtocol': 'tcp',
115 'IpRanges': [
116 'IpRanges': [
116 {
117 {
117 'CidrIp': '0.0.0.0/0',
118 'CidrIp': '0.0.0.0/0',
118 'Description': 'PowerShell Remoting (Windows Remote Management)',
119 'Description': 'PowerShell Remoting (Windows Remote Management)',
119 },
120 },
120 ],
121 ],
121 }
122 },
122 ],
123 ],
123 },
124 },
124 }
125 }
125
126
126
127
127 IAM_ROLES = {
128 IAM_ROLES = {
128 'ephemeral-ec2-role-1': {
129 'ephemeral-ec2-role-1': {
129 'description': 'Mercurial temporary EC2 instances',
130 'description': 'Mercurial temporary EC2 instances',
130 'policy_arns': [
131 'policy_arns': [
131 'arn:aws:iam::aws:policy/service-role/AmazonEC2RoleforSSM',
132 'arn:aws:iam::aws:policy/service-role/AmazonEC2RoleforSSM',
132 ],
133 ],
133 },
134 },
134 }
135 }
135
136
136
137
137 ASSUME_ROLE_POLICY_DOCUMENT = '''
138 ASSUME_ROLE_POLICY_DOCUMENT = '''
138 {
139 {
139 "Version": "2012-10-17",
140 "Version": "2012-10-17",
140 "Statement": [
141 "Statement": [
141 {
142 {
142 "Effect": "Allow",
143 "Effect": "Allow",
143 "Principal": {
144 "Principal": {
144 "Service": "ec2.amazonaws.com"
145 "Service": "ec2.amazonaws.com"
145 },
146 },
146 "Action": "sts:AssumeRole"
147 "Action": "sts:AssumeRole"
147 }
148 }
148 ]
149 ]
149 }
150 }
150 '''.strip()
151 '''.strip()
151
152
152
153
153 IAM_INSTANCE_PROFILES = {
154 IAM_INSTANCE_PROFILES = {
154 'ephemeral-ec2-1': {
155 'ephemeral-ec2-1': {'roles': ['ephemeral-ec2-role-1',],}
155 'roles': [
156 'ephemeral-ec2-role-1',
157 ],
158 }
159 }
156 }
160
157
161
158
162 # User Data for Windows EC2 instance. Mainly used to set the password
159 # User Data for Windows EC2 instance. Mainly used to set the password
163 # and configure WinRM.
160 # and configure WinRM.
164 # Inspired by the User Data script used by Packer
161 # Inspired by the User Data script used by Packer
165 # (from https://www.packer.io/intro/getting-started/build-image.html).
162 # (from https://www.packer.io/intro/getting-started/build-image.html).
166 WINDOWS_USER_DATA = r'''
163 WINDOWS_USER_DATA = r'''
167 <powershell>
164 <powershell>
168
165
169 # TODO enable this once we figure out what is failing.
166 # TODO enable this once we figure out what is failing.
170 #$ErrorActionPreference = "stop"
167 #$ErrorActionPreference = "stop"
171
168
172 # Set administrator password
169 # Set administrator password
173 net user Administrator "%s"
170 net user Administrator "%s"
174 wmic useraccount where "name='Administrator'" set PasswordExpires=FALSE
171 wmic useraccount where "name='Administrator'" set PasswordExpires=FALSE
175
172
176 # First, make sure WinRM can't be connected to
173 # First, make sure WinRM can't be connected to
177 netsh advfirewall firewall set rule name="Windows Remote Management (HTTP-In)" new enable=yes action=block
174 netsh advfirewall firewall set rule name="Windows Remote Management (HTTP-In)" new enable=yes action=block
178
175
179 # Delete any existing WinRM listeners
176 # Delete any existing WinRM listeners
180 winrm delete winrm/config/listener?Address=*+Transport=HTTP 2>$Null
177 winrm delete winrm/config/listener?Address=*+Transport=HTTP 2>$Null
181 winrm delete winrm/config/listener?Address=*+Transport=HTTPS 2>$Null
178 winrm delete winrm/config/listener?Address=*+Transport=HTTPS 2>$Null
182
179
183 # Create a new WinRM listener and configure
180 # Create a new WinRM listener and configure
184 winrm create winrm/config/listener?Address=*+Transport=HTTP
181 winrm create winrm/config/listener?Address=*+Transport=HTTP
185 winrm set winrm/config/winrs '@{MaxMemoryPerShellMB="0"}'
182 winrm set winrm/config/winrs '@{MaxMemoryPerShellMB="0"}'
186 winrm set winrm/config '@{MaxTimeoutms="7200000"}'
183 winrm set winrm/config '@{MaxTimeoutms="7200000"}'
187 winrm set winrm/config/service '@{AllowUnencrypted="true"}'
184 winrm set winrm/config/service '@{AllowUnencrypted="true"}'
188 winrm set winrm/config/service '@{MaxConcurrentOperationsPerUser="12000"}'
185 winrm set winrm/config/service '@{MaxConcurrentOperationsPerUser="12000"}'
189 winrm set winrm/config/service/auth '@{Basic="true"}'
186 winrm set winrm/config/service/auth '@{Basic="true"}'
190 winrm set winrm/config/client/auth '@{Basic="true"}'
187 winrm set winrm/config/client/auth '@{Basic="true"}'
191
188
192 # Configure UAC to allow privilege elevation in remote shells
189 # Configure UAC to allow privilege elevation in remote shells
193 $Key = 'HKLM:\SOFTWARE\Microsoft\Windows\CurrentVersion\Policies\System'
190 $Key = 'HKLM:\SOFTWARE\Microsoft\Windows\CurrentVersion\Policies\System'
194 $Setting = 'LocalAccountTokenFilterPolicy'
191 $Setting = 'LocalAccountTokenFilterPolicy'
195 Set-ItemProperty -Path $Key -Name $Setting -Value 1 -Force
192 Set-ItemProperty -Path $Key -Name $Setting -Value 1 -Force
196
193
197 # Configure and restart the WinRM Service; Enable the required firewall exception
194 # Configure and restart the WinRM Service; Enable the required firewall exception
198 Stop-Service -Name WinRM
195 Stop-Service -Name WinRM
199 Set-Service -Name WinRM -StartupType Automatic
196 Set-Service -Name WinRM -StartupType Automatic
200 netsh advfirewall firewall set rule name="Windows Remote Management (HTTP-In)" new action=allow localip=any remoteip=any
197 netsh advfirewall firewall set rule name="Windows Remote Management (HTTP-In)" new action=allow localip=any remoteip=any
201 Start-Service -Name WinRM
198 Start-Service -Name WinRM
202
199
203 # Disable firewall on private network interfaces so prompts don't appear.
200 # Disable firewall on private network interfaces so prompts don't appear.
204 Set-NetFirewallProfile -Name private -Enabled false
201 Set-NetFirewallProfile -Name private -Enabled false
205 </powershell>
202 </powershell>
206 '''.lstrip()
203 '''.lstrip()
207
204
208
205
209 WINDOWS_BOOTSTRAP_POWERSHELL = '''
206 WINDOWS_BOOTSTRAP_POWERSHELL = '''
210 Write-Output "installing PowerShell dependencies"
207 Write-Output "installing PowerShell dependencies"
211 Install-PackageProvider -Name NuGet -MinimumVersion 2.8.5.201 -Force
208 Install-PackageProvider -Name NuGet -MinimumVersion 2.8.5.201 -Force
212 Set-PSRepository -Name PSGallery -InstallationPolicy Trusted
209 Set-PSRepository -Name PSGallery -InstallationPolicy Trusted
213 Install-Module -Name OpenSSHUtils -RequiredVersion 0.0.2.0
210 Install-Module -Name OpenSSHUtils -RequiredVersion 0.0.2.0
214
211
215 Write-Output "installing OpenSSL server"
212 Write-Output "installing OpenSSL server"
216 Add-WindowsCapability -Online -Name OpenSSH.Server~~~~0.0.1.0
213 Add-WindowsCapability -Online -Name OpenSSH.Server~~~~0.0.1.0
217 # Various tools will attempt to use older versions of .NET. So we enable
214 # Various tools will attempt to use older versions of .NET. So we enable
218 # the feature that provides them so it doesn't have to be auto-enabled
215 # the feature that provides them so it doesn't have to be auto-enabled
219 # later.
216 # later.
220 Write-Output "enabling .NET Framework feature"
217 Write-Output "enabling .NET Framework feature"
221 Install-WindowsFeature -Name Net-Framework-Core
218 Install-WindowsFeature -Name Net-Framework-Core
222 '''
219 '''
223
220
224
221
225 class AWSConnection:
222 class AWSConnection:
226 """Manages the state of a connection with AWS."""
223 """Manages the state of a connection with AWS."""
227
224
228 def __init__(self, automation, region: str, ensure_ec2_state: bool=True):
225 def __init__(self, automation, region: str, ensure_ec2_state: bool = True):
229 self.automation = automation
226 self.automation = automation
230 self.local_state_path = automation.state_path
227 self.local_state_path = automation.state_path
231
228
232 self.prefix = 'hg-'
229 self.prefix = 'hg-'
233
230
234 self.session = boto3.session.Session(region_name=region)
231 self.session = boto3.session.Session(region_name=region)
235 self.ec2client = self.session.client('ec2')
232 self.ec2client = self.session.client('ec2')
236 self.ec2resource = self.session.resource('ec2')
233 self.ec2resource = self.session.resource('ec2')
237 self.iamclient = self.session.client('iam')
234 self.iamclient = self.session.client('iam')
238 self.iamresource = self.session.resource('iam')
235 self.iamresource = self.session.resource('iam')
239 self.security_groups = {}
236 self.security_groups = {}
240
237
241 if ensure_ec2_state:
238 if ensure_ec2_state:
242 ensure_key_pairs(automation.state_path, self.ec2resource)
239 ensure_key_pairs(automation.state_path, self.ec2resource)
243 self.security_groups = ensure_security_groups(self.ec2resource)
240 self.security_groups = ensure_security_groups(self.ec2resource)
244 ensure_iam_state(self.iamclient, self.iamresource)
241 ensure_iam_state(self.iamclient, self.iamresource)
245
242
246 def key_pair_path_private(self, name):
243 def key_pair_path_private(self, name):
247 """Path to a key pair private key file."""
244 """Path to a key pair private key file."""
248 return self.local_state_path / 'keys' / ('keypair-%s' % name)
245 return self.local_state_path / 'keys' / ('keypair-%s' % name)
249
246
250 def key_pair_path_public(self, name):
247 def key_pair_path_public(self, name):
251 return self.local_state_path / 'keys' / ('keypair-%s.pub' % name)
248 return self.local_state_path / 'keys' / ('keypair-%s.pub' % name)
252
249
253
250
254 def rsa_key_fingerprint(p: pathlib.Path):
251 def rsa_key_fingerprint(p: pathlib.Path):
255 """Compute the fingerprint of an RSA private key."""
252 """Compute the fingerprint of an RSA private key."""
256
253
257 # TODO use rsa package.
254 # TODO use rsa package.
258 res = subprocess.run(
255 res = subprocess.run(
259 ['openssl', 'pkcs8', '-in', str(p), '-nocrypt', '-topk8',
256 [
260 '-outform', 'DER'],
257 'openssl',
258 'pkcs8',
259 '-in',
260 str(p),
261 '-nocrypt',
262 '-topk8',
263 '-outform',
264 'DER',
265 ],
261 capture_output=True,
266 capture_output=True,
262 check=True)
267 check=True,
268 )
263
269
264 sha1 = hashlib.sha1(res.stdout).hexdigest()
270 sha1 = hashlib.sha1(res.stdout).hexdigest()
265 return ':'.join(a + b for a, b in zip(sha1[::2], sha1[1::2]))
271 return ':'.join(a + b for a, b in zip(sha1[::2], sha1[1::2]))
266
272
267
273
268 def ensure_key_pairs(state_path: pathlib.Path, ec2resource, prefix='hg-'):
274 def ensure_key_pairs(state_path: pathlib.Path, ec2resource, prefix='hg-'):
269 remote_existing = {}
275 remote_existing = {}
270
276
271 for kpi in ec2resource.key_pairs.all():
277 for kpi in ec2resource.key_pairs.all():
272 if kpi.name.startswith(prefix):
278 if kpi.name.startswith(prefix):
273 remote_existing[kpi.name[len(prefix):]] = kpi.key_fingerprint
279 remote_existing[kpi.name[len(prefix) :]] = kpi.key_fingerprint
274
280
275 # Validate that we have these keys locally.
281 # Validate that we have these keys locally.
276 key_path = state_path / 'keys'
282 key_path = state_path / 'keys'
277 key_path.mkdir(exist_ok=True, mode=0o700)
283 key_path.mkdir(exist_ok=True, mode=0o700)
278
284
279 def remove_remote(name):
285 def remove_remote(name):
280 print('deleting key pair %s' % name)
286 print('deleting key pair %s' % name)
281 key = ec2resource.KeyPair(name)
287 key = ec2resource.KeyPair(name)
282 key.delete()
288 key.delete()
283
289
284 def remove_local(name):
290 def remove_local(name):
285 pub_full = key_path / ('keypair-%s.pub' % name)
291 pub_full = key_path / ('keypair-%s.pub' % name)
286 priv_full = key_path / ('keypair-%s' % name)
292 priv_full = key_path / ('keypair-%s' % name)
287
293
288 print('removing %s' % pub_full)
294 print('removing %s' % pub_full)
289 pub_full.unlink()
295 pub_full.unlink()
290 print('removing %s' % priv_full)
296 print('removing %s' % priv_full)
291 priv_full.unlink()
297 priv_full.unlink()
292
298
293 local_existing = {}
299 local_existing = {}
294
300
295 for f in sorted(os.listdir(key_path)):
301 for f in sorted(os.listdir(key_path)):
296 if not f.startswith('keypair-') or not f.endswith('.pub'):
302 if not f.startswith('keypair-') or not f.endswith('.pub'):
297 continue
303 continue
298
304
299 name = f[len('keypair-'):-len('.pub')]
305 name = f[len('keypair-') : -len('.pub')]
300
306
301 pub_full = key_path / f
307 pub_full = key_path / f
302 priv_full = key_path / ('keypair-%s' % name)
308 priv_full = key_path / ('keypair-%s' % name)
303
309
304 with open(pub_full, 'r', encoding='ascii') as fh:
310 with open(pub_full, 'r', encoding='ascii') as fh:
305 data = fh.read()
311 data = fh.read()
306
312
307 if not data.startswith('ssh-rsa '):
313 if not data.startswith('ssh-rsa '):
308 print('unexpected format for key pair file: %s; removing' %
314 print(
309 pub_full)
315 'unexpected format for key pair file: %s; removing' % pub_full
316 )
310 pub_full.unlink()
317 pub_full.unlink()
311 priv_full.unlink()
318 priv_full.unlink()
312 continue
319 continue
313
320
314 local_existing[name] = rsa_key_fingerprint(priv_full)
321 local_existing[name] = rsa_key_fingerprint(priv_full)
315
322
316 for name in sorted(set(remote_existing) | set(local_existing)):
323 for name in sorted(set(remote_existing) | set(local_existing)):
317 if name not in local_existing:
324 if name not in local_existing:
318 actual = '%s%s' % (prefix, name)
325 actual = '%s%s' % (prefix, name)
319 print('remote key %s does not exist locally' % name)
326 print('remote key %s does not exist locally' % name)
320 remove_remote(actual)
327 remove_remote(actual)
321 del remote_existing[name]
328 del remote_existing[name]
322
329
323 elif name not in remote_existing:
330 elif name not in remote_existing:
324 print('local key %s does not exist remotely' % name)
331 print('local key %s does not exist remotely' % name)
325 remove_local(name)
332 remove_local(name)
326 del local_existing[name]
333 del local_existing[name]
327
334
328 elif remote_existing[name] != local_existing[name]:
335 elif remote_existing[name] != local_existing[name]:
329 print('key fingerprint mismatch for %s; '
336 print(
330 'removing from local and remote' % name)
337 'key fingerprint mismatch for %s; '
338 'removing from local and remote' % name
339 )
331 remove_local(name)
340 remove_local(name)
332 remove_remote('%s%s' % (prefix, name))
341 remove_remote('%s%s' % (prefix, name))
333 del local_existing[name]
342 del local_existing[name]
334 del remote_existing[name]
343 del remote_existing[name]
335
344
336 missing = KEY_PAIRS - set(remote_existing)
345 missing = KEY_PAIRS - set(remote_existing)
337
346
338 for name in sorted(missing):
347 for name in sorted(missing):
339 actual = '%s%s' % (prefix, name)
348 actual = '%s%s' % (prefix, name)
340 print('creating key pair %s' % actual)
349 print('creating key pair %s' % actual)
341
350
342 priv_full = key_path / ('keypair-%s' % name)
351 priv_full = key_path / ('keypair-%s' % name)
343 pub_full = key_path / ('keypair-%s.pub' % name)
352 pub_full = key_path / ('keypair-%s.pub' % name)
344
353
345 kp = ec2resource.create_key_pair(KeyName=actual)
354 kp = ec2resource.create_key_pair(KeyName=actual)
346
355
347 with priv_full.open('w', encoding='ascii') as fh:
356 with priv_full.open('w', encoding='ascii') as fh:
348 fh.write(kp.key_material)
357 fh.write(kp.key_material)
349 fh.write('\n')
358 fh.write('\n')
350
359
351 priv_full.chmod(0o0600)
360 priv_full.chmod(0o0600)
352
361
353 # SSH public key can be extracted via `ssh-keygen`.
362 # SSH public key can be extracted via `ssh-keygen`.
354 with pub_full.open('w', encoding='ascii') as fh:
363 with pub_full.open('w', encoding='ascii') as fh:
355 subprocess.run(
364 subprocess.run(
356 ['ssh-keygen', '-y', '-f', str(priv_full)],
365 ['ssh-keygen', '-y', '-f', str(priv_full)],
357 stdout=fh,
366 stdout=fh,
358 check=True)
367 check=True,
368 )
359
369
360 pub_full.chmod(0o0600)
370 pub_full.chmod(0o0600)
361
371
362
372
363 def delete_instance_profile(profile):
373 def delete_instance_profile(profile):
364 for role in profile.roles:
374 for role in profile.roles:
365 print('removing role %s from instance profile %s' % (role.name,
375 print(
366 profile.name))
376 'removing role %s from instance profile %s'
377 % (role.name, profile.name)
378 )
367 profile.remove_role(RoleName=role.name)
379 profile.remove_role(RoleName=role.name)
368
380
369 print('deleting instance profile %s' % profile.name)
381 print('deleting instance profile %s' % profile.name)
370 profile.delete()
382 profile.delete()
371
383
372
384
373 def ensure_iam_state(iamclient, iamresource, prefix='hg-'):
385 def ensure_iam_state(iamclient, iamresource, prefix='hg-'):
374 """Ensure IAM state is in sync with our canonical definition."""
386 """Ensure IAM state is in sync with our canonical definition."""
375
387
376 remote_profiles = {}
388 remote_profiles = {}
377
389
378 for profile in iamresource.instance_profiles.all():
390 for profile in iamresource.instance_profiles.all():
379 if profile.name.startswith(prefix):
391 if profile.name.startswith(prefix):
380 remote_profiles[profile.name[len(prefix):]] = profile
392 remote_profiles[profile.name[len(prefix) :]] = profile
381
393
382 for name in sorted(set(remote_profiles) - set(IAM_INSTANCE_PROFILES)):
394 for name in sorted(set(remote_profiles) - set(IAM_INSTANCE_PROFILES)):
383 delete_instance_profile(remote_profiles[name])
395 delete_instance_profile(remote_profiles[name])
384 del remote_profiles[name]
396 del remote_profiles[name]
385
397
386 remote_roles = {}
398 remote_roles = {}
387
399
388 for role in iamresource.roles.all():
400 for role in iamresource.roles.all():
389 if role.name.startswith(prefix):
401 if role.name.startswith(prefix):
390 remote_roles[role.name[len(prefix):]] = role
402 remote_roles[role.name[len(prefix) :]] = role
391
403
392 for name in sorted(set(remote_roles) - set(IAM_ROLES)):
404 for name in sorted(set(remote_roles) - set(IAM_ROLES)):
393 role = remote_roles[name]
405 role = remote_roles[name]
394
406
395 print('removing role %s' % role.name)
407 print('removing role %s' % role.name)
396 role.delete()
408 role.delete()
397 del remote_roles[name]
409 del remote_roles[name]
398
410
399 # We've purged remote state that doesn't belong. Create missing
411 # We've purged remote state that doesn't belong. Create missing
400 # instance profiles and roles.
412 # instance profiles and roles.
401 for name in sorted(set(IAM_INSTANCE_PROFILES) - set(remote_profiles)):
413 for name in sorted(set(IAM_INSTANCE_PROFILES) - set(remote_profiles)):
402 actual = '%s%s' % (prefix, name)
414 actual = '%s%s' % (prefix, name)
403 print('creating IAM instance profile %s' % actual)
415 print('creating IAM instance profile %s' % actual)
404
416
405 profile = iamresource.create_instance_profile(
417 profile = iamresource.create_instance_profile(
406 InstanceProfileName=actual)
418 InstanceProfileName=actual
419 )
407 remote_profiles[name] = profile
420 remote_profiles[name] = profile
408
421
409 waiter = iamclient.get_waiter('instance_profile_exists')
422 waiter = iamclient.get_waiter('instance_profile_exists')
410 waiter.wait(InstanceProfileName=actual)
423 waiter.wait(InstanceProfileName=actual)
411 print('IAM instance profile %s is available' % actual)
424 print('IAM instance profile %s is available' % actual)
412
425
413 for name in sorted(set(IAM_ROLES) - set(remote_roles)):
426 for name in sorted(set(IAM_ROLES) - set(remote_roles)):
414 entry = IAM_ROLES[name]
427 entry = IAM_ROLES[name]
415
428
416 actual = '%s%s' % (prefix, name)
429 actual = '%s%s' % (prefix, name)
417 print('creating IAM role %s' % actual)
430 print('creating IAM role %s' % actual)
418
431
419 role = iamresource.create_role(
432 role = iamresource.create_role(
420 RoleName=actual,
433 RoleName=actual,
421 Description=entry['description'],
434 Description=entry['description'],
422 AssumeRolePolicyDocument=ASSUME_ROLE_POLICY_DOCUMENT,
435 AssumeRolePolicyDocument=ASSUME_ROLE_POLICY_DOCUMENT,
423 )
436 )
424
437
425 waiter = iamclient.get_waiter('role_exists')
438 waiter = iamclient.get_waiter('role_exists')
426 waiter.wait(RoleName=actual)
439 waiter.wait(RoleName=actual)
427 print('IAM role %s is available' % actual)
440 print('IAM role %s is available' % actual)
428
441
429 remote_roles[name] = role
442 remote_roles[name] = role
430
443
431 for arn in entry['policy_arns']:
444 for arn in entry['policy_arns']:
432 print('attaching policy %s to %s' % (arn, role.name))
445 print('attaching policy %s to %s' % (arn, role.name))
433 role.attach_policy(PolicyArn=arn)
446 role.attach_policy(PolicyArn=arn)
434
447
435 # Now reconcile state of profiles.
448 # Now reconcile state of profiles.
436 for name, meta in sorted(IAM_INSTANCE_PROFILES.items()):
449 for name, meta in sorted(IAM_INSTANCE_PROFILES.items()):
437 profile = remote_profiles[name]
450 profile = remote_profiles[name]
438 wanted = {'%s%s' % (prefix, role) for role in meta['roles']}
451 wanted = {'%s%s' % (prefix, role) for role in meta['roles']}
439 have = {role.name for role in profile.roles}
452 have = {role.name for role in profile.roles}
440
453
441 for role in sorted(have - wanted):
454 for role in sorted(have - wanted):
442 print('removing role %s from %s' % (role, profile.name))
455 print('removing role %s from %s' % (role, profile.name))
443 profile.remove_role(RoleName=role)
456 profile.remove_role(RoleName=role)
444
457
445 for role in sorted(wanted - have):
458 for role in sorted(wanted - have):
446 print('adding role %s to %s' % (role, profile.name))
459 print('adding role %s to %s' % (role, profile.name))
447 profile.add_role(RoleName=role)
460 profile.add_role(RoleName=role)
448
461
449
462
450 def find_image(ec2resource, owner_id, name):
463 def find_image(ec2resource, owner_id, name):
451 """Find an AMI by its owner ID and name."""
464 """Find an AMI by its owner ID and name."""
452
465
453 images = ec2resource.images.filter(
466 images = ec2resource.images.filter(
454 Filters=[
467 Filters=[
455 {
468 {'Name': 'owner-id', 'Values': [owner_id],},
456 'Name': 'owner-id',
469 {'Name': 'state', 'Values': ['available'],},
457 'Values': [owner_id],
470 {'Name': 'image-type', 'Values': ['machine'],},
458 },
471 {'Name': 'name', 'Values': [name],},
459 {
472 ]
460 'Name': 'state',
473 )
461 'Values': ['available'],
462 },
463 {
464 'Name': 'image-type',
465 'Values': ['machine'],
466 },
467 {
468 'Name': 'name',
469 'Values': [name],
470 },
471 ])
472
474
473 for image in images:
475 for image in images:
474 return image
476 return image
475
477
476 raise Exception('unable to find image for %s' % name)
478 raise Exception('unable to find image for %s' % name)
477
479
478
480
479 def ensure_security_groups(ec2resource, prefix='hg-'):
481 def ensure_security_groups(ec2resource, prefix='hg-'):
480 """Ensure all necessary Mercurial security groups are present.
482 """Ensure all necessary Mercurial security groups are present.
481
483
482 All security groups are prefixed with ``hg-`` by default. Any security
484 All security groups are prefixed with ``hg-`` by default. Any security
483 groups having this prefix but aren't in our list are deleted.
485 groups having this prefix but aren't in our list are deleted.
484 """
486 """
485 existing = {}
487 existing = {}
486
488
487 for group in ec2resource.security_groups.all():
489 for group in ec2resource.security_groups.all():
488 if group.group_name.startswith(prefix):
490 if group.group_name.startswith(prefix):
489 existing[group.group_name[len(prefix):]] = group
491 existing[group.group_name[len(prefix) :]] = group
490
492
491 purge = set(existing) - set(SECURITY_GROUPS)
493 purge = set(existing) - set(SECURITY_GROUPS)
492
494
493 for name in sorted(purge):
495 for name in sorted(purge):
494 group = existing[name]
496 group = existing[name]
495 print('removing legacy security group: %s' % group.group_name)
497 print('removing legacy security group: %s' % group.group_name)
496 group.delete()
498 group.delete()
497
499
498 security_groups = {}
500 security_groups = {}
499
501
500 for name, group in sorted(SECURITY_GROUPS.items()):
502 for name, group in sorted(SECURITY_GROUPS.items()):
501 if name in existing:
503 if name in existing:
502 security_groups[name] = existing[name]
504 security_groups[name] = existing[name]
503 continue
505 continue
504
506
505 actual = '%s%s' % (prefix, name)
507 actual = '%s%s' % (prefix, name)
506 print('adding security group %s' % actual)
508 print('adding security group %s' % actual)
507
509
508 group_res = ec2resource.create_security_group(
510 group_res = ec2resource.create_security_group(
509 Description=group['description'],
511 Description=group['description'], GroupName=actual,
510 GroupName=actual,
511 )
512 )
512
513
513 group_res.authorize_ingress(
514 group_res.authorize_ingress(IpPermissions=group['ingress'],)
514 IpPermissions=group['ingress'],
515 )
516
515
517 security_groups[name] = group_res
516 security_groups[name] = group_res
518
517
519 return security_groups
518 return security_groups
520
519
521
520
522 def terminate_ec2_instances(ec2resource, prefix='hg-'):
521 def terminate_ec2_instances(ec2resource, prefix='hg-'):
523 """Terminate all EC2 instances managed by us."""
522 """Terminate all EC2 instances managed by us."""
524 waiting = []
523 waiting = []
525
524
526 for instance in ec2resource.instances.all():
525 for instance in ec2resource.instances.all():
527 if instance.state['Name'] == 'terminated':
526 if instance.state['Name'] == 'terminated':
528 continue
527 continue
529
528
530 for tag in instance.tags or []:
529 for tag in instance.tags or []:
531 if tag['Key'] == 'Name' and tag['Value'].startswith(prefix):
530 if tag['Key'] == 'Name' and tag['Value'].startswith(prefix):
532 print('terminating %s' % instance.id)
531 print('terminating %s' % instance.id)
533 instance.terminate()
532 instance.terminate()
534 waiting.append(instance)
533 waiting.append(instance)
535
534
536 for instance in waiting:
535 for instance in waiting:
537 instance.wait_until_terminated()
536 instance.wait_until_terminated()
538
537
539
538
540 def remove_resources(c, prefix='hg-'):
539 def remove_resources(c, prefix='hg-'):
541 """Purge all of our resources in this EC2 region."""
540 """Purge all of our resources in this EC2 region."""
542 ec2resource = c.ec2resource
541 ec2resource = c.ec2resource
543 iamresource = c.iamresource
542 iamresource = c.iamresource
544
543
545 terminate_ec2_instances(ec2resource, prefix=prefix)
544 terminate_ec2_instances(ec2resource, prefix=prefix)
546
545
547 for image in ec2resource.images.filter(Owners=['self']):
546 for image in ec2resource.images.filter(Owners=['self']):
548 if image.name.startswith(prefix):
547 if image.name.startswith(prefix):
549 remove_ami(ec2resource, image)
548 remove_ami(ec2resource, image)
550
549
551 for group in ec2resource.security_groups.all():
550 for group in ec2resource.security_groups.all():
552 if group.group_name.startswith(prefix):
551 if group.group_name.startswith(prefix):
553 print('removing security group %s' % group.group_name)
552 print('removing security group %s' % group.group_name)
554 group.delete()
553 group.delete()
555
554
556 for profile in iamresource.instance_profiles.all():
555 for profile in iamresource.instance_profiles.all():
557 if profile.name.startswith(prefix):
556 if profile.name.startswith(prefix):
558 delete_instance_profile(profile)
557 delete_instance_profile(profile)
559
558
560 for role in iamresource.roles.all():
559 for role in iamresource.roles.all():
561 if role.name.startswith(prefix):
560 if role.name.startswith(prefix):
562 for p in role.attached_policies.all():
561 for p in role.attached_policies.all():
563 print('detaching policy %s from %s' % (p.arn, role.name))
562 print('detaching policy %s from %s' % (p.arn, role.name))
564 role.detach_policy(PolicyArn=p.arn)
563 role.detach_policy(PolicyArn=p.arn)
565
564
566 print('removing role %s' % role.name)
565 print('removing role %s' % role.name)
567 role.delete()
566 role.delete()
568
567
569
568
570 def wait_for_ip_addresses(instances):
569 def wait_for_ip_addresses(instances):
571 """Wait for the public IP addresses of an iterable of instances."""
570 """Wait for the public IP addresses of an iterable of instances."""
572 for instance in instances:
571 for instance in instances:
573 while True:
572 while True:
574 if not instance.public_ip_address:
573 if not instance.public_ip_address:
575 time.sleep(2)
574 time.sleep(2)
576 instance.reload()
575 instance.reload()
577 continue
576 continue
578
577
579 print('public IP address for %s: %s' % (
578 print(
580 instance.id, instance.public_ip_address))
579 'public IP address for %s: %s'
580 % (instance.id, instance.public_ip_address)
581 )
581 break
582 break
582
583
583
584
584 def remove_ami(ec2resource, image):
585 def remove_ami(ec2resource, image):
585 """Remove an AMI and its underlying snapshots."""
586 """Remove an AMI and its underlying snapshots."""
586 snapshots = []
587 snapshots = []
587
588
588 for device in image.block_device_mappings:
589 for device in image.block_device_mappings:
589 if 'Ebs' in device:
590 if 'Ebs' in device:
590 snapshots.append(ec2resource.Snapshot(device['Ebs']['SnapshotId']))
591 snapshots.append(ec2resource.Snapshot(device['Ebs']['SnapshotId']))
591
592
592 print('deregistering %s' % image.id)
593 print('deregistering %s' % image.id)
593 image.deregister()
594 image.deregister()
594
595
595 for snapshot in snapshots:
596 for snapshot in snapshots:
596 print('deleting snapshot %s' % snapshot.id)
597 print('deleting snapshot %s' % snapshot.id)
597 snapshot.delete()
598 snapshot.delete()
598
599
599
600
600 def wait_for_ssm(ssmclient, instances):
601 def wait_for_ssm(ssmclient, instances):
601 """Wait for SSM to come online for an iterable of instance IDs."""
602 """Wait for SSM to come online for an iterable of instance IDs."""
602 while True:
603 while True:
603 res = ssmclient.describe_instance_information(
604 res = ssmclient.describe_instance_information(
604 Filters=[
605 Filters=[
605 {
606 {'Key': 'InstanceIds', 'Values': [i.id for i in instances],},
606 'Key': 'InstanceIds',
607 'Values': [i.id for i in instances],
608 },
609 ],
607 ],
610 )
608 )
611
609
612 available = len(res['InstanceInformationList'])
610 available = len(res['InstanceInformationList'])
613 wanted = len(instances)
611 wanted = len(instances)
614
612
615 print('%d/%d instances available in SSM' % (available, wanted))
613 print('%d/%d instances available in SSM' % (available, wanted))
616
614
617 if available == wanted:
615 if available == wanted:
618 return
616 return
619
617
620 time.sleep(2)
618 time.sleep(2)
621
619
622
620
623 def run_ssm_command(ssmclient, instances, document_name, parameters):
621 def run_ssm_command(ssmclient, instances, document_name, parameters):
624 """Run a PowerShell script on an EC2 instance."""
622 """Run a PowerShell script on an EC2 instance."""
625
623
626 res = ssmclient.send_command(
624 res = ssmclient.send_command(
627 InstanceIds=[i.id for i in instances],
625 InstanceIds=[i.id for i in instances],
628 DocumentName=document_name,
626 DocumentName=document_name,
629 Parameters=parameters,
627 Parameters=parameters,
630 CloudWatchOutputConfig={
628 CloudWatchOutputConfig={'CloudWatchOutputEnabled': True,},
631 'CloudWatchOutputEnabled': True,
632 },
633 )
629 )
634
630
635 command_id = res['Command']['CommandId']
631 command_id = res['Command']['CommandId']
636
632
637 for instance in instances:
633 for instance in instances:
638 while True:
634 while True:
639 try:
635 try:
640 res = ssmclient.get_command_invocation(
636 res = ssmclient.get_command_invocation(
641 CommandId=command_id,
637 CommandId=command_id, InstanceId=instance.id,
642 InstanceId=instance.id,
643 )
638 )
644 except botocore.exceptions.ClientError as e:
639 except botocore.exceptions.ClientError as e:
645 if e.response['Error']['Code'] == 'InvocationDoesNotExist':
640 if e.response['Error']['Code'] == 'InvocationDoesNotExist':
646 print('could not find SSM command invocation; waiting')
641 print('could not find SSM command invocation; waiting')
647 time.sleep(1)
642 time.sleep(1)
648 continue
643 continue
649 else:
644 else:
650 raise
645 raise
651
646
652 if res['Status'] == 'Success':
647 if res['Status'] == 'Success':
653 break
648 break
654 elif res['Status'] in ('Pending', 'InProgress', 'Delayed'):
649 elif res['Status'] in ('Pending', 'InProgress', 'Delayed'):
655 time.sleep(2)
650 time.sleep(2)
656 else:
651 else:
657 raise Exception('command failed on %s: %s' % (
652 raise Exception(
658 instance.id, res['Status']))
653 'command failed on %s: %s' % (instance.id, res['Status'])
654 )
659
655
660
656
661 @contextlib.contextmanager
657 @contextlib.contextmanager
662 def temporary_ec2_instances(ec2resource, config):
658 def temporary_ec2_instances(ec2resource, config):
663 """Create temporary EC2 instances.
659 """Create temporary EC2 instances.
664
660
665 This is a proxy to ``ec2client.run_instances(**config)`` that takes care of
661 This is a proxy to ``ec2client.run_instances(**config)`` that takes care of
666 managing the lifecycle of the instances.
662 managing the lifecycle of the instances.
667
663
668 When the context manager exits, the instances are terminated.
664 When the context manager exits, the instances are terminated.
669
665
670 The context manager evaluates to the list of data structures
666 The context manager evaluates to the list of data structures
671 describing each created instance. The instances may not be available
667 describing each created instance. The instances may not be available
672 for work immediately: it is up to the caller to wait for the instance
668 for work immediately: it is up to the caller to wait for the instance
673 to start responding.
669 to start responding.
674 """
670 """
675
671
676 ids = None
672 ids = None
677
673
678 try:
674 try:
679 res = ec2resource.create_instances(**config)
675 res = ec2resource.create_instances(**config)
680
676
681 ids = [i.id for i in res]
677 ids = [i.id for i in res]
682 print('started instances: %s' % ' '.join(ids))
678 print('started instances: %s' % ' '.join(ids))
683
679
684 yield res
680 yield res
685 finally:
681 finally:
686 if ids:
682 if ids:
687 print('terminating instances: %s' % ' '.join(ids))
683 print('terminating instances: %s' % ' '.join(ids))
688 for instance in res:
684 for instance in res:
689 instance.terminate()
685 instance.terminate()
690 print('terminated %d instances' % len(ids))
686 print('terminated %d instances' % len(ids))
691
687
692
688
693 @contextlib.contextmanager
689 @contextlib.contextmanager
694 def create_temp_windows_ec2_instances(c: AWSConnection, config):
690 def create_temp_windows_ec2_instances(
691 c: AWSConnection, config, bootstrap: bool = False
692 ):
695 """Create temporary Windows EC2 instances.
693 """Create temporary Windows EC2 instances.
696
694
697 This is a higher-level wrapper around ``create_temp_ec2_instances()`` that
695 This is a higher-level wrapper around ``create_temp_ec2_instances()`` that
698 configures the Windows instance for Windows Remote Management. The emitted
696 configures the Windows instance for Windows Remote Management. The emitted
699 instances will have a ``winrm_client`` attribute containing a
697 instances will have a ``winrm_client`` attribute containing a
700 ``pypsrp.client.Client`` instance bound to the instance.
698 ``pypsrp.client.Client`` instance bound to the instance.
701 """
699 """
702 if 'IamInstanceProfile' in config:
700 if 'IamInstanceProfile' in config:
703 raise ValueError('IamInstanceProfile cannot be provided in config')
701 raise ValueError('IamInstanceProfile cannot be provided in config')
704 if 'UserData' in config:
702 if 'UserData' in config:
705 raise ValueError('UserData cannot be provided in config')
703 raise ValueError('UserData cannot be provided in config')
706
704
707 password = c.automation.default_password()
705 password = c.automation.default_password()
708
706
709 config = copy.deepcopy(config)
707 config = copy.deepcopy(config)
710 config['IamInstanceProfile'] = {
708 config['IamInstanceProfile'] = {
711 'Name': 'hg-ephemeral-ec2-1',
709 'Name': 'hg-ephemeral-ec2-1',
712 }
710 }
713 config.setdefault('TagSpecifications', []).append({
711 config.setdefault('TagSpecifications', []).append(
714 'ResourceType': 'instance',
712 {
715 'Tags': [{'Key': 'Name', 'Value': 'hg-temp-windows'}],
713 'ResourceType': 'instance',
716 })
714 'Tags': [{'Key': 'Name', 'Value': 'hg-temp-windows'}],
717 config['UserData'] = WINDOWS_USER_DATA % password
715 }
716 )
717
718 if bootstrap:
719 config['UserData'] = WINDOWS_USER_DATA % password
718
720
719 with temporary_ec2_instances(c.ec2resource, config) as instances:
721 with temporary_ec2_instances(c.ec2resource, config) as instances:
720 wait_for_ip_addresses(instances)
722 wait_for_ip_addresses(instances)
721
723
722 print('waiting for Windows Remote Management service...')
724 print('waiting for Windows Remote Management service...')
723
725
724 for instance in instances:
726 for instance in instances:
725 client = wait_for_winrm(instance.public_ip_address, 'Administrator', password)
727 client = wait_for_winrm(
728 instance.public_ip_address, 'Administrator', password
729 )
726 print('established WinRM connection to %s' % instance.id)
730 print('established WinRM connection to %s' % instance.id)
727 instance.winrm_client = client
731 instance.winrm_client = client
728
732
729 yield instances
733 yield instances
730
734
731
735
732 def resolve_fingerprint(fingerprint):
736 def resolve_fingerprint(fingerprint):
733 fingerprint = json.dumps(fingerprint, sort_keys=True)
737 fingerprint = json.dumps(fingerprint, sort_keys=True)
734 return hashlib.sha256(fingerprint.encode('utf-8')).hexdigest()
738 return hashlib.sha256(fingerprint.encode('utf-8')).hexdigest()
735
739
736
740
737 def find_and_reconcile_image(ec2resource, name, fingerprint):
741 def find_and_reconcile_image(ec2resource, name, fingerprint):
738 """Attempt to find an existing EC2 AMI with a name and fingerprint.
742 """Attempt to find an existing EC2 AMI with a name and fingerprint.
739
743
740 If an image with the specified fingerprint is found, it is returned.
744 If an image with the specified fingerprint is found, it is returned.
741 Otherwise None is returned.
745 Otherwise None is returned.
742
746
743 Existing images for the specified name that don't have the specified
747 Existing images for the specified name that don't have the specified
744 fingerprint or are missing required metadata or deleted.
748 fingerprint or are missing required metadata or deleted.
745 """
749 """
746 # Find existing AMIs with this name and delete the ones that are invalid.
750 # Find existing AMIs with this name and delete the ones that are invalid.
747 # Store a reference to a good image so it can be returned one the
751 # Store a reference to a good image so it can be returned one the
748 # image state is reconciled.
752 # image state is reconciled.
749 images = ec2resource.images.filter(
753 images = ec2resource.images.filter(
750 Filters=[{'Name': 'name', 'Values': [name]}])
754 Filters=[{'Name': 'name', 'Values': [name]}]
755 )
751
756
752 existing_image = None
757 existing_image = None
753
758
754 for image in images:
759 for image in images:
755 if image.tags is None:
760 if image.tags is None:
756 print('image %s for %s lacks required tags; removing' % (
761 print(
757 image.id, image.name))
762 'image %s for %s lacks required tags; removing'
763 % (image.id, image.name)
764 )
758 remove_ami(ec2resource, image)
765 remove_ami(ec2resource, image)
759 else:
766 else:
760 tags = {t['Key']: t['Value'] for t in image.tags}
767 tags = {t['Key']: t['Value'] for t in image.tags}
761
768
762 if tags.get('HGIMAGEFINGERPRINT') == fingerprint:
769 if tags.get('HGIMAGEFINGERPRINT') == fingerprint:
763 existing_image = image
770 existing_image = image
764 else:
771 else:
765 print('image %s for %s has wrong fingerprint; removing' % (
772 print(
766 image.id, image.name))
773 'image %s for %s has wrong fingerprint; removing'
774 % (image.id, image.name)
775 )
767 remove_ami(ec2resource, image)
776 remove_ami(ec2resource, image)
768
777
769 return existing_image
778 return existing_image
770
779
771
780
772 def create_ami_from_instance(ec2client, instance, name, description,
781 def create_ami_from_instance(
773 fingerprint):
782 ec2client, instance, name, description, fingerprint
783 ):
774 """Create an AMI from a running instance.
784 """Create an AMI from a running instance.
775
785
776 Returns the ``ec2resource.Image`` representing the created AMI.
786 Returns the ``ec2resource.Image`` representing the created AMI.
777 """
787 """
778 instance.stop()
788 instance.stop()
779
789
780 ec2client.get_waiter('instance_stopped').wait(
790 ec2client.get_waiter('instance_stopped').wait(
781 InstanceIds=[instance.id],
791 InstanceIds=[instance.id], WaiterConfig={'Delay': 5,}
782 WaiterConfig={
792 )
783 'Delay': 5,
784 })
785 print('%s is stopped' % instance.id)
793 print('%s is stopped' % instance.id)
786
794
787 image = instance.create_image(
795 image = instance.create_image(Name=name, Description=description,)
788 Name=name,
789 Description=description,
790 )
791
796
792 image.create_tags(Tags=[
797 image.create_tags(
793 {
798 Tags=[{'Key': 'HGIMAGEFINGERPRINT', 'Value': fingerprint,},]
794 'Key': 'HGIMAGEFINGERPRINT',
799 )
795 'Value': fingerprint,
796 },
797 ])
798
800
799 print('waiting for image %s' % image.id)
801 print('waiting for image %s' % image.id)
800
802
801 ec2client.get_waiter('image_available').wait(
803 ec2client.get_waiter('image_available').wait(ImageIds=[image.id],)
802 ImageIds=[image.id],
803 )
804
804
805 print('image %s available as %s' % (image.id, image.name))
805 print('image %s available as %s' % (image.id, image.name))
806
806
807 return image
807 return image
808
808
809
809
810 def ensure_linux_dev_ami(c: AWSConnection, distro='debian9', prefix='hg-'):
810 def ensure_linux_dev_ami(c: AWSConnection, distro='debian10', prefix='hg-'):
811 """Ensures a Linux development AMI is available and up-to-date.
811 """Ensures a Linux development AMI is available and up-to-date.
812
812
813 Returns an ``ec2.Image`` of either an existing AMI or a newly-built one.
813 Returns an ``ec2.Image`` of either an existing AMI or a newly-built one.
814 """
814 """
815 ec2client = c.ec2client
815 ec2client = c.ec2client
816 ec2resource = c.ec2resource
816 ec2resource = c.ec2resource
817
817
818 name = '%s%s-%s' % (prefix, 'linux-dev', distro)
818 name = '%s%s-%s' % (prefix, 'linux-dev', distro)
819
819
820 if distro == 'debian9':
820 if distro == 'debian9':
821 image = find_image(
821 image = find_image(
822 ec2resource,
822 ec2resource,
823 DEBIAN_ACCOUNT_ID,
823 DEBIAN_ACCOUNT_ID,
824 'debian-stretch-hvm-x86_64-gp2-2019-02-19-26620',
824 'debian-stretch-hvm-x86_64-gp2-2019-09-08-17994',
825 )
826 ssh_username = 'admin'
827 elif distro == 'debian10':
828 image = find_image(
829 ec2resource, DEBIAN_ACCOUNT_ID_2, 'debian-10-amd64-20190909-10',
825 )
830 )
826 ssh_username = 'admin'
831 ssh_username = 'admin'
827 elif distro == 'ubuntu18.04':
832 elif distro == 'ubuntu18.04':
828 image = find_image(
833 image = find_image(
829 ec2resource,
834 ec2resource,
830 UBUNTU_ACCOUNT_ID,
835 UBUNTU_ACCOUNT_ID,
831 'ubuntu/images/hvm-ssd/ubuntu-bionic-18.04-amd64-server-20190403',
836 'ubuntu/images/hvm-ssd/ubuntu-bionic-18.04-amd64-server-20190918',
832 )
833 ssh_username = 'ubuntu'
834 elif distro == 'ubuntu18.10':
835 image = find_image(
836 ec2resource,
837 UBUNTU_ACCOUNT_ID,
838 'ubuntu/images/hvm-ssd/ubuntu-cosmic-18.10-amd64-server-20190402',
839 )
837 )
840 ssh_username = 'ubuntu'
838 ssh_username = 'ubuntu'
841 elif distro == 'ubuntu19.04':
839 elif distro == 'ubuntu19.04':
842 image = find_image(
840 image = find_image(
843 ec2resource,
841 ec2resource,
844 UBUNTU_ACCOUNT_ID,
842 UBUNTU_ACCOUNT_ID,
845 'ubuntu/images/hvm-ssd/ubuntu-disco-19.04-amd64-server-20190417',
843 'ubuntu/images/hvm-ssd/ubuntu-disco-19.04-amd64-server-20190918',
846 )
844 )
847 ssh_username = 'ubuntu'
845 ssh_username = 'ubuntu'
848 else:
846 else:
849 raise ValueError('unsupported Linux distro: %s' % distro)
847 raise ValueError('unsupported Linux distro: %s' % distro)
850
848
851 config = {
849 config = {
852 'BlockDeviceMappings': [
850 'BlockDeviceMappings': [
853 {
851 {
854 'DeviceName': image.block_device_mappings[0]['DeviceName'],
852 'DeviceName': image.block_device_mappings[0]['DeviceName'],
855 'Ebs': {
853 'Ebs': {
856 'DeleteOnTermination': True,
854 'DeleteOnTermination': True,
857 'VolumeSize': 8,
855 'VolumeSize': 10,
858 'VolumeType': 'gp2',
856 'VolumeType': 'gp2',
859 },
857 },
860 },
858 },
861 ],
859 ],
862 'EbsOptimized': True,
860 'EbsOptimized': True,
863 'ImageId': image.id,
861 'ImageId': image.id,
864 'InstanceInitiatedShutdownBehavior': 'stop',
862 'InstanceInitiatedShutdownBehavior': 'stop',
865 # 8 VCPUs for compiling Python.
863 # 8 VCPUs for compiling Python.
866 'InstanceType': 't3.2xlarge',
864 'InstanceType': 't3.2xlarge',
867 'KeyName': '%sautomation' % prefix,
865 'KeyName': '%sautomation' % prefix,
868 'MaxCount': 1,
866 'MaxCount': 1,
869 'MinCount': 1,
867 'MinCount': 1,
870 'SecurityGroupIds': [c.security_groups['linux-dev-1'].id],
868 'SecurityGroupIds': [c.security_groups['linux-dev-1'].id],
871 }
869 }
872
870
873 requirements2_path = (pathlib.Path(__file__).parent.parent /
871 requirements2_path = (
874 'linux-requirements-py2.txt')
872 pathlib.Path(__file__).parent.parent / 'linux-requirements-py2.txt'
875 requirements3_path = (pathlib.Path(__file__).parent.parent /
873 )
876 'linux-requirements-py3.txt')
874 requirements3_path = (
875 pathlib.Path(__file__).parent.parent / 'linux-requirements-py3.txt'
876 )
877 with requirements2_path.open('r', encoding='utf-8') as fh:
877 with requirements2_path.open('r', encoding='utf-8') as fh:
878 requirements2 = fh.read()
878 requirements2 = fh.read()
879 with requirements3_path.open('r', encoding='utf-8') as fh:
879 with requirements3_path.open('r', encoding='utf-8') as fh:
880 requirements3 = fh.read()
880 requirements3 = fh.read()
881
881
882 # Compute a deterministic fingerprint to determine whether image needs to
882 # Compute a deterministic fingerprint to determine whether image needs to
883 # be regenerated.
883 # be regenerated.
884 fingerprint = resolve_fingerprint({
884 fingerprint = resolve_fingerprint(
885 'instance_config': config,
885 {
886 'bootstrap_script': BOOTSTRAP_DEBIAN,
886 'instance_config': config,
887 'requirements_py2': requirements2,
887 'bootstrap_script': BOOTSTRAP_DEBIAN,
888 'requirements_py3': requirements3,
888 'requirements_py2': requirements2,
889 })
889 'requirements_py3': requirements3,
890 }
891 )
890
892
891 existing_image = find_and_reconcile_image(ec2resource, name, fingerprint)
893 existing_image = find_and_reconcile_image(ec2resource, name, fingerprint)
892
894
893 if existing_image:
895 if existing_image:
894 return existing_image
896 return existing_image
895
897
896 print('no suitable %s image found; creating one...' % name)
898 print('no suitable %s image found; creating one...' % name)
897
899
898 with temporary_ec2_instances(ec2resource, config) as instances:
900 with temporary_ec2_instances(ec2resource, config) as instances:
899 wait_for_ip_addresses(instances)
901 wait_for_ip_addresses(instances)
900
902
901 instance = instances[0]
903 instance = instances[0]
902
904
903 client = wait_for_ssh(
905 client = wait_for_ssh(
904 instance.public_ip_address, 22,
906 instance.public_ip_address,
907 22,
905 username=ssh_username,
908 username=ssh_username,
906 key_filename=str(c.key_pair_path_private('automation')))
909 key_filename=str(c.key_pair_path_private('automation')),
910 )
907
911
908 home = '/home/%s' % ssh_username
912 home = '/home/%s' % ssh_username
909
913
910 with client:
914 with client:
911 print('connecting to SSH server')
915 print('connecting to SSH server')
912 sftp = client.open_sftp()
916 sftp = client.open_sftp()
913
917
914 print('uploading bootstrap files')
918 print('uploading bootstrap files')
915 with sftp.open('%s/bootstrap' % home, 'wb') as fh:
919 with sftp.open('%s/bootstrap' % home, 'wb') as fh:
916 fh.write(BOOTSTRAP_DEBIAN)
920 fh.write(BOOTSTRAP_DEBIAN)
917 fh.chmod(0o0700)
921 fh.chmod(0o0700)
918
922
919 with sftp.open('%s/requirements-py2.txt' % home, 'wb') as fh:
923 with sftp.open('%s/requirements-py2.txt' % home, 'wb') as fh:
920 fh.write(requirements2)
924 fh.write(requirements2)
921 fh.chmod(0o0700)
925 fh.chmod(0o0700)
922
926
923 with sftp.open('%s/requirements-py3.txt' % home, 'wb') as fh:
927 with sftp.open('%s/requirements-py3.txt' % home, 'wb') as fh:
924 fh.write(requirements3)
928 fh.write(requirements3)
925 fh.chmod(0o0700)
929 fh.chmod(0o0700)
926
930
927 print('executing bootstrap')
931 print('executing bootstrap')
928 chan, stdin, stdout = ssh_exec_command(client,
932 chan, stdin, stdout = ssh_exec_command(
929 '%s/bootstrap' % home)
933 client, '%s/bootstrap' % home
934 )
930 stdin.close()
935 stdin.close()
931
936
932 for line in stdout:
937 for line in stdout:
933 print(line, end='')
938 print(line, end='')
934
939
935 res = chan.recv_exit_status()
940 res = chan.recv_exit_status()
936 if res:
941 if res:
937 raise Exception('non-0 exit from bootstrap: %d' % res)
942 raise Exception('non-0 exit from bootstrap: %d' % res)
938
943
939 print('bootstrap completed; stopping %s to create %s' % (
944 print(
940 instance.id, name))
945 'bootstrap completed; stopping %s to create %s'
946 % (instance.id, name)
947 )
941
948
942 return create_ami_from_instance(ec2client, instance, name,
949 return create_ami_from_instance(
943 'Mercurial Linux development environment',
950 ec2client,
944 fingerprint)
951 instance,
952 name,
953 'Mercurial Linux development environment',
954 fingerprint,
955 )
945
956
946
957
947 @contextlib.contextmanager
958 @contextlib.contextmanager
948 def temporary_linux_dev_instances(c: AWSConnection, image, instance_type,
959 def temporary_linux_dev_instances(
949 prefix='hg-', ensure_extra_volume=False):
960 c: AWSConnection,
961 image,
962 instance_type,
963 prefix='hg-',
964 ensure_extra_volume=False,
965 ):
950 """Create temporary Linux development EC2 instances.
966 """Create temporary Linux development EC2 instances.
951
967
952 Context manager resolves to a list of ``ec2.Instance`` that were created
968 Context manager resolves to a list of ``ec2.Instance`` that were created
953 and are running.
969 and are running.
954
970
955 ``ensure_extra_volume`` can be set to ``True`` to require that instances
971 ``ensure_extra_volume`` can be set to ``True`` to require that instances
956 have a 2nd storage volume available other than the primary AMI volume.
972 have a 2nd storage volume available other than the primary AMI volume.
957 For instance types with instance storage, this does nothing special.
973 For instance types with instance storage, this does nothing special.
958 But for instance types without instance storage, an additional EBS volume
974 But for instance types without instance storage, an additional EBS volume
959 will be added to the instance.
975 will be added to the instance.
960
976
961 Instances have an ``ssh_client`` attribute containing a paramiko SSHClient
977 Instances have an ``ssh_client`` attribute containing a paramiko SSHClient
962 instance bound to the instance.
978 instance bound to the instance.
963
979
964 Instances have an ``ssh_private_key_path`` attributing containing the
980 Instances have an ``ssh_private_key_path`` attributing containing the
965 str path to the SSH private key to connect to the instance.
981 str path to the SSH private key to connect to the instance.
966 """
982 """
967
983
968 block_device_mappings = [
984 block_device_mappings = [
969 {
985 {
970 'DeviceName': image.block_device_mappings[0]['DeviceName'],
986 'DeviceName': image.block_device_mappings[0]['DeviceName'],
971 'Ebs': {
987 'Ebs': {
972 'DeleteOnTermination': True,
988 'DeleteOnTermination': True,
973 'VolumeSize': 8,
989 'VolumeSize': 12,
974 'VolumeType': 'gp2',
990 'VolumeType': 'gp2',
975 },
991 },
976 }
992 }
977 ]
993 ]
978
994
979 # This is not an exhaustive list of instance types having instance storage.
995 # This is not an exhaustive list of instance types having instance storage.
980 # But
996 # But
981 if (ensure_extra_volume
997 if ensure_extra_volume and not instance_type.startswith(
982 and not instance_type.startswith(tuple(INSTANCE_TYPES_WITH_STORAGE))):
998 tuple(INSTANCE_TYPES_WITH_STORAGE)
999 ):
983 main_device = block_device_mappings[0]['DeviceName']
1000 main_device = block_device_mappings[0]['DeviceName']
984
1001
985 if main_device == 'xvda':
1002 if main_device == 'xvda':
986 second_device = 'xvdb'
1003 second_device = 'xvdb'
987 elif main_device == '/dev/sda1':
1004 elif main_device == '/dev/sda1':
988 second_device = '/dev/sdb'
1005 second_device = '/dev/sdb'
989 else:
1006 else:
990 raise ValueError('unhandled primary EBS device name: %s' %
1007 raise ValueError(
991 main_device)
1008 'unhandled primary EBS device name: %s' % main_device
1009 )
992
1010
993 block_device_mappings.append({
1011 block_device_mappings.append(
994 'DeviceName': second_device,
1012 {
995 'Ebs': {
1013 'DeviceName': second_device,
996 'DeleteOnTermination': True,
1014 'Ebs': {
997 'VolumeSize': 8,
1015 'DeleteOnTermination': True,
998 'VolumeType': 'gp2',
1016 'VolumeSize': 8,
1017 'VolumeType': 'gp2',
1018 },
999 }
1019 }
1000 })
1020 )
1001
1021
1002 config = {
1022 config = {
1003 'BlockDeviceMappings': block_device_mappings,
1023 'BlockDeviceMappings': block_device_mappings,
1004 'EbsOptimized': True,
1024 'EbsOptimized': True,
1005 'ImageId': image.id,
1025 'ImageId': image.id,
1006 'InstanceInitiatedShutdownBehavior': 'terminate',
1026 'InstanceInitiatedShutdownBehavior': 'terminate',
1007 'InstanceType': instance_type,
1027 'InstanceType': instance_type,
1008 'KeyName': '%sautomation' % prefix,
1028 'KeyName': '%sautomation' % prefix,
1009 'MaxCount': 1,
1029 'MaxCount': 1,
1010 'MinCount': 1,
1030 'MinCount': 1,
1011 'SecurityGroupIds': [c.security_groups['linux-dev-1'].id],
1031 'SecurityGroupIds': [c.security_groups['linux-dev-1'].id],
1012 }
1032 }
1013
1033
1014 with temporary_ec2_instances(c.ec2resource, config) as instances:
1034 with temporary_ec2_instances(c.ec2resource, config) as instances:
1015 wait_for_ip_addresses(instances)
1035 wait_for_ip_addresses(instances)
1016
1036
1017 ssh_private_key_path = str(c.key_pair_path_private('automation'))
1037 ssh_private_key_path = str(c.key_pair_path_private('automation'))
1018
1038
1019 for instance in instances:
1039 for instance in instances:
1020 client = wait_for_ssh(
1040 client = wait_for_ssh(
1021 instance.public_ip_address, 22,
1041 instance.public_ip_address,
1042 22,
1022 username='hg',
1043 username='hg',
1023 key_filename=ssh_private_key_path)
1044 key_filename=ssh_private_key_path,
1045 )
1024
1046
1025 instance.ssh_client = client
1047 instance.ssh_client = client
1026 instance.ssh_private_key_path = ssh_private_key_path
1048 instance.ssh_private_key_path = ssh_private_key_path
1027
1049
1028 try:
1050 try:
1029 yield instances
1051 yield instances
1030 finally:
1052 finally:
1031 for instance in instances:
1053 for instance in instances:
1032 instance.ssh_client.close()
1054 instance.ssh_client.close()
1033
1055
1034
1056
1035 def ensure_windows_dev_ami(c: AWSConnection, prefix='hg-',
1057 def ensure_windows_dev_ami(
1036 base_image_name=WINDOWS_BASE_IMAGE_NAME):
1058 c: AWSConnection, prefix='hg-', base_image_name=WINDOWS_BASE_IMAGE_NAME
1059 ):
1037 """Ensure Windows Development AMI is available and up-to-date.
1060 """Ensure Windows Development AMI is available and up-to-date.
1038
1061
1039 If necessary, a modern AMI will be built by starting a temporary EC2
1062 If necessary, a modern AMI will be built by starting a temporary EC2
1040 instance and bootstrapping it.
1063 instance and bootstrapping it.
1041
1064
1042 Obsolete AMIs will be deleted so there is only a single AMI having the
1065 Obsolete AMIs will be deleted so there is only a single AMI having the
1043 desired name.
1066 desired name.
1044
1067
1045 Returns an ``ec2.Image`` of either an existing AMI or a newly-built
1068 Returns an ``ec2.Image`` of either an existing AMI or a newly-built
1046 one.
1069 one.
1047 """
1070 """
1048 ec2client = c.ec2client
1071 ec2client = c.ec2client
1049 ec2resource = c.ec2resource
1072 ec2resource = c.ec2resource
1050 ssmclient = c.session.client('ssm')
1073 ssmclient = c.session.client('ssm')
1051
1074
1052 name = '%s%s' % (prefix, 'windows-dev')
1075 name = '%s%s' % (prefix, 'windows-dev')
1053
1076
1054 image = find_image(ec2resource, AMAZON_ACCOUNT_ID, base_image_name)
1077 image = find_image(ec2resource, AMAZON_ACCOUNT_ID, base_image_name)
1055
1078
1056 config = {
1079 config = {
1057 'BlockDeviceMappings': [
1080 'BlockDeviceMappings': [
1058 {
1081 {
1059 'DeviceName': '/dev/sda1',
1082 'DeviceName': '/dev/sda1',
1060 'Ebs': {
1083 'Ebs': {
1061 'DeleteOnTermination': True,
1084 'DeleteOnTermination': True,
1062 'VolumeSize': 32,
1085 'VolumeSize': 32,
1063 'VolumeType': 'gp2',
1086 'VolumeType': 'gp2',
1064 },
1087 },
1065 }
1088 }
1066 ],
1089 ],
1067 'ImageId': image.id,
1090 'ImageId': image.id,
1068 'InstanceInitiatedShutdownBehavior': 'stop',
1091 'InstanceInitiatedShutdownBehavior': 'stop',
1069 'InstanceType': 't3.medium',
1092 'InstanceType': 't3.medium',
1070 'KeyName': '%sautomation' % prefix,
1093 'KeyName': '%sautomation' % prefix,
1071 'MaxCount': 1,
1094 'MaxCount': 1,
1072 'MinCount': 1,
1095 'MinCount': 1,
1073 'SecurityGroupIds': [c.security_groups['windows-dev-1'].id],
1096 'SecurityGroupIds': [c.security_groups['windows-dev-1'].id],
1074 }
1097 }
1075
1098
1076 commands = [
1099 commands = [
1077 # Need to start the service so sshd_config is generated.
1100 # Need to start the service so sshd_config is generated.
1078 'Start-Service sshd',
1101 'Start-Service sshd',
1079 'Write-Output "modifying sshd_config"',
1102 'Write-Output "modifying sshd_config"',
1080 r'$content = Get-Content C:\ProgramData\ssh\sshd_config',
1103 r'$content = Get-Content C:\ProgramData\ssh\sshd_config',
1081 '$content = $content -replace "Match Group administrators","" -replace "AuthorizedKeysFile __PROGRAMDATA__/ssh/administrators_authorized_keys",""',
1104 '$content = $content -replace "Match Group administrators","" -replace "AuthorizedKeysFile __PROGRAMDATA__/ssh/administrators_authorized_keys",""',
1082 r'$content | Set-Content C:\ProgramData\ssh\sshd_config',
1105 r'$content | Set-Content C:\ProgramData\ssh\sshd_config',
1083 'Import-Module OpenSSHUtils',
1106 'Import-Module OpenSSHUtils',
1084 r'Repair-SshdConfigPermission C:\ProgramData\ssh\sshd_config -Confirm:$false',
1107 r'Repair-SshdConfigPermission C:\ProgramData\ssh\sshd_config -Confirm:$false',
1085 'Restart-Service sshd',
1108 'Restart-Service sshd',
1086 'Write-Output "installing OpenSSL client"',
1109 'Write-Output "installing OpenSSL client"',
1087 'Add-WindowsCapability -Online -Name OpenSSH.Client~~~~0.0.1.0',
1110 'Add-WindowsCapability -Online -Name OpenSSH.Client~~~~0.0.1.0',
1088 'Set-Service -Name sshd -StartupType "Automatic"',
1111 'Set-Service -Name sshd -StartupType "Automatic"',
1089 'Write-Output "OpenSSH server running"',
1112 'Write-Output "OpenSSH server running"',
1090 ]
1113 ]
1091
1114
1092 with INSTALL_WINDOWS_DEPENDENCIES.open('r', encoding='utf-8') as fh:
1115 with INSTALL_WINDOWS_DEPENDENCIES.open('r', encoding='utf-8') as fh:
1093 commands.extend(l.rstrip() for l in fh)
1116 commands.extend(l.rstrip() for l in fh)
1094
1117
1118 # Schedule run of EC2Launch on next boot. This ensures that UserData
1119 # is executed.
1120 # We disable setComputerName because it forces a reboot.
1121 # We set an explicit admin password because this causes UserData to run
1122 # as Administrator instead of System.
1123 commands.extend(
1124 [
1125 r'''Set-Content -Path C:\ProgramData\Amazon\EC2-Windows\Launch\Config\LaunchConfig.json '''
1126 r'''-Value '{"setComputerName": false, "setWallpaper": true, "addDnsSuffixList": true, '''
1127 r'''"extendBootVolumeSize": true, "handleUserData": true, '''
1128 r'''"adminPasswordType": "Specify", "adminPassword": "%s"}' '''
1129 % c.automation.default_password(),
1130 r'C:\ProgramData\Amazon\EC2-Windows\Launch\Scripts\InitializeInstance.ps1 '
1131 r'–Schedule',
1132 ]
1133 )
1134
1095 # Disable Windows Defender when bootstrapping because it just slows
1135 # Disable Windows Defender when bootstrapping because it just slows
1096 # things down.
1136 # things down.
1097 commands.insert(0, 'Set-MpPreference -DisableRealtimeMonitoring $true')
1137 commands.insert(0, 'Set-MpPreference -DisableRealtimeMonitoring $true')
1098 commands.append('Set-MpPreference -DisableRealtimeMonitoring $false')
1138 commands.append('Set-MpPreference -DisableRealtimeMonitoring $false')
1099
1139
1100 # Compute a deterministic fingerprint to determine whether image needs
1140 # Compute a deterministic fingerprint to determine whether image needs
1101 # to be regenerated.
1141 # to be regenerated.
1102 fingerprint = resolve_fingerprint({
1142 fingerprint = resolve_fingerprint(
1103 'instance_config': config,
1143 {
1104 'user_data': WINDOWS_USER_DATA,
1144 'instance_config': config,
1105 'initial_bootstrap': WINDOWS_BOOTSTRAP_POWERSHELL,
1145 'user_data': WINDOWS_USER_DATA,
1106 'bootstrap_commands': commands,
1146 'initial_bootstrap': WINDOWS_BOOTSTRAP_POWERSHELL,
1107 'base_image_name': base_image_name,
1147 'bootstrap_commands': commands,
1108 })
1148 'base_image_name': base_image_name,
1149 }
1150 )
1109
1151
1110 existing_image = find_and_reconcile_image(ec2resource, name, fingerprint)
1152 existing_image = find_and_reconcile_image(ec2resource, name, fingerprint)
1111
1153
1112 if existing_image:
1154 if existing_image:
1113 return existing_image
1155 return existing_image
1114
1156
1115 print('no suitable Windows development image found; creating one...')
1157 print('no suitable Windows development image found; creating one...')
1116
1158
1117 with create_temp_windows_ec2_instances(c, config) as instances:
1159 with create_temp_windows_ec2_instances(
1160 c, config, bootstrap=True
1161 ) as instances:
1118 assert len(instances) == 1
1162 assert len(instances) == 1
1119 instance = instances[0]
1163 instance = instances[0]
1120
1164
1121 wait_for_ssm(ssmclient, [instance])
1165 wait_for_ssm(ssmclient, [instance])
1122
1166
1123 # On first boot, install various Windows updates.
1167 # On first boot, install various Windows updates.
1124 # We would ideally use PowerShell Remoting for this. However, there are
1168 # We would ideally use PowerShell Remoting for this. However, there are
1125 # trust issues that make it difficult to invoke Windows Update
1169 # trust issues that make it difficult to invoke Windows Update
1126 # remotely. So we use SSM, which has a mechanism for running Windows
1170 # remotely. So we use SSM, which has a mechanism for running Windows
1127 # Update.
1171 # Update.
1128 print('installing Windows features...')
1172 print('installing Windows features...')
1129 run_ssm_command(
1173 run_ssm_command(
1130 ssmclient,
1174 ssmclient,
1131 [instance],
1175 [instance],
1132 'AWS-RunPowerShellScript',
1176 'AWS-RunPowerShellScript',
1133 {
1177 {'commands': WINDOWS_BOOTSTRAP_POWERSHELL.split('\n'),},
1134 'commands': WINDOWS_BOOTSTRAP_POWERSHELL.split('\n'),
1135 },
1136 )
1178 )
1137
1179
1138 # Reboot so all updates are fully applied.
1180 # Reboot so all updates are fully applied.
1139 #
1181 #
1140 # We don't use instance.reboot() here because it is asynchronous and
1182 # We don't use instance.reboot() here because it is asynchronous and
1141 # we don't know when exactly the instance has rebooted. It could take
1183 # we don't know when exactly the instance has rebooted. It could take
1142 # a while to stop and we may start trying to interact with the instance
1184 # a while to stop and we may start trying to interact with the instance
1143 # before it has rebooted.
1185 # before it has rebooted.
1144 print('rebooting instance %s' % instance.id)
1186 print('rebooting instance %s' % instance.id)
1145 instance.stop()
1187 instance.stop()
1146 ec2client.get_waiter('instance_stopped').wait(
1188 ec2client.get_waiter('instance_stopped').wait(
1147 InstanceIds=[instance.id],
1189 InstanceIds=[instance.id], WaiterConfig={'Delay': 5,}
1148 WaiterConfig={
1190 )
1149 'Delay': 5,
1150 })
1151
1191
1152 instance.start()
1192 instance.start()
1153 wait_for_ip_addresses([instance])
1193 wait_for_ip_addresses([instance])
1154
1194
1155 # There is a race condition here between the User Data PS script running
1195 # There is a race condition here between the User Data PS script running
1156 # and us connecting to WinRM. This can manifest as
1196 # and us connecting to WinRM. This can manifest as
1157 # "AuthorizationManager check failed" failures during run_powershell().
1197 # "AuthorizationManager check failed" failures during run_powershell().
1158 # TODO figure out a workaround.
1198 # TODO figure out a workaround.
1159
1199
1160 print('waiting for Windows Remote Management to come back...')
1200 print('waiting for Windows Remote Management to come back...')
1161 client = wait_for_winrm(instance.public_ip_address, 'Administrator',
1201 client = wait_for_winrm(
1162 c.automation.default_password())
1202 instance.public_ip_address,
1203 'Administrator',
1204 c.automation.default_password(),
1205 )
1163 print('established WinRM connection to %s' % instance.id)
1206 print('established WinRM connection to %s' % instance.id)
1164 instance.winrm_client = client
1207 instance.winrm_client = client
1165
1208
1166 print('bootstrapping instance...')
1209 print('bootstrapping instance...')
1167 run_powershell(instance.winrm_client, '\n'.join(commands))
1210 run_powershell(instance.winrm_client, '\n'.join(commands))
1168
1211
1169 print('bootstrap completed; stopping %s to create image' % instance.id)
1212 print('bootstrap completed; stopping %s to create image' % instance.id)
1170 return create_ami_from_instance(ec2client, instance, name,
1213 return create_ami_from_instance(
1171 'Mercurial Windows development environment',
1214 ec2client,
1172 fingerprint)
1215 instance,
1216 name,
1217 'Mercurial Windows development environment',
1218 fingerprint,
1219 )
1173
1220
1174
1221
1175 @contextlib.contextmanager
1222 @contextlib.contextmanager
1176 def temporary_windows_dev_instances(c: AWSConnection, image, instance_type,
1223 def temporary_windows_dev_instances(
1177 prefix='hg-', disable_antivirus=False):
1224 c: AWSConnection,
1225 image,
1226 instance_type,
1227 prefix='hg-',
1228 disable_antivirus=False,
1229 ):
1178 """Create a temporary Windows development EC2 instance.
1230 """Create a temporary Windows development EC2 instance.
1179
1231
1180 Context manager resolves to the list of ``EC2.Instance`` that were created.
1232 Context manager resolves to the list of ``EC2.Instance`` that were created.
1181 """
1233 """
1182 config = {
1234 config = {
1183 'BlockDeviceMappings': [
1235 'BlockDeviceMappings': [
1184 {
1236 {
1185 'DeviceName': '/dev/sda1',
1237 'DeviceName': '/dev/sda1',
1186 'Ebs': {
1238 'Ebs': {
1187 'DeleteOnTermination': True,
1239 'DeleteOnTermination': True,
1188 'VolumeSize': 32,
1240 'VolumeSize': 32,
1189 'VolumeType': 'gp2',
1241 'VolumeType': 'gp2',
1190 },
1242 },
1191 }
1243 }
1192 ],
1244 ],
1193 'ImageId': image.id,
1245 'ImageId': image.id,
1194 'InstanceInitiatedShutdownBehavior': 'stop',
1246 'InstanceInitiatedShutdownBehavior': 'stop',
1195 'InstanceType': instance_type,
1247 'InstanceType': instance_type,
1196 'KeyName': '%sautomation' % prefix,
1248 'KeyName': '%sautomation' % prefix,
1197 'MaxCount': 1,
1249 'MaxCount': 1,
1198 'MinCount': 1,
1250 'MinCount': 1,
1199 'SecurityGroupIds': [c.security_groups['windows-dev-1'].id],
1251 'SecurityGroupIds': [c.security_groups['windows-dev-1'].id],
1200 }
1252 }
1201
1253
1202 with create_temp_windows_ec2_instances(c, config) as instances:
1254 with create_temp_windows_ec2_instances(c, config) as instances:
1203 if disable_antivirus:
1255 if disable_antivirus:
1204 for instance in instances:
1256 for instance in instances:
1205 run_powershell(
1257 run_powershell(
1206 instance.winrm_client,
1258 instance.winrm_client,
1207 'Set-MpPreference -DisableRealtimeMonitoring $true')
1259 'Set-MpPreference -DisableRealtimeMonitoring $true',
1260 )
1208
1261
1209 yield instances
1262 yield instances
@@ -1,424 +1,487 b''
1 # cli.py - Command line interface for automation
1 # cli.py - Command line interface for automation
2 #
2 #
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 # no-check-code because Python 3 native.
8 # no-check-code because Python 3 native.
9
9
10 import argparse
10 import argparse
11 import concurrent.futures as futures
11 import concurrent.futures as futures
12 import os
12 import os
13 import pathlib
13 import pathlib
14 import time
14 import time
15
15
16 from . import (
16 from . import (
17 aws,
17 aws,
18 HGAutomation,
18 HGAutomation,
19 linux,
19 linux,
20 try_server,
20 windows,
21 windows,
21 )
22 )
22
23
23
24
24 SOURCE_ROOT = pathlib.Path(os.path.abspath(__file__)).parent.parent.parent.parent
25 SOURCE_ROOT = pathlib.Path(
26 os.path.abspath(__file__)
27 ).parent.parent.parent.parent
25 DIST_PATH = SOURCE_ROOT / 'dist'
28 DIST_PATH = SOURCE_ROOT / 'dist'
26
29
27
30
28 def bootstrap_linux_dev(hga: HGAutomation, aws_region, distros=None,
31 def bootstrap_linux_dev(
29 parallel=False):
32 hga: HGAutomation, aws_region, distros=None, parallel=False
33 ):
30 c = hga.aws_connection(aws_region)
34 c = hga.aws_connection(aws_region)
31
35
32 if distros:
36 if distros:
33 distros = distros.split(',')
37 distros = distros.split(',')
34 else:
38 else:
35 distros = sorted(linux.DISTROS)
39 distros = sorted(linux.DISTROS)
36
40
37 # TODO There is a wonky interaction involving KeyboardInterrupt whereby
41 # TODO There is a wonky interaction involving KeyboardInterrupt whereby
38 # the context manager that is supposed to terminate the temporary EC2
42 # the context manager that is supposed to terminate the temporary EC2
39 # instance doesn't run. Until we fix this, make parallel building opt-in
43 # instance doesn't run. Until we fix this, make parallel building opt-in
40 # so we don't orphan instances.
44 # so we don't orphan instances.
41 if parallel:
45 if parallel:
42 fs = []
46 fs = []
43
47
44 with futures.ThreadPoolExecutor(len(distros)) as e:
48 with futures.ThreadPoolExecutor(len(distros)) as e:
45 for distro in distros:
49 for distro in distros:
46 fs.append(e.submit(aws.ensure_linux_dev_ami, c, distro=distro))
50 fs.append(e.submit(aws.ensure_linux_dev_ami, c, distro=distro))
47
51
48 for f in fs:
52 for f in fs:
49 f.result()
53 f.result()
50 else:
54 else:
51 for distro in distros:
55 for distro in distros:
52 aws.ensure_linux_dev_ami(c, distro=distro)
56 aws.ensure_linux_dev_ami(c, distro=distro)
53
57
54
58
55 def bootstrap_windows_dev(hga: HGAutomation, aws_region, base_image_name):
59 def bootstrap_windows_dev(hga: HGAutomation, aws_region, base_image_name):
56 c = hga.aws_connection(aws_region)
60 c = hga.aws_connection(aws_region)
57 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
61 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
58 print('Windows development AMI available as %s' % image.id)
62 print('Windows development AMI available as %s' % image.id)
59
63
60
64
61 def build_inno(hga: HGAutomation, aws_region, arch, revision, version,
65 def build_inno(
62 base_image_name):
66 hga: HGAutomation, aws_region, arch, revision, version, base_image_name
67 ):
63 c = hga.aws_connection(aws_region)
68 c = hga.aws_connection(aws_region)
64 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
69 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
65 DIST_PATH.mkdir(exist_ok=True)
70 DIST_PATH.mkdir(exist_ok=True)
66
71
67 with aws.temporary_windows_dev_instances(c, image, 't3.medium') as insts:
72 with aws.temporary_windows_dev_instances(c, image, 't3.medium') as insts:
68 instance = insts[0]
73 instance = insts[0]
69
74
70 windows.synchronize_hg(SOURCE_ROOT, revision, instance)
75 windows.synchronize_hg(SOURCE_ROOT, revision, instance)
71
76
72 for a in arch:
77 for a in arch:
73 windows.build_inno_installer(instance.winrm_client, a,
78 windows.build_inno_installer(
74 DIST_PATH,
79 instance.winrm_client, a, DIST_PATH, version=version
75 version=version)
80 )
76
81
77
82
78 def build_wix(hga: HGAutomation, aws_region, arch, revision, version,
83 def build_wix(
79 base_image_name):
84 hga: HGAutomation, aws_region, arch, revision, version, base_image_name
85 ):
80 c = hga.aws_connection(aws_region)
86 c = hga.aws_connection(aws_region)
81 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
87 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
82 DIST_PATH.mkdir(exist_ok=True)
88 DIST_PATH.mkdir(exist_ok=True)
83
89
84 with aws.temporary_windows_dev_instances(c, image, 't3.medium') as insts:
90 with aws.temporary_windows_dev_instances(c, image, 't3.medium') as insts:
85 instance = insts[0]
91 instance = insts[0]
86
92
87 windows.synchronize_hg(SOURCE_ROOT, revision, instance)
93 windows.synchronize_hg(SOURCE_ROOT, revision, instance)
88
94
89 for a in arch:
95 for a in arch:
90 windows.build_wix_installer(instance.winrm_client, a,
96 windows.build_wix_installer(
91 DIST_PATH, version=version)
97 instance.winrm_client, a, DIST_PATH, version=version
98 )
92
99
93
100
94 def build_windows_wheel(hga: HGAutomation, aws_region, arch, revision,
101 def build_windows_wheel(
95 base_image_name):
102 hga: HGAutomation, aws_region, arch, revision, base_image_name
103 ):
96 c = hga.aws_connection(aws_region)
104 c = hga.aws_connection(aws_region)
97 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
105 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
98 DIST_PATH.mkdir(exist_ok=True)
106 DIST_PATH.mkdir(exist_ok=True)
99
107
100 with aws.temporary_windows_dev_instances(c, image, 't3.medium') as insts:
108 with aws.temporary_windows_dev_instances(c, image, 't3.medium') as insts:
101 instance = insts[0]
109 instance = insts[0]
102
110
103 windows.synchronize_hg(SOURCE_ROOT, revision, instance)
111 windows.synchronize_hg(SOURCE_ROOT, revision, instance)
104
112
105 for a in arch:
113 for a in arch:
106 windows.build_wheel(instance.winrm_client, a, DIST_PATH)
114 windows.build_wheel(instance.winrm_client, a, DIST_PATH)
107
115
108
116
109 def build_all_windows_packages(hga: HGAutomation, aws_region, revision,
117 def build_all_windows_packages(
110 version, base_image_name):
118 hga: HGAutomation, aws_region, revision, version, base_image_name
119 ):
111 c = hga.aws_connection(aws_region)
120 c = hga.aws_connection(aws_region)
112 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
121 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
113 DIST_PATH.mkdir(exist_ok=True)
122 DIST_PATH.mkdir(exist_ok=True)
114
123
115 with aws.temporary_windows_dev_instances(c, image, 't3.medium') as insts:
124 with aws.temporary_windows_dev_instances(c, image, 't3.medium') as insts:
116 instance = insts[0]
125 instance = insts[0]
117
126
118 winrm_client = instance.winrm_client
127 winrm_client = instance.winrm_client
119
128
120 windows.synchronize_hg(SOURCE_ROOT, revision, instance)
129 windows.synchronize_hg(SOURCE_ROOT, revision, instance)
121
130
122 for arch in ('x86', 'x64'):
131 for arch in ('x86', 'x64'):
123 windows.purge_hg(winrm_client)
132 windows.purge_hg(winrm_client)
124 windows.build_wheel(winrm_client, arch, DIST_PATH)
133 windows.build_wheel(winrm_client, arch, DIST_PATH)
125 windows.purge_hg(winrm_client)
134 windows.purge_hg(winrm_client)
126 windows.build_inno_installer(winrm_client, arch, DIST_PATH,
135 windows.build_inno_installer(
127 version=version)
136 winrm_client, arch, DIST_PATH, version=version
137 )
128 windows.purge_hg(winrm_client)
138 windows.purge_hg(winrm_client)
129 windows.build_wix_installer(winrm_client, arch, DIST_PATH,
139 windows.build_wix_installer(
130 version=version)
140 winrm_client, arch, DIST_PATH, version=version
141 )
131
142
132
143
133 def terminate_ec2_instances(hga: HGAutomation, aws_region):
144 def terminate_ec2_instances(hga: HGAutomation, aws_region):
134 c = hga.aws_connection(aws_region, ensure_ec2_state=False)
145 c = hga.aws_connection(aws_region, ensure_ec2_state=False)
135 aws.terminate_ec2_instances(c.ec2resource)
146 aws.terminate_ec2_instances(c.ec2resource)
136
147
137
148
138 def purge_ec2_resources(hga: HGAutomation, aws_region):
149 def purge_ec2_resources(hga: HGAutomation, aws_region):
139 c = hga.aws_connection(aws_region, ensure_ec2_state=False)
150 c = hga.aws_connection(aws_region, ensure_ec2_state=False)
140 aws.remove_resources(c)
151 aws.remove_resources(c)
141
152
142
153
143 def run_tests_linux(hga: HGAutomation, aws_region, instance_type,
154 def run_tests_linux(
144 python_version, test_flags, distro, filesystem):
155 hga: HGAutomation,
156 aws_region,
157 instance_type,
158 python_version,
159 test_flags,
160 distro,
161 filesystem,
162 ):
145 c = hga.aws_connection(aws_region)
163 c = hga.aws_connection(aws_region)
146 image = aws.ensure_linux_dev_ami(c, distro=distro)
164 image = aws.ensure_linux_dev_ami(c, distro=distro)
147
165
148 t_start = time.time()
166 t_start = time.time()
149
167
150 ensure_extra_volume = filesystem not in ('default', 'tmpfs')
168 ensure_extra_volume = filesystem not in ('default', 'tmpfs')
151
169
152 with aws.temporary_linux_dev_instances(
170 with aws.temporary_linux_dev_instances(
153 c, image, instance_type,
171 c, image, instance_type, ensure_extra_volume=ensure_extra_volume
154 ensure_extra_volume=ensure_extra_volume) as insts:
172 ) as insts:
155
173
156 instance = insts[0]
174 instance = insts[0]
157
175
158 linux.prepare_exec_environment(instance.ssh_client,
176 linux.prepare_exec_environment(
159 filesystem=filesystem)
177 instance.ssh_client, filesystem=filesystem
178 )
160 linux.synchronize_hg(SOURCE_ROOT, instance, '.')
179 linux.synchronize_hg(SOURCE_ROOT, instance, '.')
161 t_prepared = time.time()
180 t_prepared = time.time()
162 linux.run_tests(instance.ssh_client, python_version,
181 linux.run_tests(instance.ssh_client, python_version, test_flags)
163 test_flags)
164 t_done = time.time()
182 t_done = time.time()
165
183
166 t_setup = t_prepared - t_start
184 t_setup = t_prepared - t_start
167 t_all = t_done - t_start
185 t_all = t_done - t_start
168
186
169 print(
187 print(
170 'total time: %.1fs; setup: %.1fs; tests: %.1fs; setup overhead: %.1f%%'
188 'total time: %.1fs; setup: %.1fs; tests: %.1fs; setup overhead: %.1f%%'
171 % (t_all, t_setup, t_done - t_prepared, t_setup / t_all * 100.0))
189 % (t_all, t_setup, t_done - t_prepared, t_setup / t_all * 100.0)
190 )
172
191
173
192
174 def run_tests_windows(hga: HGAutomation, aws_region, instance_type,
193 def run_tests_windows(
175 python_version, arch, test_flags, base_image_name):
194 hga: HGAutomation,
195 aws_region,
196 instance_type,
197 python_version,
198 arch,
199 test_flags,
200 base_image_name,
201 ):
176 c = hga.aws_connection(aws_region)
202 c = hga.aws_connection(aws_region)
177 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
203 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
178
204
179 with aws.temporary_windows_dev_instances(c, image, instance_type,
205 with aws.temporary_windows_dev_instances(
180 disable_antivirus=True) as insts:
206 c, image, instance_type, disable_antivirus=True
207 ) as insts:
181 instance = insts[0]
208 instance = insts[0]
182
209
183 windows.synchronize_hg(SOURCE_ROOT, '.', instance)
210 windows.synchronize_hg(SOURCE_ROOT, '.', instance)
184 windows.run_tests(instance.winrm_client, python_version, arch,
211 windows.run_tests(
185 test_flags)
212 instance.winrm_client, python_version, arch, test_flags
213 )
214
215
216 def publish_windows_artifacts(
217 hg: HGAutomation,
218 aws_region,
219 version: str,
220 pypi: bool,
221 mercurial_scm_org: bool,
222 ssh_username: str,
223 ):
224 windows.publish_artifacts(
225 DIST_PATH,
226 version,
227 pypi=pypi,
228 mercurial_scm_org=mercurial_scm_org,
229 ssh_username=ssh_username,
230 )
231
232
233 def run_try(hga: HGAutomation, aws_region: str, rev: str):
234 c = hga.aws_connection(aws_region, ensure_ec2_state=False)
235 try_server.trigger_try(c, rev=rev)
186
236
187
237
188 def get_parser():
238 def get_parser():
189 parser = argparse.ArgumentParser()
239 parser = argparse.ArgumentParser()
190
240
191 parser.add_argument(
241 parser.add_argument(
192 '--state-path',
242 '--state-path',
193 default='~/.hgautomation',
243 default='~/.hgautomation',
194 help='Path for local state files',
244 help='Path for local state files',
195 )
245 )
196 parser.add_argument(
246 parser.add_argument(
197 '--aws-region',
247 '--aws-region', help='AWS region to use', default='us-west-2',
198 help='AWS region to use',
199 default='us-west-1',
200 )
248 )
201
249
202 subparsers = parser.add_subparsers()
250 subparsers = parser.add_subparsers()
203
251
204 sp = subparsers.add_parser(
252 sp = subparsers.add_parser(
205 'bootstrap-linux-dev',
253 'bootstrap-linux-dev', help='Bootstrap Linux development environments',
206 help='Bootstrap Linux development environments',
207 )
254 )
208 sp.add_argument(
255 sp.add_argument(
209 '--distros',
256 '--distros', help='Comma delimited list of distros to bootstrap',
210 help='Comma delimited list of distros to bootstrap',
211 )
257 )
212 sp.add_argument(
258 sp.add_argument(
213 '--parallel',
259 '--parallel',
214 action='store_true',
260 action='store_true',
215 help='Generate AMIs in parallel (not CTRL-c safe)'
261 help='Generate AMIs in parallel (not CTRL-c safe)',
216 )
262 )
217 sp.set_defaults(func=bootstrap_linux_dev)
263 sp.set_defaults(func=bootstrap_linux_dev)
218
264
219 sp = subparsers.add_parser(
265 sp = subparsers.add_parser(
220 'bootstrap-windows-dev',
266 'bootstrap-windows-dev',
221 help='Bootstrap the Windows development environment',
267 help='Bootstrap the Windows development environment',
222 )
268 )
223 sp.add_argument(
269 sp.add_argument(
224 '--base-image-name',
270 '--base-image-name',
225 help='AMI name of base image',
271 help='AMI name of base image',
226 default=aws.WINDOWS_BASE_IMAGE_NAME,
272 default=aws.WINDOWS_BASE_IMAGE_NAME,
227 )
273 )
228 sp.set_defaults(func=bootstrap_windows_dev)
274 sp.set_defaults(func=bootstrap_windows_dev)
229
275
230 sp = subparsers.add_parser(
276 sp = subparsers.add_parser(
231 'build-all-windows-packages',
277 'build-all-windows-packages', help='Build all Windows packages',
232 help='Build all Windows packages',
233 )
278 )
234 sp.add_argument(
279 sp.add_argument(
235 '--revision',
280 '--revision', help='Mercurial revision to build', default='.',
236 help='Mercurial revision to build',
237 default='.',
238 )
281 )
239 sp.add_argument(
282 sp.add_argument(
240 '--version',
283 '--version', help='Mercurial version string to use',
241 help='Mercurial version string to use',
242 )
284 )
243 sp.add_argument(
285 sp.add_argument(
244 '--base-image-name',
286 '--base-image-name',
245 help='AMI name of base image',
287 help='AMI name of base image',
246 default=aws.WINDOWS_BASE_IMAGE_NAME,
288 default=aws.WINDOWS_BASE_IMAGE_NAME,
247 )
289 )
248 sp.set_defaults(func=build_all_windows_packages)
290 sp.set_defaults(func=build_all_windows_packages)
249
291
250 sp = subparsers.add_parser(
292 sp = subparsers.add_parser(
251 'build-inno',
293 'build-inno', help='Build Inno Setup installer(s)',
252 help='Build Inno Setup installer(s)',
253 )
294 )
254 sp.add_argument(
295 sp.add_argument(
255 '--arch',
296 '--arch',
256 help='Architecture to build for',
297 help='Architecture to build for',
257 choices={'x86', 'x64'},
298 choices={'x86', 'x64'},
258 nargs='*',
299 nargs='*',
259 default=['x64'],
300 default=['x64'],
260 )
301 )
261 sp.add_argument(
302 sp.add_argument(
262 '--revision',
303 '--revision', help='Mercurial revision to build', default='.',
263 help='Mercurial revision to build',
264 default='.',
265 )
304 )
266 sp.add_argument(
305 sp.add_argument(
267 '--version',
306 '--version', help='Mercurial version string to use in installer',
268 help='Mercurial version string to use in installer',
269 )
307 )
270 sp.add_argument(
308 sp.add_argument(
271 '--base-image-name',
309 '--base-image-name',
272 help='AMI name of base image',
310 help='AMI name of base image',
273 default=aws.WINDOWS_BASE_IMAGE_NAME,
311 default=aws.WINDOWS_BASE_IMAGE_NAME,
274 )
312 )
275 sp.set_defaults(func=build_inno)
313 sp.set_defaults(func=build_inno)
276
314
277 sp = subparsers.add_parser(
315 sp = subparsers.add_parser(
278 'build-windows-wheel',
316 'build-windows-wheel', help='Build Windows wheel(s)',
279 help='Build Windows wheel(s)',
280 )
317 )
281 sp.add_argument(
318 sp.add_argument(
282 '--arch',
319 '--arch',
283 help='Architecture to build for',
320 help='Architecture to build for',
284 choices={'x86', 'x64'},
321 choices={'x86', 'x64'},
285 nargs='*',
322 nargs='*',
286 default=['x64'],
323 default=['x64'],
287 )
324 )
288 sp.add_argument(
325 sp.add_argument(
289 '--revision',
326 '--revision', help='Mercurial revision to build', default='.',
290 help='Mercurial revision to build',
291 default='.',
292 )
327 )
293 sp.add_argument(
328 sp.add_argument(
294 '--base-image-name',
329 '--base-image-name',
295 help='AMI name of base image',
330 help='AMI name of base image',
296 default=aws.WINDOWS_BASE_IMAGE_NAME,
331 default=aws.WINDOWS_BASE_IMAGE_NAME,
297 )
332 )
298 sp.set_defaults(func=build_windows_wheel)
333 sp.set_defaults(func=build_windows_wheel)
299
334
300 sp = subparsers.add_parser(
335 sp = subparsers.add_parser('build-wix', help='Build WiX installer(s)')
301 'build-wix',
302 help='Build WiX installer(s)'
303 )
304 sp.add_argument(
336 sp.add_argument(
305 '--arch',
337 '--arch',
306 help='Architecture to build for',
338 help='Architecture to build for',
307 choices={'x86', 'x64'},
339 choices={'x86', 'x64'},
308 nargs='*',
340 nargs='*',
309 default=['x64'],
341 default=['x64'],
310 )
342 )
311 sp.add_argument(
343 sp.add_argument(
312 '--revision',
344 '--revision', help='Mercurial revision to build', default='.',
313 help='Mercurial revision to build',
314 default='.',
315 )
345 )
316 sp.add_argument(
346 sp.add_argument(
317 '--version',
347 '--version', help='Mercurial version string to use in installer',
318 help='Mercurial version string to use in installer',
319 )
348 )
320 sp.add_argument(
349 sp.add_argument(
321 '--base-image-name',
350 '--base-image-name',
322 help='AMI name of base image',
351 help='AMI name of base image',
323 default=aws.WINDOWS_BASE_IMAGE_NAME,
352 default=aws.WINDOWS_BASE_IMAGE_NAME,
324 )
353 )
325 sp.set_defaults(func=build_wix)
354 sp.set_defaults(func=build_wix)
326
355
327 sp = subparsers.add_parser(
356 sp = subparsers.add_parser(
328 'terminate-ec2-instances',
357 'terminate-ec2-instances',
329 help='Terminate all active EC2 instances managed by us',
358 help='Terminate all active EC2 instances managed by us',
330 )
359 )
331 sp.set_defaults(func=terminate_ec2_instances)
360 sp.set_defaults(func=terminate_ec2_instances)
332
361
333 sp = subparsers.add_parser(
362 sp = subparsers.add_parser(
334 'purge-ec2-resources',
363 'purge-ec2-resources', help='Purge all EC2 resources managed by us',
335 help='Purge all EC2 resources managed by us',
336 )
364 )
337 sp.set_defaults(func=purge_ec2_resources)
365 sp.set_defaults(func=purge_ec2_resources)
338
366
339 sp = subparsers.add_parser(
367 sp = subparsers.add_parser('run-tests-linux', help='Run tests on Linux',)
340 'run-tests-linux',
341 help='Run tests on Linux',
342 )
343 sp.add_argument(
368 sp.add_argument(
344 '--distro',
369 '--distro',
345 help='Linux distribution to run tests on',
370 help='Linux distribution to run tests on',
346 choices=linux.DISTROS,
371 choices=linux.DISTROS,
347 default='debian9',
372 default='debian10',
348 )
373 )
349 sp.add_argument(
374 sp.add_argument(
350 '--filesystem',
375 '--filesystem',
351 help='Filesystem type to use',
376 help='Filesystem type to use',
352 choices={'btrfs', 'default', 'ext3', 'ext4', 'jfs', 'tmpfs', 'xfs'},
377 choices={'btrfs', 'default', 'ext3', 'ext4', 'jfs', 'tmpfs', 'xfs'},
353 default='default',
378 default='default',
354 )
379 )
355 sp.add_argument(
380 sp.add_argument(
356 '--instance-type',
381 '--instance-type',
357 help='EC2 instance type to use',
382 help='EC2 instance type to use',
358 default='c5.9xlarge',
383 default='c5.9xlarge',
359 )
384 )
360 sp.add_argument(
385 sp.add_argument(
361 '--python-version',
386 '--python-version',
362 help='Python version to use',
387 help='Python version to use',
363 choices={'system2', 'system3', '2.7', '3.5', '3.6', '3.7', '3.8',
388 choices={
364 'pypy', 'pypy3.5', 'pypy3.6'},
389 'system2',
390 'system3',
391 '2.7',
392 '3.5',
393 '3.6',
394 '3.7',
395 '3.8',
396 'pypy',
397 'pypy3.5',
398 'pypy3.6',
399 },
365 default='system2',
400 default='system2',
366 )
401 )
367 sp.add_argument(
402 sp.add_argument(
368 'test_flags',
403 'test_flags',
369 help='Extra command line flags to pass to run-tests.py',
404 help='Extra command line flags to pass to run-tests.py',
370 nargs='*',
405 nargs='*',
371 )
406 )
372 sp.set_defaults(func=run_tests_linux)
407 sp.set_defaults(func=run_tests_linux)
373
408
374 sp = subparsers.add_parser(
409 sp = subparsers.add_parser(
375 'run-tests-windows',
410 'run-tests-windows', help='Run tests on Windows',
376 help='Run tests on Windows',
377 )
411 )
378 sp.add_argument(
412 sp.add_argument(
379 '--instance-type',
413 '--instance-type', help='EC2 instance type to use', default='t3.medium',
380 help='EC2 instance type to use',
381 default='t3.medium',
382 )
414 )
383 sp.add_argument(
415 sp.add_argument(
384 '--python-version',
416 '--python-version',
385 help='Python version to use',
417 help='Python version to use',
386 choices={'2.7', '3.5', '3.6', '3.7', '3.8'},
418 choices={'2.7', '3.5', '3.6', '3.7', '3.8'},
387 default='2.7',
419 default='2.7',
388 )
420 )
389 sp.add_argument(
421 sp.add_argument(
390 '--arch',
422 '--arch',
391 help='Architecture to test',
423 help='Architecture to test',
392 choices={'x86', 'x64'},
424 choices={'x86', 'x64'},
393 default='x64',
425 default='x64',
394 )
426 )
395 sp.add_argument(
427 sp.add_argument(
396 '--test-flags',
428 '--test-flags', help='Extra command line flags to pass to run-tests.py',
397 help='Extra command line flags to pass to run-tests.py',
398 )
429 )
399 sp.add_argument(
430 sp.add_argument(
400 '--base-image-name',
431 '--base-image-name',
401 help='AMI name of base image',
432 help='AMI name of base image',
402 default=aws.WINDOWS_BASE_IMAGE_NAME,
433 default=aws.WINDOWS_BASE_IMAGE_NAME,
403 )
434 )
404 sp.set_defaults(func=run_tests_windows)
435 sp.set_defaults(func=run_tests_windows)
405
436
437 sp = subparsers.add_parser(
438 'publish-windows-artifacts',
439 help='Publish built Windows artifacts (wheels, installers, etc)',
440 )
441 sp.add_argument(
442 '--no-pypi',
443 dest='pypi',
444 action='store_false',
445 default=True,
446 help='Skip uploading to PyPI',
447 )
448 sp.add_argument(
449 '--no-mercurial-scm-org',
450 dest='mercurial_scm_org',
451 action='store_false',
452 default=True,
453 help='Skip uploading to www.mercurial-scm.org',
454 )
455 sp.add_argument(
456 '--ssh-username', help='SSH username for mercurial-scm.org',
457 )
458 sp.add_argument(
459 'version', help='Mercurial version string to locate local packages',
460 )
461 sp.set_defaults(func=publish_windows_artifacts)
462
463 sp = subparsers.add_parser(
464 'try', help='Run CI automation against a custom changeset'
465 )
466 sp.add_argument('-r', '--rev', default='.', help='Revision to run CI on')
467 sp.set_defaults(func=run_try)
468
406 return parser
469 return parser
407
470
408
471
409 def main():
472 def main():
410 parser = get_parser()
473 parser = get_parser()
411 args = parser.parse_args()
474 args = parser.parse_args()
412
475
413 local_state_path = pathlib.Path(os.path.expanduser(args.state_path))
476 local_state_path = pathlib.Path(os.path.expanduser(args.state_path))
414 automation = HGAutomation(local_state_path)
477 automation = HGAutomation(local_state_path)
415
478
416 if not hasattr(args, 'func'):
479 if not hasattr(args, 'func'):
417 parser.print_help()
480 parser.print_help()
418 return
481 return
419
482
420 kwargs = dict(vars(args))
483 kwargs = dict(vars(args))
421 del kwargs['func']
484 del kwargs['func']
422 del kwargs['state_path']
485 del kwargs['state_path']
423
486
424 args.func(automation, **kwargs)
487 args.func(automation, **kwargs)
@@ -1,551 +1,594 b''
1 # linux.py - Linux specific automation functionality
1 # linux.py - Linux specific automation functionality
2 #
2 #
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 # no-check-code because Python 3 native.
8 # no-check-code because Python 3 native.
9
9
10 import os
10 import os
11 import pathlib
11 import pathlib
12 import shlex
12 import shlex
13 import subprocess
13 import subprocess
14 import tempfile
14 import tempfile
15
15
16 from .ssh import (
16 from .ssh import exec_command
17 exec_command,
18 )
19
17
20
18
21 # Linux distributions that are supported.
19 # Linux distributions that are supported.
22 DISTROS = {
20 DISTROS = {
23 'debian9',
21 'debian9',
22 'debian10',
24 'ubuntu18.04',
23 'ubuntu18.04',
25 'ubuntu18.10',
26 'ubuntu19.04',
24 'ubuntu19.04',
27 }
25 }
28
26
29 INSTALL_PYTHONS = r'''
27 INSTALL_PYTHONS = r'''
30 PYENV2_VERSIONS="2.7.16 pypy2.7-7.1.1"
28 PYENV2_VERSIONS="2.7.16 pypy2.7-7.1.1"
31 PYENV3_VERSIONS="3.5.7 3.6.8 3.7.3 3.8-dev pypy3.5-7.0.0 pypy3.6-7.1.1"
29 PYENV3_VERSIONS="3.5.7 3.6.9 3.7.4 3.8.0 pypy3.5-7.0.0 pypy3.6-7.1.1"
32
30
33 git clone https://github.com/pyenv/pyenv.git /hgdev/pyenv
31 git clone https://github.com/pyenv/pyenv.git /hgdev/pyenv
34 pushd /hgdev/pyenv
32 pushd /hgdev/pyenv
35 git checkout 3faeda67bb33e07750d1a104271369a7384ca45c
33 git checkout d6d6bc8bb08bcdcbf4eb79509aa7061011ade1c4
36 popd
34 popd
37
35
38 export PYENV_ROOT="/hgdev/pyenv"
36 export PYENV_ROOT="/hgdev/pyenv"
39 export PATH="$PYENV_ROOT/bin:$PATH"
37 export PATH="$PYENV_ROOT/bin:$PATH"
40
38
41 # pip 19.0.3.
39 # pip 19.2.3.
42 PIP_SHA256=efe99298f3fbb1f56201ce6b81d2658067d2f7d7dfc2d412e0d3cacc9a397c61
40 PIP_SHA256=57e3643ff19f018f8a00dfaa6b7e4620e3c1a7a2171fd218425366ec006b3bfe
43 wget -O get-pip.py --progress dot:mega https://github.com/pypa/get-pip/raw/fee32c376da1ff6496a798986d7939cd51e1644f/get-pip.py
41 wget -O get-pip.py --progress dot:mega https://github.com/pypa/get-pip/raw/309a56c5fd94bd1134053a541cb4657a4e47e09d/get-pip.py
44 echo "${PIP_SHA256} get-pip.py" | sha256sum --check -
42 echo "${PIP_SHA256} get-pip.py" | sha256sum --check -
45
43
46 VIRTUALENV_SHA256=984d7e607b0a5d1329425dd8845bd971b957424b5ba664729fab51ab8c11bc39
44 VIRTUALENV_SHA256=f78d81b62d3147396ac33fc9d77579ddc42cc2a98dd9ea38886f616b33bc7fb2
47 VIRTUALENV_TARBALL=virtualenv-16.4.3.tar.gz
45 VIRTUALENV_TARBALL=virtualenv-16.7.5.tar.gz
48 wget -O ${VIRTUALENV_TARBALL} --progress dot:mega https://files.pythonhosted.org/packages/37/db/89d6b043b22052109da35416abc3c397655e4bd3cff031446ba02b9654fa/${VIRTUALENV_TARBALL}
46 wget -O ${VIRTUALENV_TARBALL} --progress dot:mega https://files.pythonhosted.org/packages/66/f0/6867af06d2e2f511e4e1d7094ff663acdebc4f15d4a0cb0fed1007395124/${VIRTUALENV_TARBALL}
49 echo "${VIRTUALENV_SHA256} ${VIRTUALENV_TARBALL}" | sha256sum --check -
47 echo "${VIRTUALENV_SHA256} ${VIRTUALENV_TARBALL}" | sha256sum --check -
50
48
51 for v in ${PYENV2_VERSIONS}; do
49 for v in ${PYENV2_VERSIONS}; do
52 pyenv install -v ${v}
50 pyenv install -v ${v}
53 ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
51 ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
54 ${PYENV_ROOT}/versions/${v}/bin/pip install ${VIRTUALENV_TARBALL}
52 ${PYENV_ROOT}/versions/${v}/bin/pip install ${VIRTUALENV_TARBALL}
55 ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py2.txt
53 ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py2.txt
56 done
54 done
57
55
58 for v in ${PYENV3_VERSIONS}; do
56 for v in ${PYENV3_VERSIONS}; do
59 pyenv install -v ${v}
57 pyenv install -v ${v}
60 ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
58 ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
61 ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py3.txt
59 ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py3.txt
62 done
60 done
63
61
64 pyenv global ${PYENV2_VERSIONS} ${PYENV3_VERSIONS} system
62 pyenv global ${PYENV2_VERSIONS} ${PYENV3_VERSIONS} system
65 '''.lstrip().replace('\r\n', '\n')
63 '''.lstrip().replace(
64 '\r\n', '\n'
65 )
66
67
68 INSTALL_RUST = r'''
69 RUSTUP_INIT_SHA256=a46fe67199b7bcbbde2dcbc23ae08db6f29883e260e23899a88b9073effc9076
70 wget -O rustup-init --progress dot:mega https://static.rust-lang.org/rustup/archive/1.18.3/x86_64-unknown-linux-gnu/rustup-init
71 echo "${RUSTUP_INIT_SHA256} rustup-init" | sha256sum --check -
72
73 chmod +x rustup-init
74 sudo -H -u hg -g hg ./rustup-init -y
75 sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup install 1.31.1 1.34.2
76 sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup component add clippy
77 '''
66
78
67
79
68 BOOTSTRAP_VIRTUALENV = r'''
80 BOOTSTRAP_VIRTUALENV = r'''
69 /usr/bin/virtualenv /hgdev/venv-bootstrap
81 /usr/bin/virtualenv /hgdev/venv-bootstrap
70
82
71 HG_SHA256=1bdd21bb87d1e05fb5cd395d488d0e0cc2f2f90ce0fd248e31a03595da5ccb47
83 HG_SHA256=35fc8ba5e0379c1b3affa2757e83fb0509e8ac314cbd9f1fd133cf265d16e49f
72 HG_TARBALL=mercurial-4.9.1.tar.gz
84 HG_TARBALL=mercurial-5.1.1.tar.gz
73
85
74 wget -O ${HG_TARBALL} --progress dot:mega https://www.mercurial-scm.org/release/${HG_TARBALL}
86 wget -O ${HG_TARBALL} --progress dot:mega https://www.mercurial-scm.org/release/${HG_TARBALL}
75 echo "${HG_SHA256} ${HG_TARBALL}" | sha256sum --check -
87 echo "${HG_SHA256} ${HG_TARBALL}" | sha256sum --check -
76
88
77 /hgdev/venv-bootstrap/bin/pip install ${HG_TARBALL}
89 /hgdev/venv-bootstrap/bin/pip install ${HG_TARBALL}
78 '''.lstrip().replace('\r\n', '\n')
90 '''.lstrip().replace(
91 '\r\n', '\n'
92 )
79
93
80
94
81 BOOTSTRAP_DEBIAN = r'''
95 BOOTSTRAP_DEBIAN = (
96 r'''
82 #!/bin/bash
97 #!/bin/bash
83
98
84 set -ex
99 set -ex
85
100
86 DISTRO=`grep DISTRIB_ID /etc/lsb-release | awk -F= '{{print $2}}'`
101 DISTRO=`grep DISTRIB_ID /etc/lsb-release | awk -F= '{{print $2}}'`
87 DEBIAN_VERSION=`cat /etc/debian_version`
102 DEBIAN_VERSION=`cat /etc/debian_version`
88 LSB_RELEASE=`lsb_release -cs`
103 LSB_RELEASE=`lsb_release -cs`
89
104
90 sudo /usr/sbin/groupadd hg
105 sudo /usr/sbin/groupadd hg
91 sudo /usr/sbin/groupadd docker
106 sudo /usr/sbin/groupadd docker
92 sudo /usr/sbin/useradd -g hg -G sudo,docker -d /home/hg -m -s /bin/bash hg
107 sudo /usr/sbin/useradd -g hg -G sudo,docker -d /home/hg -m -s /bin/bash hg
93 sudo mkdir /home/hg/.ssh
108 sudo mkdir /home/hg/.ssh
94 sudo cp ~/.ssh/authorized_keys /home/hg/.ssh/authorized_keys
109 sudo cp ~/.ssh/authorized_keys /home/hg/.ssh/authorized_keys
95 sudo chown -R hg:hg /home/hg/.ssh
110 sudo chown -R hg:hg /home/hg/.ssh
96 sudo chmod 700 /home/hg/.ssh
111 sudo chmod 700 /home/hg/.ssh
97 sudo chmod 600 /home/hg/.ssh/authorized_keys
112 sudo chmod 600 /home/hg/.ssh/authorized_keys
98
113
99 cat << EOF | sudo tee /etc/sudoers.d/90-hg
114 cat << EOF | sudo tee /etc/sudoers.d/90-hg
100 hg ALL=(ALL) NOPASSWD:ALL
115 hg ALL=(ALL) NOPASSWD:ALL
101 EOF
116 EOF
102
117
103 sudo apt-get update
118 sudo apt-get update
104 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq dist-upgrade
119 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq dist-upgrade
105
120
106 # Install packages necessary to set up Docker Apt repo.
121 # Install packages necessary to set up Docker Apt repo.
107 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends \
122 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends \
108 apt-transport-https \
123 apt-transport-https \
109 gnupg
124 gnupg
110
125
111 cat > docker-apt-key << EOF
126 cat > docker-apt-key << EOF
112 -----BEGIN PGP PUBLIC KEY BLOCK-----
127 -----BEGIN PGP PUBLIC KEY BLOCK-----
113
128
114 mQINBFit2ioBEADhWpZ8/wvZ6hUTiXOwQHXMAlaFHcPH9hAtr4F1y2+OYdbtMuth
129 mQINBFit2ioBEADhWpZ8/wvZ6hUTiXOwQHXMAlaFHcPH9hAtr4F1y2+OYdbtMuth
115 lqqwp028AqyY+PRfVMtSYMbjuQuu5byyKR01BbqYhuS3jtqQmljZ/bJvXqnmiVXh
130 lqqwp028AqyY+PRfVMtSYMbjuQuu5byyKR01BbqYhuS3jtqQmljZ/bJvXqnmiVXh
116 38UuLa+z077PxyxQhu5BbqntTPQMfiyqEiU+BKbq2WmANUKQf+1AmZY/IruOXbnq
131 38UuLa+z077PxyxQhu5BbqntTPQMfiyqEiU+BKbq2WmANUKQf+1AmZY/IruOXbnq
117 L4C1+gJ8vfmXQt99npCaxEjaNRVYfOS8QcixNzHUYnb6emjlANyEVlZzeqo7XKl7
132 L4C1+gJ8vfmXQt99npCaxEjaNRVYfOS8QcixNzHUYnb6emjlANyEVlZzeqo7XKl7
118 UrwV5inawTSzWNvtjEjj4nJL8NsLwscpLPQUhTQ+7BbQXAwAmeHCUTQIvvWXqw0N
133 UrwV5inawTSzWNvtjEjj4nJL8NsLwscpLPQUhTQ+7BbQXAwAmeHCUTQIvvWXqw0N
119 cmhh4HgeQscQHYgOJjjDVfoY5MucvglbIgCqfzAHW9jxmRL4qbMZj+b1XoePEtht
134 cmhh4HgeQscQHYgOJjjDVfoY5MucvglbIgCqfzAHW9jxmRL4qbMZj+b1XoePEtht
120 ku4bIQN1X5P07fNWzlgaRL5Z4POXDDZTlIQ/El58j9kp4bnWRCJW0lya+f8ocodo
135 ku4bIQN1X5P07fNWzlgaRL5Z4POXDDZTlIQ/El58j9kp4bnWRCJW0lya+f8ocodo
121 vZZ+Doi+fy4D5ZGrL4XEcIQP/Lv5uFyf+kQtl/94VFYVJOleAv8W92KdgDkhTcTD
136 vZZ+Doi+fy4D5ZGrL4XEcIQP/Lv5uFyf+kQtl/94VFYVJOleAv8W92KdgDkhTcTD
122 G7c0tIkVEKNUq48b3aQ64NOZQW7fVjfoKwEZdOqPE72Pa45jrZzvUFxSpdiNk2tZ
137 G7c0tIkVEKNUq48b3aQ64NOZQW7fVjfoKwEZdOqPE72Pa45jrZzvUFxSpdiNk2tZ
123 XYukHjlxxEgBdC/J3cMMNRE1F4NCA3ApfV1Y7/hTeOnmDuDYwr9/obA8t016Yljj
138 XYukHjlxxEgBdC/J3cMMNRE1F4NCA3ApfV1Y7/hTeOnmDuDYwr9/obA8t016Yljj
124 q5rdkywPf4JF8mXUW5eCN1vAFHxeg9ZWemhBtQmGxXnw9M+z6hWwc6ahmwARAQAB
139 q5rdkywPf4JF8mXUW5eCN1vAFHxeg9ZWemhBtQmGxXnw9M+z6hWwc6ahmwARAQAB
125 tCtEb2NrZXIgUmVsZWFzZSAoQ0UgZGViKSA8ZG9ja2VyQGRvY2tlci5jb20+iQI3
140 tCtEb2NrZXIgUmVsZWFzZSAoQ0UgZGViKSA8ZG9ja2VyQGRvY2tlci5jb20+iQI3
126 BBMBCgAhBQJYrefAAhsvBQsJCAcDBRUKCQgLBRYCAwEAAh4BAheAAAoJEI2BgDwO
141 BBMBCgAhBQJYrefAAhsvBQsJCAcDBRUKCQgLBRYCAwEAAh4BAheAAAoJEI2BgDwO
127 v82IsskP/iQZo68flDQmNvn8X5XTd6RRaUH33kXYXquT6NkHJciS7E2gTJmqvMqd
142 v82IsskP/iQZo68flDQmNvn8X5XTd6RRaUH33kXYXquT6NkHJciS7E2gTJmqvMqd
128 tI4mNYHCSEYxI5qrcYV5YqX9P6+Ko+vozo4nseUQLPH/ATQ4qL0Zok+1jkag3Lgk
143 tI4mNYHCSEYxI5qrcYV5YqX9P6+Ko+vozo4nseUQLPH/ATQ4qL0Zok+1jkag3Lgk
129 jonyUf9bwtWxFp05HC3GMHPhhcUSexCxQLQvnFWXD2sWLKivHp2fT8QbRGeZ+d3m
144 jonyUf9bwtWxFp05HC3GMHPhhcUSexCxQLQvnFWXD2sWLKivHp2fT8QbRGeZ+d3m
130 6fqcd5Fu7pxsqm0EUDK5NL+nPIgYhN+auTrhgzhK1CShfGccM/wfRlei9Utz6p9P
145 6fqcd5Fu7pxsqm0EUDK5NL+nPIgYhN+auTrhgzhK1CShfGccM/wfRlei9Utz6p9P
131 XRKIlWnXtT4qNGZNTN0tR+NLG/6Bqd8OYBaFAUcue/w1VW6JQ2VGYZHnZu9S8LMc
146 XRKIlWnXtT4qNGZNTN0tR+NLG/6Bqd8OYBaFAUcue/w1VW6JQ2VGYZHnZu9S8LMc
132 FYBa5Ig9PxwGQOgq6RDKDbV+PqTQT5EFMeR1mrjckk4DQJjbxeMZbiNMG5kGECA8
147 FYBa5Ig9PxwGQOgq6RDKDbV+PqTQT5EFMeR1mrjckk4DQJjbxeMZbiNMG5kGECA8
133 g383P3elhn03WGbEEa4MNc3Z4+7c236QI3xWJfNPdUbXRaAwhy/6rTSFbzwKB0Jm
148 g383P3elhn03WGbEEa4MNc3Z4+7c236QI3xWJfNPdUbXRaAwhy/6rTSFbzwKB0Jm
134 ebwzQfwjQY6f55MiI/RqDCyuPj3r3jyVRkK86pQKBAJwFHyqj9KaKXMZjfVnowLh
149 ebwzQfwjQY6f55MiI/RqDCyuPj3r3jyVRkK86pQKBAJwFHyqj9KaKXMZjfVnowLh
135 9svIGfNbGHpucATqREvUHuQbNnqkCx8VVhtYkhDb9fEP2xBu5VvHbR+3nfVhMut5
150 9svIGfNbGHpucATqREvUHuQbNnqkCx8VVhtYkhDb9fEP2xBu5VvHbR+3nfVhMut5
136 G34Ct5RS7Jt6LIfFdtcn8CaSas/l1HbiGeRgc70X/9aYx/V/CEJv0lIe8gP6uDoW
151 G34Ct5RS7Jt6LIfFdtcn8CaSas/l1HbiGeRgc70X/9aYx/V/CEJv0lIe8gP6uDoW
137 FPIZ7d6vH+Vro6xuWEGiuMaiznap2KhZmpkgfupyFmplh0s6knymuQINBFit2ioB
152 FPIZ7d6vH+Vro6xuWEGiuMaiznap2KhZmpkgfupyFmplh0s6knymuQINBFit2ioB
138 EADneL9S9m4vhU3blaRjVUUyJ7b/qTjcSylvCH5XUE6R2k+ckEZjfAMZPLpO+/tF
153 EADneL9S9m4vhU3blaRjVUUyJ7b/qTjcSylvCH5XUE6R2k+ckEZjfAMZPLpO+/tF
139 M2JIJMD4SifKuS3xck9KtZGCufGmcwiLQRzeHF7vJUKrLD5RTkNi23ydvWZgPjtx
154 M2JIJMD4SifKuS3xck9KtZGCufGmcwiLQRzeHF7vJUKrLD5RTkNi23ydvWZgPjtx
140 Q+DTT1Zcn7BrQFY6FgnRoUVIxwtdw1bMY/89rsFgS5wwuMESd3Q2RYgb7EOFOpnu
155 Q+DTT1Zcn7BrQFY6FgnRoUVIxwtdw1bMY/89rsFgS5wwuMESd3Q2RYgb7EOFOpnu
141 w6da7WakWf4IhnF5nsNYGDVaIHzpiqCl+uTbf1epCjrOlIzkZ3Z3Yk5CM/TiFzPk
156 w6da7WakWf4IhnF5nsNYGDVaIHzpiqCl+uTbf1epCjrOlIzkZ3Z3Yk5CM/TiFzPk
142 z2lLz89cpD8U+NtCsfagWWfjd2U3jDapgH+7nQnCEWpROtzaKHG6lA3pXdix5zG8
157 z2lLz89cpD8U+NtCsfagWWfjd2U3jDapgH+7nQnCEWpROtzaKHG6lA3pXdix5zG8
143 eRc6/0IbUSWvfjKxLLPfNeCS2pCL3IeEI5nothEEYdQH6szpLog79xB9dVnJyKJb
158 eRc6/0IbUSWvfjKxLLPfNeCS2pCL3IeEI5nothEEYdQH6szpLog79xB9dVnJyKJb
144 VfxXnseoYqVrRz2VVbUI5Blwm6B40E3eGVfUQWiux54DspyVMMk41Mx7QJ3iynIa
159 VfxXnseoYqVrRz2VVbUI5Blwm6B40E3eGVfUQWiux54DspyVMMk41Mx7QJ3iynIa
145 1N4ZAqVMAEruyXTRTxc9XW0tYhDMA/1GYvz0EmFpm8LzTHA6sFVtPm/ZlNCX6P1X
160 1N4ZAqVMAEruyXTRTxc9XW0tYhDMA/1GYvz0EmFpm8LzTHA6sFVtPm/ZlNCX6P1X
146 zJwrv7DSQKD6GGlBQUX+OeEJ8tTkkf8QTJSPUdh8P8YxDFS5EOGAvhhpMBYD42kQ
161 zJwrv7DSQKD6GGlBQUX+OeEJ8tTkkf8QTJSPUdh8P8YxDFS5EOGAvhhpMBYD42kQ
147 pqXjEC+XcycTvGI7impgv9PDY1RCC1zkBjKPa120rNhv/hkVk/YhuGoajoHyy4h7
162 pqXjEC+XcycTvGI7impgv9PDY1RCC1zkBjKPa120rNhv/hkVk/YhuGoajoHyy4h7
148 ZQopdcMtpN2dgmhEegny9JCSwxfQmQ0zK0g7m6SHiKMwjwARAQABiQQ+BBgBCAAJ
163 ZQopdcMtpN2dgmhEegny9JCSwxfQmQ0zK0g7m6SHiKMwjwARAQABiQQ+BBgBCAAJ
149 BQJYrdoqAhsCAikJEI2BgDwOv82IwV0gBBkBCAAGBQJYrdoqAAoJEH6gqcPyc/zY
164 BQJYrdoqAhsCAikJEI2BgDwOv82IwV0gBBkBCAAGBQJYrdoqAAoJEH6gqcPyc/zY
150 1WAP/2wJ+R0gE6qsce3rjaIz58PJmc8goKrir5hnElWhPgbq7cYIsW5qiFyLhkdp
165 1WAP/2wJ+R0gE6qsce3rjaIz58PJmc8goKrir5hnElWhPgbq7cYIsW5qiFyLhkdp
151 YcMmhD9mRiPpQn6Ya2w3e3B8zfIVKipbMBnke/ytZ9M7qHmDCcjoiSmwEXN3wKYI
166 YcMmhD9mRiPpQn6Ya2w3e3B8zfIVKipbMBnke/ytZ9M7qHmDCcjoiSmwEXN3wKYI
152 mD9VHONsl/CG1rU9Isw1jtB5g1YxuBA7M/m36XN6x2u+NtNMDB9P56yc4gfsZVES
167 mD9VHONsl/CG1rU9Isw1jtB5g1YxuBA7M/m36XN6x2u+NtNMDB9P56yc4gfsZVES
153 KA9v+yY2/l45L8d/WUkUi0YXomn6hyBGI7JrBLq0CX37GEYP6O9rrKipfz73XfO7
168 KA9v+yY2/l45L8d/WUkUi0YXomn6hyBGI7JrBLq0CX37GEYP6O9rrKipfz73XfO7
154 JIGzOKZlljb/D9RX/g7nRbCn+3EtH7xnk+TK/50euEKw8SMUg147sJTcpQmv6UzZ
169 JIGzOKZlljb/D9RX/g7nRbCn+3EtH7xnk+TK/50euEKw8SMUg147sJTcpQmv6UzZ
155 cM4JgL0HbHVCojV4C/plELwMddALOFeYQzTif6sMRPf+3DSj8frbInjChC3yOLy0
170 cM4JgL0HbHVCojV4C/plELwMddALOFeYQzTif6sMRPf+3DSj8frbInjChC3yOLy0
156 6br92KFom17EIj2CAcoeq7UPhi2oouYBwPxh5ytdehJkoo+sN7RIWua6P2WSmon5
171 6br92KFom17EIj2CAcoeq7UPhi2oouYBwPxh5ytdehJkoo+sN7RIWua6P2WSmon5
157 U888cSylXC0+ADFdgLX9K2zrDVYUG1vo8CX0vzxFBaHwN6Px26fhIT1/hYUHQR1z
172 U888cSylXC0+ADFdgLX9K2zrDVYUG1vo8CX0vzxFBaHwN6Px26fhIT1/hYUHQR1z
158 VfNDcyQmXqkOnZvvoMfz/Q0s9BhFJ/zU6AgQbIZE/hm1spsfgvtsD1frZfygXJ9f
173 VfNDcyQmXqkOnZvvoMfz/Q0s9BhFJ/zU6AgQbIZE/hm1spsfgvtsD1frZfygXJ9f
159 irP+MSAI80xHSf91qSRZOj4Pl3ZJNbq4yYxv0b1pkMqeGdjdCYhLU+LZ4wbQmpCk
174 irP+MSAI80xHSf91qSRZOj4Pl3ZJNbq4yYxv0b1pkMqeGdjdCYhLU+LZ4wbQmpCk
160 SVe2prlLureigXtmZfkqevRz7FrIZiu9ky8wnCAPwC7/zmS18rgP/17bOtL4/iIz
175 SVe2prlLureigXtmZfkqevRz7FrIZiu9ky8wnCAPwC7/zmS18rgP/17bOtL4/iIz
161 QhxAAoAMWVrGyJivSkjhSGx1uCojsWfsTAm11P7jsruIL61ZzMUVE2aM3Pmj5G+W
176 QhxAAoAMWVrGyJivSkjhSGx1uCojsWfsTAm11P7jsruIL61ZzMUVE2aM3Pmj5G+W
162 9AcZ58Em+1WsVnAXdUR//bMmhyr8wL/G1YO1V3JEJTRdxsSxdYa4deGBBY/Adpsw
177 9AcZ58Em+1WsVnAXdUR//bMmhyr8wL/G1YO1V3JEJTRdxsSxdYa4deGBBY/Adpsw
163 24jxhOJR+lsJpqIUeb999+R8euDhRHG9eFO7DRu6weatUJ6suupoDTRWtr/4yGqe
178 24jxhOJR+lsJpqIUeb999+R8euDhRHG9eFO7DRu6weatUJ6suupoDTRWtr/4yGqe
164 dKxV3qQhNLSnaAzqW/1nA3iUB4k7kCaKZxhdhDbClf9P37qaRW467BLCVO/coL3y
179 dKxV3qQhNLSnaAzqW/1nA3iUB4k7kCaKZxhdhDbClf9P37qaRW467BLCVO/coL3y
165 Vm50dwdrNtKpMBh3ZpbB1uJvgi9mXtyBOMJ3v8RZeDzFiG8HdCtg9RvIt/AIFoHR
180 Vm50dwdrNtKpMBh3ZpbB1uJvgi9mXtyBOMJ3v8RZeDzFiG8HdCtg9RvIt/AIFoHR
166 H3S+U79NT6i0KPzLImDfs8T7RlpyuMc4Ufs8ggyg9v3Ae6cN3eQyxcK3w0cbBwsh
181 H3S+U79NT6i0KPzLImDfs8T7RlpyuMc4Ufs8ggyg9v3Ae6cN3eQyxcK3w0cbBwsh
167 /nQNfsA6uu+9H7NhbehBMhYnpNZyrHzCmzyXkauwRAqoCbGCNykTRwsur9gS41TQ
182 /nQNfsA6uu+9H7NhbehBMhYnpNZyrHzCmzyXkauwRAqoCbGCNykTRwsur9gS41TQ
168 M8ssD1jFheOJf3hODnkKU+HKjvMROl1DK7zdmLdNzA1cvtZH/nCC9KPj1z8QC47S
183 M8ssD1jFheOJf3hODnkKU+HKjvMROl1DK7zdmLdNzA1cvtZH/nCC9KPj1z8QC47S
169 xx+dTZSx4ONAhwbS/LN3PoKtn8LPjY9NP9uDWI+TWYquS2U+KHDrBDlsgozDbs/O
184 xx+dTZSx4ONAhwbS/LN3PoKtn8LPjY9NP9uDWI+TWYquS2U+KHDrBDlsgozDbs/O
170 jCxcpDzNmXpWQHEtHU7649OXHP7UeNST1mCUCH5qdank0V1iejF6/CfTFU4MfcrG
185 jCxcpDzNmXpWQHEtHU7649OXHP7UeNST1mCUCH5qdank0V1iejF6/CfTFU4MfcrG
171 YT90qFF93M3v01BbxP+EIY2/9tiIPbrd
186 YT90qFF93M3v01BbxP+EIY2/9tiIPbrd
172 =0YYh
187 =0YYh
173 -----END PGP PUBLIC KEY BLOCK-----
188 -----END PGP PUBLIC KEY BLOCK-----
174 EOF
189 EOF
175
190
176 sudo apt-key add docker-apt-key
191 sudo apt-key add docker-apt-key
177
192
178 if [ "$DEBIAN_VERSION" = "9.8" ]; then
193 if [ "$LSB_RELEASE" = "stretch" ]; then
179 cat << EOF | sudo tee -a /etc/apt/sources.list
194 cat << EOF | sudo tee -a /etc/apt/sources.list
180 # Need backports for clang-format-6.0
195 # Need backports for clang-format-6.0
181 deb http://deb.debian.org/debian stretch-backports main
196 deb http://deb.debian.org/debian stretch-backports main
197 EOF
198 fi
182
199
200 if [ "$LSB_RELEASE" = "stretch" -o "$LSB_RELEASE" = "buster" ]; then
201 cat << EOF | sudo tee -a /etc/apt/sources.list
183 # Sources are useful if we want to compile things locally.
202 # Sources are useful if we want to compile things locally.
184 deb-src http://deb.debian.org/debian stretch main
203 deb-src http://deb.debian.org/debian $LSB_RELEASE main
185 deb-src http://security.debian.org/debian-security stretch/updates main
204 deb-src http://security.debian.org/debian-security $LSB_RELEASE/updates main
186 deb-src http://deb.debian.org/debian stretch-updates main
205 deb-src http://deb.debian.org/debian $LSB_RELEASE-updates main
187 deb-src http://deb.debian.org/debian stretch-backports main
206 deb-src http://deb.debian.org/debian $LSB_RELEASE-backports main
188
207
189 deb [arch=amd64] https://download.docker.com/linux/debian stretch stable
208 deb [arch=amd64] https://download.docker.com/linux/debian $LSB_RELEASE stable
190 EOF
209 EOF
191
210
192 elif [ "$DISTRO" = "Ubuntu" ]; then
211 elif [ "$DISTRO" = "Ubuntu" ]; then
193 cat << EOF | sudo tee -a /etc/apt/sources.list
212 cat << EOF | sudo tee -a /etc/apt/sources.list
194 deb [arch=amd64] https://download.docker.com/linux/ubuntu $LSB_RELEASE stable
213 deb [arch=amd64] https://download.docker.com/linux/ubuntu $LSB_RELEASE stable
195 EOF
214 EOF
196
215
197 fi
216 fi
198
217
199 sudo apt-get update
218 sudo apt-get update
200
219
201 PACKAGES="\
220 PACKAGES="\
221 awscli \
202 btrfs-progs \
222 btrfs-progs \
203 build-essential \
223 build-essential \
204 bzr \
224 bzr \
205 clang-format-6.0 \
225 clang-format-6.0 \
206 cvs \
226 cvs \
207 darcs \
227 darcs \
208 debhelper \
228 debhelper \
209 devscripts \
229 devscripts \
230 docker-ce \
210 dpkg-dev \
231 dpkg-dev \
211 dstat \
232 dstat \
212 emacs \
233 emacs \
213 gettext \
234 gettext \
214 git \
235 git \
215 htop \
236 htop \
216 iotop \
237 iotop \
217 jfsutils \
238 jfsutils \
218 libbz2-dev \
239 libbz2-dev \
219 libexpat1-dev \
240 libexpat1-dev \
220 libffi-dev \
241 libffi-dev \
221 libgdbm-dev \
242 libgdbm-dev \
222 liblzma-dev \
243 liblzma-dev \
223 libncurses5-dev \
244 libncurses5-dev \
224 libnss3-dev \
245 libnss3-dev \
225 libreadline-dev \
246 libreadline-dev \
226 libsqlite3-dev \
247 libsqlite3-dev \
227 libssl-dev \
248 libssl-dev \
228 netbase \
249 netbase \
229 ntfs-3g \
250 ntfs-3g \
230 nvme-cli \
251 nvme-cli \
231 pyflakes \
252 pyflakes \
232 pyflakes3 \
253 pyflakes3 \
233 pylint \
254 pylint \
234 pylint3 \
255 pylint3 \
235 python-all-dev \
256 python-all-dev \
236 python-dev \
257 python-dev \
237 python-docutils \
258 python-docutils \
238 python-fuzzywuzzy \
259 python-fuzzywuzzy \
239 python-pygments \
260 python-pygments \
240 python-subversion \
261 python-subversion \
241 python-vcr \
262 python-vcr \
263 python3-boto3 \
242 python3-dev \
264 python3-dev \
243 python3-docutils \
265 python3-docutils \
244 python3-fuzzywuzzy \
266 python3-fuzzywuzzy \
245 python3-pygments \
267 python3-pygments \
246 python3-vcr \
268 python3-vcr \
247 rsync \
269 rsync \
248 sqlite3 \
270 sqlite3 \
249 subversion \
271 subversion \
250 tcl-dev \
272 tcl-dev \
251 tk-dev \
273 tk-dev \
252 tla \
274 tla \
253 unzip \
275 unzip \
254 uuid-dev \
276 uuid-dev \
255 vim \
277 vim \
256 virtualenv \
278 virtualenv \
257 wget \
279 wget \
258 xfsprogs \
280 xfsprogs \
259 zip \
281 zip \
260 zlib1g-dev"
282 zlib1g-dev"
261
283
262 if [ "$DEBIAN_VERSION" = "9.8" ]; then
284 if [ "LSB_RELEASE" = "stretch" ]; then
263 PACKAGES="$PACKAGES linux-perf"
285 PACKAGES="$PACKAGES linux-perf"
264 elif [ "$DISTRO" = "Ubuntu" ]; then
286 elif [ "$DISTRO" = "Ubuntu" ]; then
265 PACKAGES="$PACKAGES linux-tools-common"
287 PACKAGES="$PACKAGES linux-tools-common"
266 fi
288 fi
267
289
268 # Ubuntu 19.04 removes monotone.
290 # Monotone only available in older releases.
269 if [ "$LSB_RELEASE" != "disco" ]; then
291 if [ "$LSB_RELEASE" = "stretch" -o "$LSB_RELEASE" = "xenial" ]; then
270 PACKAGES="$PACKAGES monotone"
292 PACKAGES="$PACKAGES monotone"
271 fi
293 fi
272
294
273 # As of April 27, 2019, Docker hasn't published packages for
274 # Ubuntu 19.04 yet.
275 if [ "$LSB_RELEASE" != "disco" ]; then
276 PACKAGES="$PACKAGES docker-ce"
277 fi
278
279 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends $PACKAGES
295 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends $PACKAGES
280
296
281 # Create clang-format symlink so test harness finds it.
297 # Create clang-format symlink so test harness finds it.
282 sudo update-alternatives --install /usr/bin/clang-format clang-format \
298 sudo update-alternatives --install /usr/bin/clang-format clang-format \
283 /usr/bin/clang-format-6.0 1000
299 /usr/bin/clang-format-6.0 1000
284
300
285 sudo mkdir /hgdev
301 sudo mkdir /hgdev
286 # Will be normalized to hg:hg later.
302 # Will be normalized to hg:hg later.
287 sudo chown `whoami` /hgdev
303 sudo chown `whoami` /hgdev
288
304
305 {install_rust}
306
289 cp requirements-py2.txt /hgdev/requirements-py2.txt
307 cp requirements-py2.txt /hgdev/requirements-py2.txt
290 cp requirements-py3.txt /hgdev/requirements-py3.txt
308 cp requirements-py3.txt /hgdev/requirements-py3.txt
291
309
292 # Disable the pip version check because it uses the network and can
310 # Disable the pip version check because it uses the network and can
293 # be annoying.
311 # be annoying.
294 cat << EOF | sudo tee -a /etc/pip.conf
312 cat << EOF | sudo tee -a /etc/pip.conf
295 [global]
313 [global]
296 disable-pip-version-check = True
314 disable-pip-version-check = True
297 EOF
315 EOF
298
316
299 {install_pythons}
317 {install_pythons}
300 {bootstrap_virtualenv}
318 {bootstrap_virtualenv}
301
319
302 /hgdev/venv-bootstrap/bin/hg clone https://www.mercurial-scm.org/repo/hg /hgdev/src
320 /hgdev/venv-bootstrap/bin/hg clone https://www.mercurial-scm.org/repo/hg /hgdev/src
303
321
304 # Mark the repo as non-publishing.
322 # Mark the repo as non-publishing.
305 cat >> /hgdev/src/.hg/hgrc << EOF
323 cat >> /hgdev/src/.hg/hgrc << EOF
306 [phases]
324 [phases]
307 publish = false
325 publish = false
308 EOF
326 EOF
309
327
310 sudo chown -R hg:hg /hgdev
328 sudo chown -R hg:hg /hgdev
311 '''.lstrip().format(
329 '''.lstrip()
312 install_pythons=INSTALL_PYTHONS,
330 .format(
313 bootstrap_virtualenv=BOOTSTRAP_VIRTUALENV
331 install_rust=INSTALL_RUST,
314 ).replace('\r\n', '\n')
332 install_pythons=INSTALL_PYTHONS,
333 bootstrap_virtualenv=BOOTSTRAP_VIRTUALENV,
334 )
335 .replace('\r\n', '\n')
336 )
315
337
316
338
317 # Prepares /hgdev for operations.
339 # Prepares /hgdev for operations.
318 PREPARE_HGDEV = '''
340 PREPARE_HGDEV = '''
319 #!/bin/bash
341 #!/bin/bash
320
342
321 set -e
343 set -e
322
344
323 FS=$1
345 FS=$1
324
346
325 ensure_device() {
347 ensure_device() {
326 if [ -z "${DEVICE}" ]; then
348 if [ -z "${DEVICE}" ]; then
327 echo "could not find block device to format"
349 echo "could not find block device to format"
328 exit 1
350 exit 1
329 fi
351 fi
330 }
352 }
331
353
332 # Determine device to partition for extra filesystem.
354 # Determine device to partition for extra filesystem.
333 # If only 1 volume is present, it will be the root volume and
355 # If only 1 volume is present, it will be the root volume and
334 # should be /dev/nvme0. If multiple volumes are present, the
356 # should be /dev/nvme0. If multiple volumes are present, the
335 # root volume could be nvme0 or nvme1. Use whichever one doesn't have
357 # root volume could be nvme0 or nvme1. Use whichever one doesn't have
336 # a partition.
358 # a partition.
337 if [ -e /dev/nvme1n1 ]; then
359 if [ -e /dev/nvme1n1 ]; then
338 if [ -e /dev/nvme0n1p1 ]; then
360 if [ -e /dev/nvme0n1p1 ]; then
339 DEVICE=/dev/nvme1n1
361 DEVICE=/dev/nvme1n1
340 else
362 else
341 DEVICE=/dev/nvme0n1
363 DEVICE=/dev/nvme0n1
342 fi
364 fi
343 else
365 else
344 DEVICE=
366 DEVICE=
345 fi
367 fi
346
368
347 sudo mkdir /hgwork
369 sudo mkdir /hgwork
348
370
349 if [ "${FS}" != "default" -a "${FS}" != "tmpfs" ]; then
371 if [ "${FS}" != "default" -a "${FS}" != "tmpfs" ]; then
350 ensure_device
372 ensure_device
351 echo "creating ${FS} filesystem on ${DEVICE}"
373 echo "creating ${FS} filesystem on ${DEVICE}"
352 fi
374 fi
353
375
354 if [ "${FS}" = "default" ]; then
376 if [ "${FS}" = "default" ]; then
355 :
377 :
356
378
357 elif [ "${FS}" = "btrfs" ]; then
379 elif [ "${FS}" = "btrfs" ]; then
358 sudo mkfs.btrfs ${DEVICE}
380 sudo mkfs.btrfs ${DEVICE}
359 sudo mount ${DEVICE} /hgwork
381 sudo mount ${DEVICE} /hgwork
360
382
361 elif [ "${FS}" = "ext3" ]; then
383 elif [ "${FS}" = "ext3" ]; then
362 # lazy_journal_init speeds up filesystem creation at the expense of
384 # lazy_journal_init speeds up filesystem creation at the expense of
363 # integrity if things crash. We are an ephemeral instance, so we don't
385 # integrity if things crash. We are an ephemeral instance, so we don't
364 # care about integrity.
386 # care about integrity.
365 sudo mkfs.ext3 -E lazy_journal_init=1 ${DEVICE}
387 sudo mkfs.ext3 -E lazy_journal_init=1 ${DEVICE}
366 sudo mount ${DEVICE} /hgwork
388 sudo mount ${DEVICE} /hgwork
367
389
368 elif [ "${FS}" = "ext4" ]; then
390 elif [ "${FS}" = "ext4" ]; then
369 sudo mkfs.ext4 -E lazy_journal_init=1 ${DEVICE}
391 sudo mkfs.ext4 -E lazy_journal_init=1 ${DEVICE}
370 sudo mount ${DEVICE} /hgwork
392 sudo mount ${DEVICE} /hgwork
371
393
372 elif [ "${FS}" = "jfs" ]; then
394 elif [ "${FS}" = "jfs" ]; then
373 sudo mkfs.jfs ${DEVICE}
395 sudo mkfs.jfs ${DEVICE}
374 sudo mount ${DEVICE} /hgwork
396 sudo mount ${DEVICE} /hgwork
375
397
376 elif [ "${FS}" = "tmpfs" ]; then
398 elif [ "${FS}" = "tmpfs" ]; then
377 echo "creating tmpfs volume in /hgwork"
399 echo "creating tmpfs volume in /hgwork"
378 sudo mount -t tmpfs -o size=1024M tmpfs /hgwork
400 sudo mount -t tmpfs -o size=1024M tmpfs /hgwork
379
401
380 elif [ "${FS}" = "xfs" ]; then
402 elif [ "${FS}" = "xfs" ]; then
381 sudo mkfs.xfs ${DEVICE}
403 sudo mkfs.xfs ${DEVICE}
382 sudo mount ${DEVICE} /hgwork
404 sudo mount ${DEVICE} /hgwork
383
405
384 else
406 else
385 echo "unsupported filesystem: ${FS}"
407 echo "unsupported filesystem: ${FS}"
386 exit 1
408 exit 1
387 fi
409 fi
388
410
389 echo "/hgwork ready"
411 echo "/hgwork ready"
390
412
391 sudo chown hg:hg /hgwork
413 sudo chown hg:hg /hgwork
392 mkdir /hgwork/tmp
414 mkdir /hgwork/tmp
393 chown hg:hg /hgwork/tmp
415 chown hg:hg /hgwork/tmp
394
416
395 rsync -a /hgdev/src /hgwork/
417 rsync -a /hgdev/src /hgwork/
396 '''.lstrip().replace('\r\n', '\n')
418 '''.lstrip().replace(
419 '\r\n', '\n'
420 )
397
421
398
422
399 HG_UPDATE_CLEAN = '''
423 HG_UPDATE_CLEAN = '''
400 set -ex
424 set -ex
401
425
402 HG=/hgdev/venv-bootstrap/bin/hg
426 HG=/hgdev/venv-bootstrap/bin/hg
403
427
404 cd /hgwork/src
428 cd /hgwork/src
405 ${HG} --config extensions.purge= purge --all
429 ${HG} --config extensions.purge= purge --all
406 ${HG} update -C $1
430 ${HG} update -C $1
407 ${HG} log -r .
431 ${HG} log -r .
408 '''.lstrip().replace('\r\n', '\n')
432 '''.lstrip().replace(
433 '\r\n', '\n'
434 )
409
435
410
436
411 def prepare_exec_environment(ssh_client, filesystem='default'):
437 def prepare_exec_environment(ssh_client, filesystem='default'):
412 """Prepare an EC2 instance to execute things.
438 """Prepare an EC2 instance to execute things.
413
439
414 The AMI has an ``/hgdev`` bootstrapped with various Python installs
440 The AMI has an ``/hgdev`` bootstrapped with various Python installs
415 and a clone of the Mercurial repo.
441 and a clone of the Mercurial repo.
416
442
417 In EC2, EBS volumes launched from snapshots have wonky performance behavior.
443 In EC2, EBS volumes launched from snapshots have wonky performance behavior.
418 Notably, blocks have to be copied on first access, which makes volume
444 Notably, blocks have to be copied on first access, which makes volume
419 I/O extremely slow on fresh volumes.
445 I/O extremely slow on fresh volumes.
420
446
421 Furthermore, we may want to run operations, tests, etc on alternative
447 Furthermore, we may want to run operations, tests, etc on alternative
422 filesystems so we examine behavior on different filesystems.
448 filesystems so we examine behavior on different filesystems.
423
449
424 This function is used to facilitate executing operations on alternate
450 This function is used to facilitate executing operations on alternate
425 volumes.
451 volumes.
426 """
452 """
427 sftp = ssh_client.open_sftp()
453 sftp = ssh_client.open_sftp()
428
454
429 with sftp.open('/hgdev/prepare-hgdev', 'wb') as fh:
455 with sftp.open('/hgdev/prepare-hgdev', 'wb') as fh:
430 fh.write(PREPARE_HGDEV)
456 fh.write(PREPARE_HGDEV)
431 fh.chmod(0o0777)
457 fh.chmod(0o0777)
432
458
433 command = 'sudo /hgdev/prepare-hgdev %s' % filesystem
459 command = 'sudo /hgdev/prepare-hgdev %s' % filesystem
434 chan, stdin, stdout = exec_command(ssh_client, command)
460 chan, stdin, stdout = exec_command(ssh_client, command)
435 stdin.close()
461 stdin.close()
436
462
437 for line in stdout:
463 for line in stdout:
438 print(line, end='')
464 print(line, end='')
439
465
440 res = chan.recv_exit_status()
466 res = chan.recv_exit_status()
441
467
442 if res:
468 if res:
443 raise Exception('non-0 exit code updating working directory; %d'
469 raise Exception('non-0 exit code updating working directory; %d' % res)
444 % res)
445
470
446
471
447 def synchronize_hg(source_path: pathlib.Path, ec2_instance, revision: str=None):
472 def synchronize_hg(
473 source_path: pathlib.Path, ec2_instance, revision: str = None
474 ):
448 """Synchronize a local Mercurial source path to remote EC2 instance."""
475 """Synchronize a local Mercurial source path to remote EC2 instance."""
449
476
450 with tempfile.TemporaryDirectory() as temp_dir:
477 with tempfile.TemporaryDirectory() as temp_dir:
451 temp_dir = pathlib.Path(temp_dir)
478 temp_dir = pathlib.Path(temp_dir)
452
479
453 ssh_dir = temp_dir / '.ssh'
480 ssh_dir = temp_dir / '.ssh'
454 ssh_dir.mkdir()
481 ssh_dir.mkdir()
455 ssh_dir.chmod(0o0700)
482 ssh_dir.chmod(0o0700)
456
483
457 public_ip = ec2_instance.public_ip_address
484 public_ip = ec2_instance.public_ip_address
458
485
459 ssh_config = ssh_dir / 'config'
486 ssh_config = ssh_dir / 'config'
460
487
461 with ssh_config.open('w', encoding='utf-8') as fh:
488 with ssh_config.open('w', encoding='utf-8') as fh:
462 fh.write('Host %s\n' % public_ip)
489 fh.write('Host %s\n' % public_ip)
463 fh.write(' User hg\n')
490 fh.write(' User hg\n')
464 fh.write(' StrictHostKeyChecking no\n')
491 fh.write(' StrictHostKeyChecking no\n')
465 fh.write(' UserKnownHostsFile %s\n' % (ssh_dir / 'known_hosts'))
492 fh.write(' UserKnownHostsFile %s\n' % (ssh_dir / 'known_hosts'))
466 fh.write(' IdentityFile %s\n' % ec2_instance.ssh_private_key_path)
493 fh.write(' IdentityFile %s\n' % ec2_instance.ssh_private_key_path)
467
494
468 if not (source_path / '.hg').is_dir():
495 if not (source_path / '.hg').is_dir():
469 raise Exception('%s is not a Mercurial repository; synchronization '
496 raise Exception(
470 'not yet supported' % source_path)
497 '%s is not a Mercurial repository; synchronization '
498 'not yet supported' % source_path
499 )
471
500
472 env = dict(os.environ)
501 env = dict(os.environ)
473 env['HGPLAIN'] = '1'
502 env['HGPLAIN'] = '1'
474 env['HGENCODING'] = 'utf-8'
503 env['HGENCODING'] = 'utf-8'
475
504
476 hg_bin = source_path / 'hg'
505 hg_bin = source_path / 'hg'
477
506
478 res = subprocess.run(
507 res = subprocess.run(
479 ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
508 ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
480 cwd=str(source_path), env=env, check=True, capture_output=True)
509 cwd=str(source_path),
510 env=env,
511 check=True,
512 capture_output=True,
513 )
481
514
482 full_revision = res.stdout.decode('ascii')
515 full_revision = res.stdout.decode('ascii')
483
516
484 args = [
517 args = [
485 'python2.7', str(hg_bin),
518 'python2.7',
486 '--config', 'ui.ssh=ssh -F %s' % ssh_config,
519 str(hg_bin),
487 '--config', 'ui.remotecmd=/hgdev/venv-bootstrap/bin/hg',
520 '--config',
521 'ui.ssh=ssh -F %s' % ssh_config,
522 '--config',
523 'ui.remotecmd=/hgdev/venv-bootstrap/bin/hg',
488 # Also ensure .hgtags changes are present so auto version
524 # Also ensure .hgtags changes are present so auto version
489 # calculation works.
525 # calculation works.
490 'push', '-f', '-r', full_revision, '-r', 'file(.hgtags)',
526 'push',
527 '-f',
528 '-r',
529 full_revision,
530 '-r',
531 'file(.hgtags)',
491 'ssh://%s//hgwork/src' % public_ip,
532 'ssh://%s//hgwork/src' % public_ip,
492 ]
533 ]
493
534
494 res = subprocess.run(args, cwd=str(source_path), env=env)
535 res = subprocess.run(args, cwd=str(source_path), env=env)
495
536
496 # Allow 1 (no-op) to not trigger error.
537 # Allow 1 (no-op) to not trigger error.
497 if res.returncode not in (0, 1):
538 if res.returncode not in (0, 1):
498 res.check_returncode()
539 res.check_returncode()
499
540
500 # TODO support synchronizing dirty working directory.
541 # TODO support synchronizing dirty working directory.
501
542
502 sftp = ec2_instance.ssh_client.open_sftp()
543 sftp = ec2_instance.ssh_client.open_sftp()
503
544
504 with sftp.open('/hgdev/hgup', 'wb') as fh:
545 with sftp.open('/hgdev/hgup', 'wb') as fh:
505 fh.write(HG_UPDATE_CLEAN)
546 fh.write(HG_UPDATE_CLEAN)
506 fh.chmod(0o0700)
547 fh.chmod(0o0700)
507
548
508 chan, stdin, stdout = exec_command(
549 chan, stdin, stdout = exec_command(
509 ec2_instance.ssh_client, '/hgdev/hgup %s' % full_revision)
550 ec2_instance.ssh_client, '/hgdev/hgup %s' % full_revision
551 )
510 stdin.close()
552 stdin.close()
511
553
512 for line in stdout:
554 for line in stdout:
513 print(line, end='')
555 print(line, end='')
514
556
515 res = chan.recv_exit_status()
557 res = chan.recv_exit_status()
516
558
517 if res:
559 if res:
518 raise Exception('non-0 exit code updating working directory; %d'
560 raise Exception(
519 % res)
561 'non-0 exit code updating working directory; %d' % res
562 )
520
563
521
564
522 def run_tests(ssh_client, python_version, test_flags=None):
565 def run_tests(ssh_client, python_version, test_flags=None):
523 """Run tests on a remote Linux machine via an SSH client."""
566 """Run tests on a remote Linux machine via an SSH client."""
524 test_flags = test_flags or []
567 test_flags = test_flags or []
525
568
526 print('running tests')
569 print('running tests')
527
570
528 if python_version == 'system2':
571 if python_version == 'system2':
529 python = '/usr/bin/python2'
572 python = '/usr/bin/python2'
530 elif python_version == 'system3':
573 elif python_version == 'system3':
531 python = '/usr/bin/python3'
574 python = '/usr/bin/python3'
532 elif python_version.startswith('pypy'):
575 elif python_version.startswith('pypy'):
533 python = '/hgdev/pyenv/shims/%s' % python_version
576 python = '/hgdev/pyenv/shims/%s' % python_version
534 else:
577 else:
535 python = '/hgdev/pyenv/shims/python%s' % python_version
578 python = '/hgdev/pyenv/shims/python%s' % python_version
536
579
537 test_flags = ' '.join(shlex.quote(a) for a in test_flags)
580 test_flags = ' '.join(shlex.quote(a) for a in test_flags)
538
581
539 command = (
582 command = (
540 '/bin/sh -c "export TMPDIR=/hgwork/tmp; '
583 '/bin/sh -c "export TMPDIR=/hgwork/tmp; '
541 'cd /hgwork/src/tests && %s run-tests.py %s"' % (
584 'cd /hgwork/src/tests && %s run-tests.py %s"' % (python, test_flags)
542 python, test_flags))
585 )
543
586
544 chan, stdin, stdout = exec_command(ssh_client, command)
587 chan, stdin, stdout = exec_command(ssh_client, command)
545
588
546 stdin.close()
589 stdin.close()
547
590
548 for line in stdout:
591 for line in stdout:
549 print(line, end='')
592 print(line, end='')
550
593
551 return chan.recv_exit_status()
594 return chan.recv_exit_status()
@@ -1,67 +1,72 b''
1 # ssh.py - Interact with remote SSH servers
1 # ssh.py - Interact with remote SSH servers
2 #
2 #
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 # no-check-code because Python 3 native.
8 # no-check-code because Python 3 native.
9
9
10 import socket
10 import socket
11 import time
11 import time
12 import warnings
12 import warnings
13
13
14 from cryptography.utils import (
14 from cryptography.utils import CryptographyDeprecationWarning
15 CryptographyDeprecationWarning,
16 )
17 import paramiko
15 import paramiko
18
16
19
17
20 def wait_for_ssh(hostname, port, timeout=60, username=None, key_filename=None):
18 def wait_for_ssh(hostname, port, timeout=60, username=None, key_filename=None):
21 """Wait for an SSH server to start on the specified host and port."""
19 """Wait for an SSH server to start on the specified host and port."""
20
22 class IgnoreHostKeyPolicy(paramiko.MissingHostKeyPolicy):
21 class IgnoreHostKeyPolicy(paramiko.MissingHostKeyPolicy):
23 def missing_host_key(self, client, hostname, key):
22 def missing_host_key(self, client, hostname, key):
24 return
23 return
25
24
26 end_time = time.time() + timeout
25 end_time = time.time() + timeout
27
26
28 # paramiko triggers a CryptographyDeprecationWarning in the cryptography
27 # paramiko triggers a CryptographyDeprecationWarning in the cryptography
29 # package. Let's suppress
28 # package. Let's suppress
30 with warnings.catch_warnings():
29 with warnings.catch_warnings():
31 warnings.filterwarnings('ignore',
30 warnings.filterwarnings(
32 category=CryptographyDeprecationWarning)
31 'ignore', category=CryptographyDeprecationWarning
32 )
33
33
34 while True:
34 while True:
35 client = paramiko.SSHClient()
35 client = paramiko.SSHClient()
36 client.set_missing_host_key_policy(IgnoreHostKeyPolicy())
36 client.set_missing_host_key_policy(IgnoreHostKeyPolicy())
37 try:
37 try:
38 client.connect(hostname, port=port, username=username,
38 client.connect(
39 key_filename=key_filename,
39 hostname,
40 timeout=5.0, allow_agent=False,
40 port=port,
41 look_for_keys=False)
41 username=username,
42 key_filename=key_filename,
43 timeout=5.0,
44 allow_agent=False,
45 look_for_keys=False,
46 )
42
47
43 return client
48 return client
44 except socket.error:
49 except socket.error:
45 pass
50 pass
46 except paramiko.AuthenticationException:
51 except paramiko.AuthenticationException:
47 raise
52 raise
48 except paramiko.SSHException:
53 except paramiko.SSHException:
49 pass
54 pass
50
55
51 if time.time() >= end_time:
56 if time.time() >= end_time:
52 raise Exception('Timeout reached waiting for SSH')
57 raise Exception('Timeout reached waiting for SSH')
53
58
54 time.sleep(1.0)
59 time.sleep(1.0)
55
60
56
61
57 def exec_command(client, command):
62 def exec_command(client, command):
58 """exec_command wrapper that combines stderr/stdout and returns channel"""
63 """exec_command wrapper that combines stderr/stdout and returns channel"""
59 chan = client.get_transport().open_session()
64 chan = client.get_transport().open_session()
60
65
61 chan.exec_command(command)
66 chan.exec_command(command)
62 chan.set_combine_stderr(True)
67 chan.set_combine_stderr(True)
63
68
64 stdin = chan.makefile('wb', -1)
69 stdin = chan.makefile('wb', -1)
65 stdout = chan.makefile('r', -1)
70 stdout = chan.makefile('r', -1)
66
71
67 return chan, stdin, stdout
72 return chan, stdin, stdout
@@ -1,298 +1,510 b''
1 # windows.py - Automation specific to Windows
1 # windows.py - Automation specific to Windows
2 #
2 #
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 # no-check-code because Python 3 native.
8 # no-check-code because Python 3 native.
9
9
10 import datetime
10 import os
11 import os
12 import paramiko
11 import pathlib
13 import pathlib
12 import re
14 import re
13 import subprocess
15 import subprocess
14 import tempfile
16 import tempfile
15
17
16 from .winrm import (
18 from .pypi import upload as pypi_upload
17 run_powershell,
19 from .winrm import run_powershell
18 )
19
20
20
21
21 # PowerShell commands to activate a Visual Studio 2008 environment.
22 # PowerShell commands to activate a Visual Studio 2008 environment.
22 # This is essentially a port of vcvarsall.bat to PowerShell.
23 # This is essentially a port of vcvarsall.bat to PowerShell.
23 ACTIVATE_VC9_AMD64 = r'''
24 ACTIVATE_VC9_AMD64 = r'''
24 Write-Output "activating Visual Studio 2008 environment for AMD64"
25 Write-Output "activating Visual Studio 2008 environment for AMD64"
25 $root = "$env:LOCALAPPDATA\Programs\Common\Microsoft\Visual C++ for Python\9.0"
26 $root = "$env:LOCALAPPDATA\Programs\Common\Microsoft\Visual C++ for Python\9.0"
26 $Env:VCINSTALLDIR = "${root}\VC\"
27 $Env:VCINSTALLDIR = "${root}\VC\"
27 $Env:WindowsSdkDir = "${root}\WinSDK\"
28 $Env:WindowsSdkDir = "${root}\WinSDK\"
28 $Env:PATH = "${root}\VC\Bin\amd64;${root}\WinSDK\Bin\x64;${root}\WinSDK\Bin;$Env:PATH"
29 $Env:PATH = "${root}\VC\Bin\amd64;${root}\WinSDK\Bin\x64;${root}\WinSDK\Bin;$Env:PATH"
29 $Env:INCLUDE = "${root}\VC\Include;${root}\WinSDK\Include;$Env:PATH"
30 $Env:INCLUDE = "${root}\VC\Include;${root}\WinSDK\Include;$Env:PATH"
30 $Env:LIB = "${root}\VC\Lib\amd64;${root}\WinSDK\Lib\x64;$Env:LIB"
31 $Env:LIB = "${root}\VC\Lib\amd64;${root}\WinSDK\Lib\x64;$Env:LIB"
31 $Env:LIBPATH = "${root}\VC\Lib\amd64;${root}\WinSDK\Lib\x64;$Env:LIBPATH"
32 $Env:LIBPATH = "${root}\VC\Lib\amd64;${root}\WinSDK\Lib\x64;$Env:LIBPATH"
32 '''.lstrip()
33 '''.lstrip()
33
34
34 ACTIVATE_VC9_X86 = r'''
35 ACTIVATE_VC9_X86 = r'''
35 Write-Output "activating Visual Studio 2008 environment for x86"
36 Write-Output "activating Visual Studio 2008 environment for x86"
36 $root = "$env:LOCALAPPDATA\Programs\Common\Microsoft\Visual C++ for Python\9.0"
37 $root = "$env:LOCALAPPDATA\Programs\Common\Microsoft\Visual C++ for Python\9.0"
37 $Env:VCINSTALLDIR = "${root}\VC\"
38 $Env:VCINSTALLDIR = "${root}\VC\"
38 $Env:WindowsSdkDir = "${root}\WinSDK\"
39 $Env:WindowsSdkDir = "${root}\WinSDK\"
39 $Env:PATH = "${root}\VC\Bin;${root}\WinSDK\Bin;$Env:PATH"
40 $Env:PATH = "${root}\VC\Bin;${root}\WinSDK\Bin;$Env:PATH"
40 $Env:INCLUDE = "${root}\VC\Include;${root}\WinSDK\Include;$Env:INCLUDE"
41 $Env:INCLUDE = "${root}\VC\Include;${root}\WinSDK\Include;$Env:INCLUDE"
41 $Env:LIB = "${root}\VC\Lib;${root}\WinSDK\Lib;$Env:LIB"
42 $Env:LIB = "${root}\VC\Lib;${root}\WinSDK\Lib;$Env:LIB"
42 $Env:LIBPATH = "${root}\VC\lib;${root}\WinSDK\Lib;$Env:LIBPATH"
43 $Env:LIBPATH = "${root}\VC\lib;${root}\WinSDK\Lib;$Env:LIBPATH"
43 '''.lstrip()
44 '''.lstrip()
44
45
45 HG_PURGE = r'''
46 HG_PURGE = r'''
46 $Env:PATH = "C:\hgdev\venv-bootstrap\Scripts;$Env:PATH"
47 $Env:PATH = "C:\hgdev\venv-bootstrap\Scripts;$Env:PATH"
47 Set-Location C:\hgdev\src
48 Set-Location C:\hgdev\src
48 hg.exe --config extensions.purge= purge --all
49 hg.exe --config extensions.purge= purge --all
49 if ($LASTEXITCODE -ne 0) {
50 if ($LASTEXITCODE -ne 0) {
50 throw "process exited non-0: $LASTEXITCODE"
51 throw "process exited non-0: $LASTEXITCODE"
51 }
52 }
52 Write-Output "purged Mercurial repo"
53 Write-Output "purged Mercurial repo"
53 '''
54 '''
54
55
55 HG_UPDATE_CLEAN = r'''
56 HG_UPDATE_CLEAN = r'''
56 $Env:PATH = "C:\hgdev\venv-bootstrap\Scripts;$Env:PATH"
57 $Env:PATH = "C:\hgdev\venv-bootstrap\Scripts;$Env:PATH"
57 Set-Location C:\hgdev\src
58 Set-Location C:\hgdev\src
58 hg.exe --config extensions.purge= purge --all
59 hg.exe --config extensions.purge= purge --all
59 if ($LASTEXITCODE -ne 0) {{
60 if ($LASTEXITCODE -ne 0) {{
60 throw "process exited non-0: $LASTEXITCODE"
61 throw "process exited non-0: $LASTEXITCODE"
61 }}
62 }}
62 hg.exe update -C {revision}
63 hg.exe update -C {revision}
63 if ($LASTEXITCODE -ne 0) {{
64 if ($LASTEXITCODE -ne 0) {{
64 throw "process exited non-0: $LASTEXITCODE"
65 throw "process exited non-0: $LASTEXITCODE"
65 }}
66 }}
66 hg.exe log -r .
67 hg.exe log -r .
67 Write-Output "updated Mercurial working directory to {revision}"
68 Write-Output "updated Mercurial working directory to {revision}"
68 '''.lstrip()
69 '''.lstrip()
69
70
70 BUILD_INNO = r'''
71 BUILD_INNO = r'''
71 Set-Location C:\hgdev\src
72 Set-Location C:\hgdev\src
72 $python = "C:\hgdev\python27-{arch}\python.exe"
73 $python = "C:\hgdev\python27-{arch}\python.exe"
73 C:\hgdev\python37-x64\python.exe contrib\packaging\inno\build.py --python $python
74 C:\hgdev\python37-x64\python.exe contrib\packaging\inno\build.py --python $python
74 if ($LASTEXITCODE -ne 0) {{
75 if ($LASTEXITCODE -ne 0) {{
75 throw "process exited non-0: $LASTEXITCODE"
76 throw "process exited non-0: $LASTEXITCODE"
76 }}
77 }}
77 '''.lstrip()
78 '''.lstrip()
78
79
79 BUILD_WHEEL = r'''
80 BUILD_WHEEL = r'''
80 Set-Location C:\hgdev\src
81 Set-Location C:\hgdev\src
81 C:\hgdev\python27-{arch}\Scripts\pip.exe wheel --wheel-dir dist .
82 C:\hgdev\python27-{arch}\Scripts\pip.exe wheel --wheel-dir dist .
82 if ($LASTEXITCODE -ne 0) {{
83 if ($LASTEXITCODE -ne 0) {{
83 throw "process exited non-0: $LASTEXITCODE"
84 throw "process exited non-0: $LASTEXITCODE"
84 }}
85 }}
85 '''
86 '''
86
87
87 BUILD_WIX = r'''
88 BUILD_WIX = r'''
88 Set-Location C:\hgdev\src
89 Set-Location C:\hgdev\src
89 $python = "C:\hgdev\python27-{arch}\python.exe"
90 $python = "C:\hgdev\python27-{arch}\python.exe"
90 C:\hgdev\python37-x64\python.exe contrib\packaging\wix\build.py --python $python {extra_args}
91 C:\hgdev\python37-x64\python.exe contrib\packaging\wix\build.py --python $python {extra_args}
91 if ($LASTEXITCODE -ne 0) {{
92 if ($LASTEXITCODE -ne 0) {{
92 throw "process exited non-0: $LASTEXITCODE"
93 throw "process exited non-0: $LASTEXITCODE"
93 }}
94 }}
94 '''
95 '''
95
96
96 RUN_TESTS = r'''
97 RUN_TESTS = r'''
97 C:\hgdev\MinGW\msys\1.0\bin\sh.exe --login -c "cd /c/hgdev/src/tests && /c/hgdev/{python_path}/python.exe run-tests.py {test_flags}"
98 C:\hgdev\MinGW\msys\1.0\bin\sh.exe --login -c "cd /c/hgdev/src/tests && /c/hgdev/{python_path}/python.exe run-tests.py {test_flags}"
98 if ($LASTEXITCODE -ne 0) {{
99 if ($LASTEXITCODE -ne 0) {{
99 throw "process exited non-0: $LASTEXITCODE"
100 throw "process exited non-0: $LASTEXITCODE"
100 }}
101 }}
101 '''
102 '''
102
103
104 X86_WHEEL_FILENAME = 'mercurial-{version}-cp27-cp27m-win32.whl'
105 X64_WHEEL_FILENAME = 'mercurial-{version}-cp27-cp27m-win_amd64.whl'
106 X86_EXE_FILENAME = 'Mercurial-{version}.exe'
107 X64_EXE_FILENAME = 'Mercurial-{version}-x64.exe'
108 X86_MSI_FILENAME = 'mercurial-{version}-x86.msi'
109 X64_MSI_FILENAME = 'mercurial-{version}-x64.msi'
110
111 MERCURIAL_SCM_BASE_URL = 'https://mercurial-scm.org/release/windows'
112
113 X86_USER_AGENT_PATTERN = '.*Windows.*'
114 X64_USER_AGENT_PATTERN = '.*Windows.*(WOW|x)64.*'
115
116 X86_EXE_DESCRIPTION = (
117 'Mercurial {version} Inno Setup installer - x86 Windows '
118 '- does not require admin rights'
119 )
120 X64_EXE_DESCRIPTION = (
121 'Mercurial {version} Inno Setup installer - x64 Windows '
122 '- does not require admin rights'
123 )
124 X86_MSI_DESCRIPTION = (
125 'Mercurial {version} MSI installer - x86 Windows ' '- requires admin rights'
126 )
127 X64_MSI_DESCRIPTION = (
128 'Mercurial {version} MSI installer - x64 Windows ' '- requires admin rights'
129 )
130
103
131
104 def get_vc_prefix(arch):
132 def get_vc_prefix(arch):
105 if arch == 'x86':
133 if arch == 'x86':
106 return ACTIVATE_VC9_X86
134 return ACTIVATE_VC9_X86
107 elif arch == 'x64':
135 elif arch == 'x64':
108 return ACTIVATE_VC9_AMD64
136 return ACTIVATE_VC9_AMD64
109 else:
137 else:
110 raise ValueError('illegal arch: %s; must be x86 or x64' % arch)
138 raise ValueError('illegal arch: %s; must be x86 or x64' % arch)
111
139
112
140
113 def fix_authorized_keys_permissions(winrm_client, path):
141 def fix_authorized_keys_permissions(winrm_client, path):
114 commands = [
142 commands = [
115 '$ErrorActionPreference = "Stop"',
143 '$ErrorActionPreference = "Stop"',
116 'Repair-AuthorizedKeyPermission -FilePath %s -Confirm:$false' % path,
144 'Repair-AuthorizedKeyPermission -FilePath %s -Confirm:$false' % path,
117 r'icacls %s /remove:g "NT Service\sshd"' % path,
145 r'icacls %s /remove:g "NT Service\sshd"' % path,
118 ]
146 ]
119
147
120 run_powershell(winrm_client, '\n'.join(commands))
148 run_powershell(winrm_client, '\n'.join(commands))
121
149
122
150
123 def synchronize_hg(hg_repo: pathlib.Path, revision: str, ec2_instance):
151 def synchronize_hg(hg_repo: pathlib.Path, revision: str, ec2_instance):
124 """Synchronize local Mercurial repo to remote EC2 instance."""
152 """Synchronize local Mercurial repo to remote EC2 instance."""
125
153
126 winrm_client = ec2_instance.winrm_client
154 winrm_client = ec2_instance.winrm_client
127
155
128 with tempfile.TemporaryDirectory() as temp_dir:
156 with tempfile.TemporaryDirectory() as temp_dir:
129 temp_dir = pathlib.Path(temp_dir)
157 temp_dir = pathlib.Path(temp_dir)
130
158
131 ssh_dir = temp_dir / '.ssh'
159 ssh_dir = temp_dir / '.ssh'
132 ssh_dir.mkdir()
160 ssh_dir.mkdir()
133 ssh_dir.chmod(0o0700)
161 ssh_dir.chmod(0o0700)
134
162
135 # Generate SSH key to use for communication.
163 # Generate SSH key to use for communication.
136 subprocess.run([
164 subprocess.run(
137 'ssh-keygen', '-t', 'rsa', '-b', '4096', '-N', '',
165 [
138 '-f', str(ssh_dir / 'id_rsa')],
166 'ssh-keygen',
139 check=True, capture_output=True)
167 '-t',
168 'rsa',
169 '-b',
170 '4096',
171 '-N',
172 '',
173 '-f',
174 str(ssh_dir / 'id_rsa'),
175 ],
176 check=True,
177 capture_output=True,
178 )
140
179
141 # Add it to ~/.ssh/authorized_keys on remote.
180 # Add it to ~/.ssh/authorized_keys on remote.
142 # This assumes the file doesn't already exist.
181 # This assumes the file doesn't already exist.
143 authorized_keys = r'c:\Users\Administrator\.ssh\authorized_keys'
182 authorized_keys = r'c:\Users\Administrator\.ssh\authorized_keys'
144 winrm_client.execute_cmd(r'mkdir c:\Users\Administrator\.ssh')
183 winrm_client.execute_cmd(r'mkdir c:\Users\Administrator\.ssh')
145 winrm_client.copy(str(ssh_dir / 'id_rsa.pub'), authorized_keys)
184 winrm_client.copy(str(ssh_dir / 'id_rsa.pub'), authorized_keys)
146 fix_authorized_keys_permissions(winrm_client, authorized_keys)
185 fix_authorized_keys_permissions(winrm_client, authorized_keys)
147
186
148 public_ip = ec2_instance.public_ip_address
187 public_ip = ec2_instance.public_ip_address
149
188
150 ssh_config = temp_dir / '.ssh' / 'config'
189 ssh_config = temp_dir / '.ssh' / 'config'
151
190
152 with open(ssh_config, 'w', encoding='utf-8') as fh:
191 with open(ssh_config, 'w', encoding='utf-8') as fh:
153 fh.write('Host %s\n' % public_ip)
192 fh.write('Host %s\n' % public_ip)
154 fh.write(' User Administrator\n')
193 fh.write(' User Administrator\n')
155 fh.write(' StrictHostKeyChecking no\n')
194 fh.write(' StrictHostKeyChecking no\n')
156 fh.write(' UserKnownHostsFile %s\n' % (ssh_dir / 'known_hosts'))
195 fh.write(' UserKnownHostsFile %s\n' % (ssh_dir / 'known_hosts'))
157 fh.write(' IdentityFile %s\n' % (ssh_dir / 'id_rsa'))
196 fh.write(' IdentityFile %s\n' % (ssh_dir / 'id_rsa'))
158
197
159 if not (hg_repo / '.hg').is_dir():
198 if not (hg_repo / '.hg').is_dir():
160 raise Exception('%s is not a Mercurial repository; '
199 raise Exception(
161 'synchronization not yet supported' % hg_repo)
200 '%s is not a Mercurial repository; '
201 'synchronization not yet supported' % hg_repo
202 )
162
203
163 env = dict(os.environ)
204 env = dict(os.environ)
164 env['HGPLAIN'] = '1'
205 env['HGPLAIN'] = '1'
165 env['HGENCODING'] = 'utf-8'
206 env['HGENCODING'] = 'utf-8'
166
207
167 hg_bin = hg_repo / 'hg'
208 hg_bin = hg_repo / 'hg'
168
209
169 res = subprocess.run(
210 res = subprocess.run(
170 ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
211 ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
171 cwd=str(hg_repo), env=env, check=True, capture_output=True)
212 cwd=str(hg_repo),
213 env=env,
214 check=True,
215 capture_output=True,
216 )
172
217
173 full_revision = res.stdout.decode('ascii')
218 full_revision = res.stdout.decode('ascii')
174
219
175 args = [
220 args = [
176 'python2.7', hg_bin,
221 'python2.7',
177 '--config', 'ui.ssh=ssh -F %s' % ssh_config,
222 hg_bin,
178 '--config', 'ui.remotecmd=c:/hgdev/venv-bootstrap/Scripts/hg.exe',
223 '--config',
224 'ui.ssh=ssh -F %s' % ssh_config,
225 '--config',
226 'ui.remotecmd=c:/hgdev/venv-bootstrap/Scripts/hg.exe',
179 # Also ensure .hgtags changes are present so auto version
227 # Also ensure .hgtags changes are present so auto version
180 # calculation works.
228 # calculation works.
181 'push', '-f', '-r', full_revision, '-r', 'file(.hgtags)',
229 'push',
230 '-f',
231 '-r',
232 full_revision,
233 '-r',
234 'file(.hgtags)',
182 'ssh://%s/c:/hgdev/src' % public_ip,
235 'ssh://%s/c:/hgdev/src' % public_ip,
183 ]
236 ]
184
237
185 res = subprocess.run(args, cwd=str(hg_repo), env=env)
238 res = subprocess.run(args, cwd=str(hg_repo), env=env)
186
239
187 # Allow 1 (no-op) to not trigger error.
240 # Allow 1 (no-op) to not trigger error.
188 if res.returncode not in (0, 1):
241 if res.returncode not in (0, 1):
189 res.check_returncode()
242 res.check_returncode()
190
243
191 run_powershell(winrm_client,
244 run_powershell(
192 HG_UPDATE_CLEAN.format(revision=full_revision))
245 winrm_client, HG_UPDATE_CLEAN.format(revision=full_revision)
246 )
193
247
194 # TODO detect dirty local working directory and synchronize accordingly.
248 # TODO detect dirty local working directory and synchronize accordingly.
195
249
196
250
197 def purge_hg(winrm_client):
251 def purge_hg(winrm_client):
198 """Purge the Mercurial source repository on an EC2 instance."""
252 """Purge the Mercurial source repository on an EC2 instance."""
199 run_powershell(winrm_client, HG_PURGE)
253 run_powershell(winrm_client, HG_PURGE)
200
254
201
255
202 def find_latest_dist(winrm_client, pattern):
256 def find_latest_dist(winrm_client, pattern):
203 """Find path to newest file in dist/ directory matching a pattern."""
257 """Find path to newest file in dist/ directory matching a pattern."""
204
258
205 res = winrm_client.execute_ps(
259 res = winrm_client.execute_ps(
206 r'$v = Get-ChildItem -Path C:\hgdev\src\dist -Filter "%s" '
260 r'$v = Get-ChildItem -Path C:\hgdev\src\dist -Filter "%s" '
207 '| Sort-Object LastWriteTime -Descending '
261 '| Sort-Object LastWriteTime -Descending '
208 '| Select-Object -First 1\n'
262 '| Select-Object -First 1\n'
209 '$v.name' % pattern
263 '$v.name' % pattern
210 )
264 )
211 return res[0]
265 return res[0]
212
266
213
267
214 def copy_latest_dist(winrm_client, pattern, dest_path):
268 def copy_latest_dist(winrm_client, pattern, dest_path):
215 """Copy latest file matching pattern in dist/ directory.
269 """Copy latest file matching pattern in dist/ directory.
216
270
217 Given a WinRM client and a file pattern, find the latest file on the remote
271 Given a WinRM client and a file pattern, find the latest file on the remote
218 matching that pattern and copy it to the ``dest_path`` directory on the
272 matching that pattern and copy it to the ``dest_path`` directory on the
219 local machine.
273 local machine.
220 """
274 """
221 latest = find_latest_dist(winrm_client, pattern)
275 latest = find_latest_dist(winrm_client, pattern)
222 source = r'C:\hgdev\src\dist\%s' % latest
276 source = r'C:\hgdev\src\dist\%s' % latest
223 dest = dest_path / latest
277 dest = dest_path / latest
224 print('copying %s to %s' % (source, dest))
278 print('copying %s to %s' % (source, dest))
225 winrm_client.fetch(source, str(dest))
279 winrm_client.fetch(source, str(dest))
226
280
227
281
228 def build_inno_installer(winrm_client, arch: str, dest_path: pathlib.Path,
282 def build_inno_installer(
229 version=None):
283 winrm_client, arch: str, dest_path: pathlib.Path, version=None
284 ):
230 """Build the Inno Setup installer on a remote machine.
285 """Build the Inno Setup installer on a remote machine.
231
286
232 Using a WinRM client, remote commands are executed to build
287 Using a WinRM client, remote commands are executed to build
233 a Mercurial Inno Setup installer.
288 a Mercurial Inno Setup installer.
234 """
289 """
235 print('building Inno Setup installer for %s' % arch)
290 print('building Inno Setup installer for %s' % arch)
236
291
237 extra_args = []
292 extra_args = []
238 if version:
293 if version:
239 extra_args.extend(['--version', version])
294 extra_args.extend(['--version', version])
240
295
241 ps = get_vc_prefix(arch) + BUILD_INNO.format(arch=arch,
296 ps = get_vc_prefix(arch) + BUILD_INNO.format(
242 extra_args=' '.join(extra_args))
297 arch=arch, extra_args=' '.join(extra_args)
298 )
243 run_powershell(winrm_client, ps)
299 run_powershell(winrm_client, ps)
244 copy_latest_dist(winrm_client, '*.exe', dest_path)
300 copy_latest_dist(winrm_client, '*.exe', dest_path)
245
301
246
302
247 def build_wheel(winrm_client, arch: str, dest_path: pathlib.Path):
303 def build_wheel(winrm_client, arch: str, dest_path: pathlib.Path):
248 """Build Python wheels on a remote machine.
304 """Build Python wheels on a remote machine.
249
305
250 Using a WinRM client, remote commands are executed to build a Python wheel
306 Using a WinRM client, remote commands are executed to build a Python wheel
251 for Mercurial.
307 for Mercurial.
252 """
308 """
253 print('Building Windows wheel for %s' % arch)
309 print('Building Windows wheel for %s' % arch)
254 ps = get_vc_prefix(arch) + BUILD_WHEEL.format(arch=arch)
310 ps = get_vc_prefix(arch) + BUILD_WHEEL.format(arch=arch)
255 run_powershell(winrm_client, ps)
311 run_powershell(winrm_client, ps)
256 copy_latest_dist(winrm_client, '*.whl', dest_path)
312 copy_latest_dist(winrm_client, '*.whl', dest_path)
257
313
258
314
259 def build_wix_installer(winrm_client, arch: str, dest_path: pathlib.Path,
315 def build_wix_installer(
260 version=None):
316 winrm_client, arch: str, dest_path: pathlib.Path, version=None
317 ):
261 """Build the WiX installer on a remote machine.
318 """Build the WiX installer on a remote machine.
262
319
263 Using a WinRM client, remote commands are executed to build a WiX installer.
320 Using a WinRM client, remote commands are executed to build a WiX installer.
264 """
321 """
265 print('Building WiX installer for %s' % arch)
322 print('Building WiX installer for %s' % arch)
266 extra_args = []
323 extra_args = []
267 if version:
324 if version:
268 extra_args.extend(['--version', version])
325 extra_args.extend(['--version', version])
269
326
270 ps = get_vc_prefix(arch) + BUILD_WIX.format(arch=arch,
327 ps = get_vc_prefix(arch) + BUILD_WIX.format(
271 extra_args=' '.join(extra_args))
328 arch=arch, extra_args=' '.join(extra_args)
329 )
272 run_powershell(winrm_client, ps)
330 run_powershell(winrm_client, ps)
273 copy_latest_dist(winrm_client, '*.msi', dest_path)
331 copy_latest_dist(winrm_client, '*.msi', dest_path)
274
332
275
333
276 def run_tests(winrm_client, python_version, arch, test_flags=''):
334 def run_tests(winrm_client, python_version, arch, test_flags=''):
277 """Run tests on a remote Windows machine.
335 """Run tests on a remote Windows machine.
278
336
279 ``python_version`` is a ``X.Y`` string like ``2.7`` or ``3.7``.
337 ``python_version`` is a ``X.Y`` string like ``2.7`` or ``3.7``.
280 ``arch`` is ``x86`` or ``x64``.
338 ``arch`` is ``x86`` or ``x64``.
281 ``test_flags`` is a str representing extra arguments to pass to
339 ``test_flags`` is a str representing extra arguments to pass to
282 ``run-tests.py``.
340 ``run-tests.py``.
283 """
341 """
284 if not re.match(r'\d\.\d', python_version):
342 if not re.match(r'\d\.\d', python_version):
285 raise ValueError(r'python_version must be \d.\d; got %s' %
343 raise ValueError(
286 python_version)
344 r'python_version must be \d.\d; got %s' % python_version
345 )
287
346
288 if arch not in ('x86', 'x64'):
347 if arch not in ('x86', 'x64'):
289 raise ValueError('arch must be x86 or x64; got %s' % arch)
348 raise ValueError('arch must be x86 or x64; got %s' % arch)
290
349
291 python_path = 'python%s-%s' % (python_version.replace('.', ''), arch)
350 python_path = 'python%s-%s' % (python_version.replace('.', ''), arch)
292
351
293 ps = RUN_TESTS.format(
352 ps = RUN_TESTS.format(python_path=python_path, test_flags=test_flags or '',)
294 python_path=python_path,
353
295 test_flags=test_flags or '',
354 run_powershell(winrm_client, ps)
355
356
357 def resolve_wheel_artifacts(dist_path: pathlib.Path, version: str):
358 return (
359 dist_path / X86_WHEEL_FILENAME.format(version=version),
360 dist_path / X64_WHEEL_FILENAME.format(version=version),
361 )
362
363
364 def resolve_all_artifacts(dist_path: pathlib.Path, version: str):
365 return (
366 dist_path / X86_WHEEL_FILENAME.format(version=version),
367 dist_path / X64_WHEEL_FILENAME.format(version=version),
368 dist_path / X86_EXE_FILENAME.format(version=version),
369 dist_path / X64_EXE_FILENAME.format(version=version),
370 dist_path / X86_MSI_FILENAME.format(version=version),
371 dist_path / X64_MSI_FILENAME.format(version=version),
372 )
373
374
375 def generate_latest_dat(version: str):
376 x86_exe_filename = X86_EXE_FILENAME.format(version=version)
377 x64_exe_filename = X64_EXE_FILENAME.format(version=version)
378 x86_msi_filename = X86_MSI_FILENAME.format(version=version)
379 x64_msi_filename = X64_MSI_FILENAME.format(version=version)
380
381 entries = (
382 (
383 '10',
384 version,
385 X86_USER_AGENT_PATTERN,
386 '%s/%s' % (MERCURIAL_SCM_BASE_URL, x86_exe_filename),
387 X86_EXE_DESCRIPTION.format(version=version),
388 ),
389 (
390 '10',
391 version,
392 X64_USER_AGENT_PATTERN,
393 '%s/%s' % (MERCURIAL_SCM_BASE_URL, x64_exe_filename),
394 X64_EXE_DESCRIPTION.format(version=version),
395 ),
396 (
397 '10',
398 version,
399 X86_USER_AGENT_PATTERN,
400 '%s/%s' % (MERCURIAL_SCM_BASE_URL, x86_msi_filename),
401 X86_MSI_DESCRIPTION.format(version=version),
402 ),
403 (
404 '10',
405 version,
406 X64_USER_AGENT_PATTERN,
407 '%s/%s' % (MERCURIAL_SCM_BASE_URL, x64_msi_filename),
408 X64_MSI_DESCRIPTION.format(version=version),
409 ),
296 )
410 )
297
411
298 run_powershell(winrm_client, ps)
412 lines = ['\t'.join(e) for e in entries]
413
414 return '\n'.join(lines) + '\n'
415
416
417 def publish_artifacts_pypi(dist_path: pathlib.Path, version: str):
418 """Publish Windows release artifacts to PyPI."""
419
420 wheel_paths = resolve_wheel_artifacts(dist_path, version)
421
422 for p in wheel_paths:
423 if not p.exists():
424 raise Exception('%s not found' % p)
425
426 print('uploading wheels to PyPI (you may be prompted for credentials)')
427 pypi_upload(wheel_paths)
428
429
430 def publish_artifacts_mercurial_scm_org(
431 dist_path: pathlib.Path, version: str, ssh_username=None
432 ):
433 """Publish Windows release artifacts to mercurial-scm.org."""
434 all_paths = resolve_all_artifacts(dist_path, version)
435
436 for p in all_paths:
437 if not p.exists():
438 raise Exception('%s not found' % p)
439
440 client = paramiko.SSHClient()
441 client.load_system_host_keys()
442 # We assume the system SSH configuration knows how to connect.
443 print('connecting to mercurial-scm.org via ssh...')
444 try:
445 client.connect('mercurial-scm.org', username=ssh_username)
446 except paramiko.AuthenticationException:
447 print('error authenticating; is an SSH key available in an SSH agent?')
448 raise
449
450 print('SSH connection established')
451
452 print('opening SFTP client...')
453 sftp = client.open_sftp()
454 print('SFTP client obtained')
455
456 for p in all_paths:
457 dest_path = '/var/www/release/windows/%s' % p.name
458 print('uploading %s to %s' % (p, dest_path))
459
460 with p.open('rb') as fh:
461 data = fh.read()
462
463 with sftp.open(dest_path, 'wb') as fh:
464 fh.write(data)
465 fh.chmod(0o0664)
466
467 latest_dat_path = '/var/www/release/windows/latest.dat'
468
469 now = datetime.datetime.utcnow()
470 backup_path = dist_path / (
471 'latest-windows-%s.dat' % now.strftime('%Y%m%dT%H%M%S')
472 )
473 print('backing up %s to %s' % (latest_dat_path, backup_path))
474
475 with sftp.open(latest_dat_path, 'rb') as fh:
476 latest_dat_old = fh.read()
477
478 with backup_path.open('wb') as fh:
479 fh.write(latest_dat_old)
480
481 print('writing %s with content:' % latest_dat_path)
482 latest_dat_content = generate_latest_dat(version)
483 print(latest_dat_content)
484
485 with sftp.open(latest_dat_path, 'wb') as fh:
486 fh.write(latest_dat_content.encode('ascii'))
487
488
489 def publish_artifacts(
490 dist_path: pathlib.Path,
491 version: str,
492 pypi=True,
493 mercurial_scm_org=True,
494 ssh_username=None,
495 ):
496 """Publish Windows release artifacts.
497
498 Files are found in `dist_path`. We will look for files with version string
499 `version`.
500
501 `pypi` controls whether we upload to PyPI.
502 `mercurial_scm_org` controls whether we upload to mercurial-scm.org.
503 """
504 if pypi:
505 publish_artifacts_pypi(dist_path, version)
506
507 if mercurial_scm_org:
508 publish_artifacts_mercurial_scm_org(
509 dist_path, version, ssh_username=ssh_username
510 )
@@ -1,82 +1,87 b''
1 # winrm.py - Interact with Windows Remote Management (WinRM)
1 # winrm.py - Interact with Windows Remote Management (WinRM)
2 #
2 #
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 # no-check-code because Python 3 native.
8 # no-check-code because Python 3 native.
9
9
10 import logging
10 import logging
11 import pprint
11 import pprint
12 import time
12 import time
13
13
14 from pypsrp.client import (
14 from pypsrp.client import Client
15 Client,
16 )
17 from pypsrp.powershell import (
15 from pypsrp.powershell import (
18 PowerShell,
16 PowerShell,
19 PSInvocationState,
17 PSInvocationState,
20 RunspacePool,
18 RunspacePool,
21 )
19 )
22 import requests.exceptions
20 import requests.exceptions
23
21
24
22
25 logger = logging.getLogger(__name__)
23 logger = logging.getLogger(__name__)
26
24
27
25
28 def wait_for_winrm(host, username, password, timeout=180, ssl=False):
26 def wait_for_winrm(host, username, password, timeout=180, ssl=False):
29 """Wait for the Windows Remoting (WinRM) service to become available.
27 """Wait for the Windows Remoting (WinRM) service to become available.
30
28
31 Returns a ``psrpclient.Client`` instance.
29 Returns a ``psrpclient.Client`` instance.
32 """
30 """
33
31
34 end_time = time.time() + timeout
32 end_time = time.time() + timeout
35
33
36 while True:
34 while True:
37 try:
35 try:
38 client = Client(host, username=username, password=password,
36 client = Client(
39 ssl=ssl, connection_timeout=5)
37 host,
38 username=username,
39 password=password,
40 ssl=ssl,
41 connection_timeout=5,
42 )
40 client.execute_ps("Write-Host 'Hello, World!'")
43 client.execute_ps("Write-Host 'Hello, World!'")
41 return client
44 return client
42 except requests.exceptions.ConnectionError:
45 except requests.exceptions.ConnectionError:
43 if time.time() >= end_time:
46 if time.time() >= end_time:
44 raise
47 raise
45
48
46 time.sleep(1)
49 time.sleep(1)
47
50
48
51
49 def format_object(o):
52 def format_object(o):
50 if isinstance(o, str):
53 if isinstance(o, str):
51 return o
54 return o
52
55
53 try:
56 try:
54 o = str(o)
57 o = str(o)
55 except TypeError:
58 except (AttributeError, TypeError):
56 o = pprint.pformat(o.extended_properties)
59 o = pprint.pformat(o.extended_properties)
57
60
58 return o
61 return o
59
62
60
63
61 def run_powershell(client, script):
64 def run_powershell(client, script):
62 with RunspacePool(client.wsman) as pool:
65 with RunspacePool(client.wsman) as pool:
63 ps = PowerShell(pool)
66 ps = PowerShell(pool)
64 ps.add_script(script)
67 ps.add_script(script)
65
68
66 ps.begin_invoke()
69 ps.begin_invoke()
67
70
68 while ps.state == PSInvocationState.RUNNING:
71 while ps.state == PSInvocationState.RUNNING:
69 ps.poll_invoke()
72 ps.poll_invoke()
70 for o in ps.output:
73 for o in ps.output:
71 print(format_object(o))
74 print(format_object(o))
72
75
73 ps.output[:] = []
76 ps.output[:] = []
74
77
75 ps.end_invoke()
78 ps.end_invoke()
76
79
77 for o in ps.output:
80 for o in ps.output:
78 print(format_object(o))
81 print(format_object(o))
79
82
80 if ps.state == PSInvocationState.FAILED:
83 if ps.state == PSInvocationState.FAILED:
81 raise Exception('PowerShell execution failed: %s' %
84 raise Exception(
82 ' '.join(map(format_object, ps.streams.error)))
85 'PowerShell execution failed: %s'
86 % ' '.join(map(format_object, ps.streams.error))
87 )
@@ -1,130 +1,121 b''
1 #
1 #
2 # This file is autogenerated by pip-compile
2 # This file is autogenerated by pip-compile
3 # To update, run:
3 # To update, run:
4 #
4 #
5 # pip-compile -U --generate-hashes --output-file contrib/automation/linux-requirements-py2.txt contrib/automation/linux-requirements.txt.in
5 # pip-compile --generate-hashes --output-file=contrib/automation/linux-requirements-py2.txt contrib/automation/linux-requirements.txt.in
6 #
6 #
7 astroid==1.6.6 \
7 astroid==1.6.6 \
8 --hash=sha256:87de48a92e29cedf7210ffa853d11441e7ad94cb47bacd91b023499b51cbc756 \
8 --hash=sha256:87de48a92e29cedf7210ffa853d11441e7ad94cb47bacd91b023499b51cbc756 \
9 --hash=sha256:d25869fc7f44f1d9fb7d24fd7ea0639656f5355fc3089cd1f3d18c6ec6b124c7 \
9 --hash=sha256:d25869fc7f44f1d9fb7d24fd7ea0639656f5355fc3089cd1f3d18c6ec6b124c7 \
10 # via pylint
10 # via pylint
11 backports.functools-lru-cache==1.5 \
11 backports.functools-lru-cache==1.5 \
12 --hash=sha256:9d98697f088eb1b0fa451391f91afb5e3ebde16bbdb272819fd091151fda4f1a \
12 --hash=sha256:9d98697f088eb1b0fa451391f91afb5e3ebde16bbdb272819fd091151fda4f1a \
13 --hash=sha256:f0b0e4eba956de51238e17573b7087e852dfe9854afd2e9c873f73fc0ca0a6dd \
13 --hash=sha256:f0b0e4eba956de51238e17573b7087e852dfe9854afd2e9c873f73fc0ca0a6dd \
14 # via astroid, isort, pylint
14 # via astroid, isort, pylint
15 bzr==2.7.0 ; python_version <= "2.7" and platform_python_implementation == "CPython" \
15 bzr==2.7.0 ; python_version <= "2.7" and platform_python_implementation == "CPython" \
16 --hash=sha256:c9f6bbe0a50201dadc5fddadd94ba50174193c6cf6e39e16f6dd0ad98a1df338
16 --hash=sha256:c9f6bbe0a50201dadc5fddadd94ba50174193c6cf6e39e16f6dd0ad98a1df338
17 configparser==3.7.4 \
17 configparser==3.7.4 \
18 --hash=sha256:8be81d89d6e7b4c0d4e44bcc525845f6da25821de80cb5e06e7e0238a2899e32 \
18 --hash=sha256:8be81d89d6e7b4c0d4e44bcc525845f6da25821de80cb5e06e7e0238a2899e32 \
19 --hash=sha256:da60d0014fd8c55eb48c1c5354352e363e2d30bbf7057e5e171a468390184c75 \
19 --hash=sha256:da60d0014fd8c55eb48c1c5354352e363e2d30bbf7057e5e171a468390184c75 \
20 # via pylint
20 # via pylint
21 contextlib2==0.5.5 \
21 contextlib2==0.5.5 \
22 --hash=sha256:509f9419ee91cdd00ba34443217d5ca51f5a364a404e1dce9e8979cea969ca48 \
22 --hash=sha256:509f9419ee91cdd00ba34443217d5ca51f5a364a404e1dce9e8979cea969ca48 \
23 --hash=sha256:f5260a6e679d2ff42ec91ec5252f4eeffdcf21053db9113bd0a8e4d953769c00 \
23 --hash=sha256:f5260a6e679d2ff42ec91ec5252f4eeffdcf21053db9113bd0a8e4d953769c00 \
24 # via vcrpy
24 # via vcrpy
25 docutils==0.14 \
25 docutils==0.15.2 \
26 --hash=sha256:02aec4bd92ab067f6ff27a38a38a41173bf01bed8f89157768c1573f53e474a6 \
26 --hash=sha256:6c4f696463b79f1fb8ba0c594b63840ebd41f059e92b31957c46b74a4599b6d0 \
27 --hash=sha256:51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274 \
27 --hash=sha256:9e4d7ecfc600058e07ba661411a2b7de2fd0fafa17d1a7f7361cd47b1175c827 \
28 --hash=sha256:7a4bd47eaf6596e1295ecb11361139febe29b084a87bf005bf899f9a42edc3c6
28 --hash=sha256:a2aeea129088da402665e92e0b25b04b073c04b2dce4ab65caaa38b7ce2e1a99
29 enum34==1.1.6 \
29 enum34==1.1.6 \
30 --hash=sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850 \
30 --hash=sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850 \
31 --hash=sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a \
31 --hash=sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a \
32 --hash=sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79 \
32 --hash=sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79 \
33 --hash=sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1 \
33 --hash=sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1 \
34 # via astroid
34 # via astroid
35 funcsigs==1.0.2 \
35 funcsigs==1.0.2 \
36 --hash=sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca \
36 --hash=sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca \
37 --hash=sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50 \
37 --hash=sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50 \
38 # via mock
38 # via mock
39 futures==3.2.0 \
39 futures==3.3.0 \
40 --hash=sha256:9ec02aa7d674acb8618afb127e27fde7fc68994c0437ad759fa094a574adb265 \
40 --hash=sha256:49b3f5b064b6e3afc3316421a3f25f66c137ae88f068abbf72830170033c5e16 \
41 --hash=sha256:ec0a6cb848cc212002b9828c3e34c675e0c9ff6741dc445cab6fdd4e1085d1f1 \
41 --hash=sha256:7e033af76a5e35f58e56da7a91e687706faf4e7bdfb2cbc3f2cca6b9bcda9794 \
42 # via isort
42 # via isort
43 fuzzywuzzy==0.17.0 \
43 fuzzywuzzy==0.17.0 \
44 --hash=sha256:5ac7c0b3f4658d2743aa17da53a55598144edbc5bee3c6863840636e6926f254 \
44 --hash=sha256:5ac7c0b3f4658d2743aa17da53a55598144edbc5bee3c6863840636e6926f254 \
45 --hash=sha256:6f49de47db00e1c71d40ad16da42284ac357936fa9b66bea1df63fed07122d62
45 --hash=sha256:6f49de47db00e1c71d40ad16da42284ac357936fa9b66bea1df63fed07122d62
46 isort==4.3.17 \
46 isort==4.3.21 \
47 --hash=sha256:01cb7e1ca5e6c5b3f235f0385057f70558b70d2f00320208825fa62887292f43 \
47 --hash=sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1 \
48 --hash=sha256:268067462aed7eb2a1e237fcb287852f22077de3fb07964e87e00f829eea2d1a \
48 --hash=sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd \
49 # via pylint
49 # via pylint
50 lazy-object-proxy==1.3.1 \
50 lazy-object-proxy==1.4.1 \
51 --hash=sha256:0ce34342b419bd8f018e6666bfef729aec3edf62345a53b537a4dcc115746a33 \
51 --hash=sha256:159a745e61422217881c4de71f9eafd9d703b93af95618635849fe469a283661 \
52 --hash=sha256:1b668120716eb7ee21d8a38815e5eb3bb8211117d9a90b0f8e21722c0758cc39 \
52 --hash=sha256:23f63c0821cc96a23332e45dfaa83266feff8adc72b9bcaef86c202af765244f \
53 --hash=sha256:209615b0fe4624d79e50220ce3310ca1a9445fd8e6d3572a896e7f9146bbf019 \
53 --hash=sha256:3b11be575475db2e8a6e11215f5aa95b9ec14de658628776e10d96fa0b4dac13 \
54 --hash=sha256:27bf62cb2b1a2068d443ff7097ee33393f8483b570b475db8ebf7e1cba64f088 \
54 --hash=sha256:3f447aff8bc61ca8b42b73304f6a44fa0d915487de144652816f950a3f1ab821 \
55 --hash=sha256:27ea6fd1c02dcc78172a82fc37fcc0992a94e4cecf53cb6d73f11749825bd98b \
55 --hash=sha256:4ba73f6089cd9b9478bc0a4fa807b47dbdb8fad1d8f31a0f0a5dbf26a4527a71 \
56 --hash=sha256:2c1b21b44ac9beb0fc848d3993924147ba45c4ebc24be19825e57aabbe74a99e \
56 --hash=sha256:4f53eadd9932055eac465bd3ca1bd610e4d7141e1278012bd1f28646aebc1d0e \
57 --hash=sha256:2df72ab12046a3496a92476020a1a0abf78b2a7db9ff4dc2036b8dd980203ae6 \
57 --hash=sha256:64483bd7154580158ea90de5b8e5e6fc29a16a9b4db24f10193f0c1ae3f9d1ea \
58 --hash=sha256:320ffd3de9699d3892048baee45ebfbbf9388a7d65d832d7e580243ade426d2b \
58 --hash=sha256:6f72d42b0d04bfee2397aa1862262654b56922c20a9bb66bb76b6f0e5e4f9229 \
59 --hash=sha256:50e3b9a464d5d08cc5227413db0d1c4707b6172e4d4d915c1c70e4de0bbff1f5 \
59 --hash=sha256:7c7f1ec07b227bdc561299fa2328e85000f90179a2f44ea30579d38e037cb3d4 \
60 --hash=sha256:5276db7ff62bb7b52f77f1f51ed58850e315154249aceb42e7f4c611f0f847ff \
60 --hash=sha256:7c8b1ba1e15c10b13cad4171cfa77f5bb5ec2580abc5a353907780805ebe158e \
61 --hash=sha256:61a6cf00dcb1a7f0c773ed4acc509cb636af2d6337a08f362413c76b2b47a8dd \
61 --hash=sha256:8559b94b823f85342e10d3d9ca4ba5478168e1ac5658a8a2f18c991ba9c52c20 \
62 --hash=sha256:6ae6c4cb59f199d8827c5a07546b2ab7e85d262acaccaacd49b62f53f7c456f7 \
62 --hash=sha256:a262c7dfb046f00e12a2bdd1bafaed2408114a89ac414b0af8755c696eb3fc16 \
63 --hash=sha256:7661d401d60d8bf15bb5da39e4dd72f5d764c5aff5a86ef52a042506e3e970ff \
63 --hash=sha256:acce4e3267610c4fdb6632b3886fe3f2f7dd641158a843cf6b6a68e4ce81477b \
64 --hash=sha256:7bd527f36a605c914efca5d3d014170b2cb184723e423d26b1fb2fd9108e264d \
64 --hash=sha256:be089bb6b83fac7f29d357b2dc4cf2b8eb8d98fe9d9ff89f9ea6012970a853c7 \
65 --hash=sha256:7cb54db3535c8686ea12e9535eb087d32421184eacc6939ef15ef50f83a5e7e2 \
65 --hash=sha256:bfab710d859c779f273cc48fb86af38d6e9210f38287df0069a63e40b45a2f5c \
66 --hash=sha256:7f3a2d740291f7f2c111d86a1c4851b70fb000a6c8883a59660d95ad57b9df35 \
66 --hash=sha256:c10d29019927301d524a22ced72706380de7cfc50f767217485a912b4c8bd82a \
67 --hash=sha256:81304b7d8e9c824d058087dcb89144842c8e0dea6d281c031f59f0acf66963d4 \
67 --hash=sha256:dd6e2b598849b3d7aee2295ac765a578879830fb8966f70be8cd472e6069932e \
68 --hash=sha256:933947e8b4fbe617a51528b09851685138b49d511af0b6c0da2539115d6d4514 \
68 --hash=sha256:e408f1eacc0a68fed0c08da45f31d0ebb38079f043328dce69ff133b95c29dc1 \
69 --hash=sha256:94223d7f060301b3a8c09c9b3bc3294b56b2188e7d8179c762a1cda72c979252 \
70 --hash=sha256:ab3ca49afcb47058393b0122428358d2fbe0408cf99f1b58b295cfeb4ed39109 \
71 --hash=sha256:bd6292f565ca46dee4e737ebcc20742e3b5be2b01556dafe169f6c65d088875f \
72 --hash=sha256:cb924aa3e4a3fb644d0c463cad5bc2572649a6a3f68a7f8e4fbe44aaa6d77e4c \
73 --hash=sha256:d0fc7a286feac9077ec52a927fc9fe8fe2fabab95426722be4c953c9a8bede92 \
74 --hash=sha256:ddc34786490a6e4ec0a855d401034cbd1242ef186c20d79d2166d6a4bd449577 \
75 --hash=sha256:e34b155e36fa9da7e1b7c738ed7767fc9491a62ec6af70fe9da4a057759edc2d \
76 --hash=sha256:e5b9e8f6bda48460b7b143c3821b21b452cb3a835e6bbd5dd33aa0c8d3f5137d \
77 --hash=sha256:e81ebf6c5ee9684be8f2c87563880f93eedd56dd2b6146d8a725b50b7e5adb0f \
78 --hash=sha256:eb91be369f945f10d3a49f5f9be8b3d0b93a4c2be8f8a5b83b0571b8123e0a7a \
79 --hash=sha256:f460d1ceb0e4a5dcb2a652db0904224f367c9b3c1470d5a7683c0480e582468b \
80 # via astroid
69 # via astroid
81 mccabe==0.6.1 \
70 mccabe==0.6.1 \
82 --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \
71 --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \
83 --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f \
72 --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f \
84 # via pylint
73 # via pylint
85 mock==2.0.0 \
74 mock==3.0.5 \
86 --hash=sha256:5ce3c71c5545b472da17b72268978914d0252980348636840bd34a00b5cc96c1 \
75 --hash=sha256:83657d894c90d5681d62155c82bda9c1187827525880eda8ff5df4ec813437c3 \
87 --hash=sha256:b158b6df76edd239b8208d481dc46b6afd45a846b7812ff0ce58971cf5bc8bba \
76 --hash=sha256:d157e52d4e5b938c550f39eb2fd15610db062441a9c2747d3dbfa9298211d0f8 \
88 # via vcrpy
77 # via vcrpy
89 pbr==5.1.3 \
90 --hash=sha256:8257baf496c8522437e8a6cfe0f15e00aedc6c0e0e7c9d55eeeeab31e0853843 \
91 --hash=sha256:8c361cc353d988e4f5b998555c88098b9d5964c2e11acf7b0d21925a66bb5824 \
92 # via mock
93 pyflakes==2.1.1 \
78 pyflakes==2.1.1 \
94 --hash=sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0 \
79 --hash=sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0 \
95 --hash=sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2
80 --hash=sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2
96 pygments==2.3.1 \
81 pygments==2.4.2 \
97 --hash=sha256:5ffada19f6203563680669ee7f53b64dabbeb100eb51b61996085e99c03b284a \
82 --hash=sha256:71e430bc85c88a430f000ac1d9b331d2407f681d6f6aec95e8bcfbc3df5b0127 \
98 --hash=sha256:e8218dd399a61674745138520d0d4cf2621d7e032439341bc3f647bff125818d
83 --hash=sha256:881c4c157e45f30af185c1ffe8d549d48ac9127433f2c380c24b84572ad66297
99 pylint==1.9.4 \
84 pylint==1.9.5 \
100 --hash=sha256:02c2b6d268695a8b64ad61847f92e611e6afcff33fd26c3a2125370c4662905d \
85 --hash=sha256:367e3d49813d349a905390ac27989eff82ab84958731c5ef0bef867452cfdc42 \
101 --hash=sha256:ee1e85575587c5b58ddafa25e1c1b01691ef172e139fc25585e5d3f02451da93
86 --hash=sha256:97a42df23d436c70132971d1dcb9efad2fe5c0c6add55b90161e773caf729300
102 python-levenshtein==0.12.0 \
87 python-levenshtein==0.12.0 \
103 --hash=sha256:033a11de5e3d19ea25c9302d11224e1a1898fe5abd23c61c7c360c25195e3eb1
88 --hash=sha256:033a11de5e3d19ea25c9302d11224e1a1898fe5abd23c61c7c360c25195e3eb1
104 pyyaml==5.1 \
89 pyyaml==5.1.2 \
105 --hash=sha256:1adecc22f88d38052fb787d959f003811ca858b799590a5eaa70e63dca50308c \
90 --hash=sha256:0113bc0ec2ad727182326b61326afa3d1d8280ae1122493553fd6f4397f33df9 \
106 --hash=sha256:436bc774ecf7c103814098159fbb84c2715d25980175292c648f2da143909f95 \
91 --hash=sha256:01adf0b6c6f61bd11af6e10ca52b7d4057dd0be0343eb9283c878cf3af56aee4 \
107 --hash=sha256:460a5a4248763f6f37ea225d19d5c205677d8d525f6a83357ca622ed541830c2 \
92 --hash=sha256:5124373960b0b3f4aa7df1707e63e9f109b5263eca5976c66e08b1c552d4eaf8 \
108 --hash=sha256:5a22a9c84653debfbf198d02fe592c176ea548cccce47553f35f466e15cf2fd4 \
93 --hash=sha256:5ca4f10adbddae56d824b2c09668e91219bb178a1eee1faa56af6f99f11bf696 \
109 --hash=sha256:7a5d3f26b89d688db27822343dfa25c599627bc92093e788956372285c6298ad \
94 --hash=sha256:7907be34ffa3c5a32b60b95f4d95ea25361c951383a894fec31be7252b2b6f34 \
110 --hash=sha256:9372b04a02080752d9e6f990179a4ab840227c6e2ce15b95e1278456664cf2ba \
95 --hash=sha256:7ec9b2a4ed5cad025c2278a1e6a19c011c80a3caaac804fd2d329e9cc2c287c9 \
111 --hash=sha256:a5dcbebee834eaddf3fa7366316b880ff4062e4bcc9787b78c7fbb4a26ff2dd1 \
96 --hash=sha256:87ae4c829bb25b9fe99cf71fbb2140c448f534e24c998cc60f39ae4f94396a73 \
112 --hash=sha256:aee5bab92a176e7cd034e57f46e9df9a9862a71f8f37cad167c6fc74c65f5b4e \
97 --hash=sha256:9de9919becc9cc2ff03637872a440195ac4241c80536632fffeb6a1e25a74299 \
113 --hash=sha256:c51f642898c0bacd335fc119da60baae0824f2cde95b0330b56c0553439f0673 \
98 --hash=sha256:a5a85b10e450c66b49f98846937e8cfca1db3127a9d5d1e31ca45c3d0bef4c5b \
114 --hash=sha256:c68ea4d3ba1705da1e0d85da6684ac657912679a649e8868bd850d2c299cce13 \
99 --hash=sha256:b0997827b4f6a7c286c01c5f60384d218dca4ed7d9efa945c3e1aa623d5709ae \
115 --hash=sha256:e23d0cc5299223dcc37885dae624f382297717e459ea24053709675a976a3e19 \
100 --hash=sha256:b631ef96d3222e62861443cc89d6563ba3eeb816eeb96b2629345ab795e53681 \
101 --hash=sha256:bf47c0607522fdbca6c9e817a6e81b08491de50f3766a7a0e6a5be7905961b41 \
102 --hash=sha256:f81025eddd0327c7d4cfe9b62cf33190e1e736cc6e97502b3ec425f574b3e7a8 \
116 # via vcrpy
103 # via vcrpy
117 singledispatch==3.4.0.3 \
104 singledispatch==3.4.0.3 \
118 --hash=sha256:5b06af87df13818d14f08a028e42f566640aef80805c3b50c5056b086e3c2b9c \
105 --hash=sha256:5b06af87df13818d14f08a028e42f566640aef80805c3b50c5056b086e3c2b9c \
119 --hash=sha256:833b46966687b3de7f438c761ac475213e53b306740f1abfaa86e1d1aae56aa8 \
106 --hash=sha256:833b46966687b3de7f438c761ac475213e53b306740f1abfaa86e1d1aae56aa8 \
120 # via astroid, pylint
107 # via astroid, pylint
121 six==1.12.0 \
108 six==1.12.0 \
122 --hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \
109 --hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \
123 --hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73 \
110 --hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73 \
124 # via astroid, mock, pylint, singledispatch, vcrpy
111 # via astroid, mock, pylint, singledispatch, vcrpy
125 vcrpy==2.0.1 \
112 vcrpy==2.0.1 \
126 --hash=sha256:127e79cf7b569d071d1bd761b83f7b62b2ce2a2eb63ceca7aa67cba8f2602ea3 \
113 --hash=sha256:127e79cf7b569d071d1bd761b83f7b62b2ce2a2eb63ceca7aa67cba8f2602ea3 \
127 --hash=sha256:57be64aa8e9883a4117d0b15de28af62275c001abcdb00b6dc2d4406073d9a4f
114 --hash=sha256:57be64aa8e9883a4117d0b15de28af62275c001abcdb00b6dc2d4406073d9a4f
128 wrapt==1.11.1 \
115 wrapt==1.11.2 \
129 --hash=sha256:4aea003270831cceb8a90ff27c4031da6ead7ec1886023b80ce0dfe0adf61533 \
116 --hash=sha256:565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1 \
130 # via astroid, vcrpy
117 # via astroid, vcrpy
118
119 # WARNING: The following packages were not pinned, but pip requires them to be
120 # pinned when the requirements file includes hashes. Consider using the --allow-unsafe flag.
121 # setuptools==41.0.1 # via python-levenshtein
@@ -1,159 +1,149 b''
1 #
1 #
2 # This file is autogenerated by pip-compile
2 # This file is autogenerated by pip-compile
3 # To update, run:
3 # To update, run:
4 #
4 #
5 # pip-compile -U --generate-hashes --output-file contrib/automation/linux-requirements-py3.txt contrib/automation/linux-requirements.txt.in
5 # pip-compile --generate-hashes --output-file=contrib/automation/linux-requirements-py3.txt contrib/automation/linux-requirements.txt.in
6 #
6 #
7 astroid==2.2.5 \
7 astroid==2.2.5 \
8 --hash=sha256:6560e1e1749f68c64a4b5dee4e091fce798d2f0d84ebe638cf0e0585a343acf4 \
8 --hash=sha256:6560e1e1749f68c64a4b5dee4e091fce798d2f0d84ebe638cf0e0585a343acf4 \
9 --hash=sha256:b65db1bbaac9f9f4d190199bb8680af6f6f84fd3769a5ea883df8a91fe68b4c4 \
9 --hash=sha256:b65db1bbaac9f9f4d190199bb8680af6f6f84fd3769a5ea883df8a91fe68b4c4 \
10 # via pylint
10 # via pylint
11 docutils==0.14 \
11 docutils==0.15.2 \
12 --hash=sha256:02aec4bd92ab067f6ff27a38a38a41173bf01bed8f89157768c1573f53e474a6 \
12 --hash=sha256:6c4f696463b79f1fb8ba0c594b63840ebd41f059e92b31957c46b74a4599b6d0 \
13 --hash=sha256:51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274 \
13 --hash=sha256:9e4d7ecfc600058e07ba661411a2b7de2fd0fafa17d1a7f7361cd47b1175c827 \
14 --hash=sha256:7a4bd47eaf6596e1295ecb11361139febe29b084a87bf005bf899f9a42edc3c6
14 --hash=sha256:a2aeea129088da402665e92e0b25b04b073c04b2dce4ab65caaa38b7ce2e1a99
15 fuzzywuzzy==0.17.0 \
15 fuzzywuzzy==0.17.0 \
16 --hash=sha256:5ac7c0b3f4658d2743aa17da53a55598144edbc5bee3c6863840636e6926f254 \
16 --hash=sha256:5ac7c0b3f4658d2743aa17da53a55598144edbc5bee3c6863840636e6926f254 \
17 --hash=sha256:6f49de47db00e1c71d40ad16da42284ac357936fa9b66bea1df63fed07122d62
17 --hash=sha256:6f49de47db00e1c71d40ad16da42284ac357936fa9b66bea1df63fed07122d62
18 idna==2.8 \
18 idna==2.8 \
19 --hash=sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407 \
19 --hash=sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407 \
20 --hash=sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c \
20 --hash=sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c \
21 # via yarl
21 # via yarl
22 isort==4.3.17 \
22 isort==4.3.21 \
23 --hash=sha256:01cb7e1ca5e6c5b3f235f0385057f70558b70d2f00320208825fa62887292f43 \
23 --hash=sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1 \
24 --hash=sha256:268067462aed7eb2a1e237fcb287852f22077de3fb07964e87e00f829eea2d1a \
24 --hash=sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd \
25 # via pylint
25 # via pylint
26 lazy-object-proxy==1.3.1 \
26 lazy-object-proxy==1.4.1 \
27 --hash=sha256:0ce34342b419bd8f018e6666bfef729aec3edf62345a53b537a4dcc115746a33 \
27 --hash=sha256:159a745e61422217881c4de71f9eafd9d703b93af95618635849fe469a283661 \
28 --hash=sha256:1b668120716eb7ee21d8a38815e5eb3bb8211117d9a90b0f8e21722c0758cc39 \
28 --hash=sha256:23f63c0821cc96a23332e45dfaa83266feff8adc72b9bcaef86c202af765244f \
29 --hash=sha256:209615b0fe4624d79e50220ce3310ca1a9445fd8e6d3572a896e7f9146bbf019 \
29 --hash=sha256:3b11be575475db2e8a6e11215f5aa95b9ec14de658628776e10d96fa0b4dac13 \
30 --hash=sha256:27bf62cb2b1a2068d443ff7097ee33393f8483b570b475db8ebf7e1cba64f088 \
30 --hash=sha256:3f447aff8bc61ca8b42b73304f6a44fa0d915487de144652816f950a3f1ab821 \
31 --hash=sha256:27ea6fd1c02dcc78172a82fc37fcc0992a94e4cecf53cb6d73f11749825bd98b \
31 --hash=sha256:4ba73f6089cd9b9478bc0a4fa807b47dbdb8fad1d8f31a0f0a5dbf26a4527a71 \
32 --hash=sha256:2c1b21b44ac9beb0fc848d3993924147ba45c4ebc24be19825e57aabbe74a99e \
32 --hash=sha256:4f53eadd9932055eac465bd3ca1bd610e4d7141e1278012bd1f28646aebc1d0e \
33 --hash=sha256:2df72ab12046a3496a92476020a1a0abf78b2a7db9ff4dc2036b8dd980203ae6 \
33 --hash=sha256:64483bd7154580158ea90de5b8e5e6fc29a16a9b4db24f10193f0c1ae3f9d1ea \
34 --hash=sha256:320ffd3de9699d3892048baee45ebfbbf9388a7d65d832d7e580243ade426d2b \
34 --hash=sha256:6f72d42b0d04bfee2397aa1862262654b56922c20a9bb66bb76b6f0e5e4f9229 \
35 --hash=sha256:50e3b9a464d5d08cc5227413db0d1c4707b6172e4d4d915c1c70e4de0bbff1f5 \
35 --hash=sha256:7c7f1ec07b227bdc561299fa2328e85000f90179a2f44ea30579d38e037cb3d4 \
36 --hash=sha256:5276db7ff62bb7b52f77f1f51ed58850e315154249aceb42e7f4c611f0f847ff \
36 --hash=sha256:7c8b1ba1e15c10b13cad4171cfa77f5bb5ec2580abc5a353907780805ebe158e \
37 --hash=sha256:61a6cf00dcb1a7f0c773ed4acc509cb636af2d6337a08f362413c76b2b47a8dd \
37 --hash=sha256:8559b94b823f85342e10d3d9ca4ba5478168e1ac5658a8a2f18c991ba9c52c20 \
38 --hash=sha256:6ae6c4cb59f199d8827c5a07546b2ab7e85d262acaccaacd49b62f53f7c456f7 \
38 --hash=sha256:a262c7dfb046f00e12a2bdd1bafaed2408114a89ac414b0af8755c696eb3fc16 \
39 --hash=sha256:7661d401d60d8bf15bb5da39e4dd72f5d764c5aff5a86ef52a042506e3e970ff \
39 --hash=sha256:acce4e3267610c4fdb6632b3886fe3f2f7dd641158a843cf6b6a68e4ce81477b \
40 --hash=sha256:7bd527f36a605c914efca5d3d014170b2cb184723e423d26b1fb2fd9108e264d \
40 --hash=sha256:be089bb6b83fac7f29d357b2dc4cf2b8eb8d98fe9d9ff89f9ea6012970a853c7 \
41 --hash=sha256:7cb54db3535c8686ea12e9535eb087d32421184eacc6939ef15ef50f83a5e7e2 \
41 --hash=sha256:bfab710d859c779f273cc48fb86af38d6e9210f38287df0069a63e40b45a2f5c \
42 --hash=sha256:7f3a2d740291f7f2c111d86a1c4851b70fb000a6c8883a59660d95ad57b9df35 \
42 --hash=sha256:c10d29019927301d524a22ced72706380de7cfc50f767217485a912b4c8bd82a \
43 --hash=sha256:81304b7d8e9c824d058087dcb89144842c8e0dea6d281c031f59f0acf66963d4 \
43 --hash=sha256:dd6e2b598849b3d7aee2295ac765a578879830fb8966f70be8cd472e6069932e \
44 --hash=sha256:933947e8b4fbe617a51528b09851685138b49d511af0b6c0da2539115d6d4514 \
44 --hash=sha256:e408f1eacc0a68fed0c08da45f31d0ebb38079f043328dce69ff133b95c29dc1 \
45 --hash=sha256:94223d7f060301b3a8c09c9b3bc3294b56b2188e7d8179c762a1cda72c979252 \
46 --hash=sha256:ab3ca49afcb47058393b0122428358d2fbe0408cf99f1b58b295cfeb4ed39109 \
47 --hash=sha256:bd6292f565ca46dee4e737ebcc20742e3b5be2b01556dafe169f6c65d088875f \
48 --hash=sha256:cb924aa3e4a3fb644d0c463cad5bc2572649a6a3f68a7f8e4fbe44aaa6d77e4c \
49 --hash=sha256:d0fc7a286feac9077ec52a927fc9fe8fe2fabab95426722be4c953c9a8bede92 \
50 --hash=sha256:ddc34786490a6e4ec0a855d401034cbd1242ef186c20d79d2166d6a4bd449577 \
51 --hash=sha256:e34b155e36fa9da7e1b7c738ed7767fc9491a62ec6af70fe9da4a057759edc2d \
52 --hash=sha256:e5b9e8f6bda48460b7b143c3821b21b452cb3a835e6bbd5dd33aa0c8d3f5137d \
53 --hash=sha256:e81ebf6c5ee9684be8f2c87563880f93eedd56dd2b6146d8a725b50b7e5adb0f \
54 --hash=sha256:eb91be369f945f10d3a49f5f9be8b3d0b93a4c2be8f8a5b83b0571b8123e0a7a \
55 --hash=sha256:f460d1ceb0e4a5dcb2a652db0904224f367c9b3c1470d5a7683c0480e582468b \
56 # via astroid
45 # via astroid
57 mccabe==0.6.1 \
46 mccabe==0.6.1 \
58 --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \
47 --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \
59 --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f \
48 --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f \
60 # via pylint
49 # via pylint
61 multidict==4.5.2 \
50 multidict==4.5.2 \
62 --hash=sha256:024b8129695a952ebd93373e45b5d341dbb87c17ce49637b34000093f243dd4f \
51 --hash=sha256:024b8129695a952ebd93373e45b5d341dbb87c17ce49637b34000093f243dd4f \
63 --hash=sha256:041e9442b11409be5e4fc8b6a97e4bcead758ab1e11768d1e69160bdde18acc3 \
52 --hash=sha256:041e9442b11409be5e4fc8b6a97e4bcead758ab1e11768d1e69160bdde18acc3 \
64 --hash=sha256:045b4dd0e5f6121e6f314d81759abd2c257db4634260abcfe0d3f7083c4908ef \
53 --hash=sha256:045b4dd0e5f6121e6f314d81759abd2c257db4634260abcfe0d3f7083c4908ef \
65 --hash=sha256:047c0a04e382ef8bd74b0de01407e8d8632d7d1b4db6f2561106af812a68741b \
54 --hash=sha256:047c0a04e382ef8bd74b0de01407e8d8632d7d1b4db6f2561106af812a68741b \
66 --hash=sha256:068167c2d7bbeebd359665ac4fff756be5ffac9cda02375b5c5a7c4777038e73 \
55 --hash=sha256:068167c2d7bbeebd359665ac4fff756be5ffac9cda02375b5c5a7c4777038e73 \
67 --hash=sha256:148ff60e0fffa2f5fad2eb25aae7bef23d8f3b8bdaf947a65cdbe84a978092bc \
56 --hash=sha256:148ff60e0fffa2f5fad2eb25aae7bef23d8f3b8bdaf947a65cdbe84a978092bc \
68 --hash=sha256:1d1c77013a259971a72ddaa83b9f42c80a93ff12df6a4723be99d858fa30bee3 \
57 --hash=sha256:1d1c77013a259971a72ddaa83b9f42c80a93ff12df6a4723be99d858fa30bee3 \
69 --hash=sha256:1d48bc124a6b7a55006d97917f695effa9725d05abe8ee78fd60d6588b8344cd \
58 --hash=sha256:1d48bc124a6b7a55006d97917f695effa9725d05abe8ee78fd60d6588b8344cd \
70 --hash=sha256:31dfa2fc323097f8ad7acd41aa38d7c614dd1960ac6681745b6da124093dc351 \
59 --hash=sha256:31dfa2fc323097f8ad7acd41aa38d7c614dd1960ac6681745b6da124093dc351 \
71 --hash=sha256:34f82db7f80c49f38b032c5abb605c458bac997a6c3142e0d6c130be6fb2b941 \
60 --hash=sha256:34f82db7f80c49f38b032c5abb605c458bac997a6c3142e0d6c130be6fb2b941 \
72 --hash=sha256:3d5dd8e5998fb4ace04789d1d008e2bb532de501218519d70bb672c4c5a2fc5d \
61 --hash=sha256:3d5dd8e5998fb4ace04789d1d008e2bb532de501218519d70bb672c4c5a2fc5d \
73 --hash=sha256:4a6ae52bd3ee41ee0f3acf4c60ceb3f44e0e3bc52ab7da1c2b2aa6703363a3d1 \
62 --hash=sha256:4a6ae52bd3ee41ee0f3acf4c60ceb3f44e0e3bc52ab7da1c2b2aa6703363a3d1 \
74 --hash=sha256:4b02a3b2a2f01d0490dd39321c74273fed0568568ea0e7ea23e02bd1fb10a10b \
63 --hash=sha256:4b02a3b2a2f01d0490dd39321c74273fed0568568ea0e7ea23e02bd1fb10a10b \
75 --hash=sha256:4b843f8e1dd6a3195679d9838eb4670222e8b8d01bc36c9894d6c3538316fa0a \
64 --hash=sha256:4b843f8e1dd6a3195679d9838eb4670222e8b8d01bc36c9894d6c3538316fa0a \
76 --hash=sha256:5de53a28f40ef3c4fd57aeab6b590c2c663de87a5af76136ced519923d3efbb3 \
65 --hash=sha256:5de53a28f40ef3c4fd57aeab6b590c2c663de87a5af76136ced519923d3efbb3 \
77 --hash=sha256:61b2b33ede821b94fa99ce0b09c9ece049c7067a33b279f343adfe35108a4ea7 \
66 --hash=sha256:61b2b33ede821b94fa99ce0b09c9ece049c7067a33b279f343adfe35108a4ea7 \
78 --hash=sha256:6a3a9b0f45fd75dc05d8e93dc21b18fc1670135ec9544d1ad4acbcf6b86781d0 \
67 --hash=sha256:6a3a9b0f45fd75dc05d8e93dc21b18fc1670135ec9544d1ad4acbcf6b86781d0 \
79 --hash=sha256:76ad8e4c69dadbb31bad17c16baee61c0d1a4a73bed2590b741b2e1a46d3edd0 \
68 --hash=sha256:76ad8e4c69dadbb31bad17c16baee61c0d1a4a73bed2590b741b2e1a46d3edd0 \
80 --hash=sha256:7ba19b777dc00194d1b473180d4ca89a054dd18de27d0ee2e42a103ec9b7d014 \
69 --hash=sha256:7ba19b777dc00194d1b473180d4ca89a054dd18de27d0ee2e42a103ec9b7d014 \
81 --hash=sha256:7c1b7eab7a49aa96f3db1f716f0113a8a2e93c7375dd3d5d21c4941f1405c9c5 \
70 --hash=sha256:7c1b7eab7a49aa96f3db1f716f0113a8a2e93c7375dd3d5d21c4941f1405c9c5 \
82 --hash=sha256:7fc0eee3046041387cbace9314926aa48b681202f8897f8bff3809967a049036 \
71 --hash=sha256:7fc0eee3046041387cbace9314926aa48b681202f8897f8bff3809967a049036 \
83 --hash=sha256:8ccd1c5fff1aa1427100ce188557fc31f1e0a383ad8ec42c559aabd4ff08802d \
72 --hash=sha256:8ccd1c5fff1aa1427100ce188557fc31f1e0a383ad8ec42c559aabd4ff08802d \
84 --hash=sha256:8e08dd76de80539d613654915a2f5196dbccc67448df291e69a88712ea21e24a \
73 --hash=sha256:8e08dd76de80539d613654915a2f5196dbccc67448df291e69a88712ea21e24a \
85 --hash=sha256:c18498c50c59263841862ea0501da9f2b3659c00db54abfbf823a80787fde8ce \
74 --hash=sha256:c18498c50c59263841862ea0501da9f2b3659c00db54abfbf823a80787fde8ce \
86 --hash=sha256:c49db89d602c24928e68c0d510f4fcf8989d77defd01c973d6cbe27e684833b1 \
75 --hash=sha256:c49db89d602c24928e68c0d510f4fcf8989d77defd01c973d6cbe27e684833b1 \
87 --hash=sha256:ce20044d0317649ddbb4e54dab3c1bcc7483c78c27d3f58ab3d0c7e6bc60d26a \
76 --hash=sha256:ce20044d0317649ddbb4e54dab3c1bcc7483c78c27d3f58ab3d0c7e6bc60d26a \
88 --hash=sha256:d1071414dd06ca2eafa90c85a079169bfeb0e5f57fd0b45d44c092546fcd6fd9 \
77 --hash=sha256:d1071414dd06ca2eafa90c85a079169bfeb0e5f57fd0b45d44c092546fcd6fd9 \
89 --hash=sha256:d3be11ac43ab1a3e979dac80843b42226d5d3cccd3986f2e03152720a4297cd7 \
78 --hash=sha256:d3be11ac43ab1a3e979dac80843b42226d5d3cccd3986f2e03152720a4297cd7 \
90 --hash=sha256:db603a1c235d110c860d5f39988ebc8218ee028f07a7cbc056ba6424372ca31b \
79 --hash=sha256:db603a1c235d110c860d5f39988ebc8218ee028f07a7cbc056ba6424372ca31b \
91 # via yarl
80 # via yarl
92 pyflakes==2.1.1 \
81 pyflakes==2.1.1 \
93 --hash=sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0 \
82 --hash=sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0 \
94 --hash=sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2
83 --hash=sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2
95 pygments==2.3.1 \
84 pygments==2.4.2 \
96 --hash=sha256:5ffada19f6203563680669ee7f53b64dabbeb100eb51b61996085e99c03b284a \
85 --hash=sha256:71e430bc85c88a430f000ac1d9b331d2407f681d6f6aec95e8bcfbc3df5b0127 \
97 --hash=sha256:e8218dd399a61674745138520d0d4cf2621d7e032439341bc3f647bff125818d
86 --hash=sha256:881c4c157e45f30af185c1ffe8d549d48ac9127433f2c380c24b84572ad66297
98 pylint==2.3.1 \
87 pylint==2.3.1 \
99 --hash=sha256:5d77031694a5fb97ea95e828c8d10fc770a1df6eb3906067aaed42201a8a6a09 \
88 --hash=sha256:5d77031694a5fb97ea95e828c8d10fc770a1df6eb3906067aaed42201a8a6a09 \
100 --hash=sha256:723e3db49555abaf9bf79dc474c6b9e2935ad82230b10c1138a71ea41ac0fff1
89 --hash=sha256:723e3db49555abaf9bf79dc474c6b9e2935ad82230b10c1138a71ea41ac0fff1
101 python-levenshtein==0.12.0 \
90 python-levenshtein==0.12.0 \
102 --hash=sha256:033a11de5e3d19ea25c9302d11224e1a1898fe5abd23c61c7c360c25195e3eb1
91 --hash=sha256:033a11de5e3d19ea25c9302d11224e1a1898fe5abd23c61c7c360c25195e3eb1
103 pyyaml==5.1 \
92 pyyaml==5.1.2 \
104 --hash=sha256:1adecc22f88d38052fb787d959f003811ca858b799590a5eaa70e63dca50308c \
93 --hash=sha256:0113bc0ec2ad727182326b61326afa3d1d8280ae1122493553fd6f4397f33df9 \
105 --hash=sha256:436bc774ecf7c103814098159fbb84c2715d25980175292c648f2da143909f95 \
94 --hash=sha256:01adf0b6c6f61bd11af6e10ca52b7d4057dd0be0343eb9283c878cf3af56aee4 \
106 --hash=sha256:460a5a4248763f6f37ea225d19d5c205677d8d525f6a83357ca622ed541830c2 \
95 --hash=sha256:5124373960b0b3f4aa7df1707e63e9f109b5263eca5976c66e08b1c552d4eaf8 \
107 --hash=sha256:5a22a9c84653debfbf198d02fe592c176ea548cccce47553f35f466e15cf2fd4 \
96 --hash=sha256:5ca4f10adbddae56d824b2c09668e91219bb178a1eee1faa56af6f99f11bf696 \
108 --hash=sha256:7a5d3f26b89d688db27822343dfa25c599627bc92093e788956372285c6298ad \
97 --hash=sha256:7907be34ffa3c5a32b60b95f4d95ea25361c951383a894fec31be7252b2b6f34 \
109 --hash=sha256:9372b04a02080752d9e6f990179a4ab840227c6e2ce15b95e1278456664cf2ba \
98 --hash=sha256:7ec9b2a4ed5cad025c2278a1e6a19c011c80a3caaac804fd2d329e9cc2c287c9 \
110 --hash=sha256:a5dcbebee834eaddf3fa7366316b880ff4062e4bcc9787b78c7fbb4a26ff2dd1 \
99 --hash=sha256:87ae4c829bb25b9fe99cf71fbb2140c448f534e24c998cc60f39ae4f94396a73 \
111 --hash=sha256:aee5bab92a176e7cd034e57f46e9df9a9862a71f8f37cad167c6fc74c65f5b4e \
100 --hash=sha256:9de9919becc9cc2ff03637872a440195ac4241c80536632fffeb6a1e25a74299 \
112 --hash=sha256:c51f642898c0bacd335fc119da60baae0824f2cde95b0330b56c0553439f0673 \
101 --hash=sha256:a5a85b10e450c66b49f98846937e8cfca1db3127a9d5d1e31ca45c3d0bef4c5b \
113 --hash=sha256:c68ea4d3ba1705da1e0d85da6684ac657912679a649e8868bd850d2c299cce13 \
102 --hash=sha256:b0997827b4f6a7c286c01c5f60384d218dca4ed7d9efa945c3e1aa623d5709ae \
114 --hash=sha256:e23d0cc5299223dcc37885dae624f382297717e459ea24053709675a976a3e19 \
103 --hash=sha256:b631ef96d3222e62861443cc89d6563ba3eeb816eeb96b2629345ab795e53681 \
104 --hash=sha256:bf47c0607522fdbca6c9e817a6e81b08491de50f3766a7a0e6a5be7905961b41 \
105 --hash=sha256:f81025eddd0327c7d4cfe9b62cf33190e1e736cc6e97502b3ec425f574b3e7a8 \
115 # via vcrpy
106 # via vcrpy
116 six==1.12.0 \
107 six==1.12.0 \
117 --hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \
108 --hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \
118 --hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73 \
109 --hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73 \
119 # via astroid, vcrpy
110 # via astroid, vcrpy
120 typed-ast==1.3.4 ; python_version >= "3.0" and platform_python_implementation != "PyPy" \
111 typed-ast==1.4.0 ; python_version >= "3.0" and platform_python_implementation != "PyPy" \
121 --hash=sha256:04894d268ba6eab7e093d43107869ad49e7b5ef40d1a94243ea49b352061b200 \
112 --hash=sha256:18511a0b3e7922276346bcb47e2ef9f38fb90fd31cb9223eed42c85d1312344e \
122 --hash=sha256:16616ece19daddc586e499a3d2f560302c11f122b9c692bc216e821ae32aa0d0 \
113 --hash=sha256:262c247a82d005e43b5b7f69aff746370538e176131c32dda9cb0f324d27141e \
123 --hash=sha256:252fdae740964b2d3cdfb3f84dcb4d6247a48a6abe2579e8029ab3be3cdc026c \
114 --hash=sha256:2b907eb046d049bcd9892e3076c7a6456c93a25bebfe554e931620c90e6a25b0 \
124 --hash=sha256:2af80a373af123d0b9f44941a46df67ef0ff7a60f95872412a145f4500a7fc99 \
115 --hash=sha256:354c16e5babd09f5cb0ee000d54cfa38401d8b8891eefa878ac772f827181a3c \
125 --hash=sha256:2c88d0a913229a06282b285f42a31e063c3bf9071ff65c5ea4c12acb6977c6a7 \
116 --hash=sha256:4e0b70c6fc4d010f8107726af5fd37921b666f5b31d9331f0bd24ad9a088e631 \
126 --hash=sha256:2ea99c029ebd4b5a308d915cc7fb95b8e1201d60b065450d5d26deb65d3f2bc1 \
117 --hash=sha256:630968c5cdee51a11c05a30453f8cd65e0cc1d2ad0d9192819df9978984529f4 \
127 --hash=sha256:3d2e3ab175fc097d2a51c7a0d3fda442f35ebcc93bb1d7bd9b95ad893e44c04d \
118 --hash=sha256:66480f95b8167c9c5c5c87f32cf437d585937970f3fc24386f313a4c97b44e34 \
128 --hash=sha256:4766dd695548a15ee766927bf883fb90c6ac8321be5a60c141f18628fb7f8da8 \
119 --hash=sha256:71211d26ffd12d63a83e079ff258ac9d56a1376a25bc80b1cdcdf601b855b90b \
129 --hash=sha256:56b6978798502ef66625a2e0f80cf923da64e328da8bbe16c1ff928c70c873de \
120 --hash=sha256:95bd11af7eafc16e829af2d3df510cecfd4387f6453355188342c3e79a2ec87a \
130 --hash=sha256:5cddb6f8bce14325b2863f9d5ac5c51e07b71b462361fd815d1d7706d3a9d682 \
121 --hash=sha256:bc6c7d3fa1325a0c6613512a093bc2a2a15aeec350451cbdf9e1d4bffe3e3233 \
131 --hash=sha256:644ee788222d81555af543b70a1098f2025db38eaa99226f3a75a6854924d4db \
122 --hash=sha256:cc34a6f5b426748a507dd5d1de4c1978f2eb5626d51326e43280941206c209e1 \
132 --hash=sha256:64cf762049fc4775efe6b27161467e76d0ba145862802a65eefc8879086fc6f8 \
123 --hash=sha256:d755f03c1e4a51e9b24d899561fec4ccaf51f210d52abdf8c07ee2849b212a36 \
133 --hash=sha256:68c362848d9fb71d3c3e5f43c09974a0ae319144634e7a47db62f0f2a54a7fa7 \
124 --hash=sha256:d7c45933b1bdfaf9f36c579671fec15d25b06c8398f113dab64c18ed1adda01d \
134 --hash=sha256:6c1f3c6f6635e611d58e467bf4371883568f0de9ccc4606f17048142dec14a1f \
125 --hash=sha256:d896919306dd0aa22d0132f62a1b78d11aaf4c9fc5b3410d3c666b818191630a \
135 --hash=sha256:b213d4a02eec4ddf622f4d2fbc539f062af3788d1f332f028a2e19c42da53f15 \
126 --hash=sha256:ffde2fbfad571af120fcbfbbc61c72469e72f550d676c3342492a9dfdefb8f12
136 --hash=sha256:bb27d4e7805a7de0e35bd0cb1411bc85f807968b2b0539597a49a23b00a622ae \
137 --hash=sha256:c9d414512eaa417aadae7758bc118868cd2396b0e6138c1dd4fda96679c079d3 \
138 --hash=sha256:f0937165d1e25477b01081c4763d2d9cdc3b18af69cb259dd4f640c9b900fe5e \
139 --hash=sha256:fb96a6e2c11059ecf84e6741a319f93f683e440e341d4489c9b161eca251cf2a \
140 --hash=sha256:fc71d2d6ae56a091a8d94f33ec9d0f2001d1cb1db423d8b4355debfe9ce689b7
141 vcrpy==2.0.1 \
127 vcrpy==2.0.1 \
142 --hash=sha256:127e79cf7b569d071d1bd761b83f7b62b2ce2a2eb63ceca7aa67cba8f2602ea3 \
128 --hash=sha256:127e79cf7b569d071d1bd761b83f7b62b2ce2a2eb63ceca7aa67cba8f2602ea3 \
143 --hash=sha256:57be64aa8e9883a4117d0b15de28af62275c001abcdb00b6dc2d4406073d9a4f
129 --hash=sha256:57be64aa8e9883a4117d0b15de28af62275c001abcdb00b6dc2d4406073d9a4f
144 wrapt==1.11.1 \
130 wrapt==1.11.2 \
145 --hash=sha256:4aea003270831cceb8a90ff27c4031da6ead7ec1886023b80ce0dfe0adf61533 \
131 --hash=sha256:565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1 \
146 # via astroid, vcrpy
132 # via astroid, vcrpy
147 yarl==1.3.0 \
133 yarl==1.3.0 \
148 --hash=sha256:024ecdc12bc02b321bc66b41327f930d1c2c543fa9a561b39861da9388ba7aa9 \
134 --hash=sha256:024ecdc12bc02b321bc66b41327f930d1c2c543fa9a561b39861da9388ba7aa9 \
149 --hash=sha256:2f3010703295fbe1aec51023740871e64bb9664c789cba5a6bdf404e93f7568f \
135 --hash=sha256:2f3010703295fbe1aec51023740871e64bb9664c789cba5a6bdf404e93f7568f \
150 --hash=sha256:3890ab952d508523ef4881457c4099056546593fa05e93da84c7250516e632eb \
136 --hash=sha256:3890ab952d508523ef4881457c4099056546593fa05e93da84c7250516e632eb \
151 --hash=sha256:3e2724eb9af5dc41648e5bb304fcf4891adc33258c6e14e2a7414ea32541e320 \
137 --hash=sha256:3e2724eb9af5dc41648e5bb304fcf4891adc33258c6e14e2a7414ea32541e320 \
152 --hash=sha256:5badb97dd0abf26623a9982cd448ff12cb39b8e4c94032ccdedf22ce01a64842 \
138 --hash=sha256:5badb97dd0abf26623a9982cd448ff12cb39b8e4c94032ccdedf22ce01a64842 \
153 --hash=sha256:73f447d11b530d860ca1e6b582f947688286ad16ca42256413083d13f260b7a0 \
139 --hash=sha256:73f447d11b530d860ca1e6b582f947688286ad16ca42256413083d13f260b7a0 \
154 --hash=sha256:7ab825726f2940c16d92aaec7d204cfc34ac26c0040da727cf8ba87255a33829 \
140 --hash=sha256:7ab825726f2940c16d92aaec7d204cfc34ac26c0040da727cf8ba87255a33829 \
155 --hash=sha256:b25de84a8c20540531526dfbb0e2d2b648c13fd5dd126728c496d7c3fea33310 \
141 --hash=sha256:b25de84a8c20540531526dfbb0e2d2b648c13fd5dd126728c496d7c3fea33310 \
156 --hash=sha256:c6e341f5a6562af74ba55205dbd56d248daf1b5748ec48a0200ba227bb9e33f4 \
142 --hash=sha256:c6e341f5a6562af74ba55205dbd56d248daf1b5748ec48a0200ba227bb9e33f4 \
157 --hash=sha256:c9bb7c249c4432cd47e75af3864bc02d26c9594f49c82e2a28624417f0ae63b8 \
143 --hash=sha256:c9bb7c249c4432cd47e75af3864bc02d26c9594f49c82e2a28624417f0ae63b8 \
158 --hash=sha256:e060906c0c585565c718d1c3841747b61c5439af2211e185f6739a9412dfbde1 \
144 --hash=sha256:e060906c0c585565c718d1c3841747b61c5439af2211e185f6739a9412dfbde1 \
159 # via vcrpy
145 # via vcrpy
146
147 # WARNING: The following packages were not pinned, but pip requires them to be
148 # pinned when the requirements file includes hashes. Consider using the --allow-unsafe flag.
149 # setuptools==41.0.1 # via python-levenshtein
@@ -1,168 +1,193 b''
1 #
1 #
2 # This file is autogenerated by pip-compile
2 # This file is autogenerated by pip-compile
3 # To update, run:
3 # To update, run:
4 #
4 #
5 # pip-compile -U --generate-hashes --output-file contrib/automation/requirements.txt contrib/automation/requirements.txt.in
5 # pip-compile --generate-hashes --output-file=contrib/automation/requirements.txt contrib/automation/requirements.txt.in
6 #
6 #
7 asn1crypto==0.24.0 \
7 asn1crypto==1.0.1 \
8 --hash=sha256:2f1adbb7546ed199e3c90ef23ec95c5cf3585bac7d11fb7eb562a3fe89c64e87 \
8 --hash=sha256:0b199f211ae690df3db4fd6c1c4ff976497fb1da689193e368eedbadc53d9292 \
9 --hash=sha256:9d5c20441baf0cb60a4ac34cc447c6c189024b6b4c6cd7877034f4965c464e49 \
9 --hash=sha256:bca90060bd995c3f62c4433168eab407e44bdbdb567b3f3a396a676c1a4c4a3f \
10 # via cryptography
10 # via cryptography
11 bcrypt==3.1.6 \
11 bcrypt==3.1.7 \
12 --hash=sha256:0ba875eb67b011add6d8c5b76afbd92166e98b1f1efab9433d5dc0fafc76e203 \
12 --hash=sha256:0258f143f3de96b7c14f762c770f5fc56ccd72f8a1857a451c1cd9a655d9ac89 \
13 --hash=sha256:21ed446054c93e209434148ef0b362432bb82bbdaf7beef70a32c221f3e33d1c \
13 --hash=sha256:0b0069c752ec14172c5f78208f1863d7ad6755a6fae6fe76ec2c80d13be41e42 \
14 --hash=sha256:28a0459381a8021f57230954b9e9a65bb5e3d569d2c253c5cac6cb181d71cf23 \
14 --hash=sha256:19a4b72a6ae5bb467fea018b825f0a7d917789bcfe893e53f15c92805d187294 \
15 --hash=sha256:2aed3091eb6f51c26b7c2fad08d6620d1c35839e7a362f706015b41bd991125e \
15 --hash=sha256:5432dd7b34107ae8ed6c10a71b4397f1c853bd39a4d6ffa7e35f40584cffd161 \
16 --hash=sha256:2fa5d1e438958ea90eaedbf8082c2ceb1a684b4f6c75a3800c6ec1e18ebef96f \
16 --hash=sha256:69361315039878c0680be456640f8705d76cb4a3a3fe1e057e0f261b74be4b31 \
17 --hash=sha256:3a73f45484e9874252002793518da060fb11eaa76c30713faa12115db17d1430 \
17 --hash=sha256:6fe49a60b25b584e2f4ef175b29d3a83ba63b3a4df1b4c0605b826668d1b6be5 \
18 --hash=sha256:3e489787638a36bb466cd66780e15715494b6d6905ffdbaede94440d6d8e7dba \
18 --hash=sha256:74a015102e877d0ccd02cdeaa18b32aa7273746914a6c5d0456dd442cb65b99c \
19 --hash=sha256:44636759d222baa62806bbceb20e96f75a015a6381690d1bc2eda91c01ec02ea \
19 --hash=sha256:763669a367869786bb4c8fcf731f4175775a5b43f070f50f46f0b59da45375d0 \
20 --hash=sha256:678c21b2fecaa72a1eded0cf12351b153615520637efcadc09ecf81b871f1596 \
20 --hash=sha256:8b10acde4e1919d6015e1df86d4c217d3b5b01bb7744c36113ea43d529e1c3de \
21 --hash=sha256:75460c2c3786977ea9768d6c9d8957ba31b5fbeb0aae67a5c0e96aab4155f18c \
21 --hash=sha256:9fe92406c857409b70a38729dbdf6578caf9228de0aef5bc44f859ffe971a39e \
22 --hash=sha256:8ac06fb3e6aacb0a95b56eba735c0b64df49651c6ceb1ad1cf01ba75070d567f \
22 --hash=sha256:a190f2a5dbbdbff4b74e3103cef44344bc30e61255beb27310e2aec407766052 \
23 --hash=sha256:8fdced50a8b646fff8fa0e4b1c5fd940ecc844b43d1da5a980cb07f2d1b1132f \
23 --hash=sha256:a595c12c618119255c90deb4b046e1ca3bcfad64667c43d1166f2b04bc72db09 \
24 --hash=sha256:9b2c5b640a2da533b0ab5f148d87fb9989bf9bcb2e61eea6a729102a6d36aef9 \
24 --hash=sha256:c9457fa5c121e94a58d6505cadca8bed1c64444b83b3204928a866ca2e599105 \
25 --hash=sha256:a9083e7fa9adb1a4de5ac15f9097eb15b04e2c8f97618f1b881af40abce382e1 \
25 --hash=sha256:cb93f6b2ab0f6853550b74e051d297c27a638719753eb9ff66d1e4072be67133 \
26 --hash=sha256:b7e3948b8b1a81c5a99d41da5fb2dc03ddb93b5f96fcd3fd27e643f91efa33e1 \
26 --hash=sha256:d7bdc26475679dd073ba0ed2766445bb5b20ca4793ca0db32b399dccc6bc84b7 \
27 --hash=sha256:b998b8ca979d906085f6a5d84f7b5459e5e94a13fc27c28a3514437013b6c2f6 \
27 --hash=sha256:ff032765bb8716d9387fd5376d987a937254b0619eff0972779515b5c98820bc \
28 --hash=sha256:dd08c50bc6f7be69cd7ba0769acca28c846ec46b7a8ddc2acf4b9ac6f8a7457e \
29 --hash=sha256:de5badee458544ab8125e63e39afeedfcf3aef6a6e2282ac159c95ae7472d773 \
30 --hash=sha256:ede2a87333d24f55a4a7338a6ccdccf3eaa9bed081d1737e0db4dbd1a4f7e6b6 \
31 # via paramiko
28 # via paramiko
32 boto3==1.9.137 \
29 bleach==3.1.0 \
33 --hash=sha256:882cc4869b47b51dae4b4a900769e72171ff00e0b6bca644b2d7a7ad7378f324 \
30 --hash=sha256:213336e49e102af26d9cde77dd2d0397afabc5a6bf2fed985dc35b5d1e285a16 \
34 --hash=sha256:cd503a7e7a04f1c14d2801f9727159dfa88c393b4004e98940fa4aa205d920c8
31 --hash=sha256:3fdf7f77adcf649c9911387df51254b813185e32b2c6619f690b593a617e19fa \
35 botocore==1.12.137 \
32 # via readme-renderer
36 --hash=sha256:0d95794f6b1239c75e2c5f966221bcd4b68020fddb5676f757531eedbb612ed8 \
33 boto3==1.9.243 \
37 --hash=sha256:3213cf48cf2ceee10fc3b93221f2cd1c38521cca7584f547d5c086213cc60f35 \
34 --hash=sha256:404acbecef8f4912f18312fcfaffe7eba7f10b3b7adf7853bdba59cdf2275ebb \
35 --hash=sha256:c6e5a7e4548ce7586c354ff633f2a66ba3c471d15a8ae6a30f873122ab04e1cf
36 botocore==1.12.243 \
37 --hash=sha256:397585a7881230274afb8d1877ef69a661b0a311745cd324f14a052fb2a2863a \
38 --hash=sha256:4496f8da89cb496462a831897ad248e13e431d9fa7e41e06d426fd6658ab6e59 \
38 # via boto3, s3transfer
39 # via boto3, s3transfer
39 certifi==2019.3.9 \
40 certifi==2019.9.11 \
40 --hash=sha256:59b7658e26ca9c7339e00f8f4636cdfe59d34fa37b9b04f6f9e9926b3cece1a5 \
41 --hash=sha256:e4f3620cfea4f83eedc95b24abd9cd56f3c4b146dd0177e83a21b4eb49e21e50 \
41 --hash=sha256:b26104d6835d1f5e49452a26eb2ff87fe7090b89dfcaee5ea2212697e1e1d7ae \
42 --hash=sha256:fd7c7c74727ddcf00e9acd26bba8da604ffec95bf1c2144e67aff7a8b50e6cef \
42 # via requests
43 # via requests
43 cffi==1.12.3 \
44 cffi==1.12.3 \
44 --hash=sha256:041c81822e9f84b1d9c401182e174996f0bae9991f33725d059b771744290774 \
45 --hash=sha256:041c81822e9f84b1d9c401182e174996f0bae9991f33725d059b771744290774 \
45 --hash=sha256:046ef9a22f5d3eed06334d01b1e836977eeef500d9b78e9ef693f9380ad0b83d \
46 --hash=sha256:046ef9a22f5d3eed06334d01b1e836977eeef500d9b78e9ef693f9380ad0b83d \
46 --hash=sha256:066bc4c7895c91812eff46f4b1c285220947d4aa46fa0a2651ff85f2afae9c90 \
47 --hash=sha256:066bc4c7895c91812eff46f4b1c285220947d4aa46fa0a2651ff85f2afae9c90 \
47 --hash=sha256:066c7ff148ae33040c01058662d6752fd73fbc8e64787229ea8498c7d7f4041b \
48 --hash=sha256:066c7ff148ae33040c01058662d6752fd73fbc8e64787229ea8498c7d7f4041b \
48 --hash=sha256:2444d0c61f03dcd26dbf7600cf64354376ee579acad77aef459e34efcb438c63 \
49 --hash=sha256:2444d0c61f03dcd26dbf7600cf64354376ee579acad77aef459e34efcb438c63 \
49 --hash=sha256:300832850b8f7967e278870c5d51e3819b9aad8f0a2c8dbe39ab11f119237f45 \
50 --hash=sha256:300832850b8f7967e278870c5d51e3819b9aad8f0a2c8dbe39ab11f119237f45 \
50 --hash=sha256:34c77afe85b6b9e967bd8154e3855e847b70ca42043db6ad17f26899a3df1b25 \
51 --hash=sha256:34c77afe85b6b9e967bd8154e3855e847b70ca42043db6ad17f26899a3df1b25 \
51 --hash=sha256:46de5fa00f7ac09f020729148ff632819649b3e05a007d286242c4882f7b1dc3 \
52 --hash=sha256:46de5fa00f7ac09f020729148ff632819649b3e05a007d286242c4882f7b1dc3 \
52 --hash=sha256:4aa8ee7ba27c472d429b980c51e714a24f47ca296d53f4d7868075b175866f4b \
53 --hash=sha256:4aa8ee7ba27c472d429b980c51e714a24f47ca296d53f4d7868075b175866f4b \
53 --hash=sha256:4d0004eb4351e35ed950c14c11e734182591465a33e960a4ab5e8d4f04d72647 \
54 --hash=sha256:4d0004eb4351e35ed950c14c11e734182591465a33e960a4ab5e8d4f04d72647 \
54 --hash=sha256:4e3d3f31a1e202b0f5a35ba3bc4eb41e2fc2b11c1eff38b362de710bcffb5016 \
55 --hash=sha256:4e3d3f31a1e202b0f5a35ba3bc4eb41e2fc2b11c1eff38b362de710bcffb5016 \
55 --hash=sha256:50bec6d35e6b1aaeb17f7c4e2b9374ebf95a8975d57863546fa83e8d31bdb8c4 \
56 --hash=sha256:50bec6d35e6b1aaeb17f7c4e2b9374ebf95a8975d57863546fa83e8d31bdb8c4 \
56 --hash=sha256:55cad9a6df1e2a1d62063f79d0881a414a906a6962bc160ac968cc03ed3efcfb \
57 --hash=sha256:55cad9a6df1e2a1d62063f79d0881a414a906a6962bc160ac968cc03ed3efcfb \
57 --hash=sha256:5662ad4e4e84f1eaa8efce5da695c5d2e229c563f9d5ce5b0113f71321bcf753 \
58 --hash=sha256:5662ad4e4e84f1eaa8efce5da695c5d2e229c563f9d5ce5b0113f71321bcf753 \
58 --hash=sha256:59b4dc008f98fc6ee2bb4fd7fc786a8d70000d058c2bbe2698275bc53a8d3fa7 \
59 --hash=sha256:59b4dc008f98fc6ee2bb4fd7fc786a8d70000d058c2bbe2698275bc53a8d3fa7 \
59 --hash=sha256:73e1ffefe05e4ccd7bcea61af76f36077b914f92b76f95ccf00b0c1b9186f3f9 \
60 --hash=sha256:73e1ffefe05e4ccd7bcea61af76f36077b914f92b76f95ccf00b0c1b9186f3f9 \
60 --hash=sha256:a1f0fd46eba2d71ce1589f7e50a9e2ffaeb739fb2c11e8192aa2b45d5f6cc41f \
61 --hash=sha256:a1f0fd46eba2d71ce1589f7e50a9e2ffaeb739fb2c11e8192aa2b45d5f6cc41f \
61 --hash=sha256:a2e85dc204556657661051ff4bab75a84e968669765c8a2cd425918699c3d0e8 \
62 --hash=sha256:a2e85dc204556657661051ff4bab75a84e968669765c8a2cd425918699c3d0e8 \
62 --hash=sha256:a5457d47dfff24882a21492e5815f891c0ca35fefae8aa742c6c263dac16ef1f \
63 --hash=sha256:a5457d47dfff24882a21492e5815f891c0ca35fefae8aa742c6c263dac16ef1f \
63 --hash=sha256:a8dccd61d52a8dae4a825cdbb7735da530179fea472903eb871a5513b5abbfdc \
64 --hash=sha256:a8dccd61d52a8dae4a825cdbb7735da530179fea472903eb871a5513b5abbfdc \
64 --hash=sha256:ae61af521ed676cf16ae94f30fe202781a38d7178b6b4ab622e4eec8cefaff42 \
65 --hash=sha256:ae61af521ed676cf16ae94f30fe202781a38d7178b6b4ab622e4eec8cefaff42 \
65 --hash=sha256:b012a5edb48288f77a63dba0840c92d0504aa215612da4541b7b42d849bc83a3 \
66 --hash=sha256:b012a5edb48288f77a63dba0840c92d0504aa215612da4541b7b42d849bc83a3 \
66 --hash=sha256:d2c5cfa536227f57f97c92ac30c8109688ace8fa4ac086d19d0af47d134e2909 \
67 --hash=sha256:d2c5cfa536227f57f97c92ac30c8109688ace8fa4ac086d19d0af47d134e2909 \
67 --hash=sha256:d42b5796e20aacc9d15e66befb7a345454eef794fdb0737d1af593447c6c8f45 \
68 --hash=sha256:d42b5796e20aacc9d15e66befb7a345454eef794fdb0737d1af593447c6c8f45 \
68 --hash=sha256:dee54f5d30d775f525894d67b1495625dd9322945e7fee00731952e0368ff42d \
69 --hash=sha256:dee54f5d30d775f525894d67b1495625dd9322945e7fee00731952e0368ff42d \
69 --hash=sha256:e070535507bd6aa07124258171be2ee8dfc19119c28ca94c9dfb7efd23564512 \
70 --hash=sha256:e070535507bd6aa07124258171be2ee8dfc19119c28ca94c9dfb7efd23564512 \
70 --hash=sha256:e1ff2748c84d97b065cc95429814cdba39bcbd77c9c85c89344b317dc0d9cbff \
71 --hash=sha256:e1ff2748c84d97b065cc95429814cdba39bcbd77c9c85c89344b317dc0d9cbff \
71 --hash=sha256:ed851c75d1e0e043cbf5ca9a8e1b13c4c90f3fbd863dacb01c0808e2b5204201 \
72 --hash=sha256:ed851c75d1e0e043cbf5ca9a8e1b13c4c90f3fbd863dacb01c0808e2b5204201 \
72 # via bcrypt, cryptography, pynacl
73 # via bcrypt, cryptography, pynacl
73 chardet==3.0.4 \
74 chardet==3.0.4 \
74 --hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \
75 --hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \
75 --hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 \
76 --hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 \
76 # via requests
77 # via requests
77 cryptography==2.6.1 \
78 cryptography==2.7 \
78 --hash=sha256:066f815f1fe46020877c5983a7e747ae140f517f1b09030ec098503575265ce1 \
79 --hash=sha256:24b61e5fcb506424d3ec4e18bca995833839bf13c59fc43e530e488f28d46b8c \
79 --hash=sha256:210210d9df0afba9e000636e97810117dc55b7157c903a55716bb73e3ae07705 \
80 --hash=sha256:25dd1581a183e9e7a806fe0543f485103232f940fcfc301db65e630512cce643 \
80 --hash=sha256:26c821cbeb683facb966045e2064303029d572a87ee69ca5a1bf54bf55f93ca6 \
81 --hash=sha256:3452bba7c21c69f2df772762be0066c7ed5dc65df494a1d53a58b683a83e1216 \
81 --hash=sha256:2afb83308dc5c5255149ff7d3fb9964f7c9ee3d59b603ec18ccf5b0a8852e2b1 \
82 --hash=sha256:41a0be220dd1ed9e998f5891948306eb8c812b512dc398e5a01846d855050799 \
82 --hash=sha256:2db34e5c45988f36f7a08a7ab2b69638994a8923853dec2d4af121f689c66dc8 \
83 --hash=sha256:5751d8a11b956fbfa314f6553d186b94aa70fdb03d8a4d4f1c82dcacf0cbe28a \
83 --hash=sha256:409c4653e0f719fa78febcb71ac417076ae5e20160aec7270c91d009837b9151 \
84 --hash=sha256:5f61c7d749048fa6e3322258b4263463bfccefecb0dd731b6561cb617a1d9bb9 \
84 --hash=sha256:45a4f4cf4f4e6a55c8128f8b76b4c057027b27d4c67e3fe157fa02f27e37830d \
85 --hash=sha256:72e24c521fa2106f19623a3851e9f89ddfdeb9ac63871c7643790f872a305dfc \
85 --hash=sha256:48eab46ef38faf1031e58dfcc9c3e71756a1108f4c9c966150b605d4a1a7f659 \
86 --hash=sha256:7b97ae6ef5cba2e3bb14256625423413d5ce8d1abb91d4f29b6d1a081da765f8 \
86 --hash=sha256:6b9e0ae298ab20d371fc26e2129fd683cfc0cfde4d157c6341722de645146537 \
87 --hash=sha256:961e886d8a3590fd2c723cf07be14e2a91cf53c25f02435c04d39e90780e3b53 \
87 --hash=sha256:6c4778afe50f413707f604828c1ad1ff81fadf6c110cb669579dea7e2e98a75e \
88 --hash=sha256:96d8473848e984184b6728e2c9d391482008646276c3ff084a1bd89e15ff53a1 \
88 --hash=sha256:8c33fb99025d353c9520141f8bc989c2134a1f76bac6369cea060812f5b5c2bb \
89 --hash=sha256:ae536da50c7ad1e002c3eee101871d93abdc90d9c5f651818450a0d3af718609 \
89 --hash=sha256:9873a1760a274b620a135054b756f9f218fa61ca030e42df31b409f0fb738b6c \
90 --hash=sha256:b0db0cecf396033abb4a93c95d1602f268b3a68bb0a9cc06a7cff587bb9a7292 \
90 --hash=sha256:9b069768c627f3f5623b1cbd3248c5e7e92aec62f4c98827059eed7053138cc9 \
91 --hash=sha256:cfee9164954c186b191b91d4193989ca994703b2fff406f71cf454a2d3c7327e \
91 --hash=sha256:9e4ce27a507e4886efbd3c32d120db5089b906979a4debf1d5939ec01b9dd6c5 \
92 --hash=sha256:e6347742ac8f35ded4a46ff835c60e68c22a536a8ae5c4422966d06946b6d4c6 \
92 --hash=sha256:acb424eaca214cb08735f1a744eceb97d014de6530c1ea23beb86d9c6f13c2ad \
93 --hash=sha256:f27d93f0139a3c056172ebb5d4f9056e770fdf0206c2f422ff2ebbad142e09ed \
93 --hash=sha256:c8181c7d77388fe26ab8418bb088b1a1ef5fde058c6926790c8a0a3d94075a4a \
94 --hash=sha256:f57b76e46a58b63d1c6375017f4564a28f19a5ca912691fd2e4261b3414b618d \
94 --hash=sha256:d4afbb0840f489b60f5a580a41a1b9c3622e08ecb5eec8614d4fb4cd914c4460 \
95 --hash=sha256:d9ed28030797c00f4bc43c86bf819266c76a5ea61d006cd4078a93ebf7da6bfd \
96 --hash=sha256:e603aa7bb52e4e8ed4119a58a03b60323918467ef209e6ff9db3ac382e5cf2c6 \
97 # via paramiko, pypsrp
95 # via paramiko, pypsrp
98 docutils==0.14 \
96 docutils==0.15.2 \
99 --hash=sha256:02aec4bd92ab067f6ff27a38a38a41173bf01bed8f89157768c1573f53e474a6 \
97 --hash=sha256:6c4f696463b79f1fb8ba0c594b63840ebd41f059e92b31957c46b74a4599b6d0 \
100 --hash=sha256:51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274 \
98 --hash=sha256:9e4d7ecfc600058e07ba661411a2b7de2fd0fafa17d1a7f7361cd47b1175c827 \
101 --hash=sha256:7a4bd47eaf6596e1295ecb11361139febe29b084a87bf005bf899f9a42edc3c6 \
99 --hash=sha256:a2aeea129088da402665e92e0b25b04b073c04b2dce4ab65caaa38b7ce2e1a99 \
102 # via botocore
100 # via botocore, readme-renderer
103 idna==2.8 \
101 idna==2.8 \
104 --hash=sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407 \
102 --hash=sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407 \
105 --hash=sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c \
103 --hash=sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c \
106 # via requests
104 # via requests
107 jmespath==0.9.4 \
105 jmespath==0.9.4 \
108 --hash=sha256:3720a4b1bd659dd2eecad0666459b9788813e032b83e7ba58578e48254e0a0e6 \
106 --hash=sha256:3720a4b1bd659dd2eecad0666459b9788813e032b83e7ba58578e48254e0a0e6 \
109 --hash=sha256:bde2aef6f44302dfb30320115b17d030798de8c4110e28d5cf6cf91a7a31074c \
107 --hash=sha256:bde2aef6f44302dfb30320115b17d030798de8c4110e28d5cf6cf91a7a31074c \
110 # via boto3, botocore
108 # via boto3, botocore
111 ntlm-auth==1.3.0 \
109 ntlm-auth==1.4.0 \
112 --hash=sha256:bb2fd03c665f0f62c5f65695b62dcdb07fb7a45df6ebc86c770be2054d6902dd \
110 --hash=sha256:11f7a3cec38155b7cecdd9bbc8c37cd738d8012f0523b3f98d8caefe394feb97 \
113 --hash=sha256:ce5b4483ed761f341a538a426a71a52e5a9cf5fd834ebef1d2090f9eef14b3f8 \
111 --hash=sha256:350f2389c8ee5517f47db55a36ac2f8efc9742a60a678d6e2caa92385bdcaa9a \
114 # via pypsrp
112 # via pypsrp
115 paramiko==2.4.2 \
113 paramiko==2.6.0 \
116 --hash=sha256:3c16b2bfb4c0d810b24c40155dbfd113c0521e7e6ee593d704e84b4c658a1f3b \
114 --hash=sha256:99f0179bdc176281d21961a003ffdb2ec369daac1a1007241f53374e376576cf \
117 --hash=sha256:a8975a7df3560c9f1e2b43dc54ebd40fd00a7017392ca5445ce7df409f900fcb
115 --hash=sha256:f4b2edfa0d226b70bd4ca31ea7e389325990283da23465d572ed1f70a7583041
118 pyasn1==0.4.5 \
116 pkginfo==1.5.0.1 \
119 --hash=sha256:da2420fe13a9452d8ae97a0e478adde1dee153b11ba832a95b223a2ba01c10f7 \
117 --hash=sha256:7424f2c8511c186cd5424bbf31045b77435b37a8d604990b79d4e70d741148bb \
120 --hash=sha256:da6b43a8c9ae93bc80e2739efb38cc776ba74a886e3e9318d65fe81a8b8a2c6e \
118 --hash=sha256:a6d9e40ca61ad3ebd0b72fbadd4fba16e4c0e4df0428c041e01e06eb6ee71f32 \
121 # via paramiko
119 # via twine
122 pycparser==2.19 \
120 pycparser==2.19 \
123 --hash=sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3 \
121 --hash=sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3 \
124 # via cffi
122 # via cffi
123 pygments==2.4.2 \
124 --hash=sha256:71e430bc85c88a430f000ac1d9b331d2407f681d6f6aec95e8bcfbc3df5b0127 \
125 --hash=sha256:881c4c157e45f30af185c1ffe8d549d48ac9127433f2c380c24b84572ad66297 \
126 # via readme-renderer
125 pynacl==1.3.0 \
127 pynacl==1.3.0 \
126 --hash=sha256:05c26f93964373fc0abe332676cb6735f0ecad27711035b9472751faa8521255 \
128 --hash=sha256:05c26f93964373fc0abe332676cb6735f0ecad27711035b9472751faa8521255 \
127 --hash=sha256:0c6100edd16fefd1557da078c7a31e7b7d7a52ce39fdca2bec29d4f7b6e7600c \
129 --hash=sha256:0c6100edd16fefd1557da078c7a31e7b7d7a52ce39fdca2bec29d4f7b6e7600c \
128 --hash=sha256:0d0a8171a68edf51add1e73d2159c4bc19fc0718e79dec51166e940856c2f28e \
130 --hash=sha256:0d0a8171a68edf51add1e73d2159c4bc19fc0718e79dec51166e940856c2f28e \
129 --hash=sha256:1c780712b206317a746ace34c209b8c29dbfd841dfbc02aa27f2084dd3db77ae \
131 --hash=sha256:1c780712b206317a746ace34c209b8c29dbfd841dfbc02aa27f2084dd3db77ae \
130 --hash=sha256:2424c8b9f41aa65bbdbd7a64e73a7450ebb4aa9ddedc6a081e7afcc4c97f7621 \
132 --hash=sha256:2424c8b9f41aa65bbdbd7a64e73a7450ebb4aa9ddedc6a081e7afcc4c97f7621 \
131 --hash=sha256:2d23c04e8d709444220557ae48ed01f3f1086439f12dbf11976e849a4926db56 \
133 --hash=sha256:2d23c04e8d709444220557ae48ed01f3f1086439f12dbf11976e849a4926db56 \
132 --hash=sha256:30f36a9c70450c7878053fa1344aca0145fd47d845270b43a7ee9192a051bf39 \
134 --hash=sha256:30f36a9c70450c7878053fa1344aca0145fd47d845270b43a7ee9192a051bf39 \
133 --hash=sha256:37aa336a317209f1bb099ad177fef0da45be36a2aa664507c5d72015f956c310 \
135 --hash=sha256:37aa336a317209f1bb099ad177fef0da45be36a2aa664507c5d72015f956c310 \
134 --hash=sha256:4943decfc5b905748f0756fdd99d4f9498d7064815c4cf3643820c9028b711d1 \
136 --hash=sha256:4943decfc5b905748f0756fdd99d4f9498d7064815c4cf3643820c9028b711d1 \
135 --hash=sha256:57ef38a65056e7800859e5ba9e6091053cd06e1038983016effaffe0efcd594a \
137 --hash=sha256:57ef38a65056e7800859e5ba9e6091053cd06e1038983016effaffe0efcd594a \
136 --hash=sha256:5bd61e9b44c543016ce1f6aef48606280e45f892a928ca7068fba30021e9b786 \
138 --hash=sha256:5bd61e9b44c543016ce1f6aef48606280e45f892a928ca7068fba30021e9b786 \
137 --hash=sha256:6482d3017a0c0327a49dddc8bd1074cc730d45db2ccb09c3bac1f8f32d1eb61b \
139 --hash=sha256:6482d3017a0c0327a49dddc8bd1074cc730d45db2ccb09c3bac1f8f32d1eb61b \
138 --hash=sha256:7d3ce02c0784b7cbcc771a2da6ea51f87e8716004512493a2b69016326301c3b \
140 --hash=sha256:7d3ce02c0784b7cbcc771a2da6ea51f87e8716004512493a2b69016326301c3b \
139 --hash=sha256:a14e499c0f5955dcc3991f785f3f8e2130ed504fa3a7f44009ff458ad6bdd17f \
141 --hash=sha256:a14e499c0f5955dcc3991f785f3f8e2130ed504fa3a7f44009ff458ad6bdd17f \
140 --hash=sha256:a39f54ccbcd2757d1d63b0ec00a00980c0b382c62865b61a505163943624ab20 \
142 --hash=sha256:a39f54ccbcd2757d1d63b0ec00a00980c0b382c62865b61a505163943624ab20 \
141 --hash=sha256:aabb0c5232910a20eec8563503c153a8e78bbf5459490c49ab31f6adf3f3a415 \
143 --hash=sha256:aabb0c5232910a20eec8563503c153a8e78bbf5459490c49ab31f6adf3f3a415 \
142 --hash=sha256:bd4ecb473a96ad0f90c20acba4f0bf0df91a4e03a1f4dd6a4bdc9ca75aa3a715 \
144 --hash=sha256:bd4ecb473a96ad0f90c20acba4f0bf0df91a4e03a1f4dd6a4bdc9ca75aa3a715 \
143 --hash=sha256:e2da3c13307eac601f3de04887624939aca8ee3c9488a0bb0eca4fb9401fc6b1 \
145 --hash=sha256:e2da3c13307eac601f3de04887624939aca8ee3c9488a0bb0eca4fb9401fc6b1 \
144 --hash=sha256:f67814c38162f4deb31f68d590771a29d5ae3b1bd64b75cf232308e5c74777e0 \
146 --hash=sha256:f67814c38162f4deb31f68d590771a29d5ae3b1bd64b75cf232308e5c74777e0 \
145 # via paramiko
147 # via paramiko
146 pypsrp==0.3.1 \
148 pypsrp==0.4.0 \
147 --hash=sha256:309853380fe086090a03cc6662a778ee69b1cae355ae4a932859034fd76e9d0b \
149 --hash=sha256:64b5bdd725a9744c821483b05ecd266f6417f4c6e90ee961a08838480f7d025e \
148 --hash=sha256:90f946254f547dc3493cea8493c819ab87e152a755797c93aa2668678ba8ae85
150 --hash=sha256:f42919247fb80f7dc24c552560d7c24e754d15326030c9e3b7b94f51cfa4dc69
149 python-dateutil==2.8.0 \
151 python-dateutil==2.8.0 \
150 --hash=sha256:7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb \
152 --hash=sha256:7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb \
151 --hash=sha256:c89805f6f4d64db21ed966fda138f8a5ed7a4fdbc1a8ee329ce1b74e3c74da9e \
153 --hash=sha256:c89805f6f4d64db21ed966fda138f8a5ed7a4fdbc1a8ee329ce1b74e3c74da9e \
152 # via botocore
154 # via botocore
153 requests==2.21.0 \
155 readme-renderer==24.0 \
154 --hash=sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e \
156 --hash=sha256:bb16f55b259f27f75f640acf5e00cf897845a8b3e4731b5c1a436e4b8529202f \
155 --hash=sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b \
157 --hash=sha256:c8532b79afc0375a85f10433eca157d6b50f7d6990f337fa498c96cd4bfc203d \
156 # via pypsrp
158 # via twine
157 s3transfer==0.2.0 \
159 requests-toolbelt==0.9.1 \
158 --hash=sha256:7b9ad3213bff7d357f888e0fab5101b56fa1a0548ee77d121c3a3dbfbef4cb2e \
160 --hash=sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f \
159 --hash=sha256:f23d5cb7d862b104401d9021fc82e5fa0e0cf57b7660a1331425aab0c691d021 \
161 --hash=sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0 \
162 # via twine
163 requests==2.22.0 \
164 --hash=sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4 \
165 --hash=sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31 \
166 # via pypsrp, requests-toolbelt, twine
167 s3transfer==0.2.1 \
168 --hash=sha256:6efc926738a3cd576c2a79725fed9afde92378aa5c6a957e3af010cb019fac9d \
169 --hash=sha256:b780f2411b824cb541dbcd2c713d0cb61c7d1bcadae204cdddda2b35cef493ba \
160 # via boto3
170 # via boto3
161 six==1.12.0 \
171 six==1.12.0 \
162 --hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \
172 --hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \
163 --hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73 \
173 --hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73 \
164 # via bcrypt, cryptography, pynacl, pypsrp, python-dateutil
174 # via bcrypt, bleach, cryptography, pynacl, pypsrp, python-dateutil, readme-renderer
165 urllib3==1.24.2 \
175 tqdm==4.36.1 \
166 --hash=sha256:4c291ca23bbb55c76518905869ef34bdd5f0e46af7afe6861e8375643ffee1a0 \
176 --hash=sha256:abc25d0ce2397d070ef07d8c7e706aede7920da163c64997585d42d3537ece3d \
167 --hash=sha256:9a247273df709c4fedb38c711e44292304f73f39ab01beda9f6b9fc375669ac3 \
177 --hash=sha256:dd3fcca8488bb1d416aa7469d2f277902f26260c45aa86b667b074cd44b3b115 \
178 # via twine
179 twine==2.0.0 \
180 --hash=sha256:5319dd3e02ac73fcddcd94f035b9631589ab5d23e1f4699d57365199d85261e1 \
181 --hash=sha256:9fe7091715c7576df166df8ef6654e61bada39571783f2fd415bdcba867c6993
182 urllib3==1.25.6 \
183 --hash=sha256:3de946ffbed6e6746608990594d08faac602528ac7015ac28d33cee6a45b7398 \
184 --hash=sha256:9a107b99a5393caf59c7aa3c1249c16e6879447533d0887f4336dde834c7be86 \
168 # via botocore, requests
185 # via botocore, requests
186 webencodings==0.5.1 \
187 --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \
188 --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923 \
189 # via bleach
190
191 # WARNING: The following packages were not pinned, but pip requires them to be
192 # pinned when the requirements file includes hashes. Consider using the --allow-unsafe flag.
193 # setuptools==41.2.0 # via twine
@@ -1,3 +1,4 b''
1 boto3
1 boto3
2 paramiko
2 paramiko
3 pypsrp
3 pypsrp
4 twine
@@ -1,99 +1,108 b''
1 # Randomized torture test generation for bdiff
1 # Randomized torture test generation for bdiff
2
2
3 from __future__ import absolute_import, print_function
3 from __future__ import absolute_import, print_function
4 import random
4 import random
5 import sys
5 import sys
6
6
7 from mercurial import (
7 from mercurial import (
8 mdiff,
8 mdiff,
9 pycompat,
9 )
10 )
10
11
12
11 def reducetest(a, b):
13 def reducetest(a, b):
12 tries = 0
14 tries = 0
13 reductions = 0
15 reductions = 0
14 print("reducing...")
16 print("reducing...")
15 while tries < 1000:
17 while tries < 1000:
16 a2 = "\n".join(l for l in a.splitlines()
18 a2 = (
17 if random.randint(0, 100) > 0) + "\n"
19 "\n".join(l for l in a.splitlines() if random.randint(0, 100) > 0)
18 b2 = "\n".join(l for l in b.splitlines()
20 + "\n"
19 if random.randint(0, 100) > 0) + "\n"
21 )
22 b2 = (
23 "\n".join(l for l in b.splitlines() if random.randint(0, 100) > 0)
24 + "\n"
25 )
20 if a2 == a and b2 == b:
26 if a2 == a and b2 == b:
21 continue
27 continue
22 if a2 == b2:
28 if a2 == b2:
23 continue
29 continue
24 tries += 1
30 tries += 1
25
31
26 try:
32 try:
27 test1(a, b)
33 test1(a, b)
28 except Exception:
34 except Exception:
29 reductions += 1
35 reductions += 1
30 tries = 0
36 tries = 0
31 a = a2
37 a = a2
32 b = b2
38 b = b2
33
39
34 print("reduced:", reductions, len(a) + len(b),
40 print("reduced:", reductions, len(a) + len(b), repr(a), repr(b))
35 repr(a), repr(b))
36 try:
41 try:
37 test1(a, b)
42 test1(a, b)
38 except Exception as inst:
43 except Exception as inst:
39 print("failed:", inst)
44 print("failed:", inst)
40
45
41 sys.exit(0)
46 sys.exit(0)
42
47
48
43 def test1(a, b):
49 def test1(a, b):
44 d = mdiff.textdiff(a, b)
50 d = mdiff.textdiff(a, b)
45 if not d:
51 if not d:
46 raise ValueError("empty")
52 raise ValueError("empty")
47 c = mdiff.patches(a, [d])
53 c = mdiff.patches(a, [d])
48 if c != b:
54 if c != b:
49 raise ValueError("bad")
55 raise ValueError("bad")
50
56
57
51 def testwrap(a, b):
58 def testwrap(a, b):
52 try:
59 try:
53 test1(a, b)
60 test1(a, b)
54 return
61 return
55 except Exception as inst:
62 except Exception as inst:
56 pass
63 print("exception:", inst)
57 print("exception:", inst)
58 reducetest(a, b)
64 reducetest(a, b)
59
65
66
60 def test(a, b):
67 def test(a, b):
61 testwrap(a, b)
68 testwrap(a, b)
62 testwrap(b, a)
69 testwrap(b, a)
63
70
71
64 def rndtest(size, noise):
72 def rndtest(size, noise):
65 a = []
73 a = []
66 src = " aaaaaaaabbbbccd"
74 src = " aaaaaaaabbbbccd"
67 for x in xrange(size):
75 for x in pycompat.xrange(size):
68 a.append(src[random.randint(0, len(src) - 1)])
76 a.append(src[random.randint(0, len(src) - 1)])
69
77
70 while True:
78 while True:
71 b = [c for c in a if random.randint(0, 99) > noise]
79 b = [c for c in a if random.randint(0, 99) > noise]
72 b2 = []
80 b2 = []
73 for c in b:
81 for c in b:
74 b2.append(c)
82 b2.append(c)
75 while random.randint(0, 99) < noise:
83 while random.randint(0, 99) < noise:
76 b2.append(src[random.randint(0, len(src) - 1)])
84 b2.append(src[random.randint(0, len(src) - 1)])
77 if b2 != a:
85 if b2 != a:
78 break
86 break
79
87
80 a = "\n".join(a) + "\n"
88 a = "\n".join(a) + "\n"
81 b = "\n".join(b2) + "\n"
89 b = "\n".join(b2) + "\n"
82
90
83 test(a, b)
91 test(a, b)
84
92
93
85 maxvol = 10000
94 maxvol = 10000
86 startsize = 2
95 startsize = 2
87 while True:
96 while True:
88 size = startsize
97 size = startsize
89 count = 0
98 count = 0
90 while size < maxvol:
99 while size < maxvol:
91 print(size)
100 print(size)
92 volume = 0
101 volume = 0
93 while volume < maxvol:
102 while volume < maxvol:
94 rndtest(size, 2)
103 rndtest(size, 2)
95 volume += size
104 volume += size
96 count += 2
105 count += 2
97 size *= 2
106 size *= 2
98 maxvol *= 4
107 maxvol *= 4
99 startsize *= 4
108 startsize *= 4
@@ -1,113 +1,126 b''
1 # __init__.py - asv benchmark suite
1 # __init__.py - asv benchmark suite
2 #
2 #
3 # Copyright 2016 Logilab SA <contact@logilab.fr>
3 # Copyright 2016 Logilab SA <contact@logilab.fr>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 # "historical portability" policy of contrib/benchmarks:
8 # "historical portability" policy of contrib/benchmarks:
9 #
9 #
10 # We have to make this code work correctly with current mercurial stable branch
10 # We have to make this code work correctly with current mercurial stable branch
11 # and if possible with reasonable cost with early Mercurial versions.
11 # and if possible with reasonable cost with early Mercurial versions.
12
12
13 '''ASV (https://asv.readthedocs.io) benchmark suite
13 '''ASV (https://asv.readthedocs.io) benchmark suite
14
14
15 Benchmark are parameterized against reference repositories found in the
15 Benchmark are parameterized against reference repositories found in the
16 directory pointed by the REPOS_DIR environment variable.
16 directory pointed by the REPOS_DIR environment variable.
17
17
18 Invocation example:
18 Invocation example:
19
19
20 $ export REPOS_DIR=~/hgperf/repos
20 $ export REPOS_DIR=~/hgperf/repos
21 # run suite on given revision
21 # run suite on given revision
22 $ asv --config contrib/asv.conf.json run REV
22 $ asv --config contrib/asv.conf.json run REV
23 # run suite on new changesets found in stable and default branch
23 # run suite on new changesets found in stable and default branch
24 $ asv --config contrib/asv.conf.json run NEW
24 $ asv --config contrib/asv.conf.json run NEW
25 # display a comparative result table of benchmark results between two given
25 # display a comparative result table of benchmark results between two given
26 # revisions
26 # revisions
27 $ asv --config contrib/asv.conf.json compare REV1 REV2
27 $ asv --config contrib/asv.conf.json compare REV1 REV2
28 # compute regression detection and generate ASV static website
28 # compute regression detection and generate ASV static website
29 $ asv --config contrib/asv.conf.json publish
29 $ asv --config contrib/asv.conf.json publish
30 # serve the static website
30 # serve the static website
31 $ asv --config contrib/asv.conf.json preview
31 $ asv --config contrib/asv.conf.json preview
32 '''
32 '''
33
33
34 from __future__ import absolute_import
34 from __future__ import absolute_import
35
35
36 import functools
36 import functools
37 import os
37 import os
38 import re
38 import re
39
39
40 from mercurial import (
40 from mercurial import (
41 extensions,
41 extensions,
42 hg,
42 hg,
43 ui as uimod,
43 ui as uimod,
44 util,
44 util,
45 )
45 )
46
46
47 basedir = os.path.abspath(os.path.join(os.path.dirname(__file__),
47 basedir = os.path.abspath(
48 os.path.pardir, os.path.pardir))
48 os.path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir)
49 )
49 reposdir = os.environ['REPOS_DIR']
50 reposdir = os.environ['REPOS_DIR']
50 reposnames = [name for name in os.listdir(reposdir)
51 reposnames = [
51 if os.path.isdir(os.path.join(reposdir, name, ".hg"))]
52 name
53 for name in os.listdir(reposdir)
54 if os.path.isdir(os.path.join(reposdir, name, ".hg"))
55 ]
52 if not reposnames:
56 if not reposnames:
53 raise ValueError("No repositories found in $REPO_DIR")
57 raise ValueError("No repositories found in $REPO_DIR")
54 outputre = re.compile((r'! wall (\d+.\d+) comb \d+.\d+ user \d+.\d+ sys '
58 outputre = re.compile(
55 r'\d+.\d+ \(best of \d+\)'))
59 (
60 r'! wall (\d+.\d+) comb \d+.\d+ user \d+.\d+ sys '
61 r'\d+.\d+ \(best of \d+\)'
62 )
63 )
64
56
65
57 def runperfcommand(reponame, command, *args, **kwargs):
66 def runperfcommand(reponame, command, *args, **kwargs):
58 os.environ["HGRCPATH"] = os.environ.get("ASVHGRCPATH", "")
67 os.environ["HGRCPATH"] = os.environ.get("ASVHGRCPATH", "")
59 # for "historical portability"
68 # for "historical portability"
60 # ui.load() has been available since d83ca85
69 # ui.load() has been available since d83ca85
61 if util.safehasattr(uimod.ui, "load"):
70 if util.safehasattr(uimod.ui, "load"):
62 ui = uimod.ui.load()
71 ui = uimod.ui.load()
63 else:
72 else:
64 ui = uimod.ui()
73 ui = uimod.ui()
65 repo = hg.repository(ui, os.path.join(reposdir, reponame))
74 repo = hg.repository(ui, os.path.join(reposdir, reponame))
66 perfext = extensions.load(ui, 'perfext',
75 perfext = extensions.load(
67 os.path.join(basedir, 'contrib', 'perf.py'))
76 ui, 'perfext', os.path.join(basedir, 'contrib', 'perf.py')
77 )
68 cmd = getattr(perfext, command)
78 cmd = getattr(perfext, command)
69 ui.pushbuffer()
79 ui.pushbuffer()
70 cmd(ui, repo, *args, **kwargs)
80 cmd(ui, repo, *args, **kwargs)
71 output = ui.popbuffer()
81 output = ui.popbuffer()
72 match = outputre.search(output)
82 match = outputre.search(output)
73 if not match:
83 if not match:
74 raise ValueError("Invalid output {0}".format(output))
84 raise ValueError("Invalid output {0}".format(output))
75 return float(match.group(1))
85 return float(match.group(1))
76
86
87
77 def perfbench(repos=reposnames, name=None, params=None):
88 def perfbench(repos=reposnames, name=None, params=None):
78 """decorator to declare ASV benchmark based on contrib/perf.py extension
89 """decorator to declare ASV benchmark based on contrib/perf.py extension
79
90
80 An ASV benchmark is a python function with the given attributes:
91 An ASV benchmark is a python function with the given attributes:
81
92
82 __name__: should start with track_, time_ or mem_ to be collected by ASV
93 __name__: should start with track_, time_ or mem_ to be collected by ASV
83 params and param_name: parameter matrix to display multiple graphs on the
94 params and param_name: parameter matrix to display multiple graphs on the
84 same page.
95 same page.
85 pretty_name: If defined it's displayed in web-ui instead of __name__
96 pretty_name: If defined it's displayed in web-ui instead of __name__
86 (useful for revsets)
97 (useful for revsets)
87 the module name is prepended to the benchmark name and displayed as
98 the module name is prepended to the benchmark name and displayed as
88 "category" in webui.
99 "category" in webui.
89
100
90 Benchmarks are automatically parameterized with repositories found in the
101 Benchmarks are automatically parameterized with repositories found in the
91 REPOS_DIR environment variable.
102 REPOS_DIR environment variable.
92
103
93 `params` is the param matrix in the form of a list of tuple
104 `params` is the param matrix in the form of a list of tuple
94 (param_name, [value0, value1])
105 (param_name, [value0, value1])
95
106
96 For example [(x, [a, b]), (y, [c, d])] declare benchmarks for
107 For example [(x, [a, b]), (y, [c, d])] declare benchmarks for
97 (a, c), (a, d), (b, c) and (b, d).
108 (a, c), (a, d), (b, c) and (b, d).
98 """
109 """
99 params = list(params or [])
110 params = list(params or [])
100 params.insert(0, ("repo", repos))
111 params.insert(0, ("repo", repos))
101
112
102 def decorator(func):
113 def decorator(func):
103 @functools.wraps(func)
114 @functools.wraps(func)
104 def wrapped(repo, *args):
115 def wrapped(repo, *args):
105 def perf(command, *a, **kw):
116 def perf(command, *a, **kw):
106 return runperfcommand(repo, command, *a, **kw)
117 return runperfcommand(repo, command, *a, **kw)
118
107 return func(perf, *args)
119 return func(perf, *args)
108
120
109 wrapped.params = [p[1] for p in params]
121 wrapped.params = [p[1] for p in params]
110 wrapped.param_names = [p[0] for p in params]
122 wrapped.param_names = [p[0] for p in params]
111 wrapped.pretty_name = name
123 wrapped.pretty_name = name
112 return wrapped
124 return wrapped
125
113 return decorator
126 return decorator
@@ -1,26 +1,30 b''
1 # perf.py - asv benchmarks using contrib/perf.py extension
1 # perf.py - asv benchmarks using contrib/perf.py extension
2 #
2 #
3 # Copyright 2016 Logilab SA <contact@logilab.fr>
3 # Copyright 2016 Logilab SA <contact@logilab.fr>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 from . import perfbench
10 from . import perfbench
11
11
12
12 @perfbench()
13 @perfbench()
13 def track_tags(perf):
14 def track_tags(perf):
14 return perf("perftags")
15 return perf("perftags")
15
16
17
16 @perfbench()
18 @perfbench()
17 def track_status(perf):
19 def track_status(perf):
18 return perf("perfstatus", unknown=False)
20 return perf("perfstatus", unknown=False)
19
21
22
20 @perfbench(params=[('rev', ['1000', '10000', 'tip'])])
23 @perfbench(params=[('rev', ['1000', '10000', 'tip'])])
21 def track_manifest(perf, rev):
24 def track_manifest(perf, rev):
22 return perf("perfmanifest", rev)
25 return perf("perfmanifest", rev)
23
26
27
24 @perfbench()
28 @perfbench()
25 def track_heads(perf):
29 def track_heads(perf):
26 return perf("perfheads")
30 return perf("perfheads")
@@ -1,53 +1,57 b''
1 # revset.py - asv revset benchmarks
1 # revset.py - asv revset benchmarks
2 #
2 #
3 # Copyright 2016 Logilab SA <contact@logilab.fr>
3 # Copyright 2016 Logilab SA <contact@logilab.fr>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''ASV revset benchmarks generated from contrib/base-revsets.txt
8 '''ASV revset benchmarks generated from contrib/base-revsets.txt
9
9
10 Each revset benchmark is parameterized with variants (first, last, sort, ...)
10 Each revset benchmark is parameterized with variants (first, last, sort, ...)
11 '''
11 '''
12
12
13 from __future__ import absolute_import
13 from __future__ import absolute_import
14
14
15 import os
15 import os
16 import string
16 import string
17 import sys
17 import sys
18
18
19 from . import basedir, perfbench
19 from . import basedir, perfbench
20
20
21
21 def createrevsetbenchmark(baseset, variants=None):
22 def createrevsetbenchmark(baseset, variants=None):
22 if variants is None:
23 if variants is None:
23 # Default variants
24 # Default variants
24 variants = ["plain", "first", "last", "sort", "sort+first",
25 variants = ["plain", "first", "last", "sort", "sort+first", "sort+last"]
25 "sort+last"]
26 fname = "track_" + "_".join(
26 fname = "track_" + "_".join("".join([
27 "".join(
27 c if c in string.digits + string.letters else " "
28 [c if c in string.digits + string.letters else " " for c in baseset]
28 for c in baseset
29 ).split()
29 ]).split())
30 )
30
31
31 def wrap(fname, baseset):
32 def wrap(fname, baseset):
32 @perfbench(name=baseset, params=[("variant", variants)])
33 @perfbench(name=baseset, params=[("variant", variants)])
33 def f(perf, variant):
34 def f(perf, variant):
34 revset = baseset
35 revset = baseset
35 if variant != "plain":
36 if variant != "plain":
36 for var in variant.split("+"):
37 for var in variant.split("+"):
37 revset = "%s(%s)" % (var, revset)
38 revset = "%s(%s)" % (var, revset)
38 return perf("perfrevset", revset)
39 return perf("perfrevset", revset)
40
39 f.__name__ = fname
41 f.__name__ = fname
40 return f
42 return f
43
41 return wrap(fname, baseset)
44 return wrap(fname, baseset)
42
45
46
43 def initializerevsetbenchmarks():
47 def initializerevsetbenchmarks():
44 mod = sys.modules[__name__]
48 mod = sys.modules[__name__]
45 with open(os.path.join(basedir, 'contrib', 'base-revsets.txt'),
49 with open(os.path.join(basedir, 'contrib', 'base-revsets.txt'), 'rb') as fh:
46 'rb') as fh:
47 for line in fh:
50 for line in fh:
48 baseset = line.strip()
51 baseset = line.strip()
49 if baseset and not baseset.startswith('#'):
52 if baseset and not baseset.startswith('#'):
50 func = createrevsetbenchmark(baseset)
53 func = createrevsetbenchmark(baseset)
51 setattr(mod, func.__name__, func)
54 setattr(mod, func.__name__, func)
52
55
56
53 initializerevsetbenchmarks()
57 initializerevsetbenchmarks()
@@ -1,233 +1,345 b''
1 #!/usr/bin/env python3
1 #!/usr/bin/env python3
2 #
2 #
3 # byteify-strings.py - transform string literals to be Python 3 safe
3 # byteify-strings.py - transform string literals to be Python 3 safe
4 #
4 #
5 # Copyright 2015 Gregory Szorc <gregory.szorc@gmail.com>
5 # Copyright 2015 Gregory Szorc <gregory.szorc@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 from __future__ import absolute_import, print_function
10 from __future__ import absolute_import, print_function
11
11
12 import argparse
12 import argparse
13 import contextlib
13 import contextlib
14 import errno
14 import errno
15 import os
15 import os
16 import sys
16 import sys
17 import tempfile
17 import tempfile
18 import token
18 import token
19 import tokenize
19 import tokenize
20
20
21
21 def adjusttokenpos(t, ofs):
22 def adjusttokenpos(t, ofs):
22 """Adjust start/end column of the given token"""
23 """Adjust start/end column of the given token"""
23 return t._replace(start=(t.start[0], t.start[1] + ofs),
24 return t._replace(
24 end=(t.end[0], t.end[1] + ofs))
25 start=(t.start[0], t.start[1] + ofs), end=(t.end[0], t.end[1] + ofs)
26 )
27
25
28
26 def replacetokens(tokens, opts):
29 def replacetokens(tokens, opts):
27 """Transform a stream of tokens from raw to Python 3.
30 """Transform a stream of tokens from raw to Python 3.
28
31
29 Returns a generator of possibly rewritten tokens.
32 Returns a generator of possibly rewritten tokens.
30
33
31 The input token list may be mutated as part of processing. However,
34 The input token list may be mutated as part of processing. However,
32 its changes do not necessarily match the output token stream.
35 its changes do not necessarily match the output token stream.
33 """
36 """
34 sysstrtokens = set()
37 sysstrtokens = set()
35
38
36 # The following utility functions access the tokens list and i index of
39 # The following utility functions access the tokens list and i index of
37 # the for i, t enumerate(tokens) loop below
40 # the for i, t enumerate(tokens) loop below
38 def _isop(j, *o):
41 def _isop(j, *o):
39 """Assert that tokens[j] is an OP with one of the given values"""
42 """Assert that tokens[j] is an OP with one of the given values"""
40 try:
43 try:
41 return tokens[j].type == token.OP and tokens[j].string in o
44 return tokens[j].type == token.OP and tokens[j].string in o
42 except IndexError:
45 except IndexError:
43 return False
46 return False
44
47
45 def _findargnofcall(n):
48 def _findargnofcall(n):
46 """Find arg n of a call expression (start at 0)
49 """Find arg n of a call expression (start at 0)
47
50
48 Returns index of the first token of that argument, or None if
51 Returns index of the first token of that argument, or None if
49 there is not that many arguments.
52 there is not that many arguments.
50
53
51 Assumes that token[i + 1] is '('.
54 Assumes that token[i + 1] is '('.
52
55
53 """
56 """
54 nested = 0
57 nested = 0
55 for j in range(i + 2, len(tokens)):
58 for j in range(i + 2, len(tokens)):
56 if _isop(j, ')', ']', '}'):
59 if _isop(j, ')', ']', '}'):
57 # end of call, tuple, subscription or dict / set
60 # end of call, tuple, subscription or dict / set
58 nested -= 1
61 nested -= 1
59 if nested < 0:
62 if nested < 0:
60 return None
63 return None
61 elif n == 0:
64 elif n == 0:
62 # this is the starting position of arg
65 # this is the starting position of arg
63 return j
66 return j
64 elif _isop(j, '(', '[', '{'):
67 elif _isop(j, '(', '[', '{'):
65 nested += 1
68 nested += 1
66 elif _isop(j, ',') and nested == 0:
69 elif _isop(j, ',') and nested == 0:
67 n -= 1
70 n -= 1
68
71
69 return None
72 return None
70
73
71 def _ensuresysstr(j):
74 def _ensuresysstr(j):
72 """Make sure the token at j is a system string
75 """Make sure the token at j is a system string
73
76
74 Remember the given token so the string transformer won't add
77 Remember the given token so the string transformer won't add
75 the byte prefix.
78 the byte prefix.
76
79
77 Ignores tokens that are not strings. Assumes bounds checking has
80 Ignores tokens that are not strings. Assumes bounds checking has
78 already been done.
81 already been done.
79
82
80 """
83 """
81 st = tokens[j]
84 k = j
82 if st.type == token.STRING and st.string.startswith(("'", '"')):
85 currtoken = tokens[k]
83 sysstrtokens.add(st)
86 while currtoken.type in (token.STRING, token.NEWLINE, tokenize.NL):
87 k += 1
88 if currtoken.type == token.STRING and currtoken.string.startswith(
89 ("'", '"')
90 ):
91 sysstrtokens.add(currtoken)
92 try:
93 currtoken = tokens[k]
94 except IndexError:
95 break
96
97 def _isitemaccess(j):
98 """Assert the next tokens form an item access on `tokens[j]` and that
99 `tokens[j]` is a name.
100 """
101 try:
102 return (
103 tokens[j].type == token.NAME
104 and _isop(j + 1, '[')
105 and tokens[j + 2].type == token.STRING
106 and _isop(j + 3, ']')
107 )
108 except IndexError:
109 return False
110
111 def _ismethodcall(j, *methodnames):
112 """Assert the next tokens form a call to `methodname` with a string
113 as first argument on `tokens[j]` and that `tokens[j]` is a name.
114 """
115 try:
116 return (
117 tokens[j].type == token.NAME
118 and _isop(j + 1, '.')
119 and tokens[j + 2].type == token.NAME
120 and tokens[j + 2].string in methodnames
121 and _isop(j + 3, '(')
122 and tokens[j + 4].type == token.STRING
123 )
124 except IndexError:
125 return False
84
126
85 coldelta = 0 # column increment for new opening parens
127 coldelta = 0 # column increment for new opening parens
86 coloffset = -1 # column offset for the current line (-1: TBD)
128 coloffset = -1 # column offset for the current line (-1: TBD)
87 parens = [(0, 0, 0)] # stack of (line, end-column, column-offset)
129 parens = [(0, 0, 0, -1)] # stack of (line, end-column, column-offset, type)
130 ignorenextline = False # don't transform the next line
131 insideignoreblock = False # don't transform until turned off
88 for i, t in enumerate(tokens):
132 for i, t in enumerate(tokens):
89 # Compute the column offset for the current line, such that
133 # Compute the column offset for the current line, such that
90 # the current line will be aligned to the last opening paren
134 # the current line will be aligned to the last opening paren
91 # as before.
135 # as before.
92 if coloffset < 0:
136 if coloffset < 0:
93 if t.start[1] == parens[-1][1]:
137 lastparen = parens[-1]
94 coloffset = parens[-1][2]
138 if t.start[1] == lastparen[1]:
95 elif t.start[1] + 1 == parens[-1][1]:
139 coloffset = lastparen[2]
140 elif t.start[1] + 1 == lastparen[1] and lastparen[3] not in (
141 token.NEWLINE,
142 tokenize.NL,
143 ):
96 # fix misaligned indent of s/util.Abort/error.Abort/
144 # fix misaligned indent of s/util.Abort/error.Abort/
97 coloffset = parens[-1][2] + (parens[-1][1] - t.start[1])
145 coloffset = lastparen[2] + (lastparen[1] - t.start[1])
98 else:
146 else:
99 coloffset = 0
147 coloffset = 0
100
148
101 # Reset per-line attributes at EOL.
149 # Reset per-line attributes at EOL.
102 if t.type in (token.NEWLINE, tokenize.NL):
150 if t.type in (token.NEWLINE, tokenize.NL):
103 yield adjusttokenpos(t, coloffset)
151 yield adjusttokenpos(t, coloffset)
104 coldelta = 0
152 coldelta = 0
105 coloffset = -1
153 coloffset = -1
154 if not insideignoreblock:
155 ignorenextline = (
156 tokens[i - 1].type == token.COMMENT
157 and tokens[i - 1].string == "# no-py3-transform"
158 )
159 continue
160
161 if t.type == token.COMMENT:
162 if t.string == "# py3-transform: off":
163 insideignoreblock = True
164 if t.string == "# py3-transform: on":
165 insideignoreblock = False
166
167 if ignorenextline or insideignoreblock:
168 yield adjusttokenpos(t, coloffset)
106 continue
169 continue
107
170
108 # Remember the last paren position.
171 # Remember the last paren position.
109 if _isop(i, '(', '[', '{'):
172 if _isop(i, '(', '[', '{'):
110 parens.append(t.end + (coloffset + coldelta,))
173 parens.append(t.end + (coloffset + coldelta, tokens[i + 1].type))
111 elif _isop(i, ')', ']', '}'):
174 elif _isop(i, ')', ']', '}'):
112 parens.pop()
175 parens.pop()
113
176
114 # Convert most string literals to byte literals. String literals
177 # Convert most string literals to byte literals. String literals
115 # in Python 2 are bytes. String literals in Python 3 are unicode.
178 # in Python 2 are bytes. String literals in Python 3 are unicode.
116 # Most strings in Mercurial are bytes and unicode strings are rare.
179 # Most strings in Mercurial are bytes and unicode strings are rare.
117 # Rather than rewrite all string literals to use ``b''`` to indicate
180 # Rather than rewrite all string literals to use ``b''`` to indicate
118 # byte strings, we apply this token transformer to insert the ``b``
181 # byte strings, we apply this token transformer to insert the ``b``
119 # prefix nearly everywhere.
182 # prefix nearly everywhere.
120 if t.type == token.STRING and t not in sysstrtokens:
183 if t.type == token.STRING and t not in sysstrtokens:
121 s = t.string
184 s = t.string
122
185
123 # Preserve docstrings as string literals. This is inconsistent
186 # Preserve docstrings as string literals. This is inconsistent
124 # with regular unprefixed strings. However, the
187 # with regular unprefixed strings. However, the
125 # "from __future__" parsing (which allows a module docstring to
188 # "from __future__" parsing (which allows a module docstring to
126 # exist before it) doesn't properly handle the docstring if it
189 # exist before it) doesn't properly handle the docstring if it
127 # is b''' prefixed, leading to a SyntaxError. We leave all
190 # is b''' prefixed, leading to a SyntaxError. We leave all
128 # docstrings as unprefixed to avoid this. This means Mercurial
191 # docstrings as unprefixed to avoid this. This means Mercurial
129 # components touching docstrings need to handle unicode,
192 # components touching docstrings need to handle unicode,
130 # unfortunately.
193 # unfortunately.
131 if s[0:3] in ("'''", '"""'):
194 if s[0:3] in ("'''", '"""'):
132 yield adjusttokenpos(t, coloffset)
195 # If it's assigned to something, it's not a docstring
133 continue
196 if not _isop(i - 1, '='):
197 yield adjusttokenpos(t, coloffset)
198 continue
134
199
135 # If the first character isn't a quote, it is likely a string
200 # If the first character isn't a quote, it is likely a string
136 # prefixing character (such as 'b', 'u', or 'r'. Ignore.
201 # prefixing character (such as 'b', 'u', or 'r'. Ignore.
137 if s[0] not in ("'", '"'):
202 if s[0] not in ("'", '"'):
138 yield adjusttokenpos(t, coloffset)
203 yield adjusttokenpos(t, coloffset)
139 continue
204 continue
140
205
141 # String literal. Prefix to make a b'' string.
206 # String literal. Prefix to make a b'' string.
142 yield adjusttokenpos(t._replace(string='b%s' % t.string),
207 yield adjusttokenpos(t._replace(string='b%s' % t.string), coloffset)
143 coloffset)
144 coldelta += 1
208 coldelta += 1
145 continue
209 continue
146
210
147 # This looks like a function call.
211 # This looks like a function call.
148 if t.type == token.NAME and _isop(i + 1, '('):
212 if t.type == token.NAME and _isop(i + 1, '('):
149 fn = t.string
213 fn = t.string
150
214
151 # *attr() builtins don't accept byte strings to 2nd argument.
215 # *attr() builtins don't accept byte strings to 2nd argument.
152 if (fn in ('getattr', 'setattr', 'hasattr', 'safehasattr') and
216 if fn in (
153 not _isop(i - 1, '.')):
217 'getattr',
218 'setattr',
219 'hasattr',
220 'safehasattr',
221 'wrapfunction',
222 'wrapclass',
223 'addattr',
224 ) and (opts['allow-attr-methods'] or not _isop(i - 1, '.')):
154 arg1idx = _findargnofcall(1)
225 arg1idx = _findargnofcall(1)
155 if arg1idx is not None:
226 if arg1idx is not None:
156 _ensuresysstr(arg1idx)
227 _ensuresysstr(arg1idx)
157
228
158 # .encode() and .decode() on str/bytes/unicode don't accept
229 # .encode() and .decode() on str/bytes/unicode don't accept
159 # byte strings on Python 3.
230 # byte strings on Python 3.
160 elif fn in ('encode', 'decode') and _isop(i - 1, '.'):
231 elif fn in ('encode', 'decode') and _isop(i - 1, '.'):
161 for argn in range(2):
232 for argn in range(2):
162 argidx = _findargnofcall(argn)
233 argidx = _findargnofcall(argn)
163 if argidx is not None:
234 if argidx is not None:
164 _ensuresysstr(argidx)
235 _ensuresysstr(argidx)
165
236
166 # It changes iteritems/values to items/values as they are not
237 # It changes iteritems/values to items/values as they are not
167 # present in Python 3 world.
238 # present in Python 3 world.
168 elif opts['dictiter'] and fn in ('iteritems', 'itervalues'):
239 elif opts['dictiter'] and fn in ('iteritems', 'itervalues'):
169 yield adjusttokenpos(t._replace(string=fn[4:]), coloffset)
240 yield adjusttokenpos(t._replace(string=fn[4:]), coloffset)
170 continue
241 continue
171
242
243 if t.type == token.NAME and t.string in opts['treat-as-kwargs']:
244 if _isitemaccess(i):
245 _ensuresysstr(i + 2)
246 if _ismethodcall(i, 'get', 'pop', 'setdefault', 'popitem'):
247 _ensuresysstr(i + 4)
248
172 # Looks like "if __name__ == '__main__'".
249 # Looks like "if __name__ == '__main__'".
173 if (t.type == token.NAME and t.string == '__name__'
250 if (
174 and _isop(i + 1, '==')):
251 t.type == token.NAME
252 and t.string == '__name__'
253 and _isop(i + 1, '==')
254 ):
175 _ensuresysstr(i + 2)
255 _ensuresysstr(i + 2)
176
256
177 # Emit unmodified token.
257 # Emit unmodified token.
178 yield adjusttokenpos(t, coloffset)
258 yield adjusttokenpos(t, coloffset)
179
259
260
180 def process(fin, fout, opts):
261 def process(fin, fout, opts):
181 tokens = tokenize.tokenize(fin.readline)
262 tokens = tokenize.tokenize(fin.readline)
182 tokens = replacetokens(list(tokens), opts)
263 tokens = replacetokens(list(tokens), opts)
183 fout.write(tokenize.untokenize(tokens))
264 fout.write(tokenize.untokenize(tokens))
184
265
266
185 def tryunlink(fname):
267 def tryunlink(fname):
186 try:
268 try:
187 os.unlink(fname)
269 os.unlink(fname)
188 except OSError as err:
270 except OSError as err:
189 if err.errno != errno.ENOENT:
271 if err.errno != errno.ENOENT:
190 raise
272 raise
191
273
274
192 @contextlib.contextmanager
275 @contextlib.contextmanager
193 def editinplace(fname):
276 def editinplace(fname):
194 n = os.path.basename(fname)
277 n = os.path.basename(fname)
195 d = os.path.dirname(fname)
278 d = os.path.dirname(fname)
196 fp = tempfile.NamedTemporaryFile(prefix='.%s-' % n, suffix='~', dir=d,
279 fp = tempfile.NamedTemporaryFile(
197 delete=False)
280 prefix='.%s-' % n, suffix='~', dir=d, delete=False
281 )
198 try:
282 try:
199 yield fp
283 yield fp
200 fp.close()
284 fp.close()
201 if os.name == 'nt':
285 if os.name == 'nt':
202 tryunlink(fname)
286 tryunlink(fname)
203 os.rename(fp.name, fname)
287 os.rename(fp.name, fname)
204 finally:
288 finally:
205 fp.close()
289 fp.close()
206 tryunlink(fp.name)
290 tryunlink(fp.name)
207
291
292
208 def main():
293 def main():
209 ap = argparse.ArgumentParser()
294 ap = argparse.ArgumentParser()
210 ap.add_argument('-i', '--inplace', action='store_true', default=False,
295 ap.add_argument(
211 help='edit files in place')
296 '--version', action='version', version='Byteify strings 1.0'
212 ap.add_argument('--dictiter', action='store_true', default=False,
297 )
213 help='rewrite iteritems() and itervalues()'),
298 ap.add_argument(
299 '-i',
300 '--inplace',
301 action='store_true',
302 default=False,
303 help='edit files in place',
304 )
305 ap.add_argument(
306 '--dictiter',
307 action='store_true',
308 default=False,
309 help='rewrite iteritems() and itervalues()',
310 ),
311 ap.add_argument(
312 '--allow-attr-methods',
313 action='store_true',
314 default=False,
315 help='also handle attr*() when they are methods',
316 ),
317 ap.add_argument(
318 '--treat-as-kwargs',
319 nargs="+",
320 default=[],
321 help="ignore kwargs-like objects",
322 ),
214 ap.add_argument('files', metavar='FILE', nargs='+', help='source file')
323 ap.add_argument('files', metavar='FILE', nargs='+', help='source file')
215 args = ap.parse_args()
324 args = ap.parse_args()
216 opts = {
325 opts = {
217 'dictiter': args.dictiter,
326 'dictiter': args.dictiter,
327 'treat-as-kwargs': set(args.treat_as_kwargs),
328 'allow-attr-methods': args.allow_attr_methods,
218 }
329 }
219 for fname in args.files:
330 for fname in args.files:
220 if args.inplace:
331 if args.inplace:
221 with editinplace(fname) as fout:
332 with editinplace(fname) as fout:
222 with open(fname, 'rb') as fin:
333 with open(fname, 'rb') as fin:
223 process(fin, fout, opts)
334 process(fin, fout, opts)
224 else:
335 else:
225 with open(fname, 'rb') as fin:
336 with open(fname, 'rb') as fin:
226 fout = sys.stdout.buffer
337 fout = sys.stdout.buffer
227 process(fin, fout, opts)
338 process(fin, fout, opts)
228
339
340
229 if __name__ == '__main__':
341 if __name__ == '__main__':
230 if sys.version_info.major < 3:
342 if sys.version_info.major < 3:
231 print('This script must be run under Python 3.')
343 print('This script must be run under Python 3.')
232 sys.exit(3)
344 sys.exit(3)
233 main()
345 main()
@@ -1,38 +1,41 b''
1 from __future__ import absolute_import
1 from __future__ import absolute_import
2 import __builtin__
2 import __builtin__
3 import os
3 import os
4 from mercurial import (
4 from mercurial import util
5 util,
5
6 )
7
6
8 def lowerwrap(scope, funcname):
7 def lowerwrap(scope, funcname):
9 f = getattr(scope, funcname)
8 f = getattr(scope, funcname)
9
10 def wrap(fname, *args, **kwargs):
10 def wrap(fname, *args, **kwargs):
11 d, base = os.path.split(fname)
11 d, base = os.path.split(fname)
12 try:
12 try:
13 files = os.listdir(d or '.')
13 files = os.listdir(d or '.')
14 except OSError:
14 except OSError:
15 files = []
15 files = []
16 if base in files:
16 if base in files:
17 return f(fname, *args, **kwargs)
17 return f(fname, *args, **kwargs)
18 for fn in files:
18 for fn in files:
19 if fn.lower() == base.lower():
19 if fn.lower() == base.lower():
20 return f(os.path.join(d, fn), *args, **kwargs)
20 return f(os.path.join(d, fn), *args, **kwargs)
21 return f(fname, *args, **kwargs)
21 return f(fname, *args, **kwargs)
22
22 scope.__dict__[funcname] = wrap
23 scope.__dict__[funcname] = wrap
23
24
25
24 def normcase(path):
26 def normcase(path):
25 return path.lower()
27 return path.lower()
26
28
29
27 os.path.normcase = normcase
30 os.path.normcase = normcase
28
31
29 for f in 'file open'.split():
32 for f in 'file open'.split():
30 lowerwrap(__builtin__, f)
33 lowerwrap(__builtin__, f)
31
34
32 for f in "chmod chown open lstat stat remove unlink".split():
35 for f in "chmod chown open lstat stat remove unlink".split():
33 lowerwrap(os, f)
36 lowerwrap(os, f)
34
37
35 for f in "exists lexists".split():
38 for f in "exists lexists".split():
36 lowerwrap(os.path, f)
39 lowerwrap(os.path, f)
37
40
38 lowerwrap(util, 'posixfile')
41 lowerwrap(util, 'posixfile')
@@ -1,104 +1,121 b''
1 #!/usr/bin/env python3
1 #!/usr/bin/env python3
2 #
2 #
3 # Copyright 2018 Google LLC.
3 # Copyright 2018 Google LLC.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """Tool read primitive events from a pipe to produce a catapult trace.
7 """Tool read primitive events from a pipe to produce a catapult trace.
8
8
9 Usage:
9 Usage:
10 Terminal 1: $ catapipe.py /tmp/mypipe /tmp/trace.json
10 Terminal 1: $ catapipe.py /tmp/mypipe /tmp/trace.json
11 Terminal 2: $ HGCATAPULTSERVERPIPE=/tmp/mypipe hg root
11 Terminal 2: $ HGCATAPULTSERVERPIPE=/tmp/mypipe hg root
12 <ctrl-c catapipe.py in Terminal 1>
12 <ctrl-c catapipe.py in Terminal 1>
13 $ catapult/tracing/bin/trace2html /tmp/trace.json # produce /tmp/trace.html
13 $ catapult/tracing/bin/trace2html /tmp/trace.json # produce /tmp/trace.html
14 <open trace.html in your browser of choice; the WASD keys are very useful>
14 <open trace.html in your browser of choice; the WASD keys are very useful>
15 (catapult is located at https://github.com/catapult-project/catapult)
15 (catapult is located at https://github.com/catapult-project/catapult)
16
16
17 For now the event stream supports
17 For now the event stream supports
18
18
19 START $SESSIONID ...
19 START $SESSIONID ...
20
20
21 and
21 and
22
22
23 END $SESSIONID ...
23 END $SESSIONID ...
24
24
25 events. Everything after the SESSIONID (which must not contain spaces)
25 events. Everything after the SESSIONID (which must not contain spaces)
26 is used as a label for the event. Events are timestamped as of when
26 is used as a label for the event. Events are timestamped as of when
27 they arrive in this process and are then used to produce catapult
27 they arrive in this process and are then used to produce catapult
28 traces that can be loaded in Chrome's about:tracing utility. It's
28 traces that can be loaded in Chrome's about:tracing utility. It's
29 important that the event stream *into* this process stay simple,
29 important that the event stream *into* this process stay simple,
30 because we have to emit it from the shell scripts produced by
30 because we have to emit it from the shell scripts produced by
31 run-tests.py.
31 run-tests.py.
32
32
33 Typically you'll want to place the path to the named pipe in the
33 Typically you'll want to place the path to the named pipe in the
34 HGCATAPULTSERVERPIPE environment variable, which both run-tests and hg
34 HGCATAPULTSERVERPIPE environment variable, which both run-tests and hg
35 understand. To trace *only* run-tests, use HGTESTCATAPULTSERVERPIPE instead.
35 understand. To trace *only* run-tests, use HGTESTCATAPULTSERVERPIPE instead.
36 """
36 """
37 from __future__ import absolute_import, print_function
37 from __future__ import absolute_import, print_function
38
38
39 import argparse
39 import argparse
40 import json
40 import json
41 import os
41 import os
42 import timeit
42 import timeit
43
43
44 _TYPEMAP = {
44 _TYPEMAP = {
45 'START': 'B',
45 'START': 'B',
46 'END': 'E',
46 'END': 'E',
47 'COUNTER': 'C',
47 'COUNTER': 'C',
48 }
48 }
49
49
50 _threadmap = {}
50 _threadmap = {}
51
51
52 # Timeit already contains the whole logic about which timer to use based on
52 # Timeit already contains the whole logic about which timer to use based on
53 # Python version and OS
53 # Python version and OS
54 timer = timeit.default_timer
54 timer = timeit.default_timer
55
55
56
56 def main():
57 def main():
57 parser = argparse.ArgumentParser()
58 parser = argparse.ArgumentParser()
58 parser.add_argument('pipe', type=str, nargs=1,
59 parser.add_argument(
59 help='Path of named pipe to create and listen on.')
60 'pipe',
60 parser.add_argument('output', default='trace.json', type=str, nargs='?',
61 type=str,
61 help='Path of json file to create where the traces '
62 nargs=1,
62 'will be stored.')
63 help='Path of named pipe to create and listen on.',
63 parser.add_argument('--debug', default=False, action='store_true',
64 )
64 help='Print useful debug messages')
65 parser.add_argument(
66 'output',
67 default='trace.json',
68 type=str,
69 nargs='?',
70 help='Path of json file to create where the traces ' 'will be stored.',
71 )
72 parser.add_argument(
73 '--debug',
74 default=False,
75 action='store_true',
76 help='Print useful debug messages',
77 )
65 args = parser.parse_args()
78 args = parser.parse_args()
66 fn = args.pipe[0]
79 fn = args.pipe[0]
67 os.mkfifo(fn)
80 os.mkfifo(fn)
68 try:
81 try:
69 with open(fn) as f, open(args.output, 'w') as out:
82 with open(fn) as f, open(args.output, 'w') as out:
70 out.write('[\n')
83 out.write('[\n')
71 start = timer()
84 start = timer()
72 while True:
85 while True:
73 ev = f.readline().strip()
86 ev = f.readline().strip()
74 if not ev:
87 if not ev:
75 continue
88 continue
76 now = timer()
89 now = timer()
77 if args.debug:
90 if args.debug:
78 print(ev)
91 print(ev)
79 verb, session, label = ev.split(' ', 2)
92 verb, session, label = ev.split(' ', 2)
80 if session not in _threadmap:
93 if session not in _threadmap:
81 _threadmap[session] = len(_threadmap)
94 _threadmap[session] = len(_threadmap)
82 if verb == 'COUNTER':
95 if verb == 'COUNTER':
83 amount, label = label.split(' ', 1)
96 amount, label = label.split(' ', 1)
84 payload_args = {'value': int(amount)}
97 payload_args = {'value': int(amount)}
85 else:
98 else:
86 payload_args = {}
99 payload_args = {}
87 pid = _threadmap[session]
100 pid = _threadmap[session]
88 ts_micros = (now - start) * 1000000
101 ts_micros = (now - start) * 1000000
89 out.write(json.dumps(
102 out.write(
90 {
103 json.dumps(
91 "name": label,
104 {
92 "cat": "misc",
105 "name": label,
93 "ph": _TYPEMAP[verb],
106 "cat": "misc",
94 "ts": ts_micros,
107 "ph": _TYPEMAP[verb],
95 "pid": pid,
108 "ts": ts_micros,
96 "tid": 1,
109 "pid": pid,
97 "args": payload_args,
110 "tid": 1,
98 }))
111 "args": payload_args,
112 }
113 )
114 )
99 out.write(',\n')
115 out.write(',\n')
100 finally:
116 finally:
101 os.unlink(fn)
117 os.unlink(fn)
102
118
119
103 if __name__ == '__main__':
120 if __name__ == '__main__':
104 main()
121 main()
This diff has been collapsed as it changes many lines, (1001 lines changed) Show them Hide them
@@ -1,876 +1,1105 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 #
2 #
3 # check-code - a style and portability checker for Mercurial
3 # check-code - a style and portability checker for Mercurial
4 #
4 #
5 # Copyright 2010 Matt Mackall <mpm@selenic.com>
5 # Copyright 2010 Matt Mackall <mpm@selenic.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 """style and portability checker for Mercurial
10 """style and portability checker for Mercurial
11
11
12 when a rule triggers wrong, do one of the following (prefer one from top):
12 when a rule triggers wrong, do one of the following (prefer one from top):
13 * do the work-around the rule suggests
13 * do the work-around the rule suggests
14 * doublecheck that it is a false match
14 * doublecheck that it is a false match
15 * improve the rule pattern
15 * improve the rule pattern
16 * add an ignore pattern to the rule (3rd arg) which matches your good line
16 * add an ignore pattern to the rule (3rd arg) which matches your good line
17 (you can append a short comment and match this, like: #re-raises)
17 (you can append a short comment and match this, like: #re-raises)
18 * change the pattern to a warning and list the exception in test-check-code-hg
18 * change the pattern to a warning and list the exception in test-check-code-hg
19 * ONLY use no--check-code for skipping entire files from external sources
19 * ONLY use no--check-code for skipping entire files from external sources
20 """
20 """
21
21
22 from __future__ import absolute_import, print_function
22 from __future__ import absolute_import, print_function
23 import glob
23 import glob
24 import keyword
24 import keyword
25 import optparse
25 import optparse
26 import os
26 import os
27 import re
27 import re
28 import sys
28 import sys
29
29 if sys.version_info[0] < 3:
30 if sys.version_info[0] < 3:
30 opentext = open
31 opentext = open
31 else:
32 else:
33
32 def opentext(f):
34 def opentext(f):
33 return open(f, encoding='latin1')
35 return open(f, encoding='latin1')
36
37
34 try:
38 try:
35 xrange
39 xrange
36 except NameError:
40 except NameError:
37 xrange = range
41 xrange = range
38 try:
42 try:
39 import re2
43 import re2
40 except ImportError:
44 except ImportError:
41 re2 = None
45 re2 = None
42
46
43 import testparseutil
47 import testparseutil
44
48
49
45 def compilere(pat, multiline=False):
50 def compilere(pat, multiline=False):
46 if multiline:
51 if multiline:
47 pat = '(?m)' + pat
52 pat = '(?m)' + pat
48 if re2:
53 if re2:
49 try:
54 try:
50 return re2.compile(pat)
55 return re2.compile(pat)
51 except re2.error:
56 except re2.error:
52 pass
57 pass
53 return re.compile(pat)
58 return re.compile(pat)
54
59
60
55 # check "rules depending on implementation of repquote()" in each
61 # check "rules depending on implementation of repquote()" in each
56 # patterns (especially pypats), before changing around repquote()
62 # patterns (especially pypats), before changing around repquote()
57 _repquotefixedmap = {' ': ' ', '\n': '\n', '.': 'p', ':': 'q',
63 _repquotefixedmap = {
58 '%': '%', '\\': 'b', '*': 'A', '+': 'P', '-': 'M'}
64 ' ': ' ',
65 '\n': '\n',
66 '.': 'p',
67 ':': 'q',
68 '%': '%',
69 '\\': 'b',
70 '*': 'A',
71 '+': 'P',
72 '-': 'M',
73 }
74
75
59 def _repquoteencodechr(i):
76 def _repquoteencodechr(i):
60 if i > 255:
77 if i > 255:
61 return 'u'
78 return 'u'
62 c = chr(i)
79 c = chr(i)
63 if c in _repquotefixedmap:
80 if c in _repquotefixedmap:
64 return _repquotefixedmap[c]
81 return _repquotefixedmap[c]
65 if c.isalpha():
82 if c.isalpha():
66 return 'x'
83 return 'x'
67 if c.isdigit():
84 if c.isdigit():
68 return 'n'
85 return 'n'
69 return 'o'
86 return 'o'
87
88
70 _repquotett = ''.join(_repquoteencodechr(i) for i in xrange(256))
89 _repquotett = ''.join(_repquoteencodechr(i) for i in xrange(256))
71
90
91
72 def repquote(m):
92 def repquote(m):
73 t = m.group('text')
93 t = m.group('text')
74 t = t.translate(_repquotett)
94 t = t.translate(_repquotett)
75 return m.group('quote') + t + m.group('quote')
95 return m.group('quote') + t + m.group('quote')
76
96
97
77 def reppython(m):
98 def reppython(m):
78 comment = m.group('comment')
99 comment = m.group('comment')
79 if comment:
100 if comment:
80 l = len(comment.rstrip())
101 l = len(comment.rstrip())
81 return "#" * l + comment[l:]
102 return "#" * l + comment[l:]
82 return repquote(m)
103 return repquote(m)
83
104
105
84 def repcomment(m):
106 def repcomment(m):
85 return m.group(1) + "#" * len(m.group(2))
107 return m.group(1) + "#" * len(m.group(2))
86
108
109
87 def repccomment(m):
110 def repccomment(m):
88 t = re.sub(r"((?<=\n) )|\S", "x", m.group(2))
111 t = re.sub(r"((?<=\n) )|\S", "x", m.group(2))
89 return m.group(1) + t + "*/"
112 return m.group(1) + t + "*/"
90
113
114
91 def repcallspaces(m):
115 def repcallspaces(m):
92 t = re.sub(r"\n\s+", "\n", m.group(2))
116 t = re.sub(r"\n\s+", "\n", m.group(2))
93 return m.group(1) + t
117 return m.group(1) + t
94
118
119
95 def repinclude(m):
120 def repinclude(m):
96 return m.group(1) + "<foo>"
121 return m.group(1) + "<foo>"
97
122
123
98 def rephere(m):
124 def rephere(m):
99 t = re.sub(r"\S", "x", m.group(2))
125 t = re.sub(r"\S", "x", m.group(2))
100 return m.group(1) + t
126 return m.group(1) + t
101
127
102
128
103 testpats = [
129 testpats = [
104 [
130 [
105 (r'\b(push|pop)d\b', "don't use 'pushd' or 'popd', use 'cd'"),
131 (r'\b(push|pop)d\b', "don't use 'pushd' or 'popd', use 'cd'"),
106 (r'\W\$?\(\([^\)\n]*\)\)', "don't use (()) or $(()), use 'expr'"),
132 (r'\W\$?\(\([^\)\n]*\)\)', "don't use (()) or $(()), use 'expr'"),
107 (r'grep.*-q', "don't use 'grep -q', redirect to /dev/null"),
133 (r'grep.*-q', "don't use 'grep -q', redirect to /dev/null"),
108 (r'(?<!hg )grep.* -a', "don't use 'grep -a', use in-line python"),
134 (r'(?<!hg )grep.* -a', "don't use 'grep -a', use in-line python"),
109 (r'sed.*-i', "don't use 'sed -i', use a temporary file"),
135 (r'sed.*-i', "don't use 'sed -i', use a temporary file"),
110 (r'\becho\b.*\\n', "don't use 'echo \\n', use printf"),
136 (r'\becho\b.*\\n', "don't use 'echo \\n', use printf"),
111 (r'echo -n', "don't use 'echo -n', use printf"),
137 (r'echo -n', "don't use 'echo -n', use printf"),
112 (r'(^|\|\s*)\bwc\b[^|]*$\n(?!.*\(re\))', "filter wc output"),
138 (r'(^|\|\s*)\bwc\b[^|]*$\n(?!.*\(re\))', "filter wc output"),
113 (r'head -c', "don't use 'head -c', use 'dd'"),
139 (r'head -c', "don't use 'head -c', use 'dd'"),
114 (r'tail -n', "don't use the '-n' option to tail, just use '-<num>'"),
140 (r'tail -n', "don't use the '-n' option to tail, just use '-<num>'"),
115 (r'sha1sum', "don't use sha1sum, use $TESTDIR/md5sum.py"),
141 (r'sha1sum', "don't use sha1sum, use $TESTDIR/md5sum.py"),
116 (r'\bls\b.*-\w*R', "don't use 'ls -R', use 'find'"),
142 (r'\bls\b.*-\w*R', "don't use 'ls -R', use 'find'"),
117 (r'printf.*[^\\]\\([1-9]|0\d)', r"don't use 'printf \NNN', use Python"),
143 (r'printf.*[^\\]\\([1-9]|0\d)', r"don't use 'printf \NNN', use Python"),
118 (r'printf.*[^\\]\\x', "don't use printf \\x, use Python"),
144 (r'printf.*[^\\]\\x', "don't use printf \\x, use Python"),
119 (r'\$\(.*\)', "don't use $(expr), use `expr`"),
145 (r'rm -rf \*', "don't use naked rm -rf, target a directory"),
120 (r'rm -rf \*', "don't use naked rm -rf, target a directory"),
146 (
121 (r'\[[^\]]+==', '[ foo == bar ] is a bashism, use [ foo = bar ] instead'),
147 r'\[[^\]]+==',
122 (r'(^|\|\s*)grep (-\w\s+)*[^|]*[(|]\w',
148 '[ foo == bar ] is a bashism, use [ foo = bar ] instead',
123 "use egrep for extended grep syntax"),
149 ),
124 (r'(^|\|\s*)e?grep .*\\S', "don't use \\S in regular expression"),
150 (
125 (r'(?<!!)/bin/', "don't use explicit paths for tools"),
151 r'(^|\|\s*)grep (-\w\s+)*[^|]*[(|]\w',
126 (r'#!.*/bash', "don't use bash in shebang, use sh"),
152 "use egrep for extended grep syntax",
127 (r'[^\n]\Z', "no trailing newline"),
153 ),
128 (r'export .*=', "don't export and assign at once"),
154 (r'(^|\|\s*)e?grep .*\\S', "don't use \\S in regular expression"),
129 (r'^source\b', "don't use 'source', use '.'"),
155 (r'(?<!!)/bin/', "don't use explicit paths for tools"),
130 (r'touch -d', "don't use 'touch -d', use 'touch -t' instead"),
156 (r'#!.*/bash', "don't use bash in shebang, use sh"),
131 (r'\bls +[^|\n-]+ +-', "options to 'ls' must come before filenames"),
157 (r'[^\n]\Z', "no trailing newline"),
132 (r'[^>\n]>\s*\$HGRCPATH', "don't overwrite $HGRCPATH, append to it"),
158 (r'export .*=', "don't export and assign at once"),
133 (r'^stop\(\)', "don't use 'stop' as a shell function name"),
159 (r'^source\b', "don't use 'source', use '.'"),
134 (r'(\[|\btest\b).*-e ', "don't use 'test -e', use 'test -f'"),
160 (r'touch -d', "don't use 'touch -d', use 'touch -t' instead"),
135 (r'\[\[\s+[^\]]*\]\]', "don't use '[[ ]]', use '[ ]'"),
161 (r'\bls +[^|\n-]+ +-', "options to 'ls' must come before filenames"),
136 (r'^alias\b.*=', "don't use alias, use a function"),
162 (r'[^>\n]>\s*\$HGRCPATH', "don't overwrite $HGRCPATH, append to it"),
137 (r'if\s*!', "don't use '!' to negate exit status"),
163 (r'^stop\(\)', "don't use 'stop' as a shell function name"),
138 (r'/dev/u?random', "don't use entropy, use /dev/zero"),
164 (r'(\[|\btest\b).*-e ', "don't use 'test -e', use 'test -f'"),
139 (r'do\s*true;\s*done', "don't use true as loop body, use sleep 0"),
165 (r'\[\[\s+[^\]]*\]\]', "don't use '[[ ]]', use '[ ]'"),
140 (r'sed (-e )?\'(\d+|/[^/]*/)i(?!\\\n)',
166 (r'^alias\b.*=', "don't use alias, use a function"),
141 "put a backslash-escaped newline after sed 'i' command"),
167 (r'if\s*!', "don't use '!' to negate exit status"),
142 (r'^diff *-\w*[uU].*$\n(^ \$ |^$)', "prefix diff -u/-U with cmp"),
168 (r'/dev/u?random', "don't use entropy, use /dev/zero"),
143 (r'^\s+(if)? diff *-\w*[uU]', "prefix diff -u/-U with cmp"),
169 (r'do\s*true;\s*done', "don't use true as loop body, use sleep 0"),
144 (r'[\s="`\']python\s(?!bindings)', "don't use 'python', use '$PYTHON'"),
170 (
145 (r'seq ', "don't use 'seq', use $TESTDIR/seq.py"),
171 r'sed (-e )?\'(\d+|/[^/]*/)i(?!\\\n)',
146 (r'\butil\.Abort\b', "directly use error.Abort"),
172 "put a backslash-escaped newline after sed 'i' command",
147 (r'\|&', "don't use |&, use 2>&1"),
173 ),
148 (r'\w = +\w', "only one space after = allowed"),
174 (r'^diff *-\w*[uU].*$\n(^ \$ |^$)', "prefix diff -u/-U with cmp"),
149 (r'\bsed\b.*[^\\]\\n', "don't use 'sed ... \\n', use a \\ and a newline"),
175 (r'^\s+(if)? diff *-\w*[uU]', "prefix diff -u/-U with cmp"),
150 (r'env.*-u', "don't use 'env -u VAR', use 'unset VAR'"),
176 (r'[\s="`\']python\s(?!bindings)', "don't use 'python', use '$PYTHON'"),
151 (r'cp.* -r ', "don't use 'cp -r', use 'cp -R'"),
177 (r'seq ', "don't use 'seq', use $TESTDIR/seq.py"),
152 (r'grep.* -[ABC]', "don't use grep's context flags"),
178 (r'\butil\.Abort\b', "directly use error.Abort"),
153 (r'find.*-printf',
179 (r'\|&', "don't use |&, use 2>&1"),
154 "don't use 'find -printf', it doesn't exist on BSD find(1)"),
180 (r'\w = +\w', "only one space after = allowed"),
155 (r'\$RANDOM ', "don't use bash-only $RANDOM to generate random values"),
181 (
156 ],
182 r'\bsed\b.*[^\\]\\n',
157 # warnings
183 "don't use 'sed ... \\n', use a \\ and a newline",
158 [
184 ),
159 (r'^function', "don't use 'function', use old style"),
185 (r'env.*-u', "don't use 'env -u VAR', use 'unset VAR'"),
160 (r'^diff.*-\w*N', "don't use 'diff -N'"),
186 (r'cp.* -r ', "don't use 'cp -r', use 'cp -R'"),
161 (r'\$PWD|\${PWD}', "don't use $PWD, use `pwd`"),
187 (r'grep.* -[ABC]', "don't use grep's context flags"),
162 (r'^([^"\'\n]|("[^"\n]*")|(\'[^\'\n]*\'))*\^', "^ must be quoted"),
188 (
163 (r'kill (`|\$\()', "don't use kill, use killdaemons.py")
189 r'find.*-printf',
164 ]
190 "don't use 'find -printf', it doesn't exist on BSD find(1)",
191 ),
192 (r'\$RANDOM ', "don't use bash-only $RANDOM to generate random values"),
193 ],
194 # warnings
195 [
196 (r'^function', "don't use 'function', use old style"),
197 (r'^diff.*-\w*N', "don't use 'diff -N'"),
198 (r'\$PWD|\${PWD}', "don't use $PWD, use `pwd`"),
199 (r'^([^"\'\n]|("[^"\n]*")|(\'[^\'\n]*\'))*\^', "^ must be quoted"),
200 (r'kill (`|\$\()', "don't use kill, use killdaemons.py"),
201 ],
165 ]
202 ]
166
203
167 testfilters = [
204 testfilters = [
168 (r"( *)(#([^!][^\n]*\S)?)", repcomment),
205 (r"( *)(#([^!][^\n]*\S)?)", repcomment),
169 (r"<<(\S+)((.|\n)*?\n\1)", rephere),
206 (r"<<(\S+)((.|\n)*?\n\1)", rephere),
170 ]
207 ]
171
208
172 uprefix = r"^ \$ "
209 uprefix = r"^ \$ "
173 utestpats = [
210 utestpats = [
174 [
211 [
175 (r'^(\S.*|| [$>] \S.*)[ \t]\n', "trailing whitespace on non-output"),
212 (r'^(\S.*|| [$>] \S.*)[ \t]\n', "trailing whitespace on non-output"),
176 (uprefix + r'.*\|\s*sed[^|>\n]*\n',
213 (
177 "use regex test output patterns instead of sed"),
214 uprefix + r'.*\|\s*sed[^|>\n]*\n',
178 (uprefix + r'(true|exit 0)', "explicit zero exit unnecessary"),
215 "use regex test output patterns instead of sed",
179 (uprefix + r'.*(?<!\[)\$\?', "explicit exit code checks unnecessary"),
216 ),
180 (uprefix + r'.*\|\| echo.*(fail|error)',
217 (uprefix + r'(true|exit 0)', "explicit zero exit unnecessary"),
181 "explicit exit code checks unnecessary"),
218 (uprefix + r'.*(?<!\[)\$\?', "explicit exit code checks unnecessary"),
182 (uprefix + r'set -e', "don't use set -e"),
219 (
183 (uprefix + r'(\s|fi\b|done\b)', "use > for continued lines"),
220 uprefix + r'.*\|\| echo.*(fail|error)',
184 (uprefix + r'.*:\.\S*/', "x:.y in a path does not work on msys, rewrite "
221 "explicit exit code checks unnecessary",
185 "as x://.y, or see `hg log -k msys` for alternatives", r'-\S+:\.|' #-Rxxx
222 ),
186 '# no-msys'), # in test-pull.t which is skipped on windows
223 (uprefix + r'set -e', "don't use set -e"),
187 (r'^ [^$>].*27\.0\.0\.1',
224 (uprefix + r'(\s|fi\b|done\b)', "use > for continued lines"),
188 'use $LOCALIP not an explicit loopback address'),
225 (
189 (r'^ (?![>$] ).*\$LOCALIP.*[^)]$',
226 uprefix + r'.*:\.\S*/',
190 'mark $LOCALIP output lines with (glob) to help tests in BSD jails'),
227 "x:.y in a path does not work on msys, rewrite "
191 (r'^ (cat|find): .*: \$ENOENT\$',
228 "as x://.y, or see `hg log -k msys` for alternatives",
192 'use test -f to test for file existence'),
229 r'-\S+:\.|' '# no-msys', # -Rxxx
193 (r'^ diff -[^ -]*p',
230 ), # in test-pull.t which is skipped on windows
194 "don't use (external) diff with -p for portability"),
231 (
195 (r' readlink ', 'use readlink.py instead of readlink'),
232 r'^ [^$>].*27\.0\.0\.1',
196 (r'^ [-+][-+][-+] .* [-+]0000 \(glob\)',
233 'use $LOCALIP not an explicit loopback address',
197 "glob timezone field in diff output for portability"),
234 ),
198 (r'^ @@ -[0-9]+ [+][0-9]+,[0-9]+ @@',
235 (
199 "use '@@ -N* +N,n @@ (glob)' style chunk header for portability"),
236 r'^ (?![>$] ).*\$LOCALIP.*[^)]$',
200 (r'^ @@ -[0-9]+,[0-9]+ [+][0-9]+ @@',
237 'mark $LOCALIP output lines with (glob) to help tests in BSD jails',
201 "use '@@ -N,n +N* @@ (glob)' style chunk header for portability"),
238 ),
202 (r'^ @@ -[0-9]+ [+][0-9]+ @@',
239 (
203 "use '@@ -N* +N* @@ (glob)' style chunk header for portability"),
240 r'^ (cat|find): .*: \$ENOENT\$',
204 (uprefix + r'hg( +-[^ ]+( +[^ ]+)?)* +extdiff'
241 'use test -f to test for file existence',
205 r'( +(-[^ po-]+|--(?!program|option)[^ ]+|[^-][^ ]*))*$',
242 ),
206 "use $RUNTESTDIR/pdiff via extdiff (or -o/-p for false-positives)"),
243 (
207 ],
244 r'^ diff -[^ -]*p',
208 # warnings
245 "don't use (external) diff with -p for portability",
209 [
246 ),
210 (r'^ (?!.*\$LOCALIP)[^*?/\n]* \(glob\)$',
247 (r' readlink ', 'use readlink.py instead of readlink'),
211 "glob match with no glob string (?, *, /, and $LOCALIP)"),
248 (
212 ]
249 r'^ [-+][-+][-+] .* [-+]0000 \(glob\)',
250 "glob timezone field in diff output for portability",
251 ),
252 (
253 r'^ @@ -[0-9]+ [+][0-9]+,[0-9]+ @@',
254 "use '@@ -N* +N,n @@ (glob)' style chunk header for portability",
255 ),
256 (
257 r'^ @@ -[0-9]+,[0-9]+ [+][0-9]+ @@',
258 "use '@@ -N,n +N* @@ (glob)' style chunk header for portability",
259 ),
260 (
261 r'^ @@ -[0-9]+ [+][0-9]+ @@',
262 "use '@@ -N* +N* @@ (glob)' style chunk header for portability",
263 ),
264 (
265 uprefix + r'hg( +-[^ ]+( +[^ ]+)?)* +extdiff'
266 r'( +(-[^ po-]+|--(?!program|option)[^ ]+|[^-][^ ]*))*$',
267 "use $RUNTESTDIR/pdiff via extdiff (or -o/-p for false-positives)",
268 ),
269 ],
270 # warnings
271 [
272 (
273 r'^ (?!.*\$LOCALIP)[^*?/\n]* \(glob\)$',
274 "glob match with no glob string (?, *, /, and $LOCALIP)",
275 ),
276 ],
213 ]
277 ]
214
278
215 # transform plain test rules to unified test's
279 # transform plain test rules to unified test's
216 for i in [0, 1]:
280 for i in [0, 1]:
217 for tp in testpats[i]:
281 for tp in testpats[i]:
218 p = tp[0]
282 p = tp[0]
219 m = tp[1]
283 m = tp[1]
220 if p.startswith(r'^'):
284 if p.startswith(r'^'):
221 p = r"^ [$>] (%s)" % p[1:]
285 p = r"^ [$>] (%s)" % p[1:]
222 else:
286 else:
223 p = r"^ [$>] .*(%s)" % p
287 p = r"^ [$>] .*(%s)" % p
224 utestpats[i].append((p, m) + tp[2:])
288 utestpats[i].append((p, m) + tp[2:])
225
289
226 # don't transform the following rules:
290 # don't transform the following rules:
227 # " > \t" and " \t" should be allowed in unified tests
291 # " > \t" and " \t" should be allowed in unified tests
228 testpats[0].append((r'^( *)\t', "don't use tabs to indent"))
292 testpats[0].append((r'^( *)\t', "don't use tabs to indent"))
229 utestpats[0].append((r'^( ?)\t', "don't use tabs to indent"))
293 utestpats[0].append((r'^( ?)\t', "don't use tabs to indent"))
230
294
231 utestfilters = [
295 utestfilters = [
232 (r"<<(\S+)((.|\n)*?\n > \1)", rephere),
296 (r"<<(\S+)((.|\n)*?\n > \1)", rephere),
233 (r"( +)(#([^!][^\n]*\S)?)", repcomment),
297 (r"( +)(#([^!][^\n]*\S)?)", repcomment),
234 ]
298 ]
235
299
236 # common patterns to check *.py
300 # common patterns to check *.py
237 commonpypats = [
301 commonpypats = [
238 [
302 [
239 (r'\\$', 'Use () to wrap long lines in Python, not \\'),
303 (r'\\$', 'Use () to wrap long lines in Python, not \\'),
240 (r'^\s*def\s*\w+\s*\(.*,\s*\(',
304 (
241 "tuple parameter unpacking not available in Python 3+"),
305 r'^\s*def\s*\w+\s*\(.*,\s*\(',
242 (r'lambda\s*\(.*,.*\)',
306 "tuple parameter unpacking not available in Python 3+",
243 "tuple parameter unpacking not available in Python 3+"),
307 ),
244 (r'(?<!def)\s+(cmp)\(', "cmp is not available in Python 3+"),
308 (
245 (r'(?<!\.)\breduce\s*\(.*', "reduce is not available in Python 3+"),
309 r'lambda\s*\(.*,.*\)',
246 (r'\bdict\(.*=', 'dict() is different in Py2 and 3 and is slower than {}',
310 "tuple parameter unpacking not available in Python 3+",
247 'dict-from-generator'),
311 ),
248 (r'\.has_key\b', "dict.has_key is not available in Python 3+"),
312 (r'(?<!def)\s+(cmp)\(', "cmp is not available in Python 3+"),
249 (r'\s<>\s', '<> operator is not available in Python 3+, use !='),
313 (r'(?<!\.)\breduce\s*\(.*', "reduce is not available in Python 3+"),
250 (r'^\s*\t', "don't use tabs"),
314 (
251 (r'\S;\s*\n', "semicolon"),
315 r'\bdict\(.*=',
252 (r'[^_]_\([ \t\n]*(?:"[^"]+"[ \t\n+]*)+%', "don't use % inside _()"),
316 'dict() is different in Py2 and 3 and is slower than {}',
253 (r"[^_]_\([ \t\n]*(?:'[^']+'[ \t\n+]*)+%", "don't use % inside _()"),
317 'dict-from-generator',
254 (r'(\w|\)),\w', "missing whitespace after ,"),
318 ),
255 (r'(\w|\))[+/*\-<>]\w', "missing whitespace in expression"),
319 (r'\.has_key\b', "dict.has_key is not available in Python 3+"),
256 (r'^\s+(\w|\.)+=\w[^,()\n]*$', "missing whitespace in assignment"),
320 (r'\s<>\s', '<> operator is not available in Python 3+, use !='),
257 (r'\w\s=\s\s+\w', "gratuitous whitespace after ="),
321 (r'^\s*\t', "don't use tabs"),
258 ((
322 (r'\S;\s*\n', "semicolon"),
259 # a line ending with a colon, potentially with trailing comments
323 (r'[^_]_\([ \t\n]*(?:"[^"]+"[ \t\n+]*)+%', "don't use % inside _()"),
260 r':([ \t]*#[^\n]*)?\n'
324 (r"[^_]_\([ \t\n]*(?:'[^']+'[ \t\n+]*)+%", "don't use % inside _()"),
261 # one that is not a pass and not only a comment
325 (r'(\w|\)),\w', "missing whitespace after ,"),
262 r'(?P<indent>[ \t]+)[^#][^\n]+\n'
326 (r'(\w|\))[+/*\-<>]\w', "missing whitespace in expression"),
263 # more lines at the same indent level
327 (r'\w\s=\s\s+\w', "gratuitous whitespace after ="),
264 r'((?P=indent)[^\n]+\n)*'
328 (
265 # a pass at the same indent level, which is bogus
329 (
266 r'(?P=indent)pass[ \t\n#]'
330 # a line ending with a colon, potentially with trailing comments
267 ), 'omit superfluous pass'),
331 r':([ \t]*#[^\n]*)?\n'
268 (r'[^\n]\Z', "no trailing newline"),
332 # one that is not a pass and not only a comment
269 (r'(\S[ \t]+|^[ \t]+)\n', "trailing whitespace"),
333 r'(?P<indent>[ \t]+)[^#][^\n]+\n'
270 # (r'^\s+[^_ \n][^_. \n]+_[^_\n]+\s*=',
334 # more lines at the same indent level
271 # "don't use underbars in identifiers"),
335 r'((?P=indent)[^\n]+\n)*'
272 (r'^\s+(self\.)?[A-Za-z][a-z0-9]+[A-Z]\w* = ',
336 # a pass at the same indent level, which is bogus
273 "don't use camelcase in identifiers", r'#.*camelcase-required'),
337 r'(?P=indent)pass[ \t\n#]'
274 (r'^\s*(if|while|def|class|except|try)\s[^[\n]*:\s*[^\\n]#\s]+',
338 ),
275 "linebreak after :"),
339 'omit superfluous pass',
276 (r'class\s[^( \n]+:', "old-style class, use class foo(object)",
340 ),
277 r'#.*old-style'),
341 (r'[^\n]\Z', "no trailing newline"),
278 (r'class\s[^( \n]+\(\):',
342 (r'(\S[ \t]+|^[ \t]+)\n', "trailing whitespace"),
279 "class foo() creates old style object, use class foo(object)",
343 (
280 r'#.*old-style'),
344 r'^\s+(self\.)?[A-Za-z][a-z0-9]+[A-Z]\w* = ',
281 (r'\b(%s)\(' % '|'.join(k for k in keyword.kwlist
345 "don't use camelcase in identifiers",
282 if k not in ('print', 'exec')),
346 r'#.*camelcase-required',
283 "Python keyword is not a function"),
347 ),
284 (r',]', "unneeded trailing ',' in list"),
348 (
285 # (r'class\s[A-Z][^\(]*\((?!Exception)',
349 r'^\s*(if|while|def|class|except|try)\s[^[\n]*:\s*[^\\n]#\s]+',
286 # "don't capitalize non-exception classes"),
350 "linebreak after :",
287 # (r'in range\(', "use xrange"),
351 ),
288 # (r'^\s*print\s+', "avoid using print in core and extensions"),
352 (
289 (r'[\x80-\xff]', "non-ASCII character literal"),
353 r'class\s[^( \n]+:',
290 (r'("\')\.format\(', "str.format() has no bytes counterpart, use %"),
354 "old-style class, use class foo(object)",
291 (r'^\s*(%s)\s\s' % '|'.join(keyword.kwlist),
355 r'#.*old-style',
292 "gratuitous whitespace after Python keyword"),
356 ),
293 (r'([\(\[][ \t]\S)|(\S[ \t][\)\]])', "gratuitous whitespace in () or []"),
357 (
294 # (r'\s\s=', "gratuitous whitespace before ="),
358 r'class\s[^( \n]+\(\):',
295 (r'[^>< ](\+=|-=|!=|<>|<=|>=|<<=|>>=|%=)\S',
359 "class foo() creates old style object, use class foo(object)",
296 "missing whitespace around operator"),
360 r'#.*old-style',
297 (r'[^>< ](\+=|-=|!=|<>|<=|>=|<<=|>>=|%=)\s',
361 ),
298 "missing whitespace around operator"),
362 (
299 (r'\s(\+=|-=|!=|<>|<=|>=|<<=|>>=|%=)\S',
363 r'\b(%s)\('
300 "missing whitespace around operator"),
364 % '|'.join(k for k in keyword.kwlist if k not in ('print', 'exec')),
301 (r'[^^+=*/!<>&| %-](\s=|=\s)[^= ]',
365 "Python keyword is not a function",
302 "wrong whitespace around ="),
366 ),
303 (r'\([^()]*( =[^=]|[^<>!=]= )',
367 # (r'class\s[A-Z][^\(]*\((?!Exception)',
304 "no whitespace around = for named parameters"),
368 # "don't capitalize non-exception classes"),
305 (r'raise [^,(]+, (\([^\)]+\)|[^,\(\)]+)$',
369 # (r'in range\(', "use xrange"),
306 "don't use old-style two-argument raise, use Exception(message)"),
370 # (r'^\s*print\s+', "avoid using print in core and extensions"),
307 (r' is\s+(not\s+)?["\'0-9-]', "object comparison with literal"),
371 (r'[\x80-\xff]', "non-ASCII character literal"),
308 (r' [=!]=\s+(True|False|None)',
372 (r'("\')\.format\(', "str.format() has no bytes counterpart, use %"),
309 "comparison with singleton, use 'is' or 'is not' instead"),
373 (
310 (r'^\s*(while|if) [01]:',
374 r'([\(\[][ \t]\S)|(\S[ \t][\)\]])',
311 "use True/False for constant Boolean expression"),
375 "gratuitous whitespace in () or []",
312 (r'^\s*if False(:| +and)', 'Remove code instead of using `if False`'),
376 ),
313 (r'(?:(?<!def)\s+|\()hasattr\(',
377 # (r'\s\s=', "gratuitous whitespace before ="),
314 'hasattr(foo, bar) is broken on py2, use util.safehasattr(foo, bar) '
378 (
315 'instead', r'#.*hasattr-py3-only'),
379 r'[^>< ](\+=|-=|!=|<>|<=|>=|<<=|>>=|%=)\S',
316 (r'opener\([^)]*\).read\(',
380 "missing whitespace around operator",
317 "use opener.read() instead"),
381 ),
318 (r'opener\([^)]*\).write\(',
382 (
319 "use opener.write() instead"),
383 r'[^>< ](\+=|-=|!=|<>|<=|>=|<<=|>>=|%=)\s',
320 (r'(?i)descend[e]nt', "the proper spelling is descendAnt"),
384 "missing whitespace around operator",
321 (r'\.debug\(\_', "don't mark debug messages for translation"),
385 ),
322 (r'\.strip\(\)\.split\(\)', "no need to strip before splitting"),
386 (
323 (r'^\s*except\s*:', "naked except clause", r'#.*re-raises'),
387 r'\s(\+=|-=|!=|<>|<=|>=|<<=|>>=|%=)\S',
324 (r'^\s*except\s([^\(,]+|\([^\)]+\))\s*,',
388 "missing whitespace around operator",
325 'legacy exception syntax; use "as" instead of ","'),
389 ),
326 (r'release\(.*wlock, .*lock\)', "wrong lock release order"),
390 (r'[^^+=*/!<>&| %-](\s=|=\s)[^= ]', "wrong whitespace around ="),
327 (r'\bdef\s+__bool__\b', "__bool__ should be __nonzero__ in Python 2"),
391 (
328 (r'os\.path\.join\(.*, *(""|\'\')\)',
392 r'\([^()]*( =[^=]|[^<>!=]= )',
329 "use pathutil.normasprefix(path) instead of os.path.join(path, '')"),
393 "no whitespace around = for named parameters",
330 (r'\s0[0-7]+\b', 'legacy octal syntax; use "0o" prefix instead of "0"'),
394 ),
331 # XXX only catch mutable arguments on the first line of the definition
395 (
332 (r'def.*[( ]\w+=\{\}', "don't use mutable default arguments"),
396 r'raise [^,(]+, (\([^\)]+\)|[^,\(\)]+)$',
333 (r'\butil\.Abort\b', "directly use error.Abort"),
397 "don't use old-style two-argument raise, use Exception(message)",
334 (r'^@(\w*\.)?cachefunc', "module-level @cachefunc is risky, please avoid"),
398 ),
335 (r'^import Queue', "don't use Queue, use pycompat.queue.Queue + "
399 (r' is\s+(not\s+)?["\'0-9-]', "object comparison with literal"),
336 "pycompat.queue.Empty"),
400 (
337 (r'^import cStringIO', "don't use cStringIO.StringIO, use util.stringio"),
401 r' [=!]=\s+(True|False|None)',
338 (r'^import urllib', "don't use urllib, use util.urlreq/util.urlerr"),
402 "comparison with singleton, use 'is' or 'is not' instead",
339 (r'^import SocketServer', "don't use SockerServer, use util.socketserver"),
403 ),
340 (r'^import urlparse', "don't use urlparse, use util.urlreq"),
404 (
341 (r'^import xmlrpclib', "don't use xmlrpclib, use util.xmlrpclib"),
405 r'^\s*(while|if) [01]:',
342 (r'^import cPickle', "don't use cPickle, use util.pickle"),
406 "use True/False for constant Boolean expression",
343 (r'^import pickle', "don't use pickle, use util.pickle"),
407 ),
344 (r'^import httplib', "don't use httplib, use util.httplib"),
408 (r'^\s*if False(:| +and)', 'Remove code instead of using `if False`'),
345 (r'^import BaseHTTPServer', "use util.httpserver instead"),
409 (
346 (r'^(from|import) mercurial\.(cext|pure|cffi)',
410 r'(?:(?<!def)\s+|\()hasattr\(',
347 "use mercurial.policy.importmod instead"),
411 'hasattr(foo, bar) is broken on py2, use util.safehasattr(foo, bar) '
348 (r'\.next\(\)', "don't use .next(), use next(...)"),
412 'instead',
349 (r'([a-z]*).revision\(\1\.node\(',
413 r'#.*hasattr-py3-only',
350 "don't convert rev to node before passing to revision(nodeorrev)"),
414 ),
351 (r'platform\.system\(\)', "don't use platform.system(), use pycompat"),
415 (r'opener\([^)]*\).read\(', "use opener.read() instead"),
352
416 (r'opener\([^)]*\).write\(', "use opener.write() instead"),
353 ],
417 (r'(?i)descend[e]nt', "the proper spelling is descendAnt"),
354 # warnings
418 (r'\.debug\(\_', "don't mark debug messages for translation"),
355 [
419 (r'\.strip\(\)\.split\(\)', "no need to strip before splitting"),
356 ]
420 (r'^\s*except\s*:', "naked except clause", r'#.*re-raises'),
421 (
422 r'^\s*except\s([^\(,]+|\([^\)]+\))\s*,',
423 'legacy exception syntax; use "as" instead of ","',
424 ),
425 (r'release\(.*wlock, .*lock\)', "wrong lock release order"),
426 (r'\bdef\s+__bool__\b', "__bool__ should be __nonzero__ in Python 2"),
427 (
428 r'os\.path\.join\(.*, *(""|\'\')\)',
429 "use pathutil.normasprefix(path) instead of os.path.join(path, '')",
430 ),
431 (r'\s0[0-7]+\b', 'legacy octal syntax; use "0o" prefix instead of "0"'),
432 # XXX only catch mutable arguments on the first line of the definition
433 (r'def.*[( ]\w+=\{\}', "don't use mutable default arguments"),
434 (r'\butil\.Abort\b', "directly use error.Abort"),
435 (
436 r'^@(\w*\.)?cachefunc',
437 "module-level @cachefunc is risky, please avoid",
438 ),
439 (
440 r'^import Queue',
441 "don't use Queue, use pycompat.queue.Queue + "
442 "pycompat.queue.Empty",
443 ),
444 (
445 r'^import cStringIO',
446 "don't use cStringIO.StringIO, use util.stringio",
447 ),
448 (r'^import urllib', "don't use urllib, use util.urlreq/util.urlerr"),
449 (
450 r'^import SocketServer',
451 "don't use SockerServer, use util.socketserver",
452 ),
453 (r'^import urlparse', "don't use urlparse, use util.urlreq"),
454 (r'^import xmlrpclib', "don't use xmlrpclib, use util.xmlrpclib"),
455 (r'^import cPickle', "don't use cPickle, use util.pickle"),
456 (r'^import pickle', "don't use pickle, use util.pickle"),
457 (r'^import httplib', "don't use httplib, use util.httplib"),
458 (r'^import BaseHTTPServer', "use util.httpserver instead"),
459 (
460 r'^(from|import) mercurial\.(cext|pure|cffi)',
461 "use mercurial.policy.importmod instead",
462 ),
463 (r'\.next\(\)', "don't use .next(), use next(...)"),
464 (
465 r'([a-z]*).revision\(\1\.node\(',
466 "don't convert rev to node before passing to revision(nodeorrev)",
467 ),
468 (r'platform\.system\(\)', "don't use platform.system(), use pycompat"),
469 ],
470 # warnings
471 [],
357 ]
472 ]
358
473
359 # patterns to check normal *.py files
474 # patterns to check normal *.py files
360 pypats = [
475 pypats = [
361 [
476 [
362 # Ideally, these should be placed in "commonpypats" for
477 # Ideally, these should be placed in "commonpypats" for
363 # consistency of coding rules in Mercurial source tree.
478 # consistency of coding rules in Mercurial source tree.
364 # But on the other hand, these are not so seriously required for
479 # But on the other hand, these are not so seriously required for
365 # python code fragments embedded in test scripts. Fixing test
480 # python code fragments embedded in test scripts. Fixing test
366 # scripts for these patterns requires many changes, and has less
481 # scripts for these patterns requires many changes, and has less
367 # profit than effort.
482 # profit than effort.
368 (r'.{81}', "line too long"),
483 (r'raise Exception', "don't raise generic exceptions"),
369 (r'raise Exception', "don't raise generic exceptions"),
484 (r'[\s\(](open|file)\([^)]*\)\.read\(', "use util.readfile() instead"),
370 (r'[\s\(](open|file)\([^)]*\)\.read\(',
485 (
371 "use util.readfile() instead"),
486 r'[\s\(](open|file)\([^)]*\)\.write\(',
372 (r'[\s\(](open|file)\([^)]*\)\.write\(',
487 "use util.writefile() instead",
373 "use util.writefile() instead"),
488 ),
374 (r'^[\s\(]*(open(er)?|file)\([^)]*\)(?!\.close\(\))',
489 (
375 "always assign an opened file to a variable, and close it afterwards"),
490 r'^[\s\(]*(open(er)?|file)\([^)]*\)(?!\.close\(\))',
376 (r'[\s\(](open|file)\([^)]*\)\.(?!close\(\))',
491 "always assign an opened file to a variable, and close it afterwards",
377 "always assign an opened file to a variable, and close it afterwards"),
492 ),
378 (r':\n( )*( ){1,3}[^ ]', "must indent 4 spaces"),
493 (
379 (r'^import atexit', "don't use atexit, use ui.atexit"),
494 r'[\s\(](open|file)\([^)]*\)\.(?!close\(\))',
380
495 "always assign an opened file to a variable, and close it afterwards",
381 # rules depending on implementation of repquote()
496 ),
382 (r' x+[xpqo%APM][\'"]\n\s+[\'"]x',
497 (r':\n( )*( ){1,3}[^ ]', "must indent 4 spaces"),
383 'string join across lines with no space'),
498 (r'^import atexit', "don't use atexit, use ui.atexit"),
384 (r'''(?x)ui\.(status|progress|write|note|warn)\(
499 # rules depending on implementation of repquote()
500 (
501 r' x+[xpqo%APM][\'"]\n\s+[\'"]x',
502 'string join across lines with no space',
503 ),
504 (
505 r'''(?x)ui\.(status|progress|write|note|warn)\(
385 [ \t\n#]*
506 [ \t\n#]*
386 (?# any strings/comments might precede a string, which
507 (?# any strings/comments might precede a string, which
387 # contains translatable message)
508 # contains translatable message)
388 ((['"]|\'\'\'|""")[ \npq%bAPMxno]*(['"]|\'\'\'|""")[ \t\n#]+)*
509 b?((['"]|\'\'\'|""")[ \npq%bAPMxno]*(['"]|\'\'\'|""")[ \t\n#]+)*
389 (?# sequence consisting of below might precede translatable message
510 (?# sequence consisting of below might precede translatable message
390 # - formatting string: "% 10s", "%05d", "% -3.2f", "%*s", "%%" ...
511 # - formatting string: "% 10s", "%05d", "% -3.2f", "%*s", "%%" ...
391 # - escaped character: "\\", "\n", "\0" ...
512 # - escaped character: "\\", "\n", "\0" ...
392 # - character other than '%', 'b' as '\', and 'x' as alphabet)
513 # - character other than '%', 'b' as '\', and 'x' as alphabet)
393 (['"]|\'\'\'|""")
514 (['"]|\'\'\'|""")
394 ((%([ n]?[PM]?([np]+|A))?x)|%%|b[bnx]|[ \nnpqAPMo])*x
515 ((%([ n]?[PM]?([np]+|A))?x)|%%|b[bnx]|[ \nnpqAPMo])*x
395 (?# this regexp can't use [^...] style,
516 (?# this regexp can't use [^...] style,
396 # because _preparepats forcibly adds "\n" into [^...],
517 # because _preparepats forcibly adds "\n" into [^...],
397 # even though this regexp wants match it against "\n")''',
518 # even though this regexp wants match it against "\n")''',
398 "missing _() in ui message (use () to hide false-positives)"),
519 "missing _() in ui message (use () to hide false-positives)",
399 ] + commonpypats[0],
520 ),
400 # warnings
521 ]
401 [
522 + commonpypats[0],
402 # rules depending on implementation of repquote()
523 # warnings
403 (r'(^| )pp +xxxxqq[ \n][^\n]', "add two newlines after '.. note::'"),
524 [
404 ] + commonpypats[1]
525 # rules depending on implementation of repquote()
526 (r'(^| )pp +xxxxqq[ \n][^\n]', "add two newlines after '.. note::'"),
527 ]
528 + commonpypats[1],
405 ]
529 ]
406
530
407 # patterns to check *.py for embedded ones in test script
531 # patterns to check *.py for embedded ones in test script
408 embeddedpypats = [
532 embeddedpypats = [
409 [
533 [] + commonpypats[0],
410 ] + commonpypats[0],
534 # warnings
411 # warnings
535 [] + commonpypats[1],
412 [
413 ] + commonpypats[1]
414 ]
536 ]
415
537
416 # common filters to convert *.py
538 # common filters to convert *.py
417 commonpyfilters = [
539 commonpyfilters = [
418 (r"""(?msx)(?P<comment>\#.*?$)|
540 (
541 r"""(?msx)(?P<comment>\#.*?$)|
419 ((?P<quote>('''|\"\"\"|(?<!')'(?!')|(?<!")"(?!")))
542 ((?P<quote>('''|\"\"\"|(?<!')'(?!')|(?<!")"(?!")))
420 (?P<text>(([^\\]|\\.)*?))
543 (?P<text>(([^\\]|\\.)*?))
421 (?P=quote))""", reppython),
544 (?P=quote))""",
545 reppython,
546 ),
422 ]
547 ]
423
548
424 # filters to convert normal *.py files
549 # filters to convert normal *.py files
425 pyfilters = [
550 pyfilters = [] + commonpyfilters
426 ] + commonpyfilters
427
551
428 # non-filter patterns
552 # non-filter patterns
429 pynfpats = [
553 pynfpats = [
430 [
554 [
431 (r'pycompat\.osname\s*[=!]=\s*[\'"]nt[\'"]', "use pycompat.iswindows"),
555 (r'pycompat\.osname\s*[=!]=\s*[\'"]nt[\'"]', "use pycompat.iswindows"),
432 (r'pycompat\.osname\s*[=!]=\s*[\'"]posix[\'"]', "use pycompat.isposix"),
556 (r'pycompat\.osname\s*[=!]=\s*[\'"]posix[\'"]', "use pycompat.isposix"),
433 (r'pycompat\.sysplatform\s*[!=]=\s*[\'"]darwin[\'"]',
557 (
434 "use pycompat.isdarwin"),
558 r'pycompat\.sysplatform\s*[!=]=\s*[\'"]darwin[\'"]',
559 "use pycompat.isdarwin",
560 ),
435 ],
561 ],
436 # warnings
562 # warnings
437 [],
563 [],
438 ]
564 ]
439
565
440 # filters to convert *.py for embedded ones in test script
566 # filters to convert *.py for embedded ones in test script
441 embeddedpyfilters = [
567 embeddedpyfilters = [] + commonpyfilters
442 ] + commonpyfilters
443
568
444 # extension non-filter patterns
569 # extension non-filter patterns
445 pyextnfpats = [
570 pyextnfpats = [
446 [(r'^"""\n?[A-Z]', "don't capitalize docstring title")],
571 [(r'^"""\n?[A-Z]', "don't capitalize docstring title")],
447 # warnings
572 # warnings
448 [],
573 [],
449 ]
574 ]
450
575
451 txtfilters = []
576 txtfilters = []
452
577
453 txtpats = [
578 txtpats = [
454 [
579 [
455 (r'\s$', 'trailing whitespace'),
580 (r'\s$', 'trailing whitespace'),
456 ('.. note::[ \n][^\n]', 'add two newlines after note::')
581 ('.. note::[ \n][^\n]', 'add two newlines after note::'),
457 ],
582 ],
458 []
583 [],
459 ]
584 ]
460
585
461 cpats = [
586 cpats = [
462 [
587 [
463 (r'//', "don't use //-style comments"),
588 (r'//', "don't use //-style comments"),
464 (r'\S\t', "don't use tabs except for indent"),
589 (r'\S\t', "don't use tabs except for indent"),
465 (r'(\S[ \t]+|^[ \t]+)\n', "trailing whitespace"),
590 (r'(\S[ \t]+|^[ \t]+)\n', "trailing whitespace"),
466 (r'.{81}', "line too long"),
591 (r'(while|if|do|for)\(', "use space after while/if/do/for"),
467 (r'(while|if|do|for)\(', "use space after while/if/do/for"),
592 (r'return\(', "return is not a function"),
468 (r'return\(', "return is not a function"),
593 (r' ;', "no space before ;"),
469 (r' ;', "no space before ;"),
594 (r'[^;] \)', "no space before )"),
470 (r'[^;] \)', "no space before )"),
595 (r'[)][{]', "space between ) and {"),
471 (r'[)][{]', "space between ) and {"),
596 (r'\w+\* \w+', "use int *foo, not int* foo"),
472 (r'\w+\* \w+', "use int *foo, not int* foo"),
597 (r'\W\([^\)]+\) \w+', "use (int)foo, not (int) foo"),
473 (r'\W\([^\)]+\) \w+', "use (int)foo, not (int) foo"),
598 (r'\w+ (\+\+|--)', "use foo++, not foo ++"),
474 (r'\w+ (\+\+|--)', "use foo++, not foo ++"),
599 (r'\w,\w', "missing whitespace after ,"),
475 (r'\w,\w', "missing whitespace after ,"),
600 (r'^[^#]\w[+/*]\w', "missing whitespace in expression"),
476 (r'^[^#]\w[+/*]\w', "missing whitespace in expression"),
601 (r'\w\s=\s\s+\w', "gratuitous whitespace after ="),
477 (r'\w\s=\s\s+\w', "gratuitous whitespace after ="),
602 (r'^#\s+\w', "use #foo, not # foo"),
478 (r'^#\s+\w', "use #foo, not # foo"),
603 (r'[^\n]\Z', "no trailing newline"),
479 (r'[^\n]\Z', "no trailing newline"),
604 (r'^\s*#import\b', "use only #include in standard C code"),
480 (r'^\s*#import\b', "use only #include in standard C code"),
605 (r'strcpy\(', "don't use strcpy, use strlcpy or memcpy"),
481 (r'strcpy\(', "don't use strcpy, use strlcpy or memcpy"),
606 (r'strcat\(', "don't use strcat"),
482 (r'strcat\(', "don't use strcat"),
607 # rules depending on implementation of repquote()
483
608 ],
484 # rules depending on implementation of repquote()
609 # warnings
485 ],
610 [
486 # warnings
611 # rules depending on implementation of repquote()
487 [
612 ],
488 # rules depending on implementation of repquote()
489 ]
490 ]
613 ]
491
614
492 cfilters = [
615 cfilters = [
493 (r'(/\*)(((\*(?!/))|[^*])*)\*/', repccomment),
616 (r'(/\*)(((\*(?!/))|[^*])*)\*/', repccomment),
494 (r'''(?P<quote>(?<!")")(?P<text>([^"]|\\")+)"(?!")''', repquote),
617 (r'''(?P<quote>(?<!")")(?P<text>([^"]|\\")+)"(?!")''', repquote),
495 (r'''(#\s*include\s+<)([^>]+)>''', repinclude),
618 (r'''(#\s*include\s+<)([^>]+)>''', repinclude),
496 (r'(\()([^)]+\))', repcallspaces),
619 (r'(\()([^)]+\))', repcallspaces),
497 ]
620 ]
498
621
499 inutilpats = [
622 inutilpats = [
500 [
623 [(r'\bui\.', "don't use ui in util"),],
501 (r'\bui\.', "don't use ui in util"),
624 # warnings
502 ],
625 [],
503 # warnings
504 []
505 ]
626 ]
506
627
507 inrevlogpats = [
628 inrevlogpats = [
508 [
629 [(r'\brepo\.', "don't use repo in revlog"),],
509 (r'\brepo\.', "don't use repo in revlog"),
630 # warnings
510 ],
631 [],
511 # warnings
512 []
513 ]
632 ]
514
633
515 webtemplatefilters = []
634 webtemplatefilters = []
516
635
517 webtemplatepats = [
636 webtemplatepats = [
518 [],
637 [],
519 [
638 [
520 (r'{desc(\|(?!websub|firstline)[^\|]*)+}',
639 (
521 'follow desc keyword with either firstline or websub'),
640 r'{desc(\|(?!websub|firstline)[^\|]*)+}',
522 ]
641 'follow desc keyword with either firstline or websub',
642 ),
643 ],
523 ]
644 ]
524
645
525 allfilesfilters = []
646 allfilesfilters = []
526
647
527 allfilespats = [
648 allfilespats = [
528 [
649 [
529 (r'(http|https)://[a-zA-Z0-9./]*selenic.com/',
650 (
530 'use mercurial-scm.org domain URL'),
651 r'(http|https)://[a-zA-Z0-9./]*selenic.com/',
531 (r'mercurial@selenic\.com',
652 'use mercurial-scm.org domain URL',
532 'use mercurial-scm.org domain for mercurial ML address'),
653 ),
533 (r'mercurial-devel@selenic\.com',
654 (
534 'use mercurial-scm.org domain for mercurial-devel ML address'),
655 r'mercurial@selenic\.com',
535 ],
656 'use mercurial-scm.org domain for mercurial ML address',
536 # warnings
657 ),
537 [],
658 (
659 r'mercurial-devel@selenic\.com',
660 'use mercurial-scm.org domain for mercurial-devel ML address',
661 ),
662 ],
663 # warnings
664 [],
538 ]
665 ]
539
666
540 py3pats = [
667 py3pats = [
541 [
668 [
542 (r'os\.environ', "use encoding.environ instead (py3)", r'#.*re-exports'),
669 (
543 (r'os\.name', "use pycompat.osname instead (py3)"),
670 r'os\.environ',
544 (r'os\.getcwd', "use encoding.getcwd instead (py3)", r'#.*re-exports'),
671 "use encoding.environ instead (py3)",
545 (r'os\.sep', "use pycompat.ossep instead (py3)"),
672 r'#.*re-exports',
546 (r'os\.pathsep', "use pycompat.ospathsep instead (py3)"),
673 ),
547 (r'os\.altsep', "use pycompat.osaltsep instead (py3)"),
674 (r'os\.name', "use pycompat.osname instead (py3)"),
548 (r'sys\.platform', "use pycompat.sysplatform instead (py3)"),
675 (r'os\.getcwd', "use encoding.getcwd instead (py3)", r'#.*re-exports'),
549 (r'getopt\.getopt', "use pycompat.getoptb instead (py3)"),
676 (r'os\.sep', "use pycompat.ossep instead (py3)"),
550 (r'os\.getenv', "use encoding.environ.get instead"),
677 (r'os\.pathsep', "use pycompat.ospathsep instead (py3)"),
551 (r'os\.setenv', "modifying the environ dict is not preferred"),
678 (r'os\.altsep', "use pycompat.osaltsep instead (py3)"),
552 (r'(?<!pycompat\.)xrange', "use pycompat.xrange instead (py3)"),
679 (r'sys\.platform', "use pycompat.sysplatform instead (py3)"),
553 ],
680 (r'getopt\.getopt', "use pycompat.getoptb instead (py3)"),
554 # warnings
681 (r'os\.getenv', "use encoding.environ.get instead"),
555 [],
682 (r'os\.setenv', "modifying the environ dict is not preferred"),
683 (r'(?<!pycompat\.)xrange', "use pycompat.xrange instead (py3)"),
684 ],
685 # warnings
686 [],
556 ]
687 ]
557
688
558 checks = [
689 checks = [
559 ('python', r'.*\.(py|cgi)$', r'^#!.*python', pyfilters, pypats),
690 ('python', r'.*\.(py|cgi)$', r'^#!.*python', pyfilters, pypats),
560 ('python', r'.*\.(py|cgi)$', r'^#!.*python', [], pynfpats),
691 ('python', r'.*\.(py|cgi)$', r'^#!.*python', [], pynfpats),
561 ('python', r'.*hgext.*\.py$', '', [], pyextnfpats),
692 ('python', r'.*hgext.*\.py$', '', [], pyextnfpats),
562 ('python 3', r'.*(hgext|mercurial)/(?!demandimport|policy|pycompat).*\.py',
693 (
563 '', pyfilters, py3pats),
694 'python 3',
695 r'.*(hgext|mercurial)/(?!demandimport|policy|pycompat).*\.py',
696 '',
697 pyfilters,
698 py3pats,
699 ),
564 ('test script', r'(.*/)?test-[^.~]*$', '', testfilters, testpats),
700 ('test script', r'(.*/)?test-[^.~]*$', '', testfilters, testpats),
565 ('c', r'.*\.[ch]$', '', cfilters, cpats),
701 ('c', r'.*\.[ch]$', '', cfilters, cpats),
566 ('unified test', r'.*\.t$', '', utestfilters, utestpats),
702 ('unified test', r'.*\.t$', '', utestfilters, utestpats),
567 ('layering violation repo in revlog', r'mercurial/revlog\.py', '',
703 (
568 pyfilters, inrevlogpats),
704 'layering violation repo in revlog',
569 ('layering violation ui in util', r'mercurial/util\.py', '', pyfilters,
705 r'mercurial/revlog\.py',
570 inutilpats),
706 '',
707 pyfilters,
708 inrevlogpats,
709 ),
710 (
711 'layering violation ui in util',
712 r'mercurial/util\.py',
713 '',
714 pyfilters,
715 inutilpats,
716 ),
571 ('txt', r'.*\.txt$', '', txtfilters, txtpats),
717 ('txt', r'.*\.txt$', '', txtfilters, txtpats),
572 ('web template', r'mercurial/templates/.*\.tmpl', '',
718 (
573 webtemplatefilters, webtemplatepats),
719 'web template',
574 ('all except for .po', r'.*(?<!\.po)$', '',
720 r'mercurial/templates/.*\.tmpl',
575 allfilesfilters, allfilespats),
721 '',
722 webtemplatefilters,
723 webtemplatepats,
724 ),
725 ('all except for .po', r'.*(?<!\.po)$', '', allfilesfilters, allfilespats),
576 ]
726 ]
577
727
578 # (desc,
728 # (desc,
579 # func to pick up embedded code fragments,
729 # func to pick up embedded code fragments,
580 # list of patterns to convert target files
730 # list of patterns to convert target files
581 # list of patterns to detect errors/warnings)
731 # list of patterns to detect errors/warnings)
582 embeddedchecks = [
732 embeddedchecks = [
583 ('embedded python',
733 (
584 testparseutil.pyembedded, embeddedpyfilters, embeddedpypats)
734 'embedded python',
735 testparseutil.pyembedded,
736 embeddedpyfilters,
737 embeddedpypats,
738 )
585 ]
739 ]
586
740
741
587 def _preparepats():
742 def _preparepats():
588 def preparefailandwarn(failandwarn):
743 def preparefailandwarn(failandwarn):
589 for pats in failandwarn:
744 for pats in failandwarn:
590 for i, pseq in enumerate(pats):
745 for i, pseq in enumerate(pats):
591 # fix-up regexes for multi-line searches
746 # fix-up regexes for multi-line searches
592 p = pseq[0]
747 p = pseq[0]
593 # \s doesn't match \n (done in two steps)
748 # \s doesn't match \n (done in two steps)
594 # first, we replace \s that appears in a set already
749 # first, we replace \s that appears in a set already
595 p = re.sub(r'\[\\s', r'[ \\t', p)
750 p = re.sub(r'\[\\s', r'[ \\t', p)
596 # now we replace other \s instances.
751 # now we replace other \s instances.
597 p = re.sub(r'(?<!(\\|\[))\\s', r'[ \\t]', p)
752 p = re.sub(r'(?<!(\\|\[))\\s', r'[ \\t]', p)
598 # [^...] doesn't match newline
753 # [^...] doesn't match newline
599 p = re.sub(r'(?<!\\)\[\^', r'[^\\n', p)
754 p = re.sub(r'(?<!\\)\[\^', r'[^\\n', p)
600
755
601 pats[i] = (re.compile(p, re.MULTILINE),) + pseq[1:]
756 pats[i] = (re.compile(p, re.MULTILINE),) + pseq[1:]
602
757
603 def preparefilters(filters):
758 def preparefilters(filters):
604 for i, flt in enumerate(filters):
759 for i, flt in enumerate(filters):
605 filters[i] = re.compile(flt[0]), flt[1]
760 filters[i] = re.compile(flt[0]), flt[1]
606
761
607 for cs in (checks, embeddedchecks):
762 for cs in (checks, embeddedchecks):
608 for c in cs:
763 for c in cs:
609 failandwarn = c[-1]
764 failandwarn = c[-1]
610 preparefailandwarn(failandwarn)
765 preparefailandwarn(failandwarn)
611
766
612 filters = c[-2]
767 filters = c[-2]
613 preparefilters(filters)
768 preparefilters(filters)
614
769
770
615 class norepeatlogger(object):
771 class norepeatlogger(object):
616 def __init__(self):
772 def __init__(self):
617 self._lastseen = None
773 self._lastseen = None
618
774
619 def log(self, fname, lineno, line, msg, blame):
775 def log(self, fname, lineno, line, msg, blame):
620 """print error related a to given line of a given file.
776 """print error related a to given line of a given file.
621
777
622 The faulty line will also be printed but only once in the case
778 The faulty line will also be printed but only once in the case
623 of multiple errors.
779 of multiple errors.
624
780
625 :fname: filename
781 :fname: filename
626 :lineno: line number
782 :lineno: line number
627 :line: actual content of the line
783 :line: actual content of the line
628 :msg: error message
784 :msg: error message
629 """
785 """
630 msgid = fname, lineno, line
786 msgid = fname, lineno, line
631 if msgid != self._lastseen:
787 if msgid != self._lastseen:
632 if blame:
788 if blame:
633 print("%s:%d (%s):" % (fname, lineno, blame))
789 print("%s:%d (%s):" % (fname, lineno, blame))
634 else:
790 else:
635 print("%s:%d:" % (fname, lineno))
791 print("%s:%d:" % (fname, lineno))
636 print(" > %s" % line)
792 print(" > %s" % line)
637 self._lastseen = msgid
793 self._lastseen = msgid
638 print(" " + msg)
794 print(" " + msg)
639
795
796
640 _defaultlogger = norepeatlogger()
797 _defaultlogger = norepeatlogger()
641
798
799
642 def getblame(f):
800 def getblame(f):
643 lines = []
801 lines = []
644 for l in os.popen('hg annotate -un %s' % f):
802 for l in os.popen('hg annotate -un %s' % f):
645 start, line = l.split(':', 1)
803 start, line = l.split(':', 1)
646 user, rev = start.split()
804 user, rev = start.split()
647 lines.append((line[1:-1], user, rev))
805 lines.append((line[1:-1], user, rev))
648 return lines
806 return lines
649
807
650 def checkfile(f, logfunc=_defaultlogger.log, maxerr=None, warnings=False,
808
651 blame=False, debug=False, lineno=True):
809 def checkfile(
810 f,
811 logfunc=_defaultlogger.log,
812 maxerr=None,
813 warnings=False,
814 blame=False,
815 debug=False,
816 lineno=True,
817 ):
652 """checks style and portability of a given file
818 """checks style and portability of a given file
653
819
654 :f: filepath
820 :f: filepath
655 :logfunc: function used to report error
821 :logfunc: function used to report error
656 logfunc(filename, linenumber, linecontent, errormessage)
822 logfunc(filename, linenumber, linecontent, errormessage)
657 :maxerr: number of error to display before aborting.
823 :maxerr: number of error to display before aborting.
658 Set to false (default) to report all errors
824 Set to false (default) to report all errors
659
825
660 return True if no error is found, False otherwise.
826 return True if no error is found, False otherwise.
661 """
827 """
662 result = True
828 result = True
663
829
664 try:
830 try:
665 with opentext(f) as fp:
831 with opentext(f) as fp:
666 try:
832 try:
667 pre = fp.read()
833 pre = fp.read()
668 except UnicodeDecodeError as e:
834 except UnicodeDecodeError as e:
669 print("%s while reading %s" % (e, f))
835 print("%s while reading %s" % (e, f))
670 return result
836 return result
671 except IOError as e:
837 except IOError as e:
672 print("Skipping %s, %s" % (f, str(e).split(':', 1)[0]))
838 print("Skipping %s, %s" % (f, str(e).split(':', 1)[0]))
673 return result
839 return result
674
840
675 # context information shared while single checkfile() invocation
841 # context information shared while single checkfile() invocation
676 context = {'blamecache': None}
842 context = {'blamecache': None}
677
843
678 for name, match, magic, filters, pats in checks:
844 for name, match, magic, filters, pats in checks:
679 if debug:
845 if debug:
680 print(name, f)
846 print(name, f)
681 if not (re.match(match, f) or (magic and re.search(magic, pre))):
847 if not (re.match(match, f) or (magic and re.search(magic, pre))):
682 if debug:
848 if debug:
683 print("Skipping %s for %s it doesn't match %s" % (
849 print(
684 name, match, f))
850 "Skipping %s for %s it doesn't match %s" % (name, match, f)
851 )
685 continue
852 continue
686 if "no-" "check-code" in pre:
853 if "no-" "check-code" in pre:
687 # If you're looking at this line, it's because a file has:
854 # If you're looking at this line, it's because a file has:
688 # no- check- code
855 # no- check- code
689 # but the reason to output skipping is to make life for
856 # but the reason to output skipping is to make life for
690 # tests easier. So, instead of writing it with a normal
857 # tests easier. So, instead of writing it with a normal
691 # spelling, we write it with the expected spelling from
858 # spelling, we write it with the expected spelling from
692 # tests/test-check-code.t
859 # tests/test-check-code.t
693 print("Skipping %s it has no-che?k-code (glob)" % f)
860 print("Skipping %s it has no-che?k-code (glob)" % f)
694 return "Skip" # skip checking this file
861 return "Skip" # skip checking this file
695
862
696 fc = _checkfiledata(name, f, pre, filters, pats, context,
863 fc = _checkfiledata(
697 logfunc, maxerr, warnings, blame, debug, lineno)
864 name,
865 f,
866 pre,
867 filters,
868 pats,
869 context,
870 logfunc,
871 maxerr,
872 warnings,
873 blame,
874 debug,
875 lineno,
876 )
698 if fc:
877 if fc:
699 result = False
878 result = False
700
879
701 if f.endswith('.t') and "no-" "check-code" not in pre:
880 if f.endswith('.t') and "no-" "check-code" not in pre:
702 if debug:
881 if debug:
703 print("Checking embedded code in %s" % (f))
882 print("Checking embedded code in %s" % f)
704
883
705 prelines = pre.splitlines()
884 prelines = pre.splitlines()
706 embeddederros = []
885 embeddederros = []
707 for name, embedded, filters, pats in embeddedchecks:
886 for name, embedded, filters, pats in embeddedchecks:
708 # "reset curmax at each repetition" treats maxerr as "max
887 # "reset curmax at each repetition" treats maxerr as "max
709 # nubmer of errors in an actual file per entry of
888 # nubmer of errors in an actual file per entry of
710 # (embedded)checks"
889 # (embedded)checks"
711 curmaxerr = maxerr
890 curmaxerr = maxerr
712
891
713 for found in embedded(f, prelines, embeddederros):
892 for found in embedded(f, prelines, embeddederros):
714 filename, starts, ends, code = found
893 filename, starts, ends, code = found
715 fc = _checkfiledata(name, f, code, filters, pats, context,
894 fc = _checkfiledata(
716 logfunc, curmaxerr, warnings, blame, debug,
895 name,
717 lineno, offset=starts - 1)
896 f,
897 code,
898 filters,
899 pats,
900 context,
901 logfunc,
902 curmaxerr,
903 warnings,
904 blame,
905 debug,
906 lineno,
907 offset=starts - 1,
908 )
718 if fc:
909 if fc:
719 result = False
910 result = False
720 if curmaxerr:
911 if curmaxerr:
721 if fc >= curmaxerr:
912 if fc >= curmaxerr:
722 break
913 break
723 curmaxerr -= fc
914 curmaxerr -= fc
724
915
725 return result
916 return result
726
917
727 def _checkfiledata(name, f, filedata, filters, pats, context,
918
728 logfunc, maxerr, warnings, blame, debug, lineno,
919 def _checkfiledata(
729 offset=None):
920 name,
921 f,
922 filedata,
923 filters,
924 pats,
925 context,
926 logfunc,
927 maxerr,
928 warnings,
929 blame,
930 debug,
931 lineno,
932 offset=None,
933 ):
730 """Execute actual error check for file data
934 """Execute actual error check for file data
731
935
732 :name: of the checking category
936 :name: of the checking category
733 :f: filepath
937 :f: filepath
734 :filedata: content of a file
938 :filedata: content of a file
735 :filters: to be applied before checking
939 :filters: to be applied before checking
736 :pats: to detect errors
940 :pats: to detect errors
737 :context: a dict of information shared while single checkfile() invocation
941 :context: a dict of information shared while single checkfile() invocation
738 Valid keys: 'blamecache'.
942 Valid keys: 'blamecache'.
739 :logfunc: function used to report error
943 :logfunc: function used to report error
740 logfunc(filename, linenumber, linecontent, errormessage)
944 logfunc(filename, linenumber, linecontent, errormessage)
741 :maxerr: number of error to display before aborting, or False to
945 :maxerr: number of error to display before aborting, or False to
742 report all errors
946 report all errors
743 :warnings: whether warning level checks should be applied
947 :warnings: whether warning level checks should be applied
744 :blame: whether blame information should be displayed at error reporting
948 :blame: whether blame information should be displayed at error reporting
745 :debug: whether debug information should be displayed
949 :debug: whether debug information should be displayed
746 :lineno: whether lineno should be displayed at error reporting
950 :lineno: whether lineno should be displayed at error reporting
747 :offset: line number offset of 'filedata' in 'f' for checking
951 :offset: line number offset of 'filedata' in 'f' for checking
748 an embedded code fragment, or None (offset=0 is different
952 an embedded code fragment, or None (offset=0 is different
749 from offset=None)
953 from offset=None)
750
954
751 returns number of detected errors.
955 returns number of detected errors.
752 """
956 """
753 blamecache = context['blamecache']
957 blamecache = context['blamecache']
754 if offset is None:
958 if offset is None:
755 lineoffset = 0
959 lineoffset = 0
756 else:
960 else:
757 lineoffset = offset
961 lineoffset = offset
758
962
759 fc = 0
963 fc = 0
760 pre = post = filedata
964 pre = post = filedata
761
965
762 if True: # TODO: get rid of this redundant 'if' block
966 if True: # TODO: get rid of this redundant 'if' block
763 for p, r in filters:
967 for p, r in filters:
764 post = re.sub(p, r, post)
968 post = re.sub(p, r, post)
765 nerrs = len(pats[0]) # nerr elements are errors
969 nerrs = len(pats[0]) # nerr elements are errors
766 if warnings:
970 if warnings:
767 pats = pats[0] + pats[1]
971 pats = pats[0] + pats[1]
768 else:
972 else:
769 pats = pats[0]
973 pats = pats[0]
770 # print post # uncomment to show filtered version
974 # print post # uncomment to show filtered version
771
975
772 if debug:
976 if debug:
773 print("Checking %s for %s" % (name, f))
977 print("Checking %s for %s" % (name, f))
774
978
775 prelines = None
979 prelines = None
776 errors = []
980 errors = []
777 for i, pat in enumerate(pats):
981 for i, pat in enumerate(pats):
778 if len(pat) == 3:
982 if len(pat) == 3:
779 p, msg, ignore = pat
983 p, msg, ignore = pat
780 else:
984 else:
781 p, msg = pat
985 p, msg = pat
782 ignore = None
986 ignore = None
783 if i >= nerrs:
987 if i >= nerrs:
784 msg = "warning: " + msg
988 msg = "warning: " + msg
785
989
786 pos = 0
990 pos = 0
787 n = 0
991 n = 0
788 for m in p.finditer(post):
992 for m in p.finditer(post):
789 if prelines is None:
993 if prelines is None:
790 prelines = pre.splitlines()
994 prelines = pre.splitlines()
791 postlines = post.splitlines(True)
995 postlines = post.splitlines(True)
792
996
793 start = m.start()
997 start = m.start()
794 while n < len(postlines):
998 while n < len(postlines):
795 step = len(postlines[n])
999 step = len(postlines[n])
796 if pos + step > start:
1000 if pos + step > start:
797 break
1001 break
798 pos += step
1002 pos += step
799 n += 1
1003 n += 1
800 l = prelines[n]
1004 l = prelines[n]
801
1005
802 if ignore and re.search(ignore, l, re.MULTILINE):
1006 if ignore and re.search(ignore, l, re.MULTILINE):
803 if debug:
1007 if debug:
804 print("Skipping %s for %s:%s (ignore pattern)" % (
1008 print(
805 name, f, (n + lineoffset)))
1009 "Skipping %s for %s:%s (ignore pattern)"
1010 % (name, f, (n + lineoffset))
1011 )
806 continue
1012 continue
807 bd = ""
1013 bd = ""
808 if blame:
1014 if blame:
809 bd = 'working directory'
1015 bd = 'working directory'
810 if blamecache is None:
1016 if blamecache is None:
811 blamecache = getblame(f)
1017 blamecache = getblame(f)
812 context['blamecache'] = blamecache
1018 context['blamecache'] = blamecache
813 if (n + lineoffset) < len(blamecache):
1019 if (n + lineoffset) < len(blamecache):
814 bl, bu, br = blamecache[(n + lineoffset)]
1020 bl, bu, br = blamecache[(n + lineoffset)]
815 if offset is None and bl == l:
1021 if offset is None and bl == l:
816 bd = '%s@%s' % (bu, br)
1022 bd = '%s@%s' % (bu, br)
817 elif offset is not None and bl.endswith(l):
1023 elif offset is not None and bl.endswith(l):
818 # "offset is not None" means "checking
1024 # "offset is not None" means "checking
819 # embedded code fragment". In this case,
1025 # embedded code fragment". In this case,
820 # "l" does not have information about the
1026 # "l" does not have information about the
821 # beginning of an *original* line in the
1027 # beginning of an *original* line in the
822 # file (e.g. ' > ').
1028 # file (e.g. ' > ').
823 # Therefore, use "str.endswith()", and
1029 # Therefore, use "str.endswith()", and
824 # show "maybe" for a little loose
1030 # show "maybe" for a little loose
825 # examination.
1031 # examination.
826 bd = '%s@%s, maybe' % (bu, br)
1032 bd = '%s@%s, maybe' % (bu, br)
827
1033
828 errors.append((f, lineno and (n + lineoffset + 1), l, msg, bd))
1034 errors.append((f, lineno and (n + lineoffset + 1), l, msg, bd))
829
1035
830 errors.sort()
1036 errors.sort()
831 for e in errors:
1037 for e in errors:
832 logfunc(*e)
1038 logfunc(*e)
833 fc += 1
1039 fc += 1
834 if maxerr and fc >= maxerr:
1040 if maxerr and fc >= maxerr:
835 print(" (too many errors, giving up)")
1041 print(" (too many errors, giving up)")
836 break
1042 break
837
1043
838 return fc
1044 return fc
839
1045
1046
840 def main():
1047 def main():
841 parser = optparse.OptionParser("%prog [options] [files | -]")
1048 parser = optparse.OptionParser("%prog [options] [files | -]")
842 parser.add_option("-w", "--warnings", action="store_true",
1049 parser.add_option(
843 help="include warning-level checks")
1050 "-w",
844 parser.add_option("-p", "--per-file", type="int",
1051 "--warnings",
845 help="max warnings per file")
1052 action="store_true",
846 parser.add_option("-b", "--blame", action="store_true",
1053 help="include warning-level checks",
847 help="use annotate to generate blame info")
1054 )
848 parser.add_option("", "--debug", action="store_true",
1055 parser.add_option(
849 help="show debug information")
1056 "-p", "--per-file", type="int", help="max warnings per file"
850 parser.add_option("", "--nolineno", action="store_false",
1057 )
851 dest='lineno', help="don't show line numbers")
1058 parser.add_option(
1059 "-b",
1060 "--blame",
1061 action="store_true",
1062 help="use annotate to generate blame info",
1063 )
1064 parser.add_option(
1065 "", "--debug", action="store_true", help="show debug information"
1066 )
1067 parser.add_option(
1068 "",
1069 "--nolineno",
1070 action="store_false",
1071 dest='lineno',
1072 help="don't show line numbers",
1073 )
852
1074
853 parser.set_defaults(per_file=15, warnings=False, blame=False, debug=False,
1075 parser.set_defaults(
854 lineno=True)
1076 per_file=15, warnings=False, blame=False, debug=False, lineno=True
1077 )
855 (options, args) = parser.parse_args()
1078 (options, args) = parser.parse_args()
856
1079
857 if len(args) == 0:
1080 if len(args) == 0:
858 check = glob.glob("*")
1081 check = glob.glob("*")
859 elif args == ['-']:
1082 elif args == ['-']:
860 # read file list from stdin
1083 # read file list from stdin
861 check = sys.stdin.read().splitlines()
1084 check = sys.stdin.read().splitlines()
862 else:
1085 else:
863 check = args
1086 check = args
864
1087
865 _preparepats()
1088 _preparepats()
866
1089
867 ret = 0
1090 ret = 0
868 for f in check:
1091 for f in check:
869 if not checkfile(f, maxerr=options.per_file, warnings=options.warnings,
1092 if not checkfile(
870 blame=options.blame, debug=options.debug,
1093 f,
871 lineno=options.lineno):
1094 maxerr=options.per_file,
1095 warnings=options.warnings,
1096 blame=options.blame,
1097 debug=options.debug,
1098 lineno=options.lineno,
1099 ):
872 ret = 1
1100 ret = 1
873 return ret
1101 return ret
874
1102
1103
875 if __name__ == "__main__":
1104 if __name__ == "__main__":
876 sys.exit(main())
1105 sys.exit(main())
@@ -1,109 +1,103 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 #
2 #
3 # Copyright 2014 Matt Mackall <mpm@selenic.com>
3 # Copyright 2014 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # A tool/hook to run basic sanity checks on commits/patches for
5 # A tool/hook to run basic sanity checks on commits/patches for
6 # submission to Mercurial. Install by adding the following to your
6 # submission to Mercurial. Install by adding the following to your
7 # .hg/hgrc:
7 # .hg/hgrc:
8 #
8 #
9 # [hooks]
9 # [hooks]
10 # pretxncommit = contrib/check-commit
10 # pretxncommit = contrib/check-commit
11 #
11 #
12 # The hook can be temporarily bypassed with:
12 # The hook can be temporarily bypassed with:
13 #
13 #
14 # $ BYPASS= hg commit
14 # $ BYPASS= hg commit
15 #
15 #
16 # See also: https://mercurial-scm.org/wiki/ContributingChanges
16 # See also: https://mercurial-scm.org/wiki/ContributingChanges
17
17
18 from __future__ import absolute_import, print_function
18 from __future__ import absolute_import, print_function
19
19
20 import os
20 import os
21 import re
21 import re
22 import sys
22 import sys
23
23
24 commitheader = r"^(?:# [^\n]*\n)*"
24 commitheader = r"^(?:# [^\n]*\n)*"
25 afterheader = commitheader + r"(?!#)"
25 afterheader = commitheader + r"(?!#)"
26 beforepatch = afterheader + r"(?!\n(?!@@))"
26 beforepatch = afterheader + r"(?!\n(?!@@))"
27
27
28 errors = [
28 errors = [
29 (beforepatch + r".*[(]bc[)]", "(BC) needs to be uppercase"),
29 (beforepatch + r".*[(]bc[)]", "(BC) needs to be uppercase"),
30 (beforepatch + r".*[(]issue \d\d\d",
30 (beforepatch + r".*[(]issue \d\d\d",
31 "no space allowed between issue and number"),
31 "no space allowed between issue and number"),
32 (beforepatch + r".*[(]bug(\d|\s)", "use (issueDDDD) instead of bug"),
32 (beforepatch + r".*[(]bug(\d|\s)", "use (issueDDDD) instead of bug"),
33 (commitheader + r"# User [^@\n]+\n", "username is not an email address"),
33 (commitheader + r"# User [^@\n]+\n", "username is not an email address"),
34 (commitheader + r"(?!merge with )[^#]\S+[^:] ",
34 (commitheader + r"(?!merge with )[^#]\S+[^:] ",
35 "summary line doesn't start with 'topic: '"),
35 "summary line doesn't start with 'topic: '"),
36 (afterheader + r"[A-Z][a-z]\S+", "don't capitalize summary lines"),
36 (afterheader + r"[A-Z][a-z]\S+", "don't capitalize summary lines"),
37 (afterheader + r"^\S+: *[A-Z][a-z]\S+", "don't capitalize summary lines"),
37 (afterheader + r"^\S+: *[A-Z][a-z]\S+", "don't capitalize summary lines"),
38 (afterheader + r"\S*[^A-Za-z0-9-_]\S*: ",
38 (afterheader + r"\S*[^A-Za-z0-9-_]\S*: ",
39 "summary keyword should be most user-relevant one-word command or topic"),
39 "summary keyword should be most user-relevant one-word command or topic"),
40 (afterheader + r".*\.\s*\n", "don't add trailing period on summary line"),
40 (afterheader + r".*\.\s*\n", "don't add trailing period on summary line"),
41 (afterheader + r".{79,}", "summary line too long (limit is 78)"),
41 (afterheader + r".{79,}", "summary line too long (limit is 78)"),
42 # Forbid "_" in function name.
43 #
44 # We skip the check for cffi related functions. They use names mapping the
45 # name of the C function. C function names may contain "_".
46 (r"\n\+[ \t]+def (?!cffi)[a-z]+_[a-z]",
47 "adds a function with foo_bar naming"),
48 ]
42 ]
49
43
50 word = re.compile(r'\S')
44 word = re.compile(r'\S')
51 def nonempty(first, second):
45 def nonempty(first, second):
52 if word.search(first):
46 if word.search(first):
53 return first
47 return first
54 return second
48 return second
55
49
56 def checkcommit(commit, node=None):
50 def checkcommit(commit, node=None):
57 exitcode = 0
51 exitcode = 0
58 printed = node is None
52 printed = node is None
59 hits = []
53 hits = []
60 signtag = (afterheader +
54 signtag = (afterheader +
61 r'Added (tag [^ ]+|signature) for changeset [a-f0-9]{12}')
55 r'Added (tag [^ ]+|signature) for changeset [a-f0-9]{12}')
62 if re.search(signtag, commit):
56 if re.search(signtag, commit):
63 return 0
57 return 0
64 for exp, msg in errors:
58 for exp, msg in errors:
65 for m in re.finditer(exp, commit):
59 for m in re.finditer(exp, commit):
66 end = m.end()
60 end = m.end()
67 trailing = re.search(r'(\\n)+$', exp)
61 trailing = re.search(r'(\\n)+$', exp)
68 if trailing:
62 if trailing:
69 end -= len(trailing.group()) / 2
63 end -= len(trailing.group()) / 2
70 hits.append((end, exp, msg))
64 hits.append((end, exp, msg))
71 if hits:
65 if hits:
72 hits.sort()
66 hits.sort()
73 pos = 0
67 pos = 0
74 last = ''
68 last = ''
75 for n, l in enumerate(commit.splitlines(True)):
69 for n, l in enumerate(commit.splitlines(True)):
76 pos += len(l)
70 pos += len(l)
77 while len(hits):
71 while len(hits):
78 end, exp, msg = hits[0]
72 end, exp, msg = hits[0]
79 if pos < end:
73 if pos < end:
80 break
74 break
81 if not printed:
75 if not printed:
82 printed = True
76 printed = True
83 print("node: %s" % node)
77 print("node: %s" % node)
84 print("%d: %s" % (n, msg))
78 print("%d: %s" % (n, msg))
85 print(" %s" % nonempty(l, last)[:-1])
79 print(" %s" % nonempty(l, last)[:-1])
86 if "BYPASS" not in os.environ:
80 if "BYPASS" not in os.environ:
87 exitcode = 1
81 exitcode = 1
88 del hits[0]
82 del hits[0]
89 last = nonempty(l, last)
83 last = nonempty(l, last)
90
84
91 return exitcode
85 return exitcode
92
86
93 def readcommit(node):
87 def readcommit(node):
94 return os.popen("hg export %s" % node).read()
88 return os.popen("hg export %s" % node).read()
95
89
96 if __name__ == "__main__":
90 if __name__ == "__main__":
97 exitcode = 0
91 exitcode = 0
98 node = os.environ.get("HG_NODE")
92 node = os.environ.get("HG_NODE")
99
93
100 if node:
94 if node:
101 commit = readcommit(node)
95 commit = readcommit(node)
102 exitcode = checkcommit(commit)
96 exitcode = checkcommit(commit)
103 elif sys.argv[1:]:
97 elif sys.argv[1:]:
104 for node in sys.argv[1:]:
98 for node in sys.argv[1:]:
105 exitcode |= checkcommit(readcommit(node), node)
99 exitcode |= checkcommit(readcommit(node), node)
106 else:
100 else:
107 commit = sys.stdin.read()
101 commit = sys.stdin.read()
108 exitcode = checkcommit(commit)
102 exitcode = checkcommit(commit)
109 sys.exit(exitcode)
103 sys.exit(exitcode)
@@ -1,159 +1,191 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 #
2 #
3 # check-config - a config flag documentation checker for Mercurial
3 # check-config - a config flag documentation checker for Mercurial
4 #
4 #
5 # Copyright 2015 Matt Mackall <mpm@selenic.com>
5 # Copyright 2015 Matt Mackall <mpm@selenic.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 from __future__ import absolute_import, print_function
10 from __future__ import absolute_import, print_function
11 import re
11 import re
12 import sys
12 import sys
13
13
14 foundopts = {}
14 foundopts = {}
15 documented = {}
15 documented = {}
16 allowinconsistent = set()
16 allowinconsistent = set()
17
17
18 configre = re.compile(br'''
18 configre = re.compile(
19 br'''
19 # Function call
20 # Function call
20 ui\.config(?P<ctype>|int|bool|list)\(
21 ui\.config(?P<ctype>|int|bool|list)\(
21 # First argument.
22 # First argument.
22 ['"](?P<section>\S+)['"],\s*
23 ['"](?P<section>\S+)['"],\s*
23 # Second argument
24 # Second argument
24 ['"](?P<option>\S+)['"](,\s+
25 ['"](?P<option>\S+)['"](,\s+
25 (?:default=)?(?P<default>\S+?))?
26 (?:default=)?(?P<default>\S+?))?
26 \)''', re.VERBOSE | re.MULTILINE)
27 \)''',
28 re.VERBOSE | re.MULTILINE,
29 )
27
30
28 configwithre = re.compile(br'''
31 configwithre = re.compile(
32 br'''
29 ui\.config(?P<ctype>with)\(
33 ui\.config(?P<ctype>with)\(
30 # First argument is callback function. This doesn't parse robustly
34 # First argument is callback function. This doesn't parse robustly
31 # if it is e.g. a function call.
35 # if it is e.g. a function call.
32 [^,]+,\s*
36 [^,]+,\s*
33 ['"](?P<section>\S+)['"],\s*
37 ['"](?P<section>\S+)['"],\s*
34 ['"](?P<option>\S+)['"](,\s+
38 ['"](?P<option>\S+)['"](,\s+
35 (?:default=)?(?P<default>\S+?))?
39 (?:default=)?(?P<default>\S+?))?
36 \)''', re.VERBOSE | re.MULTILINE)
40 \)''',
41 re.VERBOSE | re.MULTILINE,
42 )
37
43
38 configpartialre = (br"""ui\.config""")
44 configpartialre = br"""ui\.config"""
39
45
40 ignorere = re.compile(br'''
46 ignorere = re.compile(
47 br'''
41 \#\s(?P<reason>internal|experimental|deprecated|developer|inconsistent)\s
48 \#\s(?P<reason>internal|experimental|deprecated|developer|inconsistent)\s
42 config:\s(?P<config>\S+\.\S+)$
49 config:\s(?P<config>\S+\.\S+)$
43 ''', re.VERBOSE | re.MULTILINE)
50 ''',
51 re.VERBOSE | re.MULTILINE,
52 )
44
53
45 if sys.version_info[0] > 2:
54 if sys.version_info[0] > 2:
55
46 def mkstr(b):
56 def mkstr(b):
47 if isinstance(b, str):
57 if isinstance(b, str):
48 return b
58 return b
49 return b.decode('utf8')
59 return b.decode('utf8')
60
61
50 else:
62 else:
51 mkstr = lambda x: x
63 mkstr = lambda x: x
52
64
65
53 def main(args):
66 def main(args):
54 for f in args:
67 for f in args:
55 sect = b''
68 sect = b''
56 prevname = b''
69 prevname = b''
57 confsect = b''
70 confsect = b''
58 carryover = b''
71 carryover = b''
59 linenum = 0
72 linenum = 0
60 for l in open(f, 'rb'):
73 for l in open(f, 'rb'):
61 linenum += 1
74 linenum += 1
62
75
63 # check topic-like bits
76 # check topic-like bits
64 m = re.match(br'\s*``(\S+)``', l)
77 m = re.match(br'\s*``(\S+)``', l)
65 if m:
78 if m:
66 prevname = m.group(1)
79 prevname = m.group(1)
67 if re.match(br'^\s*-+$', l):
80 if re.match(br'^\s*-+$', l):
68 sect = prevname
81 sect = prevname
69 prevname = b''
82 prevname = b''
70
83
71 if sect and prevname:
84 if sect and prevname:
72 name = sect + b'.' + prevname
85 name = sect + b'.' + prevname
73 documented[name] = 1
86 documented[name] = 1
74
87
75 # check docstring bits
88 # check docstring bits
76 m = re.match(br'^\s+\[(\S+)\]', l)
89 m = re.match(br'^\s+\[(\S+)\]', l)
77 if m:
90 if m:
78 confsect = m.group(1)
91 confsect = m.group(1)
79 continue
92 continue
80 m = re.match(br'^\s+(?:#\s*)?(\S+) = ', l)
93 m = re.match(br'^\s+(?:#\s*)?(\S+) = ', l)
81 if m:
94 if m:
82 name = confsect + b'.' + m.group(1)
95 name = confsect + b'.' + m.group(1)
83 documented[name] = 1
96 documented[name] = 1
84
97
85 # like the bugzilla extension
98 # like the bugzilla extension
86 m = re.match(br'^\s*(\S+\.\S+)$', l)
99 m = re.match(br'^\s*(\S+\.\S+)$', l)
87 if m:
100 if m:
88 documented[m.group(1)] = 1
101 documented[m.group(1)] = 1
89
102
90 # like convert
103 # like convert
91 m = re.match(br'^\s*:(\S+\.\S+):\s+', l)
104 m = re.match(br'^\s*:(\S+\.\S+):\s+', l)
92 if m:
105 if m:
93 documented[m.group(1)] = 1
106 documented[m.group(1)] = 1
94
107
95 # quoted in help or docstrings
108 # quoted in help or docstrings
96 m = re.match(br'.*?``(\S+\.\S+)``', l)
109 m = re.match(br'.*?``(\S+\.\S+)``', l)
97 if m:
110 if m:
98 documented[m.group(1)] = 1
111 documented[m.group(1)] = 1
99
112
100 # look for ignore markers
113 # look for ignore markers
101 m = ignorere.search(l)
114 m = ignorere.search(l)
102 if m:
115 if m:
103 if m.group('reason') == b'inconsistent':
116 if m.group('reason') == b'inconsistent':
104 allowinconsistent.add(m.group('config'))
117 allowinconsistent.add(m.group('config'))
105 else:
118 else:
106 documented[m.group('config')] = 1
119 documented[m.group('config')] = 1
107
120
108 # look for code-like bits
121 # look for code-like bits
109 line = carryover + l
122 line = carryover + l
110 m = configre.search(line) or configwithre.search(line)
123 m = configre.search(line) or configwithre.search(line)
111 if m:
124 if m:
112 ctype = m.group('ctype')
125 ctype = m.group('ctype')
113 if not ctype:
126 if not ctype:
114 ctype = 'str'
127 ctype = 'str'
115 name = m.group('section') + b"." + m.group('option')
128 name = m.group('section') + b"." + m.group('option')
116 default = m.group('default')
129 default = m.group('default')
117 if default in (
130 if default in (
118 None, b'False', b'None', b'0', b'[]', b'""', b"''"):
131 None,
132 b'False',
133 b'None',
134 b'0',
135 b'[]',
136 b'""',
137 b"''",
138 ):
119 default = b''
139 default = b''
120 if re.match(b'[a-z.]+$', default):
140 if re.match(b'[a-z.]+$', default):
121 default = b'<variable>'
141 default = b'<variable>'
122 if (name in foundopts and (ctype, default) != foundopts[name]
142 if (
123 and name not in allowinconsistent):
143 name in foundopts
144 and (ctype, default) != foundopts[name]
145 and name not in allowinconsistent
146 ):
124 print(mkstr(l.rstrip()))
147 print(mkstr(l.rstrip()))
125 fctype, fdefault = foundopts[name]
148 fctype, fdefault = foundopts[name]
126 print("conflict on %s: %r != %r" % (
149 print(
127 mkstr(name),
150 "conflict on %s: %r != %r"
128 (mkstr(ctype), mkstr(default)),
151 % (
129 (mkstr(fctype), mkstr(fdefault))))
152 mkstr(name),
153 (mkstr(ctype), mkstr(default)),
154 (mkstr(fctype), mkstr(fdefault)),
155 )
156 )
130 print("at %s:%d:" % (mkstr(f), linenum))
157 print("at %s:%d:" % (mkstr(f), linenum))
131 foundopts[name] = (ctype, default)
158 foundopts[name] = (ctype, default)
132 carryover = b''
159 carryover = b''
133 else:
160 else:
134 m = re.search(configpartialre, line)
161 m = re.search(configpartialre, line)
135 if m:
162 if m:
136 carryover = line
163 carryover = line
137 else:
164 else:
138 carryover = b''
165 carryover = b''
139
166
140 for name in sorted(foundopts):
167 for name in sorted(foundopts):
141 if name not in documented:
168 if name not in documented:
142 if not (name.startswith(b"devel.") or
169 if not (
143 name.startswith(b"experimental.") or
170 name.startswith(b"devel.")
144 name.startswith(b"debug.")):
171 or name.startswith(b"experimental.")
172 or name.startswith(b"debug.")
173 ):
145 ctype, default = foundopts[name]
174 ctype, default = foundopts[name]
146 if default:
175 if default:
147 if isinstance(default, bytes):
176 if isinstance(default, bytes):
148 default = mkstr(default)
177 default = mkstr(default)
149 default = ' [%s]' % default
178 default = ' [%s]' % default
150 elif isinstance(default, bytes):
179 elif isinstance(default, bytes):
151 default = mkstr(default)
180 default = mkstr(default)
152 print("undocumented: %s (%s)%s" % (
181 print(
153 mkstr(name), mkstr(ctype), default))
182 "undocumented: %s (%s)%s"
183 % (mkstr(name), mkstr(ctype), default)
184 )
185
154
186
155 if __name__ == "__main__":
187 if __name__ == "__main__":
156 if len(sys.argv) > 1:
188 if len(sys.argv) > 1:
157 sys.exit(main(sys.argv[1:]))
189 sys.exit(main(sys.argv[1:]))
158 else:
190 else:
159 sys.exit(main([l.rstrip() for l in sys.stdin]))
191 sys.exit(main([l.rstrip() for l in sys.stdin]))
@@ -1,102 +1,113 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 #
2 #
3 # check-py3-compat - check Python 3 compatibility of Mercurial files
3 # check-py3-compat - check Python 3 compatibility of Mercurial files
4 #
4 #
5 # Copyright 2015 Gregory Szorc <gregory.szorc@gmail.com>
5 # Copyright 2015 Gregory Szorc <gregory.szorc@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 from __future__ import absolute_import, print_function
10 from __future__ import absolute_import, print_function
11
11
12 import ast
12 import ast
13 import importlib
13 import importlib
14 import os
14 import os
15 import sys
15 import sys
16 import traceback
16 import traceback
17 import warnings
17 import warnings
18
18
19
19 def check_compat_py2(f):
20 def check_compat_py2(f):
20 """Check Python 3 compatibility for a file with Python 2"""
21 """Check Python 3 compatibility for a file with Python 2"""
21 with open(f, 'rb') as fh:
22 with open(f, 'rb') as fh:
22 content = fh.read()
23 content = fh.read()
23 root = ast.parse(content)
24 root = ast.parse(content)
24
25
25 # Ignore empty files.
26 # Ignore empty files.
26 if not root.body:
27 if not root.body:
27 return
28 return
28
29
29 futures = set()
30 futures = set()
30 haveprint = False
31 haveprint = False
31 for node in ast.walk(root):
32 for node in ast.walk(root):
32 if isinstance(node, ast.ImportFrom):
33 if isinstance(node, ast.ImportFrom):
33 if node.module == '__future__':
34 if node.module == '__future__':
34 futures |= set(n.name for n in node.names)
35 futures |= set(n.name for n in node.names)
35 elif isinstance(node, ast.Print):
36 elif isinstance(node, ast.Print):
36 haveprint = True
37 haveprint = True
37
38
38 if 'absolute_import' not in futures:
39 if 'absolute_import' not in futures:
39 print('%s not using absolute_import' % f)
40 print('%s not using absolute_import' % f)
40 if haveprint and 'print_function' not in futures:
41 if haveprint and 'print_function' not in futures:
41 print('%s requires print_function' % f)
42 print('%s requires print_function' % f)
42
43
44
43 def check_compat_py3(f):
45 def check_compat_py3(f):
44 """Check Python 3 compatibility of a file with Python 3."""
46 """Check Python 3 compatibility of a file with Python 3."""
45 with open(f, 'rb') as fh:
47 with open(f, 'rb') as fh:
46 content = fh.read()
48 content = fh.read()
47
49
48 try:
50 try:
49 ast.parse(content, filename=f)
51 ast.parse(content, filename=f)
50 except SyntaxError as e:
52 except SyntaxError as e:
51 print('%s: invalid syntax: %s' % (f, e))
53 print('%s: invalid syntax: %s' % (f, e))
52 return
54 return
53
55
54 # Try to import the module.
56 # Try to import the module.
55 # For now we only support modules in packages because figuring out module
57 # For now we only support modules in packages because figuring out module
56 # paths for things not in a package can be confusing.
58 # paths for things not in a package can be confusing.
57 if (f.startswith(('hgdemandimport/', 'hgext/', 'mercurial/'))
59 if f.startswith(
58 and not f.endswith('__init__.py')):
60 ('hgdemandimport/', 'hgext/', 'mercurial/')
61 ) and not f.endswith('__init__.py'):
59 assert f.endswith('.py')
62 assert f.endswith('.py')
60 name = f.replace('/', '.')[:-3]
63 name = f.replace('/', '.')[:-3]
61 try:
64 try:
62 importlib.import_module(name)
65 importlib.import_module(name)
63 except Exception as e:
66 except Exception as e:
64 exc_type, exc_value, tb = sys.exc_info()
67 exc_type, exc_value, tb = sys.exc_info()
65 # We walk the stack and ignore frames from our custom importer,
68 # We walk the stack and ignore frames from our custom importer,
66 # import mechanisms, and stdlib modules. This kinda/sorta
69 # import mechanisms, and stdlib modules. This kinda/sorta
67 # emulates CPython behavior in import.c while also attempting
70 # emulates CPython behavior in import.c while also attempting
68 # to pin blame on a Mercurial file.
71 # to pin blame on a Mercurial file.
69 for frame in reversed(traceback.extract_tb(tb)):
72 for frame in reversed(traceback.extract_tb(tb)):
70 if frame.name == '_call_with_frames_removed':
73 if frame.name == '_call_with_frames_removed':
71 continue
74 continue
72 if 'importlib' in frame.filename:
75 if 'importlib' in frame.filename:
73 continue
76 continue
74 if 'mercurial/__init__.py' in frame.filename:
77 if 'mercurial/__init__.py' in frame.filename:
75 continue
78 continue
76 if frame.filename.startswith(sys.prefix):
79 if frame.filename.startswith(sys.prefix):
77 continue
80 continue
78 break
81 break
79
82
80 if frame.filename:
83 if frame.filename:
81 filename = os.path.basename(frame.filename)
84 filename = os.path.basename(frame.filename)
82 print('%s: error importing: <%s> %s (error at %s:%d)' % (
85 print(
83 f, type(e).__name__, e, filename, frame.lineno))
86 '%s: error importing: <%s> %s (error at %s:%d)'
87 % (f, type(e).__name__, e, filename, frame.lineno)
88 )
84 else:
89 else:
85 print('%s: error importing module: <%s> %s (line %d)' % (
90 print(
86 f, type(e).__name__, e, frame.lineno))
91 '%s: error importing module: <%s> %s (line %d)'
92 % (f, type(e).__name__, e, frame.lineno)
93 )
94
87
95
88 if __name__ == '__main__':
96 if __name__ == '__main__':
89 if sys.version_info[0] == 2:
97 if sys.version_info[0] == 2:
90 fn = check_compat_py2
98 fn = check_compat_py2
91 else:
99 else:
92 fn = check_compat_py3
100 fn = check_compat_py3
93
101
94 for f in sys.argv[1:]:
102 for f in sys.argv[1:]:
95 with warnings.catch_warnings(record=True) as warns:
103 with warnings.catch_warnings(record=True) as warns:
96 fn(f)
104 fn(f)
97
105
98 for w in warns:
106 for w in warns:
99 print(warnings.formatwarning(w.message, w.category,
107 print(
100 w.filename, w.lineno).rstrip())
108 warnings.formatwarning(
109 w.message, w.category, w.filename, w.lineno
110 ).rstrip()
111 )
101
112
102 sys.exit(0)
113 sys.exit(0)
@@ -1,99 +1,102 b''
1 # Files that just need to be migrated to the formatter.
1 # Files that just need to be migrated to the formatter.
2 # Do not add new files here!
2 # Do not add new files here!
3 mercurial/cext/dirs.c
4 mercurial/cext/manifest.c
3 mercurial/cext/manifest.c
5 mercurial/cext/osutil.c
4 mercurial/cext/osutil.c
6 # Vendored code that we should never format:
5 # Vendored code that we should never format:
7 contrib/python-zstandard/c-ext/bufferutil.c
6 contrib/python-zstandard/c-ext/bufferutil.c
8 contrib/python-zstandard/c-ext/compressionchunker.c
7 contrib/python-zstandard/c-ext/compressionchunker.c
9 contrib/python-zstandard/c-ext/compressiondict.c
8 contrib/python-zstandard/c-ext/compressiondict.c
10 contrib/python-zstandard/c-ext/compressionparams.c
9 contrib/python-zstandard/c-ext/compressionparams.c
11 contrib/python-zstandard/c-ext/compressionreader.c
10 contrib/python-zstandard/c-ext/compressionreader.c
12 contrib/python-zstandard/c-ext/compressionwriter.c
11 contrib/python-zstandard/c-ext/compressionwriter.c
13 contrib/python-zstandard/c-ext/compressobj.c
12 contrib/python-zstandard/c-ext/compressobj.c
14 contrib/python-zstandard/c-ext/compressor.c
13 contrib/python-zstandard/c-ext/compressor.c
15 contrib/python-zstandard/c-ext/compressoriterator.c
14 contrib/python-zstandard/c-ext/compressoriterator.c
16 contrib/python-zstandard/c-ext/constants.c
15 contrib/python-zstandard/c-ext/constants.c
17 contrib/python-zstandard/c-ext/decompressionreader.c
16 contrib/python-zstandard/c-ext/decompressionreader.c
18 contrib/python-zstandard/c-ext/decompressionwriter.c
17 contrib/python-zstandard/c-ext/decompressionwriter.c
19 contrib/python-zstandard/c-ext/decompressobj.c
18 contrib/python-zstandard/c-ext/decompressobj.c
20 contrib/python-zstandard/c-ext/decompressor.c
19 contrib/python-zstandard/c-ext/decompressor.c
21 contrib/python-zstandard/c-ext/decompressoriterator.c
20 contrib/python-zstandard/c-ext/decompressoriterator.c
22 contrib/python-zstandard/c-ext/frameparams.c
21 contrib/python-zstandard/c-ext/frameparams.c
23 contrib/python-zstandard/c-ext/python-zstandard.h
22 contrib/python-zstandard/c-ext/python-zstandard.h
24 contrib/python-zstandard/zstd.c
23 contrib/python-zstandard/zstd.c
25 contrib/python-zstandard/zstd/common/bitstream.h
24 contrib/python-zstandard/zstd/common/bitstream.h
26 contrib/python-zstandard/zstd/common/compiler.h
25 contrib/python-zstandard/zstd/common/compiler.h
27 contrib/python-zstandard/zstd/common/cpu.h
26 contrib/python-zstandard/zstd/common/cpu.h
28 contrib/python-zstandard/zstd/common/debug.c
27 contrib/python-zstandard/zstd/common/debug.c
29 contrib/python-zstandard/zstd/common/debug.h
28 contrib/python-zstandard/zstd/common/debug.h
30 contrib/python-zstandard/zstd/common/entropy_common.c
29 contrib/python-zstandard/zstd/common/entropy_common.c
31 contrib/python-zstandard/zstd/common/error_private.c
30 contrib/python-zstandard/zstd/common/error_private.c
32 contrib/python-zstandard/zstd/common/error_private.h
31 contrib/python-zstandard/zstd/common/error_private.h
33 contrib/python-zstandard/zstd/common/fse_decompress.c
32 contrib/python-zstandard/zstd/common/fse_decompress.c
34 contrib/python-zstandard/zstd/common/fse.h
33 contrib/python-zstandard/zstd/common/fse.h
35 contrib/python-zstandard/zstd/common/huf.h
34 contrib/python-zstandard/zstd/common/huf.h
36 contrib/python-zstandard/zstd/common/mem.h
35 contrib/python-zstandard/zstd/common/mem.h
37 contrib/python-zstandard/zstd/common/pool.c
36 contrib/python-zstandard/zstd/common/pool.c
38 contrib/python-zstandard/zstd/common/pool.h
37 contrib/python-zstandard/zstd/common/pool.h
39 contrib/python-zstandard/zstd/common/threading.c
38 contrib/python-zstandard/zstd/common/threading.c
40 contrib/python-zstandard/zstd/common/threading.h
39 contrib/python-zstandard/zstd/common/threading.h
41 contrib/python-zstandard/zstd/common/xxhash.c
40 contrib/python-zstandard/zstd/common/xxhash.c
42 contrib/python-zstandard/zstd/common/xxhash.h
41 contrib/python-zstandard/zstd/common/xxhash.h
43 contrib/python-zstandard/zstd/common/zstd_common.c
42 contrib/python-zstandard/zstd/common/zstd_common.c
44 contrib/python-zstandard/zstd/common/zstd_errors.h
43 contrib/python-zstandard/zstd/common/zstd_errors.h
45 contrib/python-zstandard/zstd/common/zstd_internal.h
44 contrib/python-zstandard/zstd/common/zstd_internal.h
46 contrib/python-zstandard/zstd/compress/fse_compress.c
45 contrib/python-zstandard/zstd/compress/fse_compress.c
47 contrib/python-zstandard/zstd/compress/hist.c
46 contrib/python-zstandard/zstd/compress/hist.c
48 contrib/python-zstandard/zstd/compress/hist.h
47 contrib/python-zstandard/zstd/compress/hist.h
49 contrib/python-zstandard/zstd/compress/huf_compress.c
48 contrib/python-zstandard/zstd/compress/huf_compress.c
50 contrib/python-zstandard/zstd/compress/zstd_compress.c
49 contrib/python-zstandard/zstd/compress/zstd_compress.c
51 contrib/python-zstandard/zstd/compress/zstd_compress_internal.h
50 contrib/python-zstandard/zstd/compress/zstd_compress_internal.h
51 contrib/python-zstandard/zstd/compress/zstd_compress_literals.c
52 contrib/python-zstandard/zstd/compress/zstd_compress_literals.h
53 contrib/python-zstandard/zstd/compress/zstd_compress_sequences.c
54 contrib/python-zstandard/zstd/compress/zstd_compress_sequences.h
52 contrib/python-zstandard/zstd/compress/zstd_double_fast.c
55 contrib/python-zstandard/zstd/compress/zstd_double_fast.c
53 contrib/python-zstandard/zstd/compress/zstd_double_fast.h
56 contrib/python-zstandard/zstd/compress/zstd_double_fast.h
54 contrib/python-zstandard/zstd/compress/zstd_fast.c
57 contrib/python-zstandard/zstd/compress/zstd_fast.c
55 contrib/python-zstandard/zstd/compress/zstd_fast.h
58 contrib/python-zstandard/zstd/compress/zstd_fast.h
56 contrib/python-zstandard/zstd/compress/zstd_lazy.c
59 contrib/python-zstandard/zstd/compress/zstd_lazy.c
57 contrib/python-zstandard/zstd/compress/zstd_lazy.h
60 contrib/python-zstandard/zstd/compress/zstd_lazy.h
58 contrib/python-zstandard/zstd/compress/zstd_ldm.c
61 contrib/python-zstandard/zstd/compress/zstd_ldm.c
59 contrib/python-zstandard/zstd/compress/zstd_ldm.h
62 contrib/python-zstandard/zstd/compress/zstd_ldm.h
60 contrib/python-zstandard/zstd/compress/zstdmt_compress.c
63 contrib/python-zstandard/zstd/compress/zstdmt_compress.c
61 contrib/python-zstandard/zstd/compress/zstdmt_compress.h
64 contrib/python-zstandard/zstd/compress/zstdmt_compress.h
62 contrib/python-zstandard/zstd/compress/zstd_opt.c
65 contrib/python-zstandard/zstd/compress/zstd_opt.c
63 contrib/python-zstandard/zstd/compress/zstd_opt.h
66 contrib/python-zstandard/zstd/compress/zstd_opt.h
64 contrib/python-zstandard/zstd/decompress/huf_decompress.c
67 contrib/python-zstandard/zstd/decompress/huf_decompress.c
65 contrib/python-zstandard/zstd/decompress/zstd_ddict.c
68 contrib/python-zstandard/zstd/decompress/zstd_ddict.c
66 contrib/python-zstandard/zstd/decompress/zstd_ddict.h
69 contrib/python-zstandard/zstd/decompress/zstd_ddict.h
67 contrib/python-zstandard/zstd/decompress/zstd_decompress_block.c
70 contrib/python-zstandard/zstd/decompress/zstd_decompress_block.c
68 contrib/python-zstandard/zstd/decompress/zstd_decompress_block.h
71 contrib/python-zstandard/zstd/decompress/zstd_decompress_block.h
69 contrib/python-zstandard/zstd/decompress/zstd_decompress_internal.h
72 contrib/python-zstandard/zstd/decompress/zstd_decompress_internal.h
70 contrib/python-zstandard/zstd/decompress/zstd_decompress.c
73 contrib/python-zstandard/zstd/decompress/zstd_decompress.c
71 contrib/python-zstandard/zstd/deprecated/zbuff_common.c
74 contrib/python-zstandard/zstd/deprecated/zbuff_common.c
72 contrib/python-zstandard/zstd/deprecated/zbuff_compress.c
75 contrib/python-zstandard/zstd/deprecated/zbuff_compress.c
73 contrib/python-zstandard/zstd/deprecated/zbuff_decompress.c
76 contrib/python-zstandard/zstd/deprecated/zbuff_decompress.c
74 contrib/python-zstandard/zstd/deprecated/zbuff.h
77 contrib/python-zstandard/zstd/deprecated/zbuff.h
75 contrib/python-zstandard/zstd/dictBuilder/cover.c
78 contrib/python-zstandard/zstd/dictBuilder/cover.c
76 contrib/python-zstandard/zstd/dictBuilder/cover.h
79 contrib/python-zstandard/zstd/dictBuilder/cover.h
77 contrib/python-zstandard/zstd/dictBuilder/divsufsort.c
80 contrib/python-zstandard/zstd/dictBuilder/divsufsort.c
78 contrib/python-zstandard/zstd/dictBuilder/divsufsort.h
81 contrib/python-zstandard/zstd/dictBuilder/divsufsort.h
79 contrib/python-zstandard/zstd/dictBuilder/fastcover.c
82 contrib/python-zstandard/zstd/dictBuilder/fastcover.c
80 contrib/python-zstandard/zstd/dictBuilder/zdict.c
83 contrib/python-zstandard/zstd/dictBuilder/zdict.c
81 contrib/python-zstandard/zstd/dictBuilder/zdict.h
84 contrib/python-zstandard/zstd/dictBuilder/zdict.h
82 contrib/python-zstandard/zstd/zstd.h
85 contrib/python-zstandard/zstd/zstd.h
83 hgext/fsmonitor/pywatchman/bser.c
86 hgext/fsmonitor/pywatchman/bser.c
84 mercurial/thirdparty/xdiff/xdiff.h
87 mercurial/thirdparty/xdiff/xdiff.h
85 mercurial/thirdparty/xdiff/xdiffi.c
88 mercurial/thirdparty/xdiff/xdiffi.c
86 mercurial/thirdparty/xdiff/xdiffi.h
89 mercurial/thirdparty/xdiff/xdiffi.h
87 mercurial/thirdparty/xdiff/xemit.c
90 mercurial/thirdparty/xdiff/xemit.c
88 mercurial/thirdparty/xdiff/xemit.h
91 mercurial/thirdparty/xdiff/xemit.h
89 mercurial/thirdparty/xdiff/xhistogram.c
92 mercurial/thirdparty/xdiff/xhistogram.c
90 mercurial/thirdparty/xdiff/xinclude.h
93 mercurial/thirdparty/xdiff/xinclude.h
91 mercurial/thirdparty/xdiff/xmacros.h
94 mercurial/thirdparty/xdiff/xmacros.h
92 mercurial/thirdparty/xdiff/xmerge.c
95 mercurial/thirdparty/xdiff/xmerge.c
93 mercurial/thirdparty/xdiff/xpatience.c
96 mercurial/thirdparty/xdiff/xpatience.c
94 mercurial/thirdparty/xdiff/xprepare.c
97 mercurial/thirdparty/xdiff/xprepare.c
95 mercurial/thirdparty/xdiff/xprepare.h
98 mercurial/thirdparty/xdiff/xprepare.h
96 mercurial/thirdparty/xdiff/xtypes.h
99 mercurial/thirdparty/xdiff/xtypes.h
97 mercurial/thirdparty/xdiff/xutils.c
100 mercurial/thirdparty/xdiff/xutils.c
98 mercurial/thirdparty/xdiff/xutils.h
101 mercurial/thirdparty/xdiff/xutils.h
99 mercurial/thirdparty/zope/interface/_zope_interface_coptimizations.c
102 mercurial/thirdparty/zope/interface/_zope_interface_coptimizations.c
@@ -1,49 +1,51 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 #
2 #
3 # Dumps output generated by Mercurial's command server in a formatted style to a
3 # Dumps output generated by Mercurial's command server in a formatted style to a
4 # given file or stderr if '-' is specified. Output is also written in its raw
4 # given file or stderr if '-' is specified. Output is also written in its raw
5 # format to stdout.
5 # format to stdout.
6 #
6 #
7 # $ ./hg serve --cmds pipe | ./contrib/debugcmdserver.py -
7 # $ ./hg serve --cmds pipe | ./contrib/debugcmdserver.py -
8 # o, 52 -> 'capabilities: getencoding runcommand\nencoding: UTF-8'
8 # o, 52 -> 'capabilities: getencoding runcommand\nencoding: UTF-8'
9
9
10 from __future__ import absolute_import, print_function
10 from __future__ import absolute_import, print_function
11 import struct
11 import struct
12 import sys
12 import sys
13
13
14 if len(sys.argv) != 2:
14 if len(sys.argv) != 2:
15 print('usage: debugcmdserver.py FILE')
15 print('usage: debugcmdserver.py FILE')
16 sys.exit(1)
16 sys.exit(1)
17
17
18 outputfmt = '>cI'
18 outputfmt = '>cI'
19 outputfmtsize = struct.calcsize(outputfmt)
19 outputfmtsize = struct.calcsize(outputfmt)
20
20
21 if sys.argv[1] == '-':
21 if sys.argv[1] == '-':
22 log = sys.stderr
22 log = sys.stderr
23 else:
23 else:
24 log = open(sys.argv[1], 'a')
24 log = open(sys.argv[1], 'a')
25
25
26
26 def read(size):
27 def read(size):
27 data = sys.stdin.read(size)
28 data = sys.stdin.read(size)
28 if not data:
29 if not data:
29 raise EOFError
30 raise EOFError
30 sys.stdout.write(data)
31 sys.stdout.write(data)
31 sys.stdout.flush()
32 sys.stdout.flush()
32 return data
33 return data
33
34
35
34 try:
36 try:
35 while True:
37 while True:
36 header = read(outputfmtsize)
38 header = read(outputfmtsize)
37 channel, length = struct.unpack(outputfmt, header)
39 channel, length = struct.unpack(outputfmt, header)
38 log.write('%s, %-4d' % (channel, length))
40 log.write('%s, %-4d' % (channel, length))
39 if channel in 'IL':
41 if channel in 'IL':
40 log.write(' -> waiting for input\n')
42 log.write(' -> waiting for input\n')
41 else:
43 else:
42 data = read(length)
44 data = read(length)
43 log.write(' -> %r\n' % data)
45 log.write(' -> %r\n' % data)
44 log.flush()
46 log.flush()
45 except EOFError:
47 except EOFError:
46 pass
48 pass
47 finally:
49 finally:
48 if log != sys.stderr:
50 if log != sys.stderr:
49 log.close()
51 log.close()
@@ -1,62 +1,65 b''
1 # debugshell extension
1 # debugshell extension
2 """a python shell with repo, changelog & manifest objects"""
2 """a python shell with repo, changelog & manifest objects"""
3
3
4 from __future__ import absolute_import
4 from __future__ import absolute_import
5 import code
5 import code
6 import mercurial
6 import mercurial
7 import sys
7 import sys
8 from mercurial import (
8 from mercurial import (
9 demandimport,
9 demandimport,
10 pycompat,
10 pycompat,
11 registrar,
11 registrar,
12 )
12 )
13
13
14 cmdtable = {}
14 cmdtable = {}
15 command = registrar.command(cmdtable)
15 command = registrar.command(cmdtable)
16
16
17
17 def pdb(ui, repo, msg, **opts):
18 def pdb(ui, repo, msg, **opts):
18 objects = {
19 objects = {
19 'mercurial': mercurial,
20 'mercurial': mercurial,
20 'repo': repo,
21 'repo': repo,
21 'cl': repo.changelog,
22 'cl': repo.changelog,
22 'mf': repo.manifestlog,
23 'mf': repo.manifestlog,
23 }
24 }
24
25
25 code.interact(msg, local=objects)
26 code.interact(msg, local=objects)
26
27
28
27 def ipdb(ui, repo, msg, **opts):
29 def ipdb(ui, repo, msg, **opts):
28 import IPython
30 import IPython
29
31
30 cl = repo.changelog
32 cl = repo.changelog
31 mf = repo.manifestlog
33 mf = repo.manifestlog
32 cl, mf # use variables to appease pyflakes
34 cl, mf # use variables to appease pyflakes
33
35
34 IPython.embed()
36 IPython.embed()
35
37
38
36 @command(b'debugshell|dbsh', [])
39 @command(b'debugshell|dbsh', [])
37 def debugshell(ui, repo, **opts):
40 def debugshell(ui, repo, **opts):
38 bannermsg = ("loaded repo : %s\n"
41 bannermsg = "loaded repo : %s\n" "using source: %s" % (
39 "using source: %s" % (pycompat.sysstr(repo.root),
42 pycompat.sysstr(repo.root),
40 mercurial.__path__[0]))
43 mercurial.__path__[0],
44 )
41
45
42 pdbmap = {
46 pdbmap = {'pdb': 'code', 'ipdb': 'IPython'}
43 'pdb' : 'code',
44 'ipdb' : 'IPython'
45 }
46
47
47 debugger = ui.config(b"ui", b"debugger")
48 debugger = ui.config(b"ui", b"debugger")
48 if not debugger:
49 if not debugger:
49 debugger = 'pdb'
50 debugger = 'pdb'
50 else:
51 else:
51 debugger = pycompat.sysstr(debugger)
52 debugger = pycompat.sysstr(debugger)
52
53
53 # if IPython doesn't exist, fallback to code.interact
54 # if IPython doesn't exist, fallback to code.interact
54 try:
55 try:
55 with demandimport.deactivated():
56 with demandimport.deactivated():
56 __import__(pdbmap[debugger])
57 __import__(pdbmap[debugger])
57 except ImportError:
58 except ImportError:
58 ui.warn((b"%s debugger specified but %s module was not found\n")
59 ui.warnnoi18n(
59 % (debugger, pdbmap[debugger]))
60 b"%s debugger specified but %s module was not found\n"
61 % (debugger, pdbmap[debugger])
62 )
60 debugger = b'pdb'
63 debugger = b'pdb'
61
64
62 getattr(sys.modules[__name__], debugger)(ui, repo, bannermsg, **opts)
65 getattr(sys.modules[__name__], debugger)(ui, repo, bannermsg, **opts)
@@ -1,59 +1,65 b''
1 # dirstatenonnormalcheck.py - extension to check the consistency of the
1 # dirstatenonnormalcheck.py - extension to check the consistency of the
2 # dirstate's non-normal map
2 # dirstate's non-normal map
3 #
3 #
4 # For most operations on dirstate, this extensions checks that the nonnormalset
4 # For most operations on dirstate, this extensions checks that the nonnormalset
5 # contains the right entries.
5 # contains the right entries.
6 # It compares the nonnormal file to a nonnormalset built from the map of all
6 # It compares the nonnormal file to a nonnormalset built from the map of all
7 # the files in the dirstate to check that they contain the same files.
7 # the files in the dirstate to check that they contain the same files.
8
8
9 from __future__ import absolute_import
9 from __future__ import absolute_import
10
10
11 from mercurial import (
11 from mercurial import (
12 dirstate,
12 dirstate,
13 extensions,
13 extensions,
14 )
14 )
15
15
16
16 def nonnormalentries(dmap):
17 def nonnormalentries(dmap):
17 """Compute nonnormal entries from dirstate's dmap"""
18 """Compute nonnormal entries from dirstate's dmap"""
18 res = set()
19 res = set()
19 for f, e in dmap.iteritems():
20 for f, e in dmap.iteritems():
20 if e[0] != b'n' or e[3] == -1:
21 if e[0] != b'n' or e[3] == -1:
21 res.add(f)
22 res.add(f)
22 return res
23 return res
23
24
25
24 def checkconsistency(ui, orig, dmap, _nonnormalset, label):
26 def checkconsistency(ui, orig, dmap, _nonnormalset, label):
25 """Compute nonnormalset from dmap, check that it matches _nonnormalset"""
27 """Compute nonnormalset from dmap, check that it matches _nonnormalset"""
26 nonnormalcomputedmap = nonnormalentries(dmap)
28 nonnormalcomputedmap = nonnormalentries(dmap)
27 if _nonnormalset != nonnormalcomputedmap:
29 if _nonnormalset != nonnormalcomputedmap:
28 ui.develwarn(b"%s call to %s\n" % (label, orig), config=b'dirstate')
30 ui.develwarn(b"%s call to %s\n" % (label, orig), config=b'dirstate')
29 ui.develwarn(b"inconsistency in nonnormalset\n", config=b'dirstate')
31 ui.develwarn(b"inconsistency in nonnormalset\n", config=b'dirstate')
30 ui.develwarn(b"[nonnormalset] %s\n" % _nonnormalset, config=b'dirstate')
32 ui.develwarn(b"[nonnormalset] %s\n" % _nonnormalset, config=b'dirstate')
31 ui.develwarn(b"[map] %s\n" % nonnormalcomputedmap, config=b'dirstate')
33 ui.develwarn(b"[map] %s\n" % nonnormalcomputedmap, config=b'dirstate')
32
34
35
33 def _checkdirstate(orig, self, arg):
36 def _checkdirstate(orig, self, arg):
34 """Check nonnormal set consistency before and after the call to orig"""
37 """Check nonnormal set consistency before and after the call to orig"""
35 checkconsistency(self._ui, orig, self._map, self._map.nonnormalset,
38 checkconsistency(
36 b"before")
39 self._ui, orig, self._map, self._map.nonnormalset, b"before"
40 )
37 r = orig(self, arg)
41 r = orig(self, arg)
38 checkconsistency(self._ui, orig, self._map, self._map.nonnormalset,
42 checkconsistency(
39 b"after")
43 self._ui, orig, self._map, self._map.nonnormalset, b"after"
44 )
40 return r
45 return r
41
46
47
42 def extsetup(ui):
48 def extsetup(ui):
43 """Wrap functions modifying dirstate to check nonnormalset consistency"""
49 """Wrap functions modifying dirstate to check nonnormalset consistency"""
44 dirstatecl = dirstate.dirstate
50 dirstatecl = dirstate.dirstate
45 devel = ui.configbool(b'devel', b'all-warnings')
51 devel = ui.configbool(b'devel', b'all-warnings')
46 paranoid = ui.configbool(b'experimental', b'nonnormalparanoidcheck')
52 paranoid = ui.configbool(b'experimental', b'nonnormalparanoidcheck')
47 if devel:
53 if devel:
48 extensions.wrapfunction(dirstatecl, '_writedirstate', _checkdirstate)
54 extensions.wrapfunction(dirstatecl, '_writedirstate', _checkdirstate)
49 if paranoid:
55 if paranoid:
50 # We don't do all these checks when paranoid is disable as it would
56 # We don't do all these checks when paranoid is disable as it would
51 # make the extension run very slowly on large repos
57 # make the extension run very slowly on large repos
52 extensions.wrapfunction(dirstatecl, 'normallookup', _checkdirstate)
58 extensions.wrapfunction(dirstatecl, 'normallookup', _checkdirstate)
53 extensions.wrapfunction(dirstatecl, 'otherparent', _checkdirstate)
59 extensions.wrapfunction(dirstatecl, 'otherparent', _checkdirstate)
54 extensions.wrapfunction(dirstatecl, 'normal', _checkdirstate)
60 extensions.wrapfunction(dirstatecl, 'normal', _checkdirstate)
55 extensions.wrapfunction(dirstatecl, 'write', _checkdirstate)
61 extensions.wrapfunction(dirstatecl, 'write', _checkdirstate)
56 extensions.wrapfunction(dirstatecl, 'add', _checkdirstate)
62 extensions.wrapfunction(dirstatecl, 'add', _checkdirstate)
57 extensions.wrapfunction(dirstatecl, 'remove', _checkdirstate)
63 extensions.wrapfunction(dirstatecl, 'remove', _checkdirstate)
58 extensions.wrapfunction(dirstatecl, 'merge', _checkdirstate)
64 extensions.wrapfunction(dirstatecl, 'merge', _checkdirstate)
59 extensions.wrapfunction(dirstatecl, 'drop', _checkdirstate)
65 extensions.wrapfunction(dirstatecl, 'drop', _checkdirstate)
@@ -1,43 +1,44 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 # Dump revlogs as raw data stream
2 # Dump revlogs as raw data stream
3 # $ find .hg/store/ -name "*.i" | xargs dumprevlog > repo.dump
3 # $ find .hg/store/ -name "*.i" | xargs dumprevlog > repo.dump
4
4
5 from __future__ import absolute_import, print_function
5 from __future__ import absolute_import, print_function
6
6
7 import sys
7 import sys
8 from mercurial import (
8 from mercurial import (
9 encoding,
9 encoding,
10 node,
10 node,
11 pycompat,
11 pycompat,
12 revlog,
12 revlog,
13 )
13 )
14 from mercurial.utils import (
14 from mercurial.utils import (
15 procutil,
15 procutil,
16 )
16 )
17
17
18 for fp in (sys.stdin, sys.stdout, sys.stderr):
18 for fp in (sys.stdin, sys.stdout, sys.stderr):
19 procutil.setbinary(fp)
19 procutil.setbinary(fp)
20
20
21 def binopen(path, mode=b'rb'):
21 def binopen(path, mode=b'rb'):
22 if b'b' not in mode:
22 if b'b' not in mode:
23 mode = mode + b'b'
23 mode = mode + b'b'
24 return open(path, pycompat.sysstr(mode))
24 return open(path, pycompat.sysstr(mode))
25 binopen.options = {}
25
26
26 def printb(data, end=b'\n'):
27 def printb(data, end=b'\n'):
27 sys.stdout.flush()
28 sys.stdout.flush()
28 pycompat.stdout.write(data + end)
29 pycompat.stdout.write(data + end)
29
30
30 for f in sys.argv[1:]:
31 for f in sys.argv[1:]:
31 r = revlog.revlog(binopen, encoding.strtolocal(f))
32 r = revlog.revlog(binopen, encoding.strtolocal(f))
32 print("file:", f)
33 print("file:", f)
33 for i in r:
34 for i in r:
34 n = r.node(i)
35 n = r.node(i)
35 p = r.parents(n)
36 p = r.parents(n)
36 d = r.revision(n)
37 d = r.revision(n)
37 printb(b"node: %s" % node.hex(n))
38 printb(b"node: %s" % node.hex(n))
38 printb(b"linkrev: %d" % r.linkrev(i))
39 printb(b"linkrev: %d" % r.linkrev(i))
39 printb(b"parents: %s %s" % (node.hex(p[0]), node.hex(p[1])))
40 printb(b"parents: %s %s" % (node.hex(p[0]), node.hex(p[1])))
40 printb(b"length: %d" % len(d))
41 printb(b"length: %d" % len(d))
41 printb(b"-start-")
42 printb(b"-start-")
42 printb(d)
43 printb(d)
43 printb(b"-end-")
44 printb(b"-end-")
@@ -1,160 +1,184 b''
1 CC = clang
1 CC = clang
2 CXX = clang++
2 CXX = clang++
3
3
4 all: bdiff mpatch xdiff
4 all: bdiff mpatch xdiff
5
5
6 fuzzutil.o: fuzzutil.cc fuzzutil.h
6 fuzzutil.o: fuzzutil.cc fuzzutil.h
7 $(CXX) $(CXXFLAGS) -g -O1 \
7 $(CXX) $(CXXFLAGS) -g -O1 \
8 -std=c++17 \
8 -std=c++17 \
9 -I../../mercurial -c -o fuzzutil.o fuzzutil.cc
9 -I../../mercurial -c -o fuzzutil.o fuzzutil.cc
10
10
11 fuzzutil-oss-fuzz.o: fuzzutil.cc fuzzutil.h
11 fuzzutil-oss-fuzz.o: fuzzutil.cc fuzzutil.h
12 $(CXX) $(CXXFLAGS) -std=c++17 \
12 $(CXX) $(CXXFLAGS) -std=c++17 \
13 -I../../mercurial -c -o fuzzutil-oss-fuzz.o fuzzutil.cc
13 -I../../mercurial -c -o fuzzutil-oss-fuzz.o fuzzutil.cc
14
14
15 pyutil.o: pyutil.cc pyutil.h
15 pyutil.o: pyutil.cc pyutil.h
16 $(CXX) $(CXXFLAGS) -g -O1 \
16 $(CXX) $(CXXFLAGS) -g -O1 \
17 `$$OUT/sanpy/bin/python-config --cflags` \
17 `$$OUT/sanpy/bin/python-config --cflags` \
18 -I../../mercurial -c -o pyutil.o pyutil.cc
18 -I../../mercurial -c -o pyutil.o pyutil.cc
19
19
20 bdiff.o: ../../mercurial/bdiff.c
20 bdiff.o: ../../mercurial/bdiff.c
21 $(CC) $(CFLAGS) -fsanitize=fuzzer-no-link,address -c -o bdiff.o \
21 $(CC) $(CFLAGS) -fsanitize=fuzzer-no-link,address -c -o bdiff.o \
22 ../../mercurial/bdiff.c
22 ../../mercurial/bdiff.c
23
23
24 bdiff: bdiff.cc bdiff.o fuzzutil.o
24 bdiff: bdiff.cc bdiff.o fuzzutil.o
25 $(CXX) $(CXXFLAGS) -DHG_FUZZER_INCLUDE_MAIN=1 -g -O1 -fsanitize=fuzzer-no-link,address \
25 $(CXX) $(CXXFLAGS) -DHG_FUZZER_INCLUDE_MAIN=1 -g -O1 -fsanitize=fuzzer-no-link,address \
26 -std=c++17 \
26 -std=c++17 \
27 -I../../mercurial bdiff.cc bdiff.o fuzzutil.o -o bdiff
27 -I../../mercurial bdiff.cc bdiff.o fuzzutil.o -o bdiff
28
28
29 bdiff-oss-fuzz.o: ../../mercurial/bdiff.c
29 bdiff-oss-fuzz.o: ../../mercurial/bdiff.c
30 $(CC) $(CFLAGS) -c -o bdiff-oss-fuzz.o ../../mercurial/bdiff.c
30 $(CC) $(CFLAGS) -c -o bdiff-oss-fuzz.o ../../mercurial/bdiff.c
31
31
32 bdiff_fuzzer: bdiff.cc bdiff-oss-fuzz.o fuzzutil-oss-fuzz.o
32 bdiff_fuzzer: bdiff.cc bdiff-oss-fuzz.o fuzzutil-oss-fuzz.o
33 $(CXX) $(CXXFLAGS) -std=c++17 -I../../mercurial bdiff.cc \
33 $(CXX) $(CXXFLAGS) -std=c++17 -I../../mercurial bdiff.cc \
34 bdiff-oss-fuzz.o fuzzutil-oss-fuzz.o -lFuzzingEngine -o \
34 bdiff-oss-fuzz.o fuzzutil-oss-fuzz.o -lFuzzingEngine -o \
35 $$OUT/bdiff_fuzzer
35 $$OUT/bdiff_fuzzer
36
36
37 mpatch.o: ../../mercurial/mpatch.c
37 mpatch.o: ../../mercurial/mpatch.c
38 $(CC) -g -O1 -fsanitize=fuzzer-no-link,address -c -o mpatch.o \
38 $(CC) -g -O1 -fsanitize=fuzzer-no-link,address -c -o mpatch.o \
39 ../../mercurial/mpatch.c
39 ../../mercurial/mpatch.c
40
40
41 mpatch: CXXFLAGS += -std=c++17
41 mpatch: CXXFLAGS += -std=c++17
42 mpatch: mpatch.cc mpatch.o fuzzutil.o
42 mpatch: mpatch.cc mpatch.o fuzzutil.o
43 $(CXX) $(CXXFLAGS) -DHG_FUZZER_INCLUDE_MAIN=1 -g -O1 -fsanitize=fuzzer-no-link,address \
43 $(CXX) $(CXXFLAGS) -DHG_FUZZER_INCLUDE_MAIN=1 -g -O1 -fsanitize=fuzzer-no-link,address \
44 -I../../mercurial mpatch.cc mpatch.o fuzzutil.o -o mpatch
44 -I../../mercurial mpatch.cc mpatch.o fuzzutil.o -o mpatch
45
45
46 mpatch-oss-fuzz.o: ../../mercurial/mpatch.c
46 mpatch-oss-fuzz.o: ../../mercurial/mpatch.c
47 $(CC) $(CFLAGS) -c -o mpatch-oss-fuzz.o ../../mercurial/mpatch.c
47 $(CC) $(CFLAGS) -c -o mpatch-oss-fuzz.o ../../mercurial/mpatch.c
48
48
49 mpatch_fuzzer: mpatch.cc mpatch-oss-fuzz.o fuzzutil-oss-fuzz.o
49 mpatch_fuzzer: mpatch.cc mpatch-oss-fuzz.o fuzzutil-oss-fuzz.o
50 $(CXX) $(CXXFLAGS) -std=c++17 -I../../mercurial mpatch.cc \
50 $(CXX) $(CXXFLAGS) -std=c++17 -I../../mercurial mpatch.cc \
51 mpatch-oss-fuzz.o fuzzutil-oss-fuzz.o -lFuzzingEngine -o \
51 mpatch-oss-fuzz.o fuzzutil-oss-fuzz.o -lFuzzingEngine -o \
52 $$OUT/mpatch_fuzzer
52 $$OUT/mpatch_fuzzer
53
53
54 mpatch_corpus.zip:
54 mpatch_corpus.zip:
55 python mpatch_corpus.py $$OUT/mpatch_fuzzer_seed_corpus.zip
55 python mpatch_corpus.py $$OUT/mpatch_fuzzer_seed_corpus.zip
56
56
57 x%.o: ../../mercurial/thirdparty/xdiff/x%.c ../../mercurial/thirdparty/xdiff/*.h
57 x%.o: ../../mercurial/thirdparty/xdiff/x%.c ../../mercurial/thirdparty/xdiff/*.h
58 $(CC) -g -O1 -fsanitize=fuzzer-no-link,address -c \
58 $(CC) -g -O1 -fsanitize=fuzzer-no-link,address -c \
59 -o $@ \
59 -o $@ \
60 $<
60 $<
61
61
62 xdiff: CXXFLAGS += -std=c++17
62 xdiff: CXXFLAGS += -std=c++17
63 xdiff: xdiff.cc xdiffi.o xprepare.o xutils.o fuzzutil.o
63 xdiff: xdiff.cc xdiffi.o xprepare.o xutils.o fuzzutil.o
64 $(CXX) $(CXXFLAGS) -DHG_FUZZER_INCLUDE_MAIN=1 -g -O1 -fsanitize=fuzzer-no-link,address \
64 $(CXX) $(CXXFLAGS) -DHG_FUZZER_INCLUDE_MAIN=1 -g -O1 -fsanitize=fuzzer-no-link,address \
65 -I../../mercurial xdiff.cc \
65 -I../../mercurial xdiff.cc \
66 xdiffi.o xprepare.o xutils.o fuzzutil.o -o xdiff
66 xdiffi.o xprepare.o xutils.o fuzzutil.o -o xdiff
67
67
68 fuzz-x%.o: ../../mercurial/thirdparty/xdiff/x%.c ../../mercurial/thirdparty/xdiff/*.h
68 fuzz-x%.o: ../../mercurial/thirdparty/xdiff/x%.c ../../mercurial/thirdparty/xdiff/*.h
69 $(CC) $(CFLAGS) -c \
69 $(CC) $(CFLAGS) -c \
70 -o $@ \
70 -o $@ \
71 $<
71 $<
72
72
73 xdiff_fuzzer: xdiff.cc fuzz-xdiffi.o fuzz-xprepare.o fuzz-xutils.o fuzzutil-oss-fuzz.o
73 xdiff_fuzzer: xdiff.cc fuzz-xdiffi.o fuzz-xprepare.o fuzz-xutils.o fuzzutil-oss-fuzz.o
74 $(CXX) $(CXXFLAGS) -std=c++17 -I../../mercurial xdiff.cc \
74 $(CXX) $(CXXFLAGS) -std=c++17 -I../../mercurial xdiff.cc \
75 fuzz-xdiffi.o fuzz-xprepare.o fuzz-xutils.o fuzzutil-oss-fuzz.o \
75 fuzz-xdiffi.o fuzz-xprepare.o fuzz-xutils.o fuzzutil-oss-fuzz.o \
76 -lFuzzingEngine -o $$OUT/xdiff_fuzzer
76 -lFuzzingEngine -o $$OUT/xdiff_fuzzer
77
77
78 manifest.o: ../../mercurial/cext/manifest.c
78 manifest.o: ../../mercurial/cext/manifest.c
79 $(CC) $(CFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
79 $(CC) $(CFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
80 -I../../mercurial \
80 -I../../mercurial \
81 -c -o manifest.o ../../mercurial/cext/manifest.c
81 -c -o manifest.o ../../mercurial/cext/manifest.c
82
82
83 charencode.o: ../../mercurial/cext/charencode.c
83 charencode.o: ../../mercurial/cext/charencode.c
84 $(CC) $(CFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
84 $(CC) $(CFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
85 -I../../mercurial \
85 -I../../mercurial \
86 -c -o charencode.o ../../mercurial/cext/charencode.c
86 -c -o charencode.o ../../mercurial/cext/charencode.c
87
87
88 parsers.o: ../../mercurial/cext/parsers.c
88 parsers.o: ../../mercurial/cext/parsers.c
89 $(CC) $(CFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
89 $(CC) $(CFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
90 -I../../mercurial \
90 -I../../mercurial \
91 -c -o parsers.o ../../mercurial/cext/parsers.c
91 -c -o parsers.o ../../mercurial/cext/parsers.c
92
92
93 dirs.o: ../../mercurial/cext/dirs.c
93 dirs.o: ../../mercurial/cext/dirs.c
94 $(CC) $(CFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
94 $(CC) $(CFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
95 -I../../mercurial \
95 -I../../mercurial \
96 -c -o dirs.o ../../mercurial/cext/dirs.c
96 -c -o dirs.o ../../mercurial/cext/dirs.c
97
97
98 pathencode.o: ../../mercurial/cext/pathencode.c
98 pathencode.o: ../../mercurial/cext/pathencode.c
99 $(CC) $(CFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
99 $(CC) $(CFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
100 -I../../mercurial \
100 -I../../mercurial \
101 -c -o pathencode.o ../../mercurial/cext/pathencode.c
101 -c -o pathencode.o ../../mercurial/cext/pathencode.c
102
102
103 revlog.o: ../../mercurial/cext/revlog.c
103 revlog.o: ../../mercurial/cext/revlog.c
104 $(CC) $(CFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
104 $(CC) $(CFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
105 -I../../mercurial \
105 -I../../mercurial \
106 -c -o revlog.o ../../mercurial/cext/revlog.c
106 -c -o revlog.o ../../mercurial/cext/revlog.c
107
107
108 dirs_fuzzer: dirs.cc manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o
109 $(CXX) $(CXXFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
110 -Wno-register -Wno-macro-redefined \
111 -I../../mercurial dirs.cc \
112 manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o \
113 -lFuzzingEngine `$$OUT/sanpy/bin/python-config --ldflags` \
114 -o $$OUT/dirs_fuzzer
115
116 fncache_fuzzer: fncache.cc manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o
117 $(CXX) $(CXXFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
118 -Wno-register -Wno-macro-redefined \
119 -I../../mercurial fncache.cc \
120 manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o \
121 -lFuzzingEngine `$$OUT/sanpy/bin/python-config --ldflags` \
122 -o $$OUT/fncache_fuzzer
123
124 jsonescapeu8fast_fuzzer: jsonescapeu8fast.cc manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o
125 $(CXX) $(CXXFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
126 -Wno-register -Wno-macro-redefined \
127 -I../../mercurial jsonescapeu8fast.cc \
128 manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o \
129 -lFuzzingEngine `$$OUT/sanpy/bin/python-config --ldflags` \
130 -o $$OUT/jsonescapeu8fast_fuzzer
131
132 manifest_corpus.zip:
133 python manifest_corpus.py $$OUT/manifest_fuzzer_seed_corpus.zip
134
108 manifest_fuzzer: manifest.cc manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o
135 manifest_fuzzer: manifest.cc manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o
109 $(CXX) $(CXXFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
136 $(CXX) $(CXXFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
110 -Wno-register -Wno-macro-redefined \
137 -Wno-register -Wno-macro-redefined \
111 -I../../mercurial manifest.cc \
138 -I../../mercurial manifest.cc \
112 manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o \
139 manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o \
113 -lFuzzingEngine `$$OUT/sanpy/bin/python-config --ldflags` \
140 -lFuzzingEngine `$$OUT/sanpy/bin/python-config --ldflags` \
114 -o $$OUT/manifest_fuzzer
141 -o $$OUT/manifest_fuzzer
115
142
116 manifest_corpus.zip:
117 python manifest_corpus.py $$OUT/manifest_fuzzer_seed_corpus.zip
118
119 revlog_fuzzer: revlog.cc manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o
143 revlog_fuzzer: revlog.cc manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o
120 $(CXX) $(CXXFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
144 $(CXX) $(CXXFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
121 -Wno-register -Wno-macro-redefined \
145 -Wno-register -Wno-macro-redefined \
122 -I../../mercurial revlog.cc \
146 -I../../mercurial revlog.cc \
123 manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o \
147 manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o \
124 -lFuzzingEngine `$$OUT/sanpy/bin/python-config --ldflags` \
148 -lFuzzingEngine `$$OUT/sanpy/bin/python-config --ldflags` \
125 -o $$OUT/revlog_fuzzer
149 -o $$OUT/revlog_fuzzer
126
150
127 revlog_corpus.zip:
151 revlog_corpus.zip:
128 python revlog_corpus.py $$OUT/revlog_fuzzer_seed_corpus.zip
152 python revlog_corpus.py $$OUT/revlog_fuzzer_seed_corpus.zip
129
153
130 dirstate_fuzzer: dirstate.cc manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o
154 dirstate_fuzzer: dirstate.cc manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o
131 $(CXX) $(CXXFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
155 $(CXX) $(CXXFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
132 -Wno-register -Wno-macro-redefined \
156 -Wno-register -Wno-macro-redefined \
133 -I../../mercurial dirstate.cc \
157 -I../../mercurial dirstate.cc \
134 manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o \
158 manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o \
135 -lFuzzingEngine `$$OUT/sanpy/bin/python-config --ldflags` \
159 -lFuzzingEngine `$$OUT/sanpy/bin/python-config --ldflags` \
136 -o $$OUT/dirstate_fuzzer
160 -o $$OUT/dirstate_fuzzer
137
161
138 dirstate_corpus.zip:
162 dirstate_corpus.zip:
139 python dirstate_corpus.py $$OUT/dirstate_fuzzer_seed_corpus.zip
163 python dirstate_corpus.py $$OUT/dirstate_fuzzer_seed_corpus.zip
140
164
141 fm1readmarkers_fuzzer: fm1readmarkers.cc manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o
165 fm1readmarkers_fuzzer: fm1readmarkers.cc manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o
142 $(CXX) $(CXXFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
166 $(CXX) $(CXXFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
143 -Wno-register -Wno-macro-redefined \
167 -Wno-register -Wno-macro-redefined \
144 -I../../mercurial fm1readmarkers.cc \
168 -I../../mercurial fm1readmarkers.cc \
145 manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o \
169 manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o \
146 -lFuzzingEngine `$$OUT/sanpy/bin/python-config --ldflags` \
170 -lFuzzingEngine `$$OUT/sanpy/bin/python-config --ldflags` \
147 -o $$OUT/fm1readmarkers_fuzzer
171 -o $$OUT/fm1readmarkers_fuzzer
148
172
149 fm1readmarkers_corpus.zip:
173 fm1readmarkers_corpus.zip:
150 python fm1readmarkers_corpus.py $$OUT/fm1readmarkers_fuzzer_seed_corpus.zip
174 python fm1readmarkers_corpus.py $$OUT/fm1readmarkers_fuzzer_seed_corpus.zip
151
175
152 clean:
176 clean:
153 $(RM) *.o *_fuzzer \
177 $(RM) *.o *_fuzzer \
154 bdiff \
178 bdiff \
155 mpatch \
179 mpatch \
156 xdiff
180 xdiff
157
181
158 oss-fuzz: bdiff_fuzzer mpatch_fuzzer mpatch_corpus.zip xdiff_fuzzer manifest_fuzzer manifest_corpus.zip revlog_fuzzer revlog_corpus.zip dirstate_fuzzer dirstate_corpus.zip fm1readmarkers_fuzzer fm1readmarkers_corpus.zip
182 oss-fuzz: bdiff_fuzzer mpatch_fuzzer mpatch_corpus.zip xdiff_fuzzer dirs_fuzzer fncache_fuzzer jsonescapeu8fast_fuzzer manifest_fuzzer manifest_corpus.zip revlog_fuzzer revlog_corpus.zip dirstate_fuzzer dirstate_corpus.zip fm1readmarkers_fuzzer fm1readmarkers_corpus.zip
159
183
160 .PHONY: all clean oss-fuzz
184 .PHONY: all clean oss-fuzz
@@ -1,18 +1,17 b''
1 from __future__ import absolute_import, print_function
1 from __future__ import absolute_import, print_function
2
2
3 import argparse
3 import argparse
4 import os
4 import os
5 import zipfile
5 import zipfile
6
6
7 ap = argparse.ArgumentParser()
7 ap = argparse.ArgumentParser()
8 ap.add_argument("out", metavar="some.zip", type=str, nargs=1)
8 ap.add_argument("out", metavar="some.zip", type=str, nargs=1)
9 args = ap.parse_args()
9 args = ap.parse_args()
10
10
11 reporoot = os.path.normpath(os.path.join(os.path.dirname(__file__),
11 reporoot = os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..'))
12 '..', '..'))
13 dirstate = os.path.join(reporoot, '.hg', 'dirstate')
12 dirstate = os.path.join(reporoot, '.hg', 'dirstate')
14
13
15 with zipfile.ZipFile(args.out[0], "w", zipfile.ZIP_STORED) as zf:
14 with zipfile.ZipFile(args.out[0], "w", zipfile.ZIP_STORED) as zf:
16 if os.path.exists(dirstate):
15 if os.path.exists(dirstate):
17 with open(dirstate) as f:
16 with open(dirstate) as f:
18 zf.writestr("dirstate", f.read())
17 zf.writestr("dirstate", f.read())
@@ -1,36 +1,38 b''
1 from __future__ import absolute_import, print_function
1 from __future__ import absolute_import, print_function
2
2
3 import argparse
3 import argparse
4 import zipfile
4 import zipfile
5
5
6 ap = argparse.ArgumentParser()
6 ap = argparse.ArgumentParser()
7 ap.add_argument("out", metavar="some.zip", type=str, nargs=1)
7 ap.add_argument("out", metavar="some.zip", type=str, nargs=1)
8 args = ap.parse_args()
8 args = ap.parse_args()
9
9
10 with zipfile.ZipFile(args.out[0], "w", zipfile.ZIP_STORED) as zf:
10 with zipfile.ZipFile(args.out[0], "w", zipfile.ZIP_STORED) as zf:
11 zf.writestr(
11 zf.writestr(
12 'smallish_obsstore',
12 'smallish_obsstore',
13 (
13 (
14 # header: fm1readmarkers should start at offset 1, and
14 # header: fm1readmarkers should start at offset 1, and
15 # read until byte 597.
15 # read until byte 597.
16 '1\x00597\x00'
16 '1\x00597\x00'
17 # body of obsstore file
17 # body of obsstore file
18 '\x01\x00\x00\x00vA\xd7\x02+C\x1a<)\x01,\x00\x00\x01\x03\x03\xe6'
18 '\x01\x00\x00\x00vA\xd7\x02+C\x1a<)\x01,\x00\x00\x01\x03\x03\xe6'
19 '\x92\xde)x\x16\xd1Xph\xc7\xa7[\xe5\xe2\x1a\xab\x1e6e\xaf\xc2\xae'
19 '\x92\xde)x\x16\xd1Xph\xc7\xa7[\xe5\xe2\x1a\xab\x1e6e\xaf\xc2\xae'
20 '\xe7\xbc\x83\xe1\x88\xa5\xda\xce>O\xbd\x04\xe9\x03\xc4o\xeb\x03'
20 '\xe7\xbc\x83\xe1\x88\xa5\xda\xce>O\xbd\x04\xe9\x03\xc4o\xeb\x03'
21 '\x01\t\x05\x04\x1fef18operationamenduserAugie Fackler <raf@duri'
21 '\x01\t\x05\x04\x1fef18operationamenduserAugie Fackler <raf@duri'
22 'n42.com>\x00\x00\x00vA\xd7\x02-\x8aD\xaf-\x01,\x00\x00\x01\x03\x03'
22 'n42.com>\x00\x00\x00vA\xd7\x02-\x8aD\xaf-\x01,\x00\x00\x01\x03\x03'
23 '\x17*\xca\x8f\x9e}i\xe0i\xbb\xdf\x9fb\x03\xd2XG?\xd3h\x98\x89\x1a'
23 '\x17*\xca\x8f\x9e}i\xe0i\xbb\xdf\x9fb\x03\xd2XG?\xd3h\x98\x89\x1a'
24 '=2\xeb\xc3\xc5<\xb3\x9e\xcc\x0e;#\xee\xc3\x10ux\x03\x01\t\x05\x04'
24 '=2\xeb\xc3\xc5<\xb3\x9e\xcc\x0e;#\xee\xc3\x10ux\x03\x01\t\x05\x04'
25 '\x1fef18operationamenduserAugie Fackler <raf@durin42.com>\x00\x00'
25 '\x1fef18operationamenduserAugie Fackler <raf@durin42.com>\x00\x00'
26 '\x00vA\xd7\x02Mn\xd9%\xea\x01,\x00\x00\x01\x03\x03\x98\x89\x1a='
26 '\x00vA\xd7\x02Mn\xd9%\xea\x01,\x00\x00\x01\x03\x03\x98\x89\x1a='
27 '2\xeb\xc3\xc5<\xb3\x9e\xcc\x0e;#\xee\xc3\x10ux\xe0*\xcaT\x86Z8J'
27 '2\xeb\xc3\xc5<\xb3\x9e\xcc\x0e;#\xee\xc3\x10ux\xe0*\xcaT\x86Z8J'
28 '\x85)\x97\xff7\xcc)\xc1\x7f\x19\x0c\x01\x03\x01\t\x05\x04\x1fef'
28 '\x85)\x97\xff7\xcc)\xc1\x7f\x19\x0c\x01\x03\x01\t\x05\x04\x1fef'
29 '18operationamenduserAugie Fackler <raf@durin42.com>\x00\x00\x00'
29 '18operationamenduserAugie Fackler <raf@durin42.com>\x00\x00\x00'
30 'yA\xd7\x02MtA\xbfj\x01,\x00\x00\x01\x03\x03\xe0*\xcaT\x86Z8J\x85'
30 'yA\xd7\x02MtA\xbfj\x01,\x00\x00\x01\x03\x03\xe0*\xcaT\x86Z8J\x85'
31 ')\x97\xff7\xcc)\xc1\x7f\x19\x0c\x01\x00\x94\x01\xa9\n\xf80\x92\xa3'
31 ')\x97\xff7\xcc)\xc1\x7f\x19\x0c\x01\x00\x94\x01\xa9\n\xf80\x92\xa3'
32 'j\xc5X\xb1\xc9:\xd51\xb8*\xa9\x03\x01\t\x08\x04\x1fef11operatio'
32 'j\xc5X\xb1\xc9:\xd51\xb8*\xa9\x03\x01\t\x08\x04\x1fef11operatio'
33 'nhistedituserAugie Fackler <raf@durin42.com>\x00\x00\x00yA\xd7\x02'
33 'nhistedituserAugie Fackler <raf@durin42.com>\x00\x00\x00yA\xd7\x02'
34 'MtA\xd4\xe1\x01,\x00\x00\x01\x03\x03"\xa5\xcb\x86\xb6\xf4\xbaO\xa0'
34 'MtA\xd4\xe1\x01,\x00\x00\x01\x03\x03"\xa5\xcb\x86\xb6\xf4\xbaO\xa0'
35 'sH\xe7?\xcb\x9b\xc2n\xcfI\x9e\x14\xf0D\xf0!\x18DN\xcd\x97\x016\xa5'
35 'sH\xe7?\xcb\x9b\xc2n\xcfI\x9e\x14\xf0D\xf0!\x18DN\xcd\x97\x016\xa5'
36 '\xef\xa06\xcb\x884\x8a\x03\x01\t\x08\x04\x1fef14operationhisted'))
36 '\xef\xa06\xcb\x884\x8a\x03\x01\t\x08\x04\x1fef14operationhisted'
37 ),
38 )
@@ -1,30 +1,33 b''
1 from __future__ import absolute_import, print_function
1 from __future__ import absolute_import, print_function
2
2
3 import argparse
3 import argparse
4 import zipfile
4 import zipfile
5
5
6 ap = argparse.ArgumentParser()
6 ap = argparse.ArgumentParser()
7 ap.add_argument("out", metavar="some.zip", type=str, nargs=1)
7 ap.add_argument("out", metavar="some.zip", type=str, nargs=1)
8 args = ap.parse_args()
8 args = ap.parse_args()
9
9
10 with zipfile.ZipFile(args.out[0], "w", zipfile.ZIP_STORED) as zf:
10 with zipfile.ZipFile(args.out[0], "w", zipfile.ZIP_STORED) as zf:
11 zf.writestr("manifest_zero",
11 zf.writestr(
12 '''PKG-INFO\09b3ed8f2b81095a13064402e930565f083346e9a
12 "manifest_zero",
13 '''PKG-INFO\09b3ed8f2b81095a13064402e930565f083346e9a
13 README\080b6e76643dcb44d4bc729e932fc464b3e36dbe3
14 README\080b6e76643dcb44d4bc729e932fc464b3e36dbe3
14 hg\0b6444347c629cc058d478023905cfb83b7f5bb9d
15 hg\0b6444347c629cc058d478023905cfb83b7f5bb9d
15 mercurial/__init__.py\0b80de5d138758541c5f05265ad144ab9fa86d1db
16 mercurial/__init__.py\0b80de5d138758541c5f05265ad144ab9fa86d1db
16 mercurial/byterange.py\017f5a9fbd99622f31a392c33ac1e903925dc80ed
17 mercurial/byterange.py\017f5a9fbd99622f31a392c33ac1e903925dc80ed
17 mercurial/fancyopts.py\0b6f52e23e356748c5039313d8b639cda16bf67ba
18 mercurial/fancyopts.py\0b6f52e23e356748c5039313d8b639cda16bf67ba
18 mercurial/hg.py\023cc12f225f1b42f32dc0d897a4f95a38ddc8f4a
19 mercurial/hg.py\023cc12f225f1b42f32dc0d897a4f95a38ddc8f4a
19 mercurial/mdiff.py\0a05f65c44bfbeec6a42336cd2ff0b30217899ca3
20 mercurial/mdiff.py\0a05f65c44bfbeec6a42336cd2ff0b30217899ca3
20 mercurial/revlog.py\0217bc3fde6d82c0210cf56aeae11d05a03f35b2b
21 mercurial/revlog.py\0217bc3fde6d82c0210cf56aeae11d05a03f35b2b
21 mercurial/transaction.py\09d180df101dc14ce3dd582fd998b36c98b3e39aa
22 mercurial/transaction.py\09d180df101dc14ce3dd582fd998b36c98b3e39aa
22 notes.txt\0703afcec5edb749cf5cec67831f554d6da13f2fb
23 notes.txt\0703afcec5edb749cf5cec67831f554d6da13f2fb
23 setup.py\0ccf3f6daf0f13101ca73631f7a1769e328b472c9
24 setup.py\0ccf3f6daf0f13101ca73631f7a1769e328b472c9
24 tkmerge\03c922edb43a9c143682f7bc7b00f98b3c756ebe7
25 tkmerge\03c922edb43a9c143682f7bc7b00f98b3c756ebe7
25 ''')
26 ''',
26 zf.writestr("badmanifest_shorthashes",
27 )
27 "narf\0aa\nnarf2\0aaa\n")
28 zf.writestr("badmanifest_shorthashes", "narf\0aa\nnarf2\0aaa\n")
28 zf.writestr("badmanifest_nonull",
29 zf.writestr(
29 "narf\0cccccccccccccccccccccccccccccccccccccccc\n"
30 "badmanifest_nonull",
30 "narf2aaaaaaaaaaaaaaaaaaaa\n")
31 "narf\0cccccccccccccccccccccccccccccccccccccccc\n"
32 "narf2aaaaaaaaaaaaaaaaaaaa\n",
33 )
@@ -1,345 +1,350 b''
1 from __future__ import absolute_import, print_function
1 from __future__ import absolute_import, print_function
2
2
3 import argparse
3 import argparse
4 import struct
4 import struct
5 import zipfile
5 import zipfile
6
6
7 from mercurial import (
7 from mercurial import (
8 hg,
8 hg,
9 ui as uimod,
9 ui as uimod,
10 )
10 )
11
11
12 ap = argparse.ArgumentParser()
12 ap = argparse.ArgumentParser()
13 ap.add_argument("out", metavar="some.zip", type=str, nargs=1)
13 ap.add_argument("out", metavar="some.zip", type=str, nargs=1)
14 args = ap.parse_args()
14 args = ap.parse_args()
15
15
16
16 class deltafrag(object):
17 class deltafrag(object):
17 def __init__(self, start, end, data):
18 def __init__(self, start, end, data):
18 self.start = start
19 self.start = start
19 self.end = end
20 self.end = end
20 self.data = data
21 self.data = data
21
22
22 def __str__(self):
23 def __str__(self):
23 return struct.pack(
24 return (
24 ">lll", self.start, self.end, len(self.data)) + self.data
25 struct.pack(">lll", self.start, self.end, len(self.data))
26 + self.data
27 )
28
25
29
26 class delta(object):
30 class delta(object):
27 def __init__(self, frags):
31 def __init__(self, frags):
28 self.frags = frags
32 self.frags = frags
29
33
30 def __str__(self):
34 def __str__(self):
31 return ''.join(str(f) for f in self.frags)
35 return ''.join(str(f) for f in self.frags)
32
36
37
33 class corpus(object):
38 class corpus(object):
34
35 def __init__(self, base, deltas):
39 def __init__(self, base, deltas):
36 self.base = base
40 self.base = base
37 self.deltas = deltas
41 self.deltas = deltas
38
42
39 def __str__(self):
43 def __str__(self):
40 deltas = [str(d) for d in self.deltas]
44 deltas = [str(d) for d in self.deltas]
41 parts = (
45 parts = (
42 [
46 [
43 struct.pack(">B", len(deltas) + 1),
47 struct.pack(">B", len(deltas) + 1),
44 struct.pack(">H", len(self.base)),
48 struct.pack(">H", len(self.base)),
45 ]
49 ]
46 + [struct.pack(">H", len(d)) for d in deltas]
50 + [struct.pack(">H", len(d)) for d in deltas]
47 + [self.base]
51 + [self.base]
48 + deltas
52 + deltas
49 )
53 )
50 return "".join(parts)
54 return "".join(parts)
51
55
56
52 with zipfile.ZipFile(args.out[0], "w", zipfile.ZIP_STORED) as zf:
57 with zipfile.ZipFile(args.out[0], "w", zipfile.ZIP_STORED) as zf:
53 # Manually constructed entries
58 # Manually constructed entries
54 zf.writestr(
59 zf.writestr(
55 "one_delta_applies",
60 "one_delta_applies", str(corpus('a', [delta([deltafrag(0, 1, 'b')])]))
56 str(corpus('a', [delta([deltafrag(0, 1, 'b')])]))
57 )
61 )
58 zf.writestr(
62 zf.writestr(
59 "one_delta_starts_late",
63 "one_delta_starts_late",
60 str(corpus('a', [delta([deltafrag(3, 1, 'b')])]))
64 str(corpus('a', [delta([deltafrag(3, 1, 'b')])])),
61 )
65 )
62 zf.writestr(
66 zf.writestr(
63 "one_delta_ends_late",
67 "one_delta_ends_late",
64 str(corpus('a', [delta([deltafrag(0, 20, 'b')])]))
68 str(corpus('a', [delta([deltafrag(0, 20, 'b')])])),
65 )
69 )
66
70
67 try:
71 try:
68 # Generated from repo data
72 # Generated from repo data
69 r = hg.repository(uimod.ui(), '../..')
73 r = hg.repository(uimod.ui(), '../..')
70 fl = r.file('mercurial/manifest.py')
74 fl = r.file('mercurial/manifest.py')
71 rl = getattr(fl, '_revlog', fl)
75 rl = getattr(fl, '_revlog', fl)
72 bins = rl._chunks(rl._deltachain(10)[0])
76 bins = rl._chunks(rl._deltachain(10)[0])
73 zf.writestr('manifest_py_rev_10',
77 zf.writestr('manifest_py_rev_10', str(corpus(bins[0], bins[1:])))
74 str(corpus(bins[0], bins[1:])))
78 except: # skip this, so no re-raises
75 except: # skip this, so no re-raises
76 print('skipping seed file from repo data')
79 print('skipping seed file from repo data')
77 # Automatically discovered by running the fuzzer
80 # Automatically discovered by running the fuzzer
78 zf.writestr(
81 zf.writestr(
79 "mpatch_decode_old_overread", "\x02\x00\x00\x00\x02\x00\x00\x00"
82 "mpatch_decode_old_overread", "\x02\x00\x00\x00\x02\x00\x00\x00"
80 )
83 )
81 # https://bugs.chromium.org/p/oss-fuzz/issues/detail?id=8876
84 # https://bugs.chromium.org/p/oss-fuzz/issues/detail?id=8876
82 zf.writestr(
85 zf.writestr(
83 "mpatch_ossfuzz_getbe32_ubsan",
86 "mpatch_ossfuzz_getbe32_ubsan",
84 "\x02\x00\x00\x00\x0c \xff\xff\xff\xff ")
87 "\x02\x00\x00\x00\x0c \xff\xff\xff\xff ",
88 )
85 zf.writestr(
89 zf.writestr(
86 "mpatch_apply_over_memcpy",
90 "mpatch_apply_over_memcpy",
87 '\x13\x01\x00\x05\xd0\x00\x00\x00\x00\x00\x00\x00\x00\n \x00\x00\x00'
91 '\x13\x01\x00\x05\xd0\x00\x00\x00\x00\x00\x00\x00\x00\n \x00\x00\x00'
88 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
92 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
89 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00'
93 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00'
90 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
94 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
91 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
95 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
92 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
96 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
93 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
97 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
94 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
98 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
95 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
99 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
96 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
100 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
97 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
101 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
98 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
102 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
99 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
103 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
100 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
104 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
101 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
105 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
102 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
106 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
103 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
107 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
104 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
108 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
105 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
109 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
106 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
110 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
107 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
111 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
108 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
112 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
109 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
113 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
110 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
114 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
111 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
115 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
112 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
116 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
113 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
117 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
114 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
118 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
115 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
119 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
116 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
120 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
117 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8c\x00\x00\x00\x00'
121 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8c\x00\x00\x00\x00'
118 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
122 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
119 '\x00\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00\x00\x00\x00\x00\x00'
123 '\x00\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00\x00\x00\x00\x00\x00'
120 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
124 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
121 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
125 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
122 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
126 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
123 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
127 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
124 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
128 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
125 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
129 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
126 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
130 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
127 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
131 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
128 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
132 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
129 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
133 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
130 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
134 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
131 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
135 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
132 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
136 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
133 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
137 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
134 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
138 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
135 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
139 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
136 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
140 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
137 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
141 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
138 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
142 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
139 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
143 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
140 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
144 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
141 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
145 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
142 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
146 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
143 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
147 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
144 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
148 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
145 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
149 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
146 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
150 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
147 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
151 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
148 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
152 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
149 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
153 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
150 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
154 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
151 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
155 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
152 '\x00\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00A\x00\x00\x00\x00'
156 '\x00\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00A\x00\x00\x00\x00'
153 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
157 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
154 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
158 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
155 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
159 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
156 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
160 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
157 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
161 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
158 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
162 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
159 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
163 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
160 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
164 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
161 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
165 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
162 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
166 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
163 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
167 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
164 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
168 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
165 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
169 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
166 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
170 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
167 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
171 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
168 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
172 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
169 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
173 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
170 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
174 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
171 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
175 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
172 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
176 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
173 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
177 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
174 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
178 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
175 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
179 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
176 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
180 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
177 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x94\x18'
181 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x94\x18'
178 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
182 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
179 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
183 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
180 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
184 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
181 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
185 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
182 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
186 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
183 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
187 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
184 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
188 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
185 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
189 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
186 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
190 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
187 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
191 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
188 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
192 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
189 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
193 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
190 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
194 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
191 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
195 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
192 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
196 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
193 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
197 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
194 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
198 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
195 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
199 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
196 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
200 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
197 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
201 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
198 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
202 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
199 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
203 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
200 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
204 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
201 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
205 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
202 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
206 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
203 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
207 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
204 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
208 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
205 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
209 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
206 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
210 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
207 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
211 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
208 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
212 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
209 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
213 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
210 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
214 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
211 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
215 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
212 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
216 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
213 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
217 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
214 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
218 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
215 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
219 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
216 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
220 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
217 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
221 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
218 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
222 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
219 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
223 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
220 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
224 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
221 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
225 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
222 '\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
226 '\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
223 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
227 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
224 '\x00\x00\x00\x00\x00\x00\x00\x00\xff\xfa\x00\x00\x00\x00\x00\x00\x00'
228 '\x00\x00\x00\x00\x00\x00\x00\x00\xff\xfa\x00\x00\x00\x00\x00\x00\x00'
225 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
229 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
226 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
230 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
227 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
231 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
228 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
232 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
229 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
233 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
230 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
234 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
231 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
235 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
232 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
236 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
233 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
237 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
234 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
238 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
235 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
239 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
236 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
240 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
237 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
241 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
238 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
242 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
239 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
243 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
240 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
244 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
241 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
245 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
242 '\x00\x00\x94\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
246 '\x00\x00\x94\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
243 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
247 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
244 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
248 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
245 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
249 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
246 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
250 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
247 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
251 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
248 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
252 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
249 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
253 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
250 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
254 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
251 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
255 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
252 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
256 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
253 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
257 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
254 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
258 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
255 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
259 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
256 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
260 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
257 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
261 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
258 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
262 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
259 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
263 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
260 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
264 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
261 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
265 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
262 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
266 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
263 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
267 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
264 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
268 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
265 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
269 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
266 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
270 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
267 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
271 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
268 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
272 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
269 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
273 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
270 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
274 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
271 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
275 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
272 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
276 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
273 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
277 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
274 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
278 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
275 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
279 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
276 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
280 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
277 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
281 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
278 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
282 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
279 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
283 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
280 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
284 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
281 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
285 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
282 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
286 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
283 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
287 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
284 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
288 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
285 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
289 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
286 '\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
290 '\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
287 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
291 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
288 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xfa\x00\x00\x00'
292 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xfa\x00\x00\x00'
289 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
293 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
290 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
294 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
291 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
295 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
292 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
296 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
293 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
297 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
294 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
298 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
295 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
299 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
296 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
300 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
297 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
301 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
298 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
302 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
299 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
303 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
300 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
304 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
301 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
305 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
302 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
306 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
303 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
307 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
304 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
308 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
305 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
309 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
306 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
310 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
307 '\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00'
311 '\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00'
308 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
312 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
309 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
313 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
310 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
314 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
311 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
315 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
312 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
316 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
313 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
317 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
314 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
318 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
315 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
319 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
316 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
320 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
317 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
321 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
318 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
322 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
319 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
323 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
320 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
324 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
321 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
325 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
322 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x13\x00\x00\x00\x00\x00\x00'
326 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x13\x00\x00\x00\x00\x00\x00'
323 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
327 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
324 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
328 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
325 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
329 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
326 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
330 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
327 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00]\x00\x00'
331 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00]\x00\x00'
328 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
332 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
329 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
333 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
330 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
334 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
331 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
335 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
332 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
336 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
333 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
337 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
334 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
338 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
335 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
339 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
336 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
340 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
337 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
341 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
338 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
342 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
339 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
343 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
340 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
344 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
341 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
345 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
342 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
346 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
343 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00se\x00\x00'
347 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00se\x00\x00'
344 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
348 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
345 '\x00\x00\x00\x00')
349 '\x00\x00\x00\x00',
350 )
@@ -1,57 +1,62 b''
1 #include <Python.h>
1 #include <Python.h>
2 #include <assert.h>
2 #include <assert.h>
3 #include <stdlib.h>
3 #include <stdlib.h>
4 #include <unistd.h>
4 #include <unistd.h>
5
5
6 #include <string>
6 #include <string>
7
7
8 #include "pyutil.h"
8 #include "pyutil.h"
9
9
10 extern "C" {
10 extern "C" {
11
11
12 static PyCodeObject *code;
12 static PyCodeObject *code;
13
13
14 extern "C" int LLVMFuzzerInitialize(int *argc, char ***argv)
14 extern "C" int LLVMFuzzerInitialize(int *argc, char ***argv)
15 {
15 {
16 contrib::initpy(*argv[0]);
16 contrib::initpy(*argv[0]);
17 code = (PyCodeObject *)Py_CompileString(R"py(
17 code = (PyCodeObject *)Py_CompileString(R"py(
18 from parsers import parse_index2
18 from parsers import parse_index2
19 for inline in (True, False):
19 for inline in (True, False):
20 try:
20 try:
21 index, cache = parse_index2(data, inline)
21 index, cache = parse_index2(data, inline)
22 index.slicechunktodensity(list(range(len(index))), 0.5, 262144)
22 index.slicechunktodensity(list(range(len(index))), 0.5, 262144)
23 index.stats()
24 index.findsnapshots({}, 0)
25 10 in index
23 for rev in range(len(index)):
26 for rev in range(len(index)):
27 index.reachableroots(0, [len(index)-1], [rev])
24 node = index[rev][7]
28 node = index[rev][7]
25 partial = index.shortest(node)
29 partial = index.shortest(node)
26 index.partialmatch(node[:partial])
30 index.partialmatch(node[:partial])
31 index.deltachain(rev, None, True)
27 except Exception as e:
32 except Exception as e:
28 pass
33 pass
29 # uncomment this print if you're editing this Python code
34 # uncomment this print if you're editing this Python code
30 # to debug failures.
35 # to debug failures.
31 # print e
36 # print e
32 )py",
37 )py",
33 "fuzzer", Py_file_input);
38 "fuzzer", Py_file_input);
34 return 0;
39 return 0;
35 }
40 }
36
41
37 int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size)
42 int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size)
38 {
43 {
39 // Don't allow fuzzer inputs larger than 60k, since we'll just bog
44 // Don't allow fuzzer inputs larger than 60k, since we'll just bog
40 // down and not accomplish much.
45 // down and not accomplish much.
41 if (Size > 60000) {
46 if (Size > 60000) {
42 return 0;
47 return 0;
43 }
48 }
44 PyObject *text =
49 PyObject *text =
45 PyBytes_FromStringAndSize((const char *)Data, (Py_ssize_t)Size);
50 PyBytes_FromStringAndSize((const char *)Data, (Py_ssize_t)Size);
46 PyObject *locals = PyDict_New();
51 PyObject *locals = PyDict_New();
47 PyDict_SetItemString(locals, "data", text);
52 PyDict_SetItemString(locals, "data", text);
48 PyObject *res = PyEval_EvalCode(code, contrib::pyglobals(), locals);
53 PyObject *res = PyEval_EvalCode(code, contrib::pyglobals(), locals);
49 if (!res) {
54 if (!res) {
50 PyErr_Print();
55 PyErr_Print();
51 }
56 }
52 Py_XDECREF(res);
57 Py_XDECREF(res);
53 Py_DECREF(locals);
58 Py_DECREF(locals);
54 Py_DECREF(text);
59 Py_DECREF(text);
55 return 0; // Non-zero return values are reserved for future use.
60 return 0; // Non-zero return values are reserved for future use.
56 }
61 }
57 }
62 }
@@ -1,28 +1,28 b''
1 from __future__ import absolute_import, print_function
1 from __future__ import absolute_import, print_function
2
2
3 import argparse
3 import argparse
4 import os
4 import os
5 import zipfile
5 import zipfile
6
6
7 ap = argparse.ArgumentParser()
7 ap = argparse.ArgumentParser()
8 ap.add_argument("out", metavar="some.zip", type=str, nargs=1)
8 ap.add_argument("out", metavar="some.zip", type=str, nargs=1)
9 args = ap.parse_args()
9 args = ap.parse_args()
10
10
11 reporoot = os.path.normpath(os.path.join(os.path.dirname(__file__),
11 reporoot = os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..'))
12 '..', '..'))
13 # typically a standalone index
12 # typically a standalone index
14 changelog = os.path.join(reporoot, '.hg', 'store', '00changelog.i')
13 changelog = os.path.join(reporoot, '.hg', 'store', '00changelog.i')
15 # an inline revlog with only a few revisions
14 # an inline revlog with only a few revisions
16 contributing = os.path.join(
15 contributing = os.path.join(
17 reporoot, '.hg', 'store', 'data', 'contrib', 'fuzz', 'mpatch.cc.i')
16 reporoot, '.hg', 'store', 'data', 'contrib', 'fuzz', 'mpatch.cc.i'
17 )
18
18
19 print(changelog, os.path.exists(changelog))
19 print(changelog, os.path.exists(changelog))
20 print(contributing, os.path.exists(contributing))
20 print(contributing, os.path.exists(contributing))
21
21
22 with zipfile.ZipFile(args.out[0], "w", zipfile.ZIP_STORED) as zf:
22 with zipfile.ZipFile(args.out[0], "w", zipfile.ZIP_STORED) as zf:
23 if os.path.exists(changelog):
23 if os.path.exists(changelog):
24 with open(changelog) as f:
24 with open(changelog) as f:
25 zf.writestr("00changelog.i", f.read())
25 zf.writestr("00changelog.i", f.read())
26 if os.path.exists(contributing):
26 if os.path.exists(contributing):
27 with open(contributing) as f:
27 with open(contributing) as f:
28 zf.writestr("contributing.i", f.read())
28 zf.writestr("contributing.i", f.read())
@@ -1,130 +1,136 b''
1 #!/usr/bin/env python2
1 #!/usr/bin/env python2
2 from __future__ import absolute_import, print_function
2 from __future__ import absolute_import, print_function
3
3
4 import argparse
4 import argparse
5 import json
6 import os
5 import os
7 import subprocess
6 import subprocess
8 import sys
7 import sys
9
8
10 # Always load hg libraries from the hg we can find on $PATH.
9 # Always load hg libraries from the hg we can find on $PATH.
11 hglib = json.loads(subprocess.check_output(
10 hglib = subprocess.check_output(['hg', 'debuginstall', '-T', '{hgmodules}'])
12 ['hg', 'debuginstall', '-Tjson']))[0]['hgmodules']
13 sys.path.insert(0, os.path.dirname(hglib))
11 sys.path.insert(0, os.path.dirname(hglib))
14
12
15 from mercurial import util
13 from mercurial import util
16
14
17 ap = argparse.ArgumentParser()
15 ap = argparse.ArgumentParser()
18 ap.add_argument('--paranoid',
16 ap.add_argument(
19 action='store_true',
17 '--paranoid',
20 help=("Be paranoid about how version numbers compare and "
18 action='store_true',
21 "produce something that's more likely to sort "
19 help=(
22 "reasonably."))
20 "Be paranoid about how version numbers compare and "
21 "produce something that's more likely to sort "
22 "reasonably."
23 ),
24 )
23 ap.add_argument('--selftest', action='store_true', help='Run self-tests.')
25 ap.add_argument('--selftest', action='store_true', help='Run self-tests.')
24 ap.add_argument('versionfile', help='Path to a valid mercurial __version__.py')
26 ap.add_argument('versionfile', help='Path to a valid mercurial __version__.py')
25
27
28
26 def paranoidver(ver):
29 def paranoidver(ver):
27 """Given an hg version produce something that distutils can sort.
30 """Given an hg version produce something that distutils can sort.
28
31
29 Some Mac package management systems use distutils code in order to
32 Some Mac package management systems use distutils code in order to
30 figure out upgrades, which makes life difficult. The test case is
33 figure out upgrades, which makes life difficult. The test case is
31 a reduced version of code in the Munki tool used by some large
34 a reduced version of code in the Munki tool used by some large
32 organizations to centrally manage OS X packages, which is what
35 organizations to centrally manage OS X packages, which is what
33 inspired this kludge.
36 inspired this kludge.
34
37
35 >>> paranoidver('3.4')
38 >>> paranoidver('3.4')
36 '3.4.0'
39 '3.4.0'
37 >>> paranoidver('3.4.2')
40 >>> paranoidver('3.4.2')
38 '3.4.2'
41 '3.4.2'
39 >>> paranoidver('3.0-rc+10')
42 >>> paranoidver('3.0-rc+10')
40 '2.9.9999-rc+10'
43 '2.9.9999-rc+10'
41 >>> paranoidver('4.2+483-5d44d7d4076e')
44 >>> paranoidver('4.2+483-5d44d7d4076e')
42 '4.2.0+483-5d44d7d4076e'
45 '4.2.0+483-5d44d7d4076e'
43 >>> paranoidver('4.2.1+598-48d1e1214d8c')
46 >>> paranoidver('4.2.1+598-48d1e1214d8c')
44 '4.2.1+598-48d1e1214d8c'
47 '4.2.1+598-48d1e1214d8c'
45 >>> paranoidver('4.3-rc')
48 >>> paranoidver('4.3-rc')
46 '4.2.9999-rc'
49 '4.2.9999-rc'
47 >>> paranoidver('4.3')
50 >>> paranoidver('4.3')
48 '4.3.0'
51 '4.3.0'
49 >>> from distutils import version
52 >>> from distutils import version
50 >>> class LossyPaddedVersion(version.LooseVersion):
53 >>> class LossyPaddedVersion(version.LooseVersion):
51 ... '''Subclass version.LooseVersion to compare things like
54 ... '''Subclass version.LooseVersion to compare things like
52 ... "10.6" and "10.6.0" as equal'''
55 ... "10.6" and "10.6.0" as equal'''
53 ... def __init__(self, s):
56 ... def __init__(self, s):
54 ... self.parse(s)
57 ... self.parse(s)
55 ...
58 ...
56 ... def _pad(self, version_list, max_length):
59 ... def _pad(self, version_list, max_length):
57 ... 'Pad a version list by adding extra 0 components to the end'
60 ... 'Pad a version list by adding extra 0 components to the end'
58 ... # copy the version_list so we don't modify it
61 ... # copy the version_list so we don't modify it
59 ... cmp_list = list(version_list)
62 ... cmp_list = list(version_list)
60 ... while len(cmp_list) < max_length:
63 ... while len(cmp_list) < max_length:
61 ... cmp_list.append(0)
64 ... cmp_list.append(0)
62 ... return cmp_list
65 ... return cmp_list
63 ...
66 ...
64 ... def __cmp__(self, other):
67 ... def __cmp__(self, other):
65 ... if isinstance(other, str):
68 ... if isinstance(other, str):
66 ... other = MunkiLooseVersion(other)
69 ... other = MunkiLooseVersion(other)
67 ... max_length = max(len(self.version), len(other.version))
70 ... max_length = max(len(self.version), len(other.version))
68 ... self_cmp_version = self._pad(self.version, max_length)
71 ... self_cmp_version = self._pad(self.version, max_length)
69 ... other_cmp_version = self._pad(other.version, max_length)
72 ... other_cmp_version = self._pad(other.version, max_length)
70 ... return cmp(self_cmp_version, other_cmp_version)
73 ... return cmp(self_cmp_version, other_cmp_version)
71 >>> def testver(older, newer):
74 >>> def testver(older, newer):
72 ... o = LossyPaddedVersion(paranoidver(older))
75 ... o = LossyPaddedVersion(paranoidver(older))
73 ... n = LossyPaddedVersion(paranoidver(newer))
76 ... n = LossyPaddedVersion(paranoidver(newer))
74 ... return o < n
77 ... return o < n
75 >>> testver('3.4', '3.5')
78 >>> testver('3.4', '3.5')
76 True
79 True
77 >>> testver('3.4.0', '3.5-rc')
80 >>> testver('3.4.0', '3.5-rc')
78 True
81 True
79 >>> testver('3.4-rc', '3.5')
82 >>> testver('3.4-rc', '3.5')
80 True
83 True
81 >>> testver('3.4-rc+10-deadbeef', '3.5')
84 >>> testver('3.4-rc+10-deadbeef', '3.5')
82 True
85 True
83 >>> testver('3.4.2', '3.5-rc')
86 >>> testver('3.4.2', '3.5-rc')
84 True
87 True
85 >>> testver('3.4.2', '3.5-rc+10-deadbeef')
88 >>> testver('3.4.2', '3.5-rc+10-deadbeef')
86 True
89 True
87 >>> testver('4.2+483-5d44d7d4076e', '4.2.1+598-48d1e1214d8c')
90 >>> testver('4.2+483-5d44d7d4076e', '4.2.1+598-48d1e1214d8c')
88 True
91 True
89 >>> testver('4.3-rc', '4.3')
92 >>> testver('4.3-rc', '4.3')
90 True
93 True
91 >>> testver('4.3', '4.3-rc')
94 >>> testver('4.3', '4.3-rc')
92 False
95 False
93 """
96 """
94 major, minor, micro, extra = util.versiontuple(ver, n=4)
97 major, minor, micro, extra = util.versiontuple(ver, n=4)
95 if micro is None:
98 if micro is None:
96 micro = 0
99 micro = 0
97 if extra:
100 if extra:
98 if extra.startswith('rc'):
101 if extra.startswith('rc'):
99 if minor == 0:
102 if minor == 0:
100 major -= 1
103 major -= 1
101 minor = 9
104 minor = 9
102 else:
105 else:
103 minor -= 1
106 minor -= 1
104 micro = 9999
107 micro = 9999
105 extra = '-' + extra
108 extra = '-' + extra
106 else:
109 else:
107 extra = '+' + extra
110 extra = '+' + extra
108 else:
111 else:
109 extra = ''
112 extra = ''
110 return '%d.%d.%d%s' % (major, minor, micro, extra)
113 return '%d.%d.%d%s' % (major, minor, micro, extra)
111
114
115
112 def main(argv):
116 def main(argv):
113 opts = ap.parse_args(argv[1:])
117 opts = ap.parse_args(argv[1:])
114 if opts.selftest:
118 if opts.selftest:
115 import doctest
119 import doctest
120
116 doctest.testmod()
121 doctest.testmod()
117 return
122 return
118 with open(opts.versionfile) as f:
123 with open(opts.versionfile) as f:
119 for l in f:
124 for l in f:
120 if l.startswith('version = b'):
125 if l.startswith('version = b'):
121 # version number is entire line minus the quotes
126 # version number is entire line minus the quotes
122 ver = l[len('version = b') + 1:-2]
127 ver = l[len('version = b') + 1 : -2]
123 break
128 break
124 if opts.paranoid:
129 if opts.paranoid:
125 print(paranoidver(ver))
130 print(paranoidver(ver))
126 else:
131 else:
127 print(ver)
132 print(ver)
128
133
134
129 if __name__ == '__main__':
135 if __name__ == '__main__':
130 main(sys.argv)
136 main(sys.argv)
@@ -1,147 +1,163 b''
1 # A minimal client for Mercurial's command server
1 # A minimal client for Mercurial's command server
2
2
3 from __future__ import absolute_import, print_function
3 from __future__ import absolute_import, print_function
4
4
5 import io
5 import io
6 import os
6 import os
7 import re
7 import re
8 import signal
8 import signal
9 import socket
9 import socket
10 import struct
10 import struct
11 import subprocess
11 import subprocess
12 import sys
12 import sys
13 import time
13 import time
14
14
15 if sys.version_info[0] >= 3:
15 if sys.version_info[0] >= 3:
16 stdout = sys.stdout.buffer
16 stdout = sys.stdout.buffer
17 stderr = sys.stderr.buffer
17 stderr = sys.stderr.buffer
18 stringio = io.BytesIO
18 stringio = io.BytesIO
19
19 def bprint(*args):
20 def bprint(*args):
20 # remove b'' as well for ease of test migration
21 # remove b'' as well for ease of test migration
21 pargs = [re.sub(br'''\bb(['"])''', br'\1', b'%s' % a) for a in args]
22 pargs = [re.sub(br'''\bb(['"])''', br'\1', b'%s' % a) for a in args]
22 stdout.write(b' '.join(pargs) + b'\n')
23 stdout.write(b' '.join(pargs) + b'\n')
24
25
23 else:
26 else:
24 import cStringIO
27 import cStringIO
28
25 stdout = sys.stdout
29 stdout = sys.stdout
26 stderr = sys.stderr
30 stderr = sys.stderr
27 stringio = cStringIO.StringIO
31 stringio = cStringIO.StringIO
28 bprint = print
32 bprint = print
29
33
34
30 def connectpipe(path=None, extraargs=()):
35 def connectpipe(path=None, extraargs=()):
31 cmdline = [b'hg', b'serve', b'--cmdserver', b'pipe']
36 cmdline = [b'hg', b'serve', b'--cmdserver', b'pipe']
32 if path:
37 if path:
33 cmdline += [b'-R', path]
38 cmdline += [b'-R', path]
34 cmdline.extend(extraargs)
39 cmdline.extend(extraargs)
35
40
36 def tonative(cmdline):
41 def tonative(cmdline):
37 if os.name != r'nt':
42 if os.name != r'nt':
38 return cmdline
43 return cmdline
39 return [arg.decode("utf-8") for arg in cmdline]
44 return [arg.decode("utf-8") for arg in cmdline]
40
45
41 server = subprocess.Popen(tonative(cmdline), stdin=subprocess.PIPE,
46 server = subprocess.Popen(
42 stdout=subprocess.PIPE)
47 tonative(cmdline), stdin=subprocess.PIPE, stdout=subprocess.PIPE
48 )
43
49
44 return server
50 return server
45
51
52
46 class unixconnection(object):
53 class unixconnection(object):
47 def __init__(self, sockpath):
54 def __init__(self, sockpath):
48 self.sock = sock = socket.socket(socket.AF_UNIX)
55 self.sock = sock = socket.socket(socket.AF_UNIX)
49 sock.connect(sockpath)
56 sock.connect(sockpath)
50 self.stdin = sock.makefile('wb')
57 self.stdin = sock.makefile('wb')
51 self.stdout = sock.makefile('rb')
58 self.stdout = sock.makefile('rb')
52
59
53 def wait(self):
60 def wait(self):
54 self.stdin.close()
61 self.stdin.close()
55 self.stdout.close()
62 self.stdout.close()
56 self.sock.close()
63 self.sock.close()
57
64
65
58 class unixserver(object):
66 class unixserver(object):
59 def __init__(self, sockpath, logpath=None, repopath=None):
67 def __init__(self, sockpath, logpath=None, repopath=None):
60 self.sockpath = sockpath
68 self.sockpath = sockpath
61 cmdline = [b'hg', b'serve', b'--cmdserver', b'unix', b'-a', sockpath]
69 cmdline = [b'hg', b'serve', b'--cmdserver', b'unix', b'-a', sockpath]
62 if repopath:
70 if repopath:
63 cmdline += [b'-R', repopath]
71 cmdline += [b'-R', repopath]
64 if logpath:
72 if logpath:
65 stdout = open(logpath, 'a')
73 stdout = open(logpath, 'a')
66 stderr = subprocess.STDOUT
74 stderr = subprocess.STDOUT
67 else:
75 else:
68 stdout = stderr = None
76 stdout = stderr = None
69 self.server = subprocess.Popen(cmdline, stdout=stdout, stderr=stderr)
77 self.server = subprocess.Popen(cmdline, stdout=stdout, stderr=stderr)
70 # wait for listen()
78 # wait for listen()
71 while self.server.poll() is None:
79 while self.server.poll() is None:
72 if os.path.exists(sockpath):
80 if os.path.exists(sockpath):
73 break
81 break
74 time.sleep(0.1)
82 time.sleep(0.1)
75
83
76 def connect(self):
84 def connect(self):
77 return unixconnection(self.sockpath)
85 return unixconnection(self.sockpath)
78
86
79 def shutdown(self):
87 def shutdown(self):
80 os.kill(self.server.pid, signal.SIGTERM)
88 os.kill(self.server.pid, signal.SIGTERM)
81 self.server.wait()
89 self.server.wait()
82
90
91
83 def writeblock(server, data):
92 def writeblock(server, data):
84 server.stdin.write(struct.pack(b'>I', len(data)))
93 server.stdin.write(struct.pack(b'>I', len(data)))
85 server.stdin.write(data)
94 server.stdin.write(data)
86 server.stdin.flush()
95 server.stdin.flush()
87
96
97
88 def readchannel(server):
98 def readchannel(server):
89 data = server.stdout.read(5)
99 data = server.stdout.read(5)
90 if not data:
100 if not data:
91 raise EOFError
101 raise EOFError
92 channel, length = struct.unpack('>cI', data)
102 channel, length = struct.unpack('>cI', data)
93 if channel in b'IL':
103 if channel in b'IL':
94 return channel, length
104 return channel, length
95 else:
105 else:
96 return channel, server.stdout.read(length)
106 return channel, server.stdout.read(length)
97
107
108
98 def sep(text):
109 def sep(text):
99 return text.replace(b'\\', b'/')
110 return text.replace(b'\\', b'/')
100
111
101 def runcommand(server, args, output=stdout, error=stderr, input=None,
112
102 outfilter=lambda x: x):
113 def runcommand(
114 server, args, output=stdout, error=stderr, input=None, outfilter=lambda x: x
115 ):
103 bprint(b'*** runcommand', b' '.join(args))
116 bprint(b'*** runcommand', b' '.join(args))
104 stdout.flush()
117 stdout.flush()
105 server.stdin.write(b'runcommand\n')
118 server.stdin.write(b'runcommand\n')
106 writeblock(server, b'\0'.join(args))
119 writeblock(server, b'\0'.join(args))
107
120
108 if not input:
121 if not input:
109 input = stringio()
122 input = stringio()
110
123
111 while True:
124 while True:
112 ch, data = readchannel(server)
125 ch, data = readchannel(server)
113 if ch == b'o':
126 if ch == b'o':
114 output.write(outfilter(data))
127 output.write(outfilter(data))
115 output.flush()
128 output.flush()
116 elif ch == b'e':
129 elif ch == b'e':
117 error.write(data)
130 error.write(data)
118 error.flush()
131 error.flush()
119 elif ch == b'I':
132 elif ch == b'I':
120 writeblock(server, input.read(data))
133 writeblock(server, input.read(data))
121 elif ch == b'L':
134 elif ch == b'L':
122 writeblock(server, input.readline(data))
135 writeblock(server, input.readline(data))
123 elif ch == b'm':
136 elif ch == b'm':
124 bprint(b"message: %r" % data)
137 bprint(b"message: %r" % data)
125 elif ch == b'r':
138 elif ch == b'r':
126 ret, = struct.unpack('>i', data)
139 (ret,) = struct.unpack('>i', data)
127 if ret != 0:
140 if ret != 0:
128 bprint(b' [%d]' % ret)
141 bprint(b' [%d]' % ret)
129 return ret
142 return ret
130 else:
143 else:
131 bprint(b"unexpected channel %c: %r" % (ch, data))
144 bprint(b"unexpected channel %c: %r" % (ch, data))
132 if ch.isupper():
145 if ch.isupper():
133 return
146 return
134
147
148
135 def check(func, connect=connectpipe):
149 def check(func, connect=connectpipe):
136 stdout.flush()
150 stdout.flush()
137 server = connect()
151 server = connect()
138 try:
152 try:
139 return func(server)
153 return func(server)
140 finally:
154 finally:
141 server.stdin.close()
155 server.stdin.close()
142 server.wait()
156 server.wait()
143
157
158
144 def checkwith(connect=connectpipe, **kwargs):
159 def checkwith(connect=connectpipe, **kwargs):
145 def wrap(func):
160 def wrap(func):
146 return check(func, lambda: connect(**kwargs))
161 return check(func, lambda: connect(**kwargs))
162
147 return wrap
163 return wrap
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file renamed from mercurial/repository.py to mercurial/interfaces/repository.py
NO CONTENT: file renamed from mercurial/repository.py to mercurial/interfaces/repository.py
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file renamed from mercurial/utils/interfaceutil.py to mercurial/interfaces/util.py
NO CONTENT: file renamed from mercurial/utils/interfaceutil.py to mercurial/interfaces/util.py
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file chmod 100755 => 100644
NO CONTENT: modified file chmod 100755 => 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file copied from rust/hg-cpython/src/dirstate.rs to rust/hg-cpython/src/dirstate/dirs_multiset.rs
NO CONTENT: file copied from rust/hg-cpython/src/dirstate.rs to rust/hg-cpython/src/dirstate/dirs_multiset.rs
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file copied from rust/hg-cpython/src/dirstate.rs to rust/hg-cpython/src/parsers.rs
NO CONTENT: file copied from rust/hg-cpython/src/dirstate.rs to rust/hg-cpython/src/parsers.rs
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now