##// END OF EJS Templates
merge default into stable for 5.2 release
Augie Fackler -
r43600:59338f95 merge 5.2rc0 stable
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -0,0 +1,17 b''
1 [tool.black]
2 line-length = 80
3 exclude = '''
4 build/
5 | wheelhouse/
6 | dist/
7 | packages/
8 | \.hg/
9 | \.mypy_cache/
10 | \.venv/
11 | mercurial/thirdparty/
12 | hgext/fsmonitor/pywatchman/
13 | contrib/python-zstandard/
14 | contrib/grey.py
15 '''
16 skip-string-normalization = true
17 quiet = true
@@ -0,0 +1,21 b''
1 # pypi.py - Automation around PyPI
2 #
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
4 #
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
7
8 # no-check-code because Python 3 native.
9
10 from twine.commands.upload import upload as twine_upload
11 from twine.settings import Settings
12
13
14 def upload(paths):
15 """Upload files to PyPI.
16
17 `paths` is an iterable of `pathlib.Path`.
18 """
19 settings = Settings()
20
21 twine_upload(settings, [str(p) for p in paths])
@@ -0,0 +1,99 b''
1 # try_server.py - Interact with Try server
2 #
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
4 #
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
7
8 # no-check-code because Python 3 native.
9
10 import base64
11 import json
12 import os
13 import subprocess
14 import tempfile
15
16 from .aws import AWSConnection
17
18 LAMBDA_FUNCTION = "ci-try-server-upload"
19
20
21 def trigger_try(c: AWSConnection, rev="."):
22 """Trigger a new Try run."""
23 lambda_client = c.session.client("lambda")
24
25 cset, bundle = generate_bundle(rev=rev)
26
27 payload = {
28 "bundle": base64.b64encode(bundle).decode("utf-8"),
29 "node": cset["node"],
30 "branch": cset["branch"],
31 "user": cset["user"],
32 "message": cset["desc"],
33 }
34
35 print("resolved revision:")
36 print("node: %s" % cset["node"])
37 print("branch: %s" % cset["branch"])
38 print("user: %s" % cset["user"])
39 print("desc: %s" % cset["desc"].splitlines()[0])
40 print()
41
42 print("sending to Try...")
43 res = lambda_client.invoke(
44 FunctionName=LAMBDA_FUNCTION,
45 InvocationType="RequestResponse",
46 Payload=json.dumps(payload).encode("utf-8"),
47 )
48
49 body = json.load(res["Payload"])
50 for message in body:
51 print("remote: %s" % message)
52
53
54 def generate_bundle(rev="."):
55 """Generate a bundle suitable for use by the Try service.
56
57 Returns a tuple of revision metadata and raw Mercurial bundle data.
58 """
59 # `hg bundle` doesn't support streaming to stdout. So we use a temporary
60 # file.
61 path = None
62 try:
63 fd, path = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
64 os.close(fd)
65
66 args = [
67 "hg",
68 "bundle",
69 "--type",
70 "gzip-v2",
71 "--base",
72 "public()",
73 "--rev",
74 rev,
75 path,
76 ]
77
78 print("generating bundle...")
79 subprocess.run(args, check=True)
80
81 with open(path, "rb") as fh:
82 bundle_data = fh.read()
83
84 finally:
85 if path:
86 os.unlink(path)
87
88 args = [
89 "hg",
90 "log",
91 "-r",
92 rev,
93 # We have to upload as JSON, so it won't matter if we emit binary
94 # since we need to normalize to UTF-8.
95 "-T",
96 "json",
97 ]
98 res = subprocess.run(args, check=True, capture_output=True)
99 return json.loads(res.stdout)[0], bundle_data
@@ -0,0 +1,15 b''
1 [fix]
2 clang-format:command = clang-format --style file -i
3 clang-format:pattern = (**.c or **.cc or **.h) and not "listfile:contrib/clang-format-ignorelist"
4
5 rustfmt:command = rustfmt {rootpath}
6 rustfmt:pattern = set:**.rs
7
8 # We use black, but currently with
9 # https://github.com/psf/black/pull/826 applied. For now
10 # contrib/grey.py is our fork of black. You need to pip install
11 # git+https://github.com/python/black/@d9e71a75ccfefa3d9156a64c03313a0d4ad981e5
12 # to have the dependencies for grey.
13 #
14 # black:command = python3.7 contrib/grey.py --config=black.toml -
15 # black:pattern = set:**.py - hgext/fsmonitor/pywatchman/** - mercurial/thirdparty/** - "contrib/python-zstandard/** - contrib/grey.py"
@@ -0,0 +1,56 b''
1 #include <Python.h>
2 #include <assert.h>
3 #include <stdlib.h>
4 #include <unistd.h>
5
6 #include "pyutil.h"
7
8 #include <string>
9
10 extern "C" {
11
12 static PyCodeObject *code;
13
14 extern "C" int LLVMFuzzerInitialize(int *argc, char ***argv)
15 {
16 contrib::initpy(*argv[0]);
17 code = (PyCodeObject *)Py_CompileString(R"py(
18 from parsers import dirs
19 try:
20 files = mdata.split('\n')
21 d = dirs(files)
22 list(d)
23 'a' in d
24 if files:
25 files[0] in d
26 except Exception as e:
27 pass
28 # uncomment this print if you're editing this Python code
29 # to debug failures.
30 # print e
31 )py",
32 "fuzzer", Py_file_input);
33 return 0;
34 }
35
36 int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size)
37 {
38 // Don't allow fuzzer inputs larger than 100k, since we'll just bog
39 // down and not accomplish much.
40 if (Size > 100000) {
41 return 0;
42 }
43 PyObject *mtext =
44 PyBytes_FromStringAndSize((const char *)Data, (Py_ssize_t)Size);
45 PyObject *locals = PyDict_New();
46 PyDict_SetItemString(locals, "mdata", mtext);
47 PyObject *res = PyEval_EvalCode(code, contrib::pyglobals(), locals);
48 if (!res) {
49 PyErr_Print();
50 }
51 Py_XDECREF(res);
52 Py_DECREF(locals);
53 Py_DECREF(mtext);
54 return 0; // Non-zero return values are reserved for future use.
55 }
56 }
@@ -0,0 +1,78 b''
1 #include <Python.h>
2 #include <assert.h>
3 #include <stdlib.h>
4 #include <unistd.h>
5
6 #include "pyutil.h"
7
8 #include <iostream>
9 #include <string>
10
11 extern "C" {
12
13 static PyCodeObject *code;
14
15 extern "C" int LLVMFuzzerInitialize(int *argc, char ***argv)
16 {
17 contrib::initpy(*argv[0]);
18 code = (PyCodeObject *)Py_CompileString(R"py(
19 from parsers import (
20 isasciistr,
21 asciilower,
22 asciiupper,
23 encodedir,
24 pathencode,
25 lowerencode,
26 )
27
28 try:
29 for fn in (
30 isasciistr,
31 asciilower,
32 asciiupper,
33 encodedir,
34 pathencode,
35 lowerencode,
36 ):
37 try:
38 fn(data)
39 except UnicodeDecodeError:
40 pass # some functions emit this exception
41 except AttributeError:
42 # pathencode needs hashlib, which fails to import because the time
43 # module fails to import. We should try and fix that some day, but
44 # for now we at least get coverage on non-hashencoded codepaths.
45 if fn != pathencode:
46 raise
47 # uncomment this for debugging exceptions
48 # except Exception as e:
49 # raise Exception('%r: %r' % (fn, e))
50 except Exception as e:
51 pass
52 # uncomment this print if you're editing this Python code
53 # to debug failures.
54 # print(e)
55 )py",
56 "fuzzer", Py_file_input);
57 if (!code) {
58 std::cerr << "failed to compile Python code!" << std::endl;
59 }
60 return 0;
61 }
62
63 int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size)
64 {
65 PyObject *mtext =
66 PyBytes_FromStringAndSize((const char *)Data, (Py_ssize_t)Size);
67 PyObject *locals = PyDict_New();
68 PyDict_SetItemString(locals, "data", mtext);
69 PyObject *res = PyEval_EvalCode(code, contrib::pyglobals(), locals);
70 if (!res) {
71 PyErr_Print();
72 }
73 Py_XDECREF(res);
74 Py_DECREF(locals);
75 Py_DECREF(mtext);
76 return 0; // Non-zero return values are reserved for future use.
77 }
78 }
@@ -0,0 +1,57 b''
1 #include <Python.h>
2 #include <assert.h>
3 #include <stdlib.h>
4 #include <unistd.h>
5
6 #include "pyutil.h"
7
8 #include <fuzzer/FuzzedDataProvider.h>
9 #include <iostream>
10 #include <string>
11
12 extern "C" {
13
14 static PyCodeObject *code;
15
16 extern "C" int LLVMFuzzerInitialize(int *argc, char ***argv)
17 {
18 contrib::initpy(*argv[0]);
19 code = (PyCodeObject *)Py_CompileString(R"py(
20 from parsers import jsonescapeu8fast
21
22 try:
23 jsonescapeu8fast(data, paranoid)
24 except Exception as e:
25 pass
26 # uncomment this print if you're editing this Python code
27 # to debug failures.
28 # print(e)
29 )py",
30 "fuzzer", Py_file_input);
31 if (!code) {
32 std::cerr << "failed to compile Python code!" << std::endl;
33 }
34 return 0;
35 }
36
37 int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size)
38 {
39 FuzzedDataProvider provider(Data, Size);
40 bool paranoid = provider.ConsumeBool();
41 std::string remainder = provider.ConsumeRemainingBytesAsString();
42
43 PyObject *mtext = PyBytes_FromStringAndSize(
44 (const char *)remainder.c_str(), remainder.size());
45 PyObject *locals = PyDict_New();
46 PyDict_SetItemString(locals, "data", mtext);
47 PyDict_SetItemString(locals, "paranoid", paranoid ? Py_True : Py_False);
48 PyObject *res = PyEval_EvalCode(code, contrib::pyglobals(), locals);
49 if (!res) {
50 PyErr_Print();
51 }
52 Py_XDECREF(res);
53 Py_DECREF(locals);
54 Py_DECREF(mtext);
55 return 0; // Non-zero return values are reserved for future use.
56 }
57 }
This diff has been collapsed as it changes many lines, (4094 lines changed) Show them Hide them
@@ -0,0 +1,4094 b''
1 # no-check-code because 3rd party
2 import ast
3 import asyncio
4 from concurrent.futures import Executor, ProcessPoolExecutor
5 from contextlib import contextmanager
6 from datetime import datetime
7 from enum import Enum
8 from functools import lru_cache, partial, wraps
9 import io
10 import itertools
11 import logging
12 from multiprocessing import Manager, freeze_support
13 import os
14 from pathlib import Path
15 import pickle
16 import re
17 import signal
18 import sys
19 import tempfile
20 import tokenize
21 import traceback
22 from typing import (
23 Any,
24 Callable,
25 Collection,
26 Dict,
27 Generator,
28 Generic,
29 Iterable,
30 Iterator,
31 List,
32 Optional,
33 Pattern,
34 Sequence,
35 Set,
36 Tuple,
37 TypeVar,
38 Union,
39 cast,
40 )
41
42 from appdirs import user_cache_dir
43 from attr import dataclass, evolve, Factory
44 import click
45 import toml
46 from typed_ast import ast3, ast27
47
48 # lib2to3 fork
49 from blib2to3.pytree import Node, Leaf, type_repr
50 from blib2to3 import pygram, pytree
51 from blib2to3.pgen2 import driver, token
52 from blib2to3.pgen2.grammar import Grammar
53 from blib2to3.pgen2.parse import ParseError
54
55 __version__ = '19.3b1.dev95+gdc1add6.d20191005'
56
57 DEFAULT_LINE_LENGTH = 88
58 DEFAULT_EXCLUDES = (
59 r"/(\.eggs|\.git|\.hg|\.mypy_cache|\.nox|\.tox|\.venv|_build|buck-out|build|dist)/"
60 )
61 DEFAULT_INCLUDES = r"\.pyi?$"
62 CACHE_DIR = Path(user_cache_dir("black", version=__version__))
63
64
65 # types
66 FileContent = str
67 Encoding = str
68 NewLine = str
69 Depth = int
70 NodeType = int
71 LeafID = int
72 Priority = int
73 Index = int
74 LN = Union[Leaf, Node]
75 SplitFunc = Callable[["Line", Collection["Feature"]], Iterator["Line"]]
76 Timestamp = float
77 FileSize = int
78 CacheInfo = Tuple[Timestamp, FileSize]
79 Cache = Dict[Path, CacheInfo]
80 out = partial(click.secho, bold=True, err=True)
81 err = partial(click.secho, fg="red", err=True)
82
83 pygram.initialize(CACHE_DIR)
84 syms = pygram.python_symbols
85
86
87 class NothingChanged(UserWarning):
88 """Raised when reformatted code is the same as source."""
89
90
91 class CannotSplit(Exception):
92 """A readable split that fits the allotted line length is impossible."""
93
94
95 class InvalidInput(ValueError):
96 """Raised when input source code fails all parse attempts."""
97
98
99 class WriteBack(Enum):
100 NO = 0
101 YES = 1
102 DIFF = 2
103 CHECK = 3
104
105 @classmethod
106 def from_configuration(cls, *, check: bool, diff: bool) -> "WriteBack":
107 if check and not diff:
108 return cls.CHECK
109
110 return cls.DIFF if diff else cls.YES
111
112
113 class Changed(Enum):
114 NO = 0
115 CACHED = 1
116 YES = 2
117
118
119 class TargetVersion(Enum):
120 PY27 = 2
121 PY33 = 3
122 PY34 = 4
123 PY35 = 5
124 PY36 = 6
125 PY37 = 7
126 PY38 = 8
127
128 def is_python2(self) -> bool:
129 return self is TargetVersion.PY27
130
131
132 PY36_VERSIONS = {TargetVersion.PY36, TargetVersion.PY37, TargetVersion.PY38}
133
134
135 class Feature(Enum):
136 # All string literals are unicode
137 UNICODE_LITERALS = 1
138 F_STRINGS = 2
139 NUMERIC_UNDERSCORES = 3
140 TRAILING_COMMA_IN_CALL = 4
141 TRAILING_COMMA_IN_DEF = 5
142 # The following two feature-flags are mutually exclusive, and exactly one should be
143 # set for every version of python.
144 ASYNC_IDENTIFIERS = 6
145 ASYNC_KEYWORDS = 7
146 ASSIGNMENT_EXPRESSIONS = 8
147 POS_ONLY_ARGUMENTS = 9
148
149
150 VERSION_TO_FEATURES: Dict[TargetVersion, Set[Feature]] = {
151 TargetVersion.PY27: {Feature.ASYNC_IDENTIFIERS},
152 TargetVersion.PY33: {Feature.UNICODE_LITERALS, Feature.ASYNC_IDENTIFIERS},
153 TargetVersion.PY34: {Feature.UNICODE_LITERALS, Feature.ASYNC_IDENTIFIERS},
154 TargetVersion.PY35: {
155 Feature.UNICODE_LITERALS,
156 Feature.TRAILING_COMMA_IN_CALL,
157 Feature.ASYNC_IDENTIFIERS,
158 },
159 TargetVersion.PY36: {
160 Feature.UNICODE_LITERALS,
161 Feature.F_STRINGS,
162 Feature.NUMERIC_UNDERSCORES,
163 Feature.TRAILING_COMMA_IN_CALL,
164 Feature.TRAILING_COMMA_IN_DEF,
165 Feature.ASYNC_IDENTIFIERS,
166 },
167 TargetVersion.PY37: {
168 Feature.UNICODE_LITERALS,
169 Feature.F_STRINGS,
170 Feature.NUMERIC_UNDERSCORES,
171 Feature.TRAILING_COMMA_IN_CALL,
172 Feature.TRAILING_COMMA_IN_DEF,
173 Feature.ASYNC_KEYWORDS,
174 },
175 TargetVersion.PY38: {
176 Feature.UNICODE_LITERALS,
177 Feature.F_STRINGS,
178 Feature.NUMERIC_UNDERSCORES,
179 Feature.TRAILING_COMMA_IN_CALL,
180 Feature.TRAILING_COMMA_IN_DEF,
181 Feature.ASYNC_KEYWORDS,
182 Feature.ASSIGNMENT_EXPRESSIONS,
183 Feature.POS_ONLY_ARGUMENTS,
184 },
185 }
186
187
188 @dataclass
189 class FileMode:
190 target_versions: Set[TargetVersion] = Factory(set)
191 line_length: int = DEFAULT_LINE_LENGTH
192 string_normalization: bool = True
193 is_pyi: bool = False
194
195 def get_cache_key(self) -> str:
196 if self.target_versions:
197 version_str = ",".join(
198 str(version.value)
199 for version in sorted(self.target_versions, key=lambda v: v.value)
200 )
201 else:
202 version_str = "-"
203 parts = [
204 version_str,
205 str(self.line_length),
206 str(int(self.string_normalization)),
207 str(int(self.is_pyi)),
208 ]
209 return ".".join(parts)
210
211
212 def supports_feature(target_versions: Set[TargetVersion], feature: Feature) -> bool:
213 return all(feature in VERSION_TO_FEATURES[version] for version in target_versions)
214
215
216 def read_pyproject_toml(
217 ctx: click.Context, param: click.Parameter, value: Union[str, int, bool, None]
218 ) -> Optional[str]:
219 """Inject Black configuration from "pyproject.toml" into defaults in `ctx`.
220
221 Returns the path to a successfully found and read configuration file, None
222 otherwise.
223 """
224 assert not isinstance(value, (int, bool)), "Invalid parameter type passed"
225 if not value:
226 root = find_project_root(ctx.params.get("src", ()))
227 path = root / "pyproject.toml"
228 if path.is_file():
229 value = str(path)
230 else:
231 return None
232
233 try:
234 pyproject_toml = toml.load(value)
235 config = pyproject_toml.get("tool", {}).get("black", {})
236 except (toml.TomlDecodeError, OSError) as e:
237 raise click.FileError(
238 filename=value, hint=f"Error reading configuration file: {e}"
239 )
240
241 if not config:
242 return None
243
244 if ctx.default_map is None:
245 ctx.default_map = {}
246 ctx.default_map.update( # type: ignore # bad types in .pyi
247 {k.replace("--", "").replace("-", "_"): v for k, v in config.items()}
248 )
249 return value
250
251
252 @click.command(context_settings=dict(help_option_names=["-h", "--help"]))
253 @click.option("-c", "--code", type=str, help="Format the code passed in as a string.")
254 @click.option(
255 "-l",
256 "--line-length",
257 type=int,
258 default=DEFAULT_LINE_LENGTH,
259 help="How many characters per line to allow.",
260 show_default=True,
261 )
262 @click.option(
263 "-t",
264 "--target-version",
265 type=click.Choice([v.name.lower() for v in TargetVersion]),
266 callback=lambda c, p, v: [TargetVersion[val.upper()] for val in v],
267 multiple=True,
268 help=(
269 "Python versions that should be supported by Black's output. [default: "
270 "per-file auto-detection]"
271 ),
272 )
273 @click.option(
274 "--py36",
275 is_flag=True,
276 help=(
277 "Allow using Python 3.6-only syntax on all input files. This will put "
278 "trailing commas in function signatures and calls also after *args and "
279 "**kwargs. Deprecated; use --target-version instead. "
280 "[default: per-file auto-detection]"
281 ),
282 )
283 @click.option(
284 "--pyi",
285 is_flag=True,
286 help=(
287 "Format all input files like typing stubs regardless of file extension "
288 "(useful when piping source on standard input)."
289 ),
290 )
291 @click.option(
292 "-S",
293 "--skip-string-normalization",
294 is_flag=True,
295 help="Don't normalize string quotes or prefixes.",
296 )
297 @click.option(
298 "--check",
299 is_flag=True,
300 help=(
301 "Don't write the files back, just return the status. Return code 0 "
302 "means nothing would change. Return code 1 means some files would be "
303 "reformatted. Return code 123 means there was an internal error."
304 ),
305 )
306 @click.option(
307 "--diff",
308 is_flag=True,
309 help="Don't write the files back, just output a diff for each file on stdout.",
310 )
311 @click.option(
312 "--fast/--safe",
313 is_flag=True,
314 help="If --fast given, skip temporary sanity checks. [default: --safe]",
315 )
316 @click.option(
317 "--include",
318 type=str,
319 default=DEFAULT_INCLUDES,
320 help=(
321 "A regular expression that matches files and directories that should be "
322 "included on recursive searches. An empty value means all files are "
323 "included regardless of the name. Use forward slashes for directories on "
324 "all platforms (Windows, too). Exclusions are calculated first, inclusions "
325 "later."
326 ),
327 show_default=True,
328 )
329 @click.option(
330 "--exclude",
331 type=str,
332 default=DEFAULT_EXCLUDES,
333 help=(
334 "A regular expression that matches files and directories that should be "
335 "excluded on recursive searches. An empty value means no paths are excluded. "
336 "Use forward slashes for directories on all platforms (Windows, too). "
337 "Exclusions are calculated first, inclusions later."
338 ),
339 show_default=True,
340 )
341 @click.option(
342 "-q",
343 "--quiet",
344 is_flag=True,
345 help=(
346 "Don't emit non-error messages to stderr. Errors are still emitted; "
347 "silence those with 2>/dev/null."
348 ),
349 )
350 @click.option(
351 "-v",
352 "--verbose",
353 is_flag=True,
354 help=(
355 "Also emit messages to stderr about files that were not changed or were "
356 "ignored due to --exclude=."
357 ),
358 )
359 @click.version_option(version=__version__)
360 @click.argument(
361 "src",
362 nargs=-1,
363 type=click.Path(
364 exists=True, file_okay=True, dir_okay=True, readable=True, allow_dash=True
365 ),
366 is_eager=True,
367 )
368 @click.option(
369 "--config",
370 type=click.Path(
371 exists=False, file_okay=True, dir_okay=False, readable=True, allow_dash=False
372 ),
373 is_eager=True,
374 callback=read_pyproject_toml,
375 help="Read configuration from PATH.",
376 )
377 @click.pass_context
378 def main(
379 ctx: click.Context,
380 code: Optional[str],
381 line_length: int,
382 target_version: List[TargetVersion],
383 check: bool,
384 diff: bool,
385 fast: bool,
386 pyi: bool,
387 py36: bool,
388 skip_string_normalization: bool,
389 quiet: bool,
390 verbose: bool,
391 include: str,
392 exclude: str,
393 src: Tuple[str],
394 config: Optional[str],
395 ) -> None:
396 """The uncompromising code formatter."""
397 write_back = WriteBack.from_configuration(check=check, diff=diff)
398 if target_version:
399 if py36:
400 err(f"Cannot use both --target-version and --py36")
401 ctx.exit(2)
402 else:
403 versions = set(target_version)
404 elif py36:
405 err(
406 "--py36 is deprecated and will be removed in a future version. "
407 "Use --target-version py36 instead."
408 )
409 versions = PY36_VERSIONS
410 else:
411 # We'll autodetect later.
412 versions = set()
413 mode = FileMode(
414 target_versions=versions,
415 line_length=line_length,
416 is_pyi=pyi,
417 string_normalization=not skip_string_normalization,
418 )
419 if config and verbose:
420 out(f"Using configuration from {config}.", bold=False, fg="blue")
421 if code is not None:
422 print(format_str(code, mode=mode))
423 ctx.exit(0)
424 try:
425 include_regex = re_compile_maybe_verbose(include)
426 except re.error:
427 err(f"Invalid regular expression for include given: {include!r}")
428 ctx.exit(2)
429 try:
430 exclude_regex = re_compile_maybe_verbose(exclude)
431 except re.error:
432 err(f"Invalid regular expression for exclude given: {exclude!r}")
433 ctx.exit(2)
434 report = Report(check=check, quiet=quiet, verbose=verbose)
435 root = find_project_root(src)
436 sources: Set[Path] = set()
437 path_empty(src, quiet, verbose, ctx)
438 for s in src:
439 p = Path(s)
440 if p.is_dir():
441 sources.update(
442 gen_python_files_in_dir(p, root, include_regex, exclude_regex, report)
443 )
444 elif p.is_file() or s == "-":
445 # if a file was explicitly given, we don't care about its extension
446 sources.add(p)
447 else:
448 err(f"invalid path: {s}")
449 if len(sources) == 0:
450 if verbose or not quiet:
451 out("No Python files are present to be formatted. Nothing to do 😴")
452 ctx.exit(0)
453
454 if len(sources) == 1:
455 reformat_one(
456 src=sources.pop(),
457 fast=fast,
458 write_back=write_back,
459 mode=mode,
460 report=report,
461 )
462 else:
463 reformat_many(
464 sources=sources, fast=fast, write_back=write_back, mode=mode, report=report
465 )
466
467 if verbose or not quiet:
468 out("Oh no! 💥 💔 💥" if report.return_code else "All done! ✨ 🍰 ✨")
469 click.secho(str(report), err=True)
470 ctx.exit(report.return_code)
471
472
473 def path_empty(src: Tuple[str], quiet: bool, verbose: bool, ctx: click.Context) -> None:
474 """
475 Exit if there is no `src` provided for formatting
476 """
477 if not src:
478 if verbose or not quiet:
479 out("No Path provided. Nothing to do 😴")
480 ctx.exit(0)
481
482
483 def reformat_one(
484 src: Path, fast: bool, write_back: WriteBack, mode: FileMode, report: "Report"
485 ) -> None:
486 """Reformat a single file under `src` without spawning child processes.
487
488 `fast`, `write_back`, and `mode` options are passed to
489 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
490 """
491 try:
492 changed = Changed.NO
493 if not src.is_file() and str(src) == "-":
494 if format_stdin_to_stdout(fast=fast, write_back=write_back, mode=mode):
495 changed = Changed.YES
496 else:
497 cache: Cache = {}
498 if write_back != WriteBack.DIFF:
499 cache = read_cache(mode)
500 res_src = src.resolve()
501 if res_src in cache and cache[res_src] == get_cache_info(res_src):
502 changed = Changed.CACHED
503 if changed is not Changed.CACHED and format_file_in_place(
504 src, fast=fast, write_back=write_back, mode=mode
505 ):
506 changed = Changed.YES
507 if (write_back is WriteBack.YES and changed is not Changed.CACHED) or (
508 write_back is WriteBack.CHECK and changed is Changed.NO
509 ):
510 write_cache(cache, [src], mode)
511 report.done(src, changed)
512 except Exception as exc:
513 report.failed(src, str(exc))
514
515
516 def reformat_many(
517 sources: Set[Path],
518 fast: bool,
519 write_back: WriteBack,
520 mode: FileMode,
521 report: "Report",
522 ) -> None:
523 """Reformat multiple files using a ProcessPoolExecutor."""
524 loop = asyncio.get_event_loop()
525 worker_count = os.cpu_count()
526 if sys.platform == "win32":
527 # Work around https://bugs.python.org/issue26903
528 worker_count = min(worker_count, 61)
529 executor = ProcessPoolExecutor(max_workers=worker_count)
530 try:
531 loop.run_until_complete(
532 schedule_formatting(
533 sources=sources,
534 fast=fast,
535 write_back=write_back,
536 mode=mode,
537 report=report,
538 loop=loop,
539 executor=executor,
540 )
541 )
542 finally:
543 shutdown(loop)
544 executor.shutdown()
545
546
547 async def schedule_formatting(
548 sources: Set[Path],
549 fast: bool,
550 write_back: WriteBack,
551 mode: FileMode,
552 report: "Report",
553 loop: asyncio.AbstractEventLoop,
554 executor: Executor,
555 ) -> None:
556 """Run formatting of `sources` in parallel using the provided `executor`.
557
558 (Use ProcessPoolExecutors for actual parallelism.)
559
560 `write_back`, `fast`, and `mode` options are passed to
561 :func:`format_file_in_place`.
562 """
563 cache: Cache = {}
564 if write_back != WriteBack.DIFF:
565 cache = read_cache(mode)
566 sources, cached = filter_cached(cache, sources)
567 for src in sorted(cached):
568 report.done(src, Changed.CACHED)
569 if not sources:
570 return
571
572 cancelled = []
573 sources_to_cache = []
574 lock = None
575 if write_back == WriteBack.DIFF:
576 # For diff output, we need locks to ensure we don't interleave output
577 # from different processes.
578 manager = Manager()
579 lock = manager.Lock()
580 tasks = {
581 asyncio.ensure_future(
582 loop.run_in_executor(
583 executor, format_file_in_place, src, fast, mode, write_back, lock
584 )
585 ): src
586 for src in sorted(sources)
587 }
588 pending: Iterable[asyncio.Future] = tasks.keys()
589 try:
590 loop.add_signal_handler(signal.SIGINT, cancel, pending)
591 loop.add_signal_handler(signal.SIGTERM, cancel, pending)
592 except NotImplementedError:
593 # There are no good alternatives for these on Windows.
594 pass
595 while pending:
596 done, _ = await asyncio.wait(pending, return_when=asyncio.FIRST_COMPLETED)
597 for task in done:
598 src = tasks.pop(task)
599 if task.cancelled():
600 cancelled.append(task)
601 elif task.exception():
602 report.failed(src, str(task.exception()))
603 else:
604 changed = Changed.YES if task.result() else Changed.NO
605 # If the file was written back or was successfully checked as
606 # well-formatted, store this information in the cache.
607 if write_back is WriteBack.YES or (
608 write_back is WriteBack.CHECK and changed is Changed.NO
609 ):
610 sources_to_cache.append(src)
611 report.done(src, changed)
612 if cancelled:
613 await asyncio.gather(*cancelled, loop=loop, return_exceptions=True)
614 if sources_to_cache:
615 write_cache(cache, sources_to_cache, mode)
616
617
618 def format_file_in_place(
619 src: Path,
620 fast: bool,
621 mode: FileMode,
622 write_back: WriteBack = WriteBack.NO,
623 lock: Any = None, # multiprocessing.Manager().Lock() is some crazy proxy
624 ) -> bool:
625 """Format file under `src` path. Return True if changed.
626
627 If `write_back` is DIFF, write a diff to stdout. If it is YES, write reformatted
628 code to the file.
629 `mode` and `fast` options are passed to :func:`format_file_contents`.
630 """
631 if src.suffix == ".pyi":
632 mode = evolve(mode, is_pyi=True)
633
634 then = datetime.utcfromtimestamp(src.stat().st_mtime)
635 with open(src, "rb") as buf:
636 src_contents, encoding, newline = decode_bytes(buf.read())
637 try:
638 dst_contents = format_file_contents(src_contents, fast=fast, mode=mode)
639 except NothingChanged:
640 return False
641
642 if write_back == write_back.YES:
643 with open(src, "w", encoding=encoding, newline=newline) as f:
644 f.write(dst_contents)
645 elif write_back == write_back.DIFF:
646 now = datetime.utcnow()
647 src_name = f"{src}\t{then} +0000"
648 dst_name = f"{src}\t{now} +0000"
649 diff_contents = diff(src_contents, dst_contents, src_name, dst_name)
650
651 with lock or nullcontext():
652 f = io.TextIOWrapper(
653 sys.stdout.buffer,
654 encoding=encoding,
655 newline=newline,
656 write_through=True,
657 )
658 f.write(diff_contents)
659 f.detach()
660
661 return True
662
663
664 def format_stdin_to_stdout(
665 fast: bool, *, write_back: WriteBack = WriteBack.NO, mode: FileMode
666 ) -> bool:
667 """Format file on stdin. Return True if changed.
668
669 If `write_back` is YES, write reformatted code back to stdout. If it is DIFF,
670 write a diff to stdout. The `mode` argument is passed to
671 :func:`format_file_contents`.
672 """
673 then = datetime.utcnow()
674 src, encoding, newline = decode_bytes(sys.stdin.buffer.read())
675 dst = src
676 try:
677 dst = format_file_contents(src, fast=fast, mode=mode)
678 return True
679
680 except NothingChanged:
681 return False
682
683 finally:
684 f = io.TextIOWrapper(
685 sys.stdout.buffer, encoding=encoding, newline=newline, write_through=True
686 )
687 if write_back == WriteBack.YES:
688 f.write(dst)
689 elif write_back == WriteBack.DIFF:
690 now = datetime.utcnow()
691 src_name = f"STDIN\t{then} +0000"
692 dst_name = f"STDOUT\t{now} +0000"
693 f.write(diff(src, dst, src_name, dst_name))
694 f.detach()
695
696
697 def format_file_contents(
698 src_contents: str, *, fast: bool, mode: FileMode
699 ) -> FileContent:
700 """Reformat contents a file and return new contents.
701
702 If `fast` is False, additionally confirm that the reformatted code is
703 valid by calling :func:`assert_equivalent` and :func:`assert_stable` on it.
704 `mode` is passed to :func:`format_str`.
705 """
706 if src_contents.strip() == "":
707 raise NothingChanged
708
709 dst_contents = format_str(src_contents, mode=mode)
710 if src_contents == dst_contents:
711 raise NothingChanged
712
713 if not fast:
714 assert_equivalent(src_contents, dst_contents)
715 assert_stable(src_contents, dst_contents, mode=mode)
716 return dst_contents
717
718
719 def format_str(src_contents: str, *, mode: FileMode) -> FileContent:
720 """Reformat a string and return new contents.
721
722 `mode` determines formatting options, such as how many characters per line are
723 allowed.
724 """
725 src_node = lib2to3_parse(src_contents.lstrip(), mode.target_versions)
726 dst_contents = []
727 future_imports = get_future_imports(src_node)
728 if mode.target_versions:
729 versions = mode.target_versions
730 else:
731 versions = detect_target_versions(src_node)
732 normalize_fmt_off(src_node)
733 lines = LineGenerator(
734 remove_u_prefix="unicode_literals" in future_imports
735 or supports_feature(versions, Feature.UNICODE_LITERALS),
736 is_pyi=mode.is_pyi,
737 normalize_strings=mode.string_normalization,
738 )
739 elt = EmptyLineTracker(is_pyi=mode.is_pyi)
740 empty_line = Line()
741 after = 0
742 split_line_features = {
743 feature
744 for feature in {Feature.TRAILING_COMMA_IN_CALL, Feature.TRAILING_COMMA_IN_DEF}
745 if supports_feature(versions, feature)
746 }
747 for current_line in lines.visit(src_node):
748 for _ in range(after):
749 dst_contents.append(str(empty_line))
750 before, after = elt.maybe_empty_lines(current_line)
751 for _ in range(before):
752 dst_contents.append(str(empty_line))
753 for line in split_line(
754 current_line, line_length=mode.line_length, features=split_line_features
755 ):
756 dst_contents.append(str(line))
757 return "".join(dst_contents)
758
759
760 def decode_bytes(src: bytes) -> Tuple[FileContent, Encoding, NewLine]:
761 """Return a tuple of (decoded_contents, encoding, newline).
762
763 `newline` is either CRLF or LF but `decoded_contents` is decoded with
764 universal newlines (i.e. only contains LF).
765 """
766 srcbuf = io.BytesIO(src)
767 encoding, lines = tokenize.detect_encoding(srcbuf.readline)
768 if not lines:
769 return "", encoding, "\n"
770
771 newline = "\r\n" if b"\r\n" == lines[0][-2:] else "\n"
772 srcbuf.seek(0)
773 with io.TextIOWrapper(srcbuf, encoding) as tiow:
774 return tiow.read(), encoding, newline
775
776
777 def get_grammars(target_versions: Set[TargetVersion]) -> List[Grammar]:
778 if not target_versions:
779 # No target_version specified, so try all grammars.
780 return [
781 # Python 3.7+
782 pygram.python_grammar_no_print_statement_no_exec_statement_async_keywords,
783 # Python 3.0-3.6
784 pygram.python_grammar_no_print_statement_no_exec_statement,
785 # Python 2.7 with future print_function import
786 pygram.python_grammar_no_print_statement,
787 # Python 2.7
788 pygram.python_grammar,
789 ]
790 elif all(version.is_python2() for version in target_versions):
791 # Python 2-only code, so try Python 2 grammars.
792 return [
793 # Python 2.7 with future print_function import
794 pygram.python_grammar_no_print_statement,
795 # Python 2.7
796 pygram.python_grammar,
797 ]
798 else:
799 # Python 3-compatible code, so only try Python 3 grammar.
800 grammars = []
801 # If we have to parse both, try to parse async as a keyword first
802 if not supports_feature(target_versions, Feature.ASYNC_IDENTIFIERS):
803 # Python 3.7+
804 grammars.append(
805 pygram.python_grammar_no_print_statement_no_exec_statement_async_keywords # noqa: B950
806 )
807 if not supports_feature(target_versions, Feature.ASYNC_KEYWORDS):
808 # Python 3.0-3.6
809 grammars.append(pygram.python_grammar_no_print_statement_no_exec_statement)
810 # At least one of the above branches must have been taken, because every Python
811 # version has exactly one of the two 'ASYNC_*' flags
812 return grammars
813
814
815 def lib2to3_parse(src_txt: str, target_versions: Iterable[TargetVersion] = ()) -> Node:
816 """Given a string with source, return the lib2to3 Node."""
817 if src_txt[-1:] != "\n":
818 src_txt += "\n"
819
820 for grammar in get_grammars(set(target_versions)):
821 drv = driver.Driver(grammar, pytree.convert)
822 try:
823 result = drv.parse_string(src_txt, True)
824 break
825
826 except ParseError as pe:
827 lineno, column = pe.context[1]
828 lines = src_txt.splitlines()
829 try:
830 faulty_line = lines[lineno - 1]
831 except IndexError:
832 faulty_line = "<line number missing in source>"
833 exc = InvalidInput(f"Cannot parse: {lineno}:{column}: {faulty_line}")
834 else:
835 raise exc from None
836
837 if isinstance(result, Leaf):
838 result = Node(syms.file_input, [result])
839 return result
840
841
842 def lib2to3_unparse(node: Node) -> str:
843 """Given a lib2to3 node, return its string representation."""
844 code = str(node)
845 return code
846
847
848 T = TypeVar("T")
849
850
851 class Visitor(Generic[T]):
852 """Basic lib2to3 visitor that yields things of type `T` on `visit()`."""
853
854 def visit(self, node: LN) -> Iterator[T]:
855 """Main method to visit `node` and its children.
856
857 It tries to find a `visit_*()` method for the given `node.type`, like
858 `visit_simple_stmt` for Node objects or `visit_INDENT` for Leaf objects.
859 If no dedicated `visit_*()` method is found, chooses `visit_default()`
860 instead.
861
862 Then yields objects of type `T` from the selected visitor.
863 """
864 if node.type < 256:
865 name = token.tok_name[node.type]
866 else:
867 name = type_repr(node.type)
868 yield from getattr(self, f"visit_{name}", self.visit_default)(node)
869
870 def visit_default(self, node: LN) -> Iterator[T]:
871 """Default `visit_*()` implementation. Recurses to children of `node`."""
872 if isinstance(node, Node):
873 for child in node.children:
874 yield from self.visit(child)
875
876
877 @dataclass
878 class DebugVisitor(Visitor[T]):
879 tree_depth: int = 0
880
881 def visit_default(self, node: LN) -> Iterator[T]:
882 indent = " " * (2 * self.tree_depth)
883 if isinstance(node, Node):
884 _type = type_repr(node.type)
885 out(f"{indent}{_type}", fg="yellow")
886 self.tree_depth += 1
887 for child in node.children:
888 yield from self.visit(child)
889
890 self.tree_depth -= 1
891 out(f"{indent}/{_type}", fg="yellow", bold=False)
892 else:
893 _type = token.tok_name.get(node.type, str(node.type))
894 out(f"{indent}{_type}", fg="blue", nl=False)
895 if node.prefix:
896 # We don't have to handle prefixes for `Node` objects since
897 # that delegates to the first child anyway.
898 out(f" {node.prefix!r}", fg="green", bold=False, nl=False)
899 out(f" {node.value!r}", fg="blue", bold=False)
900
901 @classmethod
902 def show(cls, code: Union[str, Leaf, Node]) -> None:
903 """Pretty-print the lib2to3 AST of a given string of `code`.
904
905 Convenience method for debugging.
906 """
907 v: DebugVisitor[None] = DebugVisitor()
908 if isinstance(code, str):
909 code = lib2to3_parse(code)
910 list(v.visit(code))
911
912
913 WHITESPACE = {token.DEDENT, token.INDENT, token.NEWLINE}
914 STATEMENT = {
915 syms.if_stmt,
916 syms.while_stmt,
917 syms.for_stmt,
918 syms.try_stmt,
919 syms.except_clause,
920 syms.with_stmt,
921 syms.funcdef,
922 syms.classdef,
923 }
924 STANDALONE_COMMENT = 153
925 token.tok_name[STANDALONE_COMMENT] = "STANDALONE_COMMENT"
926 LOGIC_OPERATORS = {"and", "or"}
927 COMPARATORS = {
928 token.LESS,
929 token.GREATER,
930 token.EQEQUAL,
931 token.NOTEQUAL,
932 token.LESSEQUAL,
933 token.GREATEREQUAL,
934 }
935 MATH_OPERATORS = {
936 token.VBAR,
937 token.CIRCUMFLEX,
938 token.AMPER,
939 token.LEFTSHIFT,
940 token.RIGHTSHIFT,
941 token.PLUS,
942 token.MINUS,
943 token.STAR,
944 token.SLASH,
945 token.DOUBLESLASH,
946 token.PERCENT,
947 token.AT,
948 token.TILDE,
949 token.DOUBLESTAR,
950 }
951 STARS = {token.STAR, token.DOUBLESTAR}
952 VARARGS_SPECIALS = STARS | {token.SLASH}
953 VARARGS_PARENTS = {
954 syms.arglist,
955 syms.argument, # double star in arglist
956 syms.trailer, # single argument to call
957 syms.typedargslist,
958 syms.varargslist, # lambdas
959 }
960 UNPACKING_PARENTS = {
961 syms.atom, # single element of a list or set literal
962 syms.dictsetmaker,
963 syms.listmaker,
964 syms.testlist_gexp,
965 syms.testlist_star_expr,
966 }
967 TEST_DESCENDANTS = {
968 syms.test,
969 syms.lambdef,
970 syms.or_test,
971 syms.and_test,
972 syms.not_test,
973 syms.comparison,
974 syms.star_expr,
975 syms.expr,
976 syms.xor_expr,
977 syms.and_expr,
978 syms.shift_expr,
979 syms.arith_expr,
980 syms.trailer,
981 syms.term,
982 syms.power,
983 }
984 ASSIGNMENTS = {
985 "=",
986 "+=",
987 "-=",
988 "*=",
989 "@=",
990 "/=",
991 "%=",
992 "&=",
993 "|=",
994 "^=",
995 "<<=",
996 ">>=",
997 "**=",
998 "//=",
999 }
1000 COMPREHENSION_PRIORITY = 20
1001 COMMA_PRIORITY = 18
1002 TERNARY_PRIORITY = 16
1003 LOGIC_PRIORITY = 14
1004 STRING_PRIORITY = 12
1005 COMPARATOR_PRIORITY = 10
1006 MATH_PRIORITIES = {
1007 token.VBAR: 9,
1008 token.CIRCUMFLEX: 8,
1009 token.AMPER: 7,
1010 token.LEFTSHIFT: 6,
1011 token.RIGHTSHIFT: 6,
1012 token.PLUS: 5,
1013 token.MINUS: 5,
1014 token.STAR: 4,
1015 token.SLASH: 4,
1016 token.DOUBLESLASH: 4,
1017 token.PERCENT: 4,
1018 token.AT: 4,
1019 token.TILDE: 3,
1020 token.DOUBLESTAR: 2,
1021 }
1022 DOT_PRIORITY = 1
1023
1024
1025 @dataclass
1026 class BracketTracker:
1027 """Keeps track of brackets on a line."""
1028
1029 depth: int = 0
1030 bracket_match: Dict[Tuple[Depth, NodeType], Leaf] = Factory(dict)
1031 delimiters: Dict[LeafID, Priority] = Factory(dict)
1032 previous: Optional[Leaf] = None
1033 _for_loop_depths: List[int] = Factory(list)
1034 _lambda_argument_depths: List[int] = Factory(list)
1035
1036 def mark(self, leaf: Leaf) -> None:
1037 """Mark `leaf` with bracket-related metadata. Keep track of delimiters.
1038
1039 All leaves receive an int `bracket_depth` field that stores how deep
1040 within brackets a given leaf is. 0 means there are no enclosing brackets
1041 that started on this line.
1042
1043 If a leaf is itself a closing bracket, it receives an `opening_bracket`
1044 field that it forms a pair with. This is a one-directional link to
1045 avoid reference cycles.
1046
1047 If a leaf is a delimiter (a token on which Black can split the line if
1048 needed) and it's on depth 0, its `id()` is stored in the tracker's
1049 `delimiters` field.
1050 """
1051 if leaf.type == token.COMMENT:
1052 return
1053
1054 self.maybe_decrement_after_for_loop_variable(leaf)
1055 self.maybe_decrement_after_lambda_arguments(leaf)
1056 if leaf.type in CLOSING_BRACKETS:
1057 self.depth -= 1
1058 opening_bracket = self.bracket_match.pop((self.depth, leaf.type))
1059 leaf.opening_bracket = opening_bracket
1060 leaf.bracket_depth = self.depth
1061 if self.depth == 0:
1062 delim = is_split_before_delimiter(leaf, self.previous)
1063 if delim and self.previous is not None:
1064 self.delimiters[id(self.previous)] = delim
1065 else:
1066 delim = is_split_after_delimiter(leaf, self.previous)
1067 if delim:
1068 self.delimiters[id(leaf)] = delim
1069 if leaf.type in OPENING_BRACKETS:
1070 self.bracket_match[self.depth, BRACKET[leaf.type]] = leaf
1071 self.depth += 1
1072 self.previous = leaf
1073 self.maybe_increment_lambda_arguments(leaf)
1074 self.maybe_increment_for_loop_variable(leaf)
1075
1076 def any_open_brackets(self) -> bool:
1077 """Return True if there is an yet unmatched open bracket on the line."""
1078 return bool(self.bracket_match)
1079
1080 def max_delimiter_priority(self, exclude: Iterable[LeafID] = ()) -> Priority:
1081 """Return the highest priority of a delimiter found on the line.
1082
1083 Values are consistent with what `is_split_*_delimiter()` return.
1084 Raises ValueError on no delimiters.
1085 """
1086 return max(v for k, v in self.delimiters.items() if k not in exclude)
1087
1088 def delimiter_count_with_priority(self, priority: Priority = 0) -> int:
1089 """Return the number of delimiters with the given `priority`.
1090
1091 If no `priority` is passed, defaults to max priority on the line.
1092 """
1093 if not self.delimiters:
1094 return 0
1095
1096 priority = priority or self.max_delimiter_priority()
1097 return sum(1 for p in self.delimiters.values() if p == priority)
1098
1099 def maybe_increment_for_loop_variable(self, leaf: Leaf) -> bool:
1100 """In a for loop, or comprehension, the variables are often unpacks.
1101
1102 To avoid splitting on the comma in this situation, increase the depth of
1103 tokens between `for` and `in`.
1104 """
1105 if leaf.type == token.NAME and leaf.value == "for":
1106 self.depth += 1
1107 self._for_loop_depths.append(self.depth)
1108 return True
1109
1110 return False
1111
1112 def maybe_decrement_after_for_loop_variable(self, leaf: Leaf) -> bool:
1113 """See `maybe_increment_for_loop_variable` above for explanation."""
1114 if (
1115 self._for_loop_depths
1116 and self._for_loop_depths[-1] == self.depth
1117 and leaf.type == token.NAME
1118 and leaf.value == "in"
1119 ):
1120 self.depth -= 1
1121 self._for_loop_depths.pop()
1122 return True
1123
1124 return False
1125
1126 def maybe_increment_lambda_arguments(self, leaf: Leaf) -> bool:
1127 """In a lambda expression, there might be more than one argument.
1128
1129 To avoid splitting on the comma in this situation, increase the depth of
1130 tokens between `lambda` and `:`.
1131 """
1132 if leaf.type == token.NAME and leaf.value == "lambda":
1133 self.depth += 1
1134 self._lambda_argument_depths.append(self.depth)
1135 return True
1136
1137 return False
1138
1139 def maybe_decrement_after_lambda_arguments(self, leaf: Leaf) -> bool:
1140 """See `maybe_increment_lambda_arguments` above for explanation."""
1141 if (
1142 self._lambda_argument_depths
1143 and self._lambda_argument_depths[-1] == self.depth
1144 and leaf.type == token.COLON
1145 ):
1146 self.depth -= 1
1147 self._lambda_argument_depths.pop()
1148 return True
1149
1150 return False
1151
1152 def get_open_lsqb(self) -> Optional[Leaf]:
1153 """Return the most recent opening square bracket (if any)."""
1154 return self.bracket_match.get((self.depth - 1, token.RSQB))
1155
1156
1157 @dataclass
1158 class Line:
1159 """Holds leaves and comments. Can be printed with `str(line)`."""
1160
1161 depth: int = 0
1162 leaves: List[Leaf] = Factory(list)
1163 comments: Dict[LeafID, List[Leaf]] = Factory(dict) # keys ordered like `leaves`
1164 bracket_tracker: BracketTracker = Factory(BracketTracker)
1165 inside_brackets: bool = False
1166 should_explode: bool = False
1167
1168 def append(self, leaf: Leaf, preformatted: bool = False) -> None:
1169 """Add a new `leaf` to the end of the line.
1170
1171 Unless `preformatted` is True, the `leaf` will receive a new consistent
1172 whitespace prefix and metadata applied by :class:`BracketTracker`.
1173 Trailing commas are maybe removed, unpacked for loop variables are
1174 demoted from being delimiters.
1175
1176 Inline comments are put aside.
1177 """
1178 has_value = leaf.type in BRACKETS or bool(leaf.value.strip())
1179 if not has_value:
1180 return
1181
1182 if token.COLON == leaf.type and self.is_class_paren_empty:
1183 del self.leaves[-2:]
1184 if self.leaves and not preformatted:
1185 # Note: at this point leaf.prefix should be empty except for
1186 # imports, for which we only preserve newlines.
1187 leaf.prefix += whitespace(
1188 leaf, complex_subscript=self.is_complex_subscript(leaf)
1189 )
1190 if self.inside_brackets or not preformatted:
1191 self.bracket_tracker.mark(leaf)
1192 self.maybe_remove_trailing_comma(leaf)
1193 if not self.append_comment(leaf):
1194 self.leaves.append(leaf)
1195
1196 def append_safe(self, leaf: Leaf, preformatted: bool = False) -> None:
1197 """Like :func:`append()` but disallow invalid standalone comment structure.
1198
1199 Raises ValueError when any `leaf` is appended after a standalone comment
1200 or when a standalone comment is not the first leaf on the line.
1201 """
1202 if self.bracket_tracker.depth == 0:
1203 if self.is_comment:
1204 raise ValueError("cannot append to standalone comments")
1205
1206 if self.leaves and leaf.type == STANDALONE_COMMENT:
1207 raise ValueError(
1208 "cannot append standalone comments to a populated line"
1209 )
1210
1211 self.append(leaf, preformatted=preformatted)
1212
1213 @property
1214 def is_comment(self) -> bool:
1215 """Is this line a standalone comment?"""
1216 return len(self.leaves) == 1 and self.leaves[0].type == STANDALONE_COMMENT
1217
1218 @property
1219 def is_decorator(self) -> bool:
1220 """Is this line a decorator?"""
1221 return bool(self) and self.leaves[0].type == token.AT
1222
1223 @property
1224 def is_import(self) -> bool:
1225 """Is this an import line?"""
1226 return bool(self) and is_import(self.leaves[0])
1227
1228 @property
1229 def is_class(self) -> bool:
1230 """Is this line a class definition?"""
1231 return (
1232 bool(self)
1233 and self.leaves[0].type == token.NAME
1234 and self.leaves[0].value == "class"
1235 )
1236
1237 @property
1238 def is_stub_class(self) -> bool:
1239 """Is this line a class definition with a body consisting only of "..."?"""
1240 return self.is_class and self.leaves[-3:] == [
1241 Leaf(token.DOT, ".") for _ in range(3)
1242 ]
1243
1244 @property
1245 def is_collection_with_optional_trailing_comma(self) -> bool:
1246 """Is this line a collection literal with a trailing comma that's optional?
1247
1248 Note that the trailing comma in a 1-tuple is not optional.
1249 """
1250 if not self.leaves or len(self.leaves) < 4:
1251 return False
1252 # Look for and address a trailing colon.
1253 if self.leaves[-1].type == token.COLON:
1254 closer = self.leaves[-2]
1255 close_index = -2
1256 else:
1257 closer = self.leaves[-1]
1258 close_index = -1
1259 if closer.type not in CLOSING_BRACKETS or self.inside_brackets:
1260 return False
1261 if closer.type == token.RPAR:
1262 # Tuples require an extra check, because if there's only
1263 # one element in the tuple removing the comma unmakes the
1264 # tuple.
1265 #
1266 # We also check for parens before looking for the trailing
1267 # comma because in some cases (eg assigning a dict
1268 # literal) the literal gets wrapped in temporary parens
1269 # during parsing. This case is covered by the
1270 # collections.py test data.
1271 opener = closer.opening_bracket
1272 for _open_index, leaf in enumerate(self.leaves):
1273 if leaf is opener:
1274 break
1275 else:
1276 # Couldn't find the matching opening paren, play it safe.
1277 return False
1278 commas = 0
1279 comma_depth = self.leaves[close_index - 1].bracket_depth
1280 for leaf in self.leaves[_open_index + 1 : close_index]:
1281 if leaf.bracket_depth == comma_depth and leaf.type == token.COMMA:
1282 commas += 1
1283 if commas > 1:
1284 # We haven't looked yet for the trailing comma because
1285 # we might also have caught noop parens.
1286 return self.leaves[close_index - 1].type == token.COMMA
1287 elif commas == 1:
1288 return False # it's either a one-tuple or didn't have a trailing comma
1289 if self.leaves[close_index - 1].type in CLOSING_BRACKETS:
1290 close_index -= 1
1291 closer = self.leaves[close_index]
1292 if closer.type == token.RPAR:
1293 # TODO: this is a gut feeling. Will we ever see this?
1294 return False
1295 if self.leaves[close_index - 1].type != token.COMMA:
1296 return False
1297 return True
1298
1299 @property
1300 def is_def(self) -> bool:
1301 """Is this a function definition? (Also returns True for async defs.)"""
1302 try:
1303 first_leaf = self.leaves[0]
1304 except IndexError:
1305 return False
1306
1307 try:
1308 second_leaf: Optional[Leaf] = self.leaves[1]
1309 except IndexError:
1310 second_leaf = None
1311 return (first_leaf.type == token.NAME and first_leaf.value == "def") or (
1312 first_leaf.type == token.ASYNC
1313 and second_leaf is not None
1314 and second_leaf.type == token.NAME
1315 and second_leaf.value == "def"
1316 )
1317
1318 @property
1319 def is_class_paren_empty(self) -> bool:
1320 """Is this a class with no base classes but using parentheses?
1321
1322 Those are unnecessary and should be removed.
1323 """
1324 return (
1325 bool(self)
1326 and len(self.leaves) == 4
1327 and self.is_class
1328 and self.leaves[2].type == token.LPAR
1329 and self.leaves[2].value == "("
1330 and self.leaves[3].type == token.RPAR
1331 and self.leaves[3].value == ")"
1332 )
1333
1334 @property
1335 def is_triple_quoted_string(self) -> bool:
1336 """Is the line a triple quoted string?"""
1337 return (
1338 bool(self)
1339 and self.leaves[0].type == token.STRING
1340 and self.leaves[0].value.startswith(('"""', "'''"))
1341 )
1342
1343 def contains_standalone_comments(self, depth_limit: int = sys.maxsize) -> bool:
1344 """If so, needs to be split before emitting."""
1345 for leaf in self.leaves:
1346 if leaf.type == STANDALONE_COMMENT:
1347 if leaf.bracket_depth <= depth_limit:
1348 return True
1349 return False
1350
1351 def contains_uncollapsable_type_comments(self) -> bool:
1352 ignored_ids = set()
1353 try:
1354 last_leaf = self.leaves[-1]
1355 ignored_ids.add(id(last_leaf))
1356 if last_leaf.type == token.COMMA or (
1357 last_leaf.type == token.RPAR and not last_leaf.value
1358 ):
1359 # When trailing commas or optional parens are inserted by Black for
1360 # consistency, comments after the previous last element are not moved
1361 # (they don't have to, rendering will still be correct). So we ignore
1362 # trailing commas and invisible.
1363 last_leaf = self.leaves[-2]
1364 ignored_ids.add(id(last_leaf))
1365 except IndexError:
1366 return False
1367
1368 # A type comment is uncollapsable if it is attached to a leaf
1369 # that isn't at the end of the line (since that could cause it
1370 # to get associated to a different argument) or if there are
1371 # comments before it (since that could cause it to get hidden
1372 # behind a comment.
1373 comment_seen = False
1374 for leaf_id, comments in self.comments.items():
1375 for comment in comments:
1376 if is_type_comment(comment):
1377 if leaf_id not in ignored_ids or comment_seen:
1378 return True
1379
1380 comment_seen = True
1381
1382 return False
1383
1384 def contains_unsplittable_type_ignore(self) -> bool:
1385 if not self.leaves:
1386 return False
1387
1388 # If a 'type: ignore' is attached to the end of a line, we
1389 # can't split the line, because we can't know which of the
1390 # subexpressions the ignore was meant to apply to.
1391 #
1392 # We only want this to apply to actual physical lines from the
1393 # original source, though: we don't want the presence of a
1394 # 'type: ignore' at the end of a multiline expression to
1395 # justify pushing it all onto one line. Thus we
1396 # (unfortunately) need to check the actual source lines and
1397 # only report an unsplittable 'type: ignore' if this line was
1398 # one line in the original code.
1399 if self.leaves[0].lineno == self.leaves[-1].lineno:
1400 for comment in self.comments.get(id(self.leaves[-1]), []):
1401 if is_type_comment(comment, " ignore"):
1402 return True
1403
1404 return False
1405
1406 def contains_multiline_strings(self) -> bool:
1407 for leaf in self.leaves:
1408 if is_multiline_string(leaf):
1409 return True
1410
1411 return False
1412
1413 def maybe_remove_trailing_comma(self, closing: Leaf) -> bool:
1414 """Remove trailing comma if there is one and it's safe."""
1415 if not (self.leaves and self.leaves[-1].type == token.COMMA):
1416 return False
1417 # We remove trailing commas only in the case of importing a
1418 # single name from a module.
1419 if not (
1420 self.leaves
1421 and self.is_import
1422 and len(self.leaves) > 4
1423 and self.leaves[-1].type == token.COMMA
1424 and closing.type in CLOSING_BRACKETS
1425 and self.leaves[-4].type == token.NAME
1426 and (
1427 # regular `from foo import bar,`
1428 self.leaves[-4].value == "import"
1429 # `from foo import (bar as baz,)
1430 or (
1431 len(self.leaves) > 6
1432 and self.leaves[-6].value == "import"
1433 and self.leaves[-3].value == "as"
1434 )
1435 # `from foo import bar as baz,`
1436 or (
1437 len(self.leaves) > 5
1438 and self.leaves[-5].value == "import"
1439 and self.leaves[-3].value == "as"
1440 )
1441 )
1442 and closing.type == token.RPAR
1443 ):
1444 return False
1445
1446 self.remove_trailing_comma()
1447 return True
1448
1449 def append_comment(self, comment: Leaf) -> bool:
1450 """Add an inline or standalone comment to the line."""
1451 if (
1452 comment.type == STANDALONE_COMMENT
1453 and self.bracket_tracker.any_open_brackets()
1454 ):
1455 comment.prefix = ""
1456 return False
1457
1458 if comment.type != token.COMMENT:
1459 return False
1460
1461 if not self.leaves:
1462 comment.type = STANDALONE_COMMENT
1463 comment.prefix = ""
1464 return False
1465
1466 last_leaf = self.leaves[-1]
1467 if (
1468 last_leaf.type == token.RPAR
1469 and not last_leaf.value
1470 and last_leaf.parent
1471 and len(list(last_leaf.parent.leaves())) <= 3
1472 and not is_type_comment(comment)
1473 ):
1474 # Comments on an optional parens wrapping a single leaf should belong to
1475 # the wrapped node except if it's a type comment. Pinning the comment like
1476 # this avoids unstable formatting caused by comment migration.
1477 if len(self.leaves) < 2:
1478 comment.type = STANDALONE_COMMENT
1479 comment.prefix = ""
1480 return False
1481 last_leaf = self.leaves[-2]
1482 self.comments.setdefault(id(last_leaf), []).append(comment)
1483 return True
1484
1485 def comments_after(self, leaf: Leaf) -> List[Leaf]:
1486 """Generate comments that should appear directly after `leaf`."""
1487 return self.comments.get(id(leaf), [])
1488
1489 def remove_trailing_comma(self) -> None:
1490 """Remove the trailing comma and moves the comments attached to it."""
1491 trailing_comma = self.leaves.pop()
1492 trailing_comma_comments = self.comments.pop(id(trailing_comma), [])
1493 self.comments.setdefault(id(self.leaves[-1]), []).extend(
1494 trailing_comma_comments
1495 )
1496
1497 def is_complex_subscript(self, leaf: Leaf) -> bool:
1498 """Return True iff `leaf` is part of a slice with non-trivial exprs."""
1499 open_lsqb = self.bracket_tracker.get_open_lsqb()
1500 if open_lsqb is None:
1501 return False
1502
1503 subscript_start = open_lsqb.next_sibling
1504
1505 if isinstance(subscript_start, Node):
1506 if subscript_start.type == syms.listmaker:
1507 return False
1508
1509 if subscript_start.type == syms.subscriptlist:
1510 subscript_start = child_towards(subscript_start, leaf)
1511 return subscript_start is not None and any(
1512 n.type in TEST_DESCENDANTS for n in subscript_start.pre_order()
1513 )
1514
1515 def __str__(self) -> str:
1516 """Render the line."""
1517 if not self:
1518 return "\n"
1519
1520 indent = " " * self.depth
1521 leaves = iter(self.leaves)
1522 first = next(leaves)
1523 res = f"{first.prefix}{indent}{first.value}"
1524 for leaf in leaves:
1525 res += str(leaf)
1526 for comment in itertools.chain.from_iterable(self.comments.values()):
1527 res += str(comment)
1528 return res + "\n"
1529
1530 def __bool__(self) -> bool:
1531 """Return True if the line has leaves or comments."""
1532 return bool(self.leaves or self.comments)
1533
1534
1535 @dataclass
1536 class EmptyLineTracker:
1537 """Provides a stateful method that returns the number of potential extra
1538 empty lines needed before and after the currently processed line.
1539
1540 Note: this tracker works on lines that haven't been split yet. It assumes
1541 the prefix of the first leaf consists of optional newlines. Those newlines
1542 are consumed by `maybe_empty_lines()` and included in the computation.
1543 """
1544
1545 is_pyi: bool = False
1546 previous_line: Optional[Line] = None
1547 previous_after: int = 0
1548 previous_defs: List[int] = Factory(list)
1549
1550 def maybe_empty_lines(self, current_line: Line) -> Tuple[int, int]:
1551 """Return the number of extra empty lines before and after the `current_line`.
1552
1553 This is for separating `def`, `async def` and `class` with extra empty
1554 lines (two on module-level).
1555 """
1556 before, after = self._maybe_empty_lines(current_line)
1557 before = (
1558 # Black should not insert empty lines at the beginning
1559 # of the file
1560 0
1561 if self.previous_line is None
1562 else before - self.previous_after
1563 )
1564 self.previous_after = after
1565 self.previous_line = current_line
1566 return before, after
1567
1568 def _maybe_empty_lines(self, current_line: Line) -> Tuple[int, int]:
1569 max_allowed = 1
1570 if current_line.depth == 0:
1571 max_allowed = 1 if self.is_pyi else 2
1572 if current_line.leaves:
1573 # Consume the first leaf's extra newlines.
1574 first_leaf = current_line.leaves[0]
1575 before = first_leaf.prefix.count("\n")
1576 before = min(before, max_allowed)
1577 first_leaf.prefix = ""
1578 else:
1579 before = 0
1580 depth = current_line.depth
1581 while self.previous_defs and self.previous_defs[-1] >= depth:
1582 self.previous_defs.pop()
1583 if self.is_pyi:
1584 before = 0 if depth else 1
1585 else:
1586 before = 1 if depth else 2
1587 if current_line.is_decorator or current_line.is_def or current_line.is_class:
1588 return self._maybe_empty_lines_for_class_or_def(current_line, before)
1589
1590 if (
1591 self.previous_line
1592 and self.previous_line.is_import
1593 and not current_line.is_import
1594 and depth == self.previous_line.depth
1595 ):
1596 return (before or 1), 0
1597
1598 if (
1599 self.previous_line
1600 and self.previous_line.is_class
1601 and current_line.is_triple_quoted_string
1602 ):
1603 return before, 1
1604
1605 return before, 0
1606
1607 def _maybe_empty_lines_for_class_or_def(
1608 self, current_line: Line, before: int
1609 ) -> Tuple[int, int]:
1610 if not current_line.is_decorator:
1611 self.previous_defs.append(current_line.depth)
1612 if self.previous_line is None:
1613 # Don't insert empty lines before the first line in the file.
1614 return 0, 0
1615
1616 if self.previous_line.is_decorator:
1617 return 0, 0
1618
1619 if self.previous_line.depth < current_line.depth and (
1620 self.previous_line.is_class or self.previous_line.is_def
1621 ):
1622 return 0, 0
1623
1624 if (
1625 self.previous_line.is_comment
1626 and self.previous_line.depth == current_line.depth
1627 and before == 0
1628 ):
1629 return 0, 0
1630
1631 if self.is_pyi:
1632 if self.previous_line.depth > current_line.depth:
1633 newlines = 1
1634 elif current_line.is_class or self.previous_line.is_class:
1635 if current_line.is_stub_class and self.previous_line.is_stub_class:
1636 # No blank line between classes with an empty body
1637 newlines = 0
1638 else:
1639 newlines = 1
1640 elif current_line.is_def and not self.previous_line.is_def:
1641 # Blank line between a block of functions and a block of non-functions
1642 newlines = 1
1643 else:
1644 newlines = 0
1645 else:
1646 newlines = 2
1647 if current_line.depth and newlines:
1648 newlines -= 1
1649 return newlines, 0
1650
1651
1652 @dataclass
1653 class LineGenerator(Visitor[Line]):
1654 """Generates reformatted Line objects. Empty lines are not emitted.
1655
1656 Note: destroys the tree it's visiting by mutating prefixes of its leaves
1657 in ways that will no longer stringify to valid Python code on the tree.
1658 """
1659
1660 is_pyi: bool = False
1661 normalize_strings: bool = True
1662 current_line: Line = Factory(Line)
1663 remove_u_prefix: bool = False
1664
1665 def line(self, indent: int = 0) -> Iterator[Line]:
1666 """Generate a line.
1667
1668 If the line is empty, only emit if it makes sense.
1669 If the line is too long, split it first and then generate.
1670
1671 If any lines were generated, set up a new current_line.
1672 """
1673 if not self.current_line:
1674 self.current_line.depth += indent
1675 return # Line is empty, don't emit. Creating a new one unnecessary.
1676
1677 complete_line = self.current_line
1678 self.current_line = Line(depth=complete_line.depth + indent)
1679 yield complete_line
1680
1681 def visit_default(self, node: LN) -> Iterator[Line]:
1682 """Default `visit_*()` implementation. Recurses to children of `node`."""
1683 if isinstance(node, Leaf):
1684 any_open_brackets = self.current_line.bracket_tracker.any_open_brackets()
1685 for comment in generate_comments(node):
1686 if any_open_brackets:
1687 # any comment within brackets is subject to splitting
1688 self.current_line.append(comment)
1689 elif comment.type == token.COMMENT:
1690 # regular trailing comment
1691 self.current_line.append(comment)
1692 yield from self.line()
1693
1694 else:
1695 # regular standalone comment
1696 yield from self.line()
1697
1698 self.current_line.append(comment)
1699 yield from self.line()
1700
1701 normalize_prefix(node, inside_brackets=any_open_brackets)
1702 if self.normalize_strings and node.type == token.STRING:
1703 normalize_string_prefix(node, remove_u_prefix=self.remove_u_prefix)
1704 normalize_string_quotes(node)
1705 if node.type == token.NUMBER:
1706 normalize_numeric_literal(node)
1707 if node.type not in WHITESPACE:
1708 self.current_line.append(node)
1709 yield from super().visit_default(node)
1710
1711 def visit_atom(self, node: Node) -> Iterator[Line]:
1712 # Always make parentheses invisible around a single node, because it should
1713 # not be needed (except in the case of yield, where removing the parentheses
1714 # produces a SyntaxError).
1715 if (
1716 len(node.children) == 3
1717 and isinstance(node.children[0], Leaf)
1718 and node.children[0].type == token.LPAR
1719 and isinstance(node.children[2], Leaf)
1720 and node.children[2].type == token.RPAR
1721 and isinstance(node.children[1], Leaf)
1722 and not (
1723 node.children[1].type == token.NAME
1724 and node.children[1].value == "yield"
1725 )
1726 ):
1727 node.children[0].value = ""
1728 node.children[2].value = ""
1729 yield from super().visit_default(node)
1730
1731 def visit_factor(self, node: Node) -> Iterator[Line]:
1732 """Force parentheses between a unary op and a binary power:
1733
1734 -2 ** 8 -> -(2 ** 8)
1735 """
1736 child = node.children[1]
1737 if child.type == syms.power and len(child.children) == 3:
1738 lpar = Leaf(token.LPAR, "(")
1739 rpar = Leaf(token.RPAR, ")")
1740 index = child.remove() or 0
1741 node.insert_child(index, Node(syms.atom, [lpar, child, rpar]))
1742 yield from self.visit_default(node)
1743
1744 def visit_INDENT(self, node: Node) -> Iterator[Line]:
1745 """Increase indentation level, maybe yield a line."""
1746 # In blib2to3 INDENT never holds comments.
1747 yield from self.line(+1)
1748 yield from self.visit_default(node)
1749
1750 def visit_DEDENT(self, node: Node) -> Iterator[Line]:
1751 """Decrease indentation level, maybe yield a line."""
1752 # The current line might still wait for trailing comments. At DEDENT time
1753 # there won't be any (they would be prefixes on the preceding NEWLINE).
1754 # Emit the line then.
1755 yield from self.line()
1756
1757 # While DEDENT has no value, its prefix may contain standalone comments
1758 # that belong to the current indentation level. Get 'em.
1759 yield from self.visit_default(node)
1760
1761 # Finally, emit the dedent.
1762 yield from self.line(-1)
1763
1764 def visit_stmt(
1765 self, node: Node, keywords: Set[str], parens: Set[str]
1766 ) -> Iterator[Line]:
1767 """Visit a statement.
1768
1769 This implementation is shared for `if`, `while`, `for`, `try`, `except`,
1770 `def`, `with`, `class`, `assert` and assignments.
1771
1772 The relevant Python language `keywords` for a given statement will be
1773 NAME leaves within it. This methods puts those on a separate line.
1774
1775 `parens` holds a set of string leaf values immediately after which
1776 invisible parens should be put.
1777 """
1778 normalize_invisible_parens(node, parens_after=parens)
1779 for child in node.children:
1780 if child.type == token.NAME and child.value in keywords: # type: ignore
1781 yield from self.line()
1782
1783 yield from self.visit(child)
1784
1785 def visit_suite(self, node: Node) -> Iterator[Line]:
1786 """Visit a suite."""
1787 if self.is_pyi and is_stub_suite(node):
1788 yield from self.visit(node.children[2])
1789 else:
1790 yield from self.visit_default(node)
1791
1792 def visit_simple_stmt(self, node: Node) -> Iterator[Line]:
1793 """Visit a statement without nested statements."""
1794 is_suite_like = node.parent and node.parent.type in STATEMENT
1795 if is_suite_like:
1796 if self.is_pyi and is_stub_body(node):
1797 yield from self.visit_default(node)
1798 else:
1799 yield from self.line(+1)
1800 yield from self.visit_default(node)
1801 yield from self.line(-1)
1802
1803 else:
1804 if not self.is_pyi or not node.parent or not is_stub_suite(node.parent):
1805 yield from self.line()
1806 yield from self.visit_default(node)
1807
1808 def visit_async_stmt(self, node: Node) -> Iterator[Line]:
1809 """Visit `async def`, `async for`, `async with`."""
1810 yield from self.line()
1811
1812 children = iter(node.children)
1813 for child in children:
1814 yield from self.visit(child)
1815
1816 if child.type == token.ASYNC:
1817 break
1818
1819 internal_stmt = next(children)
1820 for child in internal_stmt.children:
1821 yield from self.visit(child)
1822
1823 def visit_decorators(self, node: Node) -> Iterator[Line]:
1824 """Visit decorators."""
1825 for child in node.children:
1826 yield from self.line()
1827 yield from self.visit(child)
1828
1829 def visit_SEMI(self, leaf: Leaf) -> Iterator[Line]:
1830 """Remove a semicolon and put the other statement on a separate line."""
1831 yield from self.line()
1832
1833 def visit_ENDMARKER(self, leaf: Leaf) -> Iterator[Line]:
1834 """End of file. Process outstanding comments and end with a newline."""
1835 yield from self.visit_default(leaf)
1836 yield from self.line()
1837
1838 def visit_STANDALONE_COMMENT(self, leaf: Leaf) -> Iterator[Line]:
1839 if not self.current_line.bracket_tracker.any_open_brackets():
1840 yield from self.line()
1841 yield from self.visit_default(leaf)
1842
1843 def __attrs_post_init__(self) -> None:
1844 """You are in a twisty little maze of passages."""
1845 v = self.visit_stmt
1846 Ø: Set[str] = set()
1847 self.visit_assert_stmt = partial(v, keywords={"assert"}, parens={"assert", ","})
1848 self.visit_if_stmt = partial(
1849 v, keywords={"if", "else", "elif"}, parens={"if", "elif"}
1850 )
1851 self.visit_while_stmt = partial(v, keywords={"while", "else"}, parens={"while"})
1852 self.visit_for_stmt = partial(v, keywords={"for", "else"}, parens={"for", "in"})
1853 self.visit_try_stmt = partial(
1854 v, keywords={"try", "except", "else", "finally"}, parens=Ø
1855 )
1856 self.visit_except_clause = partial(v, keywords={"except"}, parens=Ø)
1857 self.visit_with_stmt = partial(v, keywords={"with"}, parens=Ø)
1858 self.visit_funcdef = partial(v, keywords={"def"}, parens=Ø)
1859 self.visit_classdef = partial(v, keywords={"class"}, parens=Ø)
1860 self.visit_expr_stmt = partial(v, keywords=Ø, parens=ASSIGNMENTS)
1861 self.visit_return_stmt = partial(v, keywords={"return"}, parens={"return"})
1862 self.visit_import_from = partial(v, keywords=Ø, parens={"import"})
1863 self.visit_del_stmt = partial(v, keywords=Ø, parens={"del"})
1864 self.visit_async_funcdef = self.visit_async_stmt
1865 self.visit_decorated = self.visit_decorators
1866
1867
1868 IMPLICIT_TUPLE = {syms.testlist, syms.testlist_star_expr, syms.exprlist}
1869 BRACKET = {token.LPAR: token.RPAR, token.LSQB: token.RSQB, token.LBRACE: token.RBRACE}
1870 OPENING_BRACKETS = set(BRACKET.keys())
1871 CLOSING_BRACKETS = set(BRACKET.values())
1872 BRACKETS = OPENING_BRACKETS | CLOSING_BRACKETS
1873 ALWAYS_NO_SPACE = CLOSING_BRACKETS | {token.COMMA, STANDALONE_COMMENT}
1874
1875
1876 def whitespace(leaf: Leaf, *, complex_subscript: bool) -> str: # noqa: C901
1877 """Return whitespace prefix if needed for the given `leaf`.
1878
1879 `complex_subscript` signals whether the given leaf is part of a subscription
1880 which has non-trivial arguments, like arithmetic expressions or function calls.
1881 """
1882 NO = ""
1883 SPACE = " "
1884 DOUBLESPACE = " "
1885 t = leaf.type
1886 p = leaf.parent
1887 v = leaf.value
1888 if t in ALWAYS_NO_SPACE:
1889 return NO
1890
1891 if t == token.COMMENT:
1892 return DOUBLESPACE
1893
1894 assert p is not None, f"INTERNAL ERROR: hand-made leaf without parent: {leaf!r}"
1895 if t == token.COLON and p.type not in {
1896 syms.subscript,
1897 syms.subscriptlist,
1898 syms.sliceop,
1899 }:
1900 return NO
1901
1902 prev = leaf.prev_sibling
1903 if not prev:
1904 prevp = preceding_leaf(p)
1905 if not prevp or prevp.type in OPENING_BRACKETS:
1906 return NO
1907
1908 if t == token.COLON:
1909 if prevp.type == token.COLON:
1910 return NO
1911
1912 elif prevp.type != token.COMMA and not complex_subscript:
1913 return NO
1914
1915 return SPACE
1916
1917 if prevp.type == token.EQUAL:
1918 if prevp.parent:
1919 if prevp.parent.type in {
1920 syms.arglist,
1921 syms.argument,
1922 syms.parameters,
1923 syms.varargslist,
1924 }:
1925 return NO
1926
1927 elif prevp.parent.type == syms.typedargslist:
1928 # A bit hacky: if the equal sign has whitespace, it means we
1929 # previously found it's a typed argument. So, we're using
1930 # that, too.
1931 return prevp.prefix
1932
1933 elif prevp.type in VARARGS_SPECIALS:
1934 if is_vararg(prevp, within=VARARGS_PARENTS | UNPACKING_PARENTS):
1935 return NO
1936
1937 elif prevp.type == token.COLON:
1938 if prevp.parent and prevp.parent.type in {syms.subscript, syms.sliceop}:
1939 return SPACE if complex_subscript else NO
1940
1941 elif (
1942 prevp.parent
1943 and prevp.parent.type == syms.factor
1944 and prevp.type in MATH_OPERATORS
1945 ):
1946 return NO
1947
1948 elif (
1949 prevp.type == token.RIGHTSHIFT
1950 and prevp.parent
1951 and prevp.parent.type == syms.shift_expr
1952 and prevp.prev_sibling
1953 and prevp.prev_sibling.type == token.NAME
1954 and prevp.prev_sibling.value == "print" # type: ignore
1955 ):
1956 # Python 2 print chevron
1957 return NO
1958
1959 elif prev.type in OPENING_BRACKETS:
1960 return NO
1961
1962 if p.type in {syms.parameters, syms.arglist}:
1963 # untyped function signatures or calls
1964 if not prev or prev.type != token.COMMA:
1965 return NO
1966
1967 elif p.type == syms.varargslist:
1968 # lambdas
1969 if prev and prev.type != token.COMMA:
1970 return NO
1971
1972 elif p.type == syms.typedargslist:
1973 # typed function signatures
1974 if not prev:
1975 return NO
1976
1977 if t == token.EQUAL:
1978 if prev.type != syms.tname:
1979 return NO
1980
1981 elif prev.type == token.EQUAL:
1982 # A bit hacky: if the equal sign has whitespace, it means we
1983 # previously found it's a typed argument. So, we're using that, too.
1984 return prev.prefix
1985
1986 elif prev.type != token.COMMA:
1987 return NO
1988
1989 elif p.type == syms.tname:
1990 # type names
1991 if not prev:
1992 prevp = preceding_leaf(p)
1993 if not prevp or prevp.type != token.COMMA:
1994 return NO
1995
1996 elif p.type == syms.trailer:
1997 # attributes and calls
1998 if t == token.LPAR or t == token.RPAR:
1999 return NO
2000
2001 if not prev:
2002 if t == token.DOT:
2003 prevp = preceding_leaf(p)
2004 if not prevp or prevp.type != token.NUMBER:
2005 return NO
2006
2007 elif t == token.LSQB:
2008 return NO
2009
2010 elif prev.type != token.COMMA:
2011 return NO
2012
2013 elif p.type == syms.argument:
2014 # single argument
2015 if t == token.EQUAL:
2016 return NO
2017
2018 if not prev:
2019 prevp = preceding_leaf(p)
2020 if not prevp or prevp.type == token.LPAR:
2021 return NO
2022
2023 elif prev.type in {token.EQUAL} | VARARGS_SPECIALS:
2024 return NO
2025
2026 elif p.type == syms.decorator:
2027 # decorators
2028 return NO
2029
2030 elif p.type == syms.dotted_name:
2031 if prev:
2032 return NO
2033
2034 prevp = preceding_leaf(p)
2035 if not prevp or prevp.type == token.AT or prevp.type == token.DOT:
2036 return NO
2037
2038 elif p.type == syms.classdef:
2039 if t == token.LPAR:
2040 return NO
2041
2042 if prev and prev.type == token.LPAR:
2043 return NO
2044
2045 elif p.type in {syms.subscript, syms.sliceop}:
2046 # indexing
2047 if not prev:
2048 assert p.parent is not None, "subscripts are always parented"
2049 if p.parent.type == syms.subscriptlist:
2050 return SPACE
2051
2052 return NO
2053
2054 elif not complex_subscript:
2055 return NO
2056
2057 elif p.type == syms.atom:
2058 if prev and t == token.DOT:
2059 # dots, but not the first one.
2060 return NO
2061
2062 elif p.type == syms.dictsetmaker:
2063 # dict unpacking
2064 if prev and prev.type == token.DOUBLESTAR:
2065 return NO
2066
2067 elif p.type in {syms.factor, syms.star_expr}:
2068 # unary ops
2069 if not prev:
2070 prevp = preceding_leaf(p)
2071 if not prevp or prevp.type in OPENING_BRACKETS:
2072 return NO
2073
2074 prevp_parent = prevp.parent
2075 assert prevp_parent is not None
2076 if prevp.type == token.COLON and prevp_parent.type in {
2077 syms.subscript,
2078 syms.sliceop,
2079 }:
2080 return NO
2081
2082 elif prevp.type == token.EQUAL and prevp_parent.type == syms.argument:
2083 return NO
2084
2085 elif t in {token.NAME, token.NUMBER, token.STRING}:
2086 return NO
2087
2088 elif p.type == syms.import_from:
2089 if t == token.DOT:
2090 if prev and prev.type == token.DOT:
2091 return NO
2092
2093 elif t == token.NAME:
2094 if v == "import":
2095 return SPACE
2096
2097 if prev and prev.type == token.DOT:
2098 return NO
2099
2100 elif p.type == syms.sliceop:
2101 return NO
2102
2103 return SPACE
2104
2105
2106 def preceding_leaf(node: Optional[LN]) -> Optional[Leaf]:
2107 """Return the first leaf that precedes `node`, if any."""
2108 while node:
2109 res = node.prev_sibling
2110 if res:
2111 if isinstance(res, Leaf):
2112 return res
2113
2114 try:
2115 return list(res.leaves())[-1]
2116
2117 except IndexError:
2118 return None
2119
2120 node = node.parent
2121 return None
2122
2123
2124 def child_towards(ancestor: Node, descendant: LN) -> Optional[LN]:
2125 """Return the child of `ancestor` that contains `descendant`."""
2126 node: Optional[LN] = descendant
2127 while node and node.parent != ancestor:
2128 node = node.parent
2129 return node
2130
2131
2132 def container_of(leaf: Leaf) -> LN:
2133 """Return `leaf` or one of its ancestors that is the topmost container of it.
2134
2135 By "container" we mean a node where `leaf` is the very first child.
2136 """
2137 same_prefix = leaf.prefix
2138 container: LN = leaf
2139 while container:
2140 parent = container.parent
2141 if parent is None:
2142 break
2143
2144 if parent.children[0].prefix != same_prefix:
2145 break
2146
2147 if parent.type == syms.file_input:
2148 break
2149
2150 if parent.prev_sibling is not None and parent.prev_sibling.type in BRACKETS:
2151 break
2152
2153 container = parent
2154 return container
2155
2156
2157 def is_split_after_delimiter(leaf: Leaf, previous: Optional[Leaf] = None) -> Priority:
2158 """Return the priority of the `leaf` delimiter, given a line break after it.
2159
2160 The delimiter priorities returned here are from those delimiters that would
2161 cause a line break after themselves.
2162
2163 Higher numbers are higher priority.
2164 """
2165 if leaf.type == token.COMMA:
2166 return COMMA_PRIORITY
2167
2168 return 0
2169
2170
2171 def is_split_before_delimiter(leaf: Leaf, previous: Optional[Leaf] = None) -> Priority:
2172 """Return the priority of the `leaf` delimiter, given a line break before it.
2173
2174 The delimiter priorities returned here are from those delimiters that would
2175 cause a line break before themselves.
2176
2177 Higher numbers are higher priority.
2178 """
2179 if is_vararg(leaf, within=VARARGS_PARENTS | UNPACKING_PARENTS):
2180 # * and ** might also be MATH_OPERATORS but in this case they are not.
2181 # Don't treat them as a delimiter.
2182 return 0
2183
2184 if (
2185 leaf.type == token.DOT
2186 and leaf.parent
2187 and leaf.parent.type not in {syms.import_from, syms.dotted_name}
2188 and (previous is None or previous.type in CLOSING_BRACKETS)
2189 ):
2190 return DOT_PRIORITY
2191
2192 if (
2193 leaf.type in MATH_OPERATORS
2194 and leaf.parent
2195 and leaf.parent.type not in {syms.factor, syms.star_expr}
2196 ):
2197 return MATH_PRIORITIES[leaf.type]
2198
2199 if leaf.type in COMPARATORS:
2200 return COMPARATOR_PRIORITY
2201
2202 if (
2203 leaf.type == token.STRING
2204 and previous is not None
2205 and previous.type == token.STRING
2206 ):
2207 return STRING_PRIORITY
2208
2209 if leaf.type not in {token.NAME, token.ASYNC}:
2210 return 0
2211
2212 if (
2213 leaf.value == "for"
2214 and leaf.parent
2215 and leaf.parent.type in {syms.comp_for, syms.old_comp_for}
2216 or leaf.type == token.ASYNC
2217 ):
2218 if (
2219 not isinstance(leaf.prev_sibling, Leaf)
2220 or leaf.prev_sibling.value != "async"
2221 ):
2222 return COMPREHENSION_PRIORITY
2223
2224 if (
2225 leaf.value == "if"
2226 and leaf.parent
2227 and leaf.parent.type in {syms.comp_if, syms.old_comp_if}
2228 ):
2229 return COMPREHENSION_PRIORITY
2230
2231 if leaf.value in {"if", "else"} and leaf.parent and leaf.parent.type == syms.test:
2232 return TERNARY_PRIORITY
2233
2234 if leaf.value == "is":
2235 return COMPARATOR_PRIORITY
2236
2237 if (
2238 leaf.value == "in"
2239 and leaf.parent
2240 and leaf.parent.type in {syms.comp_op, syms.comparison}
2241 and not (
2242 previous is not None
2243 and previous.type == token.NAME
2244 and previous.value == "not"
2245 )
2246 ):
2247 return COMPARATOR_PRIORITY
2248
2249 if (
2250 leaf.value == "not"
2251 and leaf.parent
2252 and leaf.parent.type == syms.comp_op
2253 and not (
2254 previous is not None
2255 and previous.type == token.NAME
2256 and previous.value == "is"
2257 )
2258 ):
2259 return COMPARATOR_PRIORITY
2260
2261 if leaf.value in LOGIC_OPERATORS and leaf.parent:
2262 return LOGIC_PRIORITY
2263
2264 return 0
2265
2266
2267 FMT_OFF = {"# fmt: off", "# fmt:off", "# yapf: disable"}
2268 FMT_ON = {"# fmt: on", "# fmt:on", "# yapf: enable"}
2269
2270
2271 def generate_comments(leaf: LN) -> Iterator[Leaf]:
2272 """Clean the prefix of the `leaf` and generate comments from it, if any.
2273
2274 Comments in lib2to3 are shoved into the whitespace prefix. This happens
2275 in `pgen2/driver.py:Driver.parse_tokens()`. This was a brilliant implementation
2276 move because it does away with modifying the grammar to include all the
2277 possible places in which comments can be placed.
2278
2279 The sad consequence for us though is that comments don't "belong" anywhere.
2280 This is why this function generates simple parentless Leaf objects for
2281 comments. We simply don't know what the correct parent should be.
2282
2283 No matter though, we can live without this. We really only need to
2284 differentiate between inline and standalone comments. The latter don't
2285 share the line with any code.
2286
2287 Inline comments are emitted as regular token.COMMENT leaves. Standalone
2288 are emitted with a fake STANDALONE_COMMENT token identifier.
2289 """
2290 for pc in list_comments(leaf.prefix, is_endmarker=leaf.type == token.ENDMARKER):
2291 yield Leaf(pc.type, pc.value, prefix="\n" * pc.newlines)
2292
2293
2294 @dataclass
2295 class ProtoComment:
2296 """Describes a piece of syntax that is a comment.
2297
2298 It's not a :class:`blib2to3.pytree.Leaf` so that:
2299
2300 * it can be cached (`Leaf` objects should not be reused more than once as
2301 they store their lineno, column, prefix, and parent information);
2302 * `newlines` and `consumed` fields are kept separate from the `value`. This
2303 simplifies handling of special marker comments like ``# fmt: off/on``.
2304 """
2305
2306 type: int # token.COMMENT or STANDALONE_COMMENT
2307 value: str # content of the comment
2308 newlines: int # how many newlines before the comment
2309 consumed: int # how many characters of the original leaf's prefix did we consume
2310
2311
2312 @lru_cache(maxsize=4096)
2313 def list_comments(prefix: str, *, is_endmarker: bool) -> List[ProtoComment]:
2314 """Return a list of :class:`ProtoComment` objects parsed from the given `prefix`."""
2315 result: List[ProtoComment] = []
2316 if not prefix or "#" not in prefix:
2317 return result
2318
2319 consumed = 0
2320 nlines = 0
2321 ignored_lines = 0
2322 for index, line in enumerate(prefix.split("\n")):
2323 consumed += len(line) + 1 # adding the length of the split '\n'
2324 line = line.lstrip()
2325 if not line:
2326 nlines += 1
2327 if not line.startswith("#"):
2328 # Escaped newlines outside of a comment are not really newlines at
2329 # all. We treat a single-line comment following an escaped newline
2330 # as a simple trailing comment.
2331 if line.endswith("\\"):
2332 ignored_lines += 1
2333 continue
2334
2335 if index == ignored_lines and not is_endmarker:
2336 comment_type = token.COMMENT # simple trailing comment
2337 else:
2338 comment_type = STANDALONE_COMMENT
2339 comment = make_comment(line)
2340 result.append(
2341 ProtoComment(
2342 type=comment_type, value=comment, newlines=nlines, consumed=consumed
2343 )
2344 )
2345 nlines = 0
2346 return result
2347
2348
2349 def make_comment(content: str) -> str:
2350 """Return a consistently formatted comment from the given `content` string.
2351
2352 All comments (except for "##", "#!", "#:", '#'", "#%%") should have a single
2353 space between the hash sign and the content.
2354
2355 If `content` didn't start with a hash sign, one is provided.
2356 """
2357 content = content.rstrip()
2358 if not content:
2359 return "#"
2360
2361 if content[0] == "#":
2362 content = content[1:]
2363 if content and content[0] not in " !:#'%":
2364 content = " " + content
2365 return "#" + content
2366
2367
2368 def split_line(
2369 line: Line,
2370 line_length: int,
2371 inner: bool = False,
2372 features: Collection[Feature] = (),
2373 ) -> Iterator[Line]:
2374 """Split a `line` into potentially many lines.
2375
2376 They should fit in the allotted `line_length` but might not be able to.
2377 `inner` signifies that there were a pair of brackets somewhere around the
2378 current `line`, possibly transitively. This means we can fallback to splitting
2379 by delimiters if the LHS/RHS don't yield any results.
2380
2381 `features` are syntactical features that may be used in the output.
2382 """
2383 if line.is_comment:
2384 yield line
2385 return
2386
2387 line_str = str(line).strip("\n")
2388
2389 if (
2390 not line.contains_uncollapsable_type_comments()
2391 and not line.should_explode
2392 and not line.is_collection_with_optional_trailing_comma
2393 and (
2394 is_line_short_enough(line, line_length=line_length, line_str=line_str)
2395 or line.contains_unsplittable_type_ignore()
2396 )
2397 ):
2398 yield line
2399 return
2400
2401 split_funcs: List[SplitFunc]
2402 if line.is_def:
2403 split_funcs = [left_hand_split]
2404 else:
2405
2406 def rhs(line: Line, features: Collection[Feature]) -> Iterator[Line]:
2407 for omit in generate_trailers_to_omit(line, line_length):
2408 lines = list(right_hand_split(line, line_length, features, omit=omit))
2409 if is_line_short_enough(lines[0], line_length=line_length):
2410 yield from lines
2411 return
2412
2413 # All splits failed, best effort split with no omits.
2414 # This mostly happens to multiline strings that are by definition
2415 # reported as not fitting a single line.
2416 yield from right_hand_split(line, line_length, features=features)
2417
2418 if line.inside_brackets:
2419 split_funcs = [delimiter_split, standalone_comment_split, rhs]
2420 else:
2421 split_funcs = [rhs]
2422 for split_func in split_funcs:
2423 # We are accumulating lines in `result` because we might want to abort
2424 # mission and return the original line in the end, or attempt a different
2425 # split altogether.
2426 result: List[Line] = []
2427 try:
2428 for l in split_func(line, features):
2429 if str(l).strip("\n") == line_str:
2430 raise CannotSplit("Split function returned an unchanged result")
2431
2432 result.extend(
2433 split_line(
2434 l, line_length=line_length, inner=True, features=features
2435 )
2436 )
2437 except CannotSplit:
2438 continue
2439
2440 else:
2441 yield from result
2442 break
2443
2444 else:
2445 yield line
2446
2447
2448 def left_hand_split(line: Line, features: Collection[Feature] = ()) -> Iterator[Line]:
2449 """Split line into many lines, starting with the first matching bracket pair.
2450
2451 Note: this usually looks weird, only use this for function definitions.
2452 Prefer RHS otherwise. This is why this function is not symmetrical with
2453 :func:`right_hand_split` which also handles optional parentheses.
2454 """
2455 tail_leaves: List[Leaf] = []
2456 body_leaves: List[Leaf] = []
2457 head_leaves: List[Leaf] = []
2458 current_leaves = head_leaves
2459 matching_bracket = None
2460 for leaf in line.leaves:
2461 if (
2462 current_leaves is body_leaves
2463 and leaf.type in CLOSING_BRACKETS
2464 and leaf.opening_bracket is matching_bracket
2465 ):
2466 current_leaves = tail_leaves if body_leaves else head_leaves
2467 current_leaves.append(leaf)
2468 if current_leaves is head_leaves:
2469 if leaf.type in OPENING_BRACKETS:
2470 matching_bracket = leaf
2471 current_leaves = body_leaves
2472 if not matching_bracket:
2473 raise CannotSplit("No brackets found")
2474
2475 head = bracket_split_build_line(head_leaves, line, matching_bracket)
2476 body = bracket_split_build_line(body_leaves, line, matching_bracket, is_body=True)
2477 tail = bracket_split_build_line(tail_leaves, line, matching_bracket)
2478 bracket_split_succeeded_or_raise(head, body, tail)
2479 for result in (head, body, tail):
2480 if result:
2481 yield result
2482
2483
2484 def right_hand_split(
2485 line: Line,
2486 line_length: int,
2487 features: Collection[Feature] = (),
2488 omit: Collection[LeafID] = (),
2489 ) -> Iterator[Line]:
2490 """Split line into many lines, starting with the last matching bracket pair.
2491
2492 If the split was by optional parentheses, attempt splitting without them, too.
2493 `omit` is a collection of closing bracket IDs that shouldn't be considered for
2494 this split.
2495
2496 Note: running this function modifies `bracket_depth` on the leaves of `line`.
2497 """
2498 tail_leaves: List[Leaf] = []
2499 body_leaves: List[Leaf] = []
2500 head_leaves: List[Leaf] = []
2501 current_leaves = tail_leaves
2502 opening_bracket = None
2503 closing_bracket = None
2504 for leaf in reversed(line.leaves):
2505 if current_leaves is body_leaves:
2506 if leaf is opening_bracket:
2507 current_leaves = head_leaves if body_leaves else tail_leaves
2508 current_leaves.append(leaf)
2509 if current_leaves is tail_leaves:
2510 if leaf.type in CLOSING_BRACKETS and id(leaf) not in omit:
2511 opening_bracket = leaf.opening_bracket
2512 closing_bracket = leaf
2513 current_leaves = body_leaves
2514 if not (opening_bracket and closing_bracket and head_leaves):
2515 # If there is no opening or closing_bracket that means the split failed and
2516 # all content is in the tail. Otherwise, if `head_leaves` are empty, it means
2517 # the matching `opening_bracket` wasn't available on `line` anymore.
2518 raise CannotSplit("No brackets found")
2519
2520 tail_leaves.reverse()
2521 body_leaves.reverse()
2522 head_leaves.reverse()
2523 head = bracket_split_build_line(head_leaves, line, opening_bracket)
2524 body = bracket_split_build_line(body_leaves, line, opening_bracket, is_body=True)
2525 tail = bracket_split_build_line(tail_leaves, line, opening_bracket)
2526 bracket_split_succeeded_or_raise(head, body, tail)
2527 if (
2528 # the body shouldn't be exploded
2529 not body.should_explode
2530 # the opening bracket is an optional paren
2531 and opening_bracket.type == token.LPAR
2532 and not opening_bracket.value
2533 # the closing bracket is an optional paren
2534 and closing_bracket.type == token.RPAR
2535 and not closing_bracket.value
2536 # it's not an import (optional parens are the only thing we can split on
2537 # in this case; attempting a split without them is a waste of time)
2538 and not line.is_import
2539 # there are no standalone comments in the body
2540 and not body.contains_standalone_comments(0)
2541 # and we can actually remove the parens
2542 and can_omit_invisible_parens(body, line_length)
2543 ):
2544 omit = {id(closing_bracket), *omit}
2545 try:
2546 yield from right_hand_split(line, line_length, features=features, omit=omit)
2547 return
2548
2549 except CannotSplit:
2550 if not (
2551 can_be_split(body)
2552 or is_line_short_enough(body, line_length=line_length)
2553 ):
2554 raise CannotSplit(
2555 "Splitting failed, body is still too long and can't be split."
2556 )
2557
2558 elif head.contains_multiline_strings() or tail.contains_multiline_strings():
2559 raise CannotSplit(
2560 "The current optional pair of parentheses is bound to fail to "
2561 "satisfy the splitting algorithm because the head or the tail "
2562 "contains multiline strings which by definition never fit one "
2563 "line."
2564 )
2565
2566 ensure_visible(opening_bracket)
2567 ensure_visible(closing_bracket)
2568 for result in (head, body, tail):
2569 if result:
2570 yield result
2571
2572
2573 def bracket_split_succeeded_or_raise(head: Line, body: Line, tail: Line) -> None:
2574 """Raise :exc:`CannotSplit` if the last left- or right-hand split failed.
2575
2576 Do nothing otherwise.
2577
2578 A left- or right-hand split is based on a pair of brackets. Content before
2579 (and including) the opening bracket is left on one line, content inside the
2580 brackets is put on a separate line, and finally content starting with and
2581 following the closing bracket is put on a separate line.
2582
2583 Those are called `head`, `body`, and `tail`, respectively. If the split
2584 produced the same line (all content in `head`) or ended up with an empty `body`
2585 and the `tail` is just the closing bracket, then it's considered failed.
2586 """
2587 tail_len = len(str(tail).strip())
2588 if not body:
2589 if tail_len == 0:
2590 raise CannotSplit("Splitting brackets produced the same line")
2591
2592 elif tail_len < 3:
2593 raise CannotSplit(
2594 f"Splitting brackets on an empty body to save "
2595 f"{tail_len} characters is not worth it"
2596 )
2597
2598
2599 def bracket_split_build_line(
2600 leaves: List[Leaf], original: Line, opening_bracket: Leaf, *, is_body: bool = False
2601 ) -> Line:
2602 """Return a new line with given `leaves` and respective comments from `original`.
2603
2604 If `is_body` is True, the result line is one-indented inside brackets and as such
2605 has its first leaf's prefix normalized and a trailing comma added when expected.
2606 """
2607 result = Line(depth=original.depth)
2608 if is_body:
2609 result.inside_brackets = True
2610 result.depth += 1
2611 if leaves:
2612 # Since body is a new indent level, remove spurious leading whitespace.
2613 normalize_prefix(leaves[0], inside_brackets=True)
2614 # Ensure a trailing comma for imports and standalone function arguments, but
2615 # be careful not to add one after any comments.
2616 no_commas = original.is_def and not any(
2617 l.type == token.COMMA for l in leaves
2618 )
2619
2620 if original.is_import or no_commas:
2621 for i in range(len(leaves) - 1, -1, -1):
2622 if leaves[i].type == STANDALONE_COMMENT:
2623 continue
2624 elif leaves[i].type == token.COMMA:
2625 break
2626 else:
2627 leaves.insert(i + 1, Leaf(token.COMMA, ","))
2628 break
2629 # Populate the line
2630 for leaf in leaves:
2631 result.append(leaf, preformatted=True)
2632 for comment_after in original.comments_after(leaf):
2633 result.append(comment_after, preformatted=True)
2634 if is_body:
2635 result.should_explode = should_explode(result, opening_bracket)
2636 return result
2637
2638
2639 def dont_increase_indentation(split_func: SplitFunc) -> SplitFunc:
2640 """Normalize prefix of the first leaf in every line returned by `split_func`.
2641
2642 This is a decorator over relevant split functions.
2643 """
2644
2645 @wraps(split_func)
2646 def split_wrapper(line: Line, features: Collection[Feature] = ()) -> Iterator[Line]:
2647 for l in split_func(line, features):
2648 normalize_prefix(l.leaves[0], inside_brackets=True)
2649 yield l
2650
2651 return split_wrapper
2652
2653
2654 @dont_increase_indentation
2655 def delimiter_split(line: Line, features: Collection[Feature] = ()) -> Iterator[Line]:
2656 """Split according to delimiters of the highest priority.
2657
2658 If the appropriate Features are given, the split will add trailing commas
2659 also in function signatures and calls that contain `*` and `**`.
2660 """
2661 try:
2662 last_leaf = line.leaves[-1]
2663 except IndexError:
2664 raise CannotSplit("Line empty")
2665
2666 bt = line.bracket_tracker
2667 try:
2668 delimiter_priority = bt.max_delimiter_priority(exclude={id(last_leaf)})
2669 except ValueError:
2670 raise CannotSplit("No delimiters found")
2671
2672 if delimiter_priority == DOT_PRIORITY:
2673 if bt.delimiter_count_with_priority(delimiter_priority) == 1:
2674 raise CannotSplit("Splitting a single attribute from its owner looks wrong")
2675
2676 current_line = Line(depth=line.depth, inside_brackets=line.inside_brackets)
2677 lowest_depth = sys.maxsize
2678 trailing_comma_safe = True
2679
2680 def append_to_line(leaf: Leaf) -> Iterator[Line]:
2681 """Append `leaf` to current line or to new line if appending impossible."""
2682 nonlocal current_line
2683 try:
2684 current_line.append_safe(leaf, preformatted=True)
2685 except ValueError:
2686 yield current_line
2687
2688 current_line = Line(depth=line.depth, inside_brackets=line.inside_brackets)
2689 current_line.append(leaf)
2690
2691 for leaf in line.leaves:
2692 yield from append_to_line(leaf)
2693
2694 for comment_after in line.comments_after(leaf):
2695 yield from append_to_line(comment_after)
2696
2697 lowest_depth = min(lowest_depth, leaf.bracket_depth)
2698 if leaf.bracket_depth == lowest_depth:
2699 if is_vararg(leaf, within={syms.typedargslist}):
2700 trailing_comma_safe = (
2701 trailing_comma_safe and Feature.TRAILING_COMMA_IN_DEF in features
2702 )
2703 elif is_vararg(leaf, within={syms.arglist, syms.argument}):
2704 trailing_comma_safe = (
2705 trailing_comma_safe and Feature.TRAILING_COMMA_IN_CALL in features
2706 )
2707
2708 leaf_priority = bt.delimiters.get(id(leaf))
2709 if leaf_priority == delimiter_priority:
2710 yield current_line
2711
2712 current_line = Line(depth=line.depth, inside_brackets=line.inside_brackets)
2713 if current_line:
2714 if (
2715 trailing_comma_safe
2716 and delimiter_priority == COMMA_PRIORITY
2717 and current_line.leaves[-1].type != token.COMMA
2718 and current_line.leaves[-1].type != STANDALONE_COMMENT
2719 ):
2720 current_line.append(Leaf(token.COMMA, ","))
2721 yield current_line
2722
2723
2724 @dont_increase_indentation
2725 def standalone_comment_split(
2726 line: Line, features: Collection[Feature] = ()
2727 ) -> Iterator[Line]:
2728 """Split standalone comments from the rest of the line."""
2729 if not line.contains_standalone_comments(0):
2730 raise CannotSplit("Line does not have any standalone comments")
2731
2732 current_line = Line(depth=line.depth, inside_brackets=line.inside_brackets)
2733
2734 def append_to_line(leaf: Leaf) -> Iterator[Line]:
2735 """Append `leaf` to current line or to new line if appending impossible."""
2736 nonlocal current_line
2737 try:
2738 current_line.append_safe(leaf, preformatted=True)
2739 except ValueError:
2740 yield current_line
2741
2742 current_line = Line(depth=line.depth, inside_brackets=line.inside_brackets)
2743 current_line.append(leaf)
2744
2745 for leaf in line.leaves:
2746 yield from append_to_line(leaf)
2747
2748 for comment_after in line.comments_after(leaf):
2749 yield from append_to_line(comment_after)
2750
2751 if current_line:
2752 yield current_line
2753
2754
2755 def is_import(leaf: Leaf) -> bool:
2756 """Return True if the given leaf starts an import statement."""
2757 p = leaf.parent
2758 t = leaf.type
2759 v = leaf.value
2760 return bool(
2761 t == token.NAME
2762 and (
2763 (v == "import" and p and p.type == syms.import_name)
2764 or (v == "from" and p and p.type == syms.import_from)
2765 )
2766 )
2767
2768
2769 def is_type_comment(leaf: Leaf, suffix: str = "") -> bool:
2770 """Return True if the given leaf is a special comment.
2771 Only returns true for type comments for now."""
2772 t = leaf.type
2773 v = leaf.value
2774 return t in {token.COMMENT, t == STANDALONE_COMMENT} and v.startswith(
2775 "# type:" + suffix
2776 )
2777
2778
2779 def normalize_prefix(leaf: Leaf, *, inside_brackets: bool) -> None:
2780 """Leave existing extra newlines if not `inside_brackets`. Remove everything
2781 else.
2782
2783 Note: don't use backslashes for formatting or you'll lose your voting rights.
2784 """
2785 if not inside_brackets:
2786 spl = leaf.prefix.split("#")
2787 if "\\" not in spl[0]:
2788 nl_count = spl[-1].count("\n")
2789 if len(spl) > 1:
2790 nl_count -= 1
2791 leaf.prefix = "\n" * nl_count
2792 return
2793
2794 leaf.prefix = ""
2795
2796
2797 def normalize_string_prefix(leaf: Leaf, remove_u_prefix: bool = False) -> None:
2798 """Make all string prefixes lowercase.
2799
2800 If remove_u_prefix is given, also removes any u prefix from the string.
2801
2802 Note: Mutates its argument.
2803 """
2804 match = re.match(r"^([furbFURB]*)(.*)$", leaf.value, re.DOTALL)
2805 assert match is not None, f"failed to match string {leaf.value!r}"
2806 orig_prefix = match.group(1)
2807 new_prefix = orig_prefix.lower()
2808 if remove_u_prefix:
2809 new_prefix = new_prefix.replace("u", "")
2810 leaf.value = f"{new_prefix}{match.group(2)}"
2811
2812
2813 def normalize_string_quotes(leaf: Leaf) -> None:
2814 """Prefer double quotes but only if it doesn't cause more escaping.
2815
2816 Adds or removes backslashes as appropriate. Doesn't parse and fix
2817 strings nested in f-strings (yet).
2818
2819 Note: Mutates its argument.
2820 """
2821 value = leaf.value.lstrip("furbFURB")
2822 if value[:3] == '"""':
2823 return
2824
2825 elif value[:3] == "'''":
2826 orig_quote = "'''"
2827 new_quote = '"""'
2828 elif value[0] == '"':
2829 orig_quote = '"'
2830 new_quote = "'"
2831 else:
2832 orig_quote = "'"
2833 new_quote = '"'
2834 first_quote_pos = leaf.value.find(orig_quote)
2835 if first_quote_pos == -1:
2836 return # There's an internal error
2837
2838 prefix = leaf.value[:first_quote_pos]
2839 unescaped_new_quote = re.compile(rf"(([^\\]|^)(\\\\)*){new_quote}")
2840 escaped_new_quote = re.compile(rf"([^\\]|^)\\((?:\\\\)*){new_quote}")
2841 escaped_orig_quote = re.compile(rf"([^\\]|^)\\((?:\\\\)*){orig_quote}")
2842 body = leaf.value[first_quote_pos + len(orig_quote) : -len(orig_quote)]
2843 if "r" in prefix.casefold():
2844 if unescaped_new_quote.search(body):
2845 # There's at least one unescaped new_quote in this raw string
2846 # so converting is impossible
2847 return
2848
2849 # Do not introduce or remove backslashes in raw strings
2850 new_body = body
2851 else:
2852 # remove unnecessary escapes
2853 new_body = sub_twice(escaped_new_quote, rf"\1\2{new_quote}", body)
2854 if body != new_body:
2855 # Consider the string without unnecessary escapes as the original
2856 body = new_body
2857 leaf.value = f"{prefix}{orig_quote}{body}{orig_quote}"
2858 new_body = sub_twice(escaped_orig_quote, rf"\1\2{orig_quote}", new_body)
2859 new_body = sub_twice(unescaped_new_quote, rf"\1\\{new_quote}", new_body)
2860 if "f" in prefix.casefold():
2861 matches = re.findall(
2862 r"""
2863 (?:[^{]|^)\{ # start of the string or a non-{ followed by a single {
2864 ([^{].*?) # contents of the brackets except if begins with {{
2865 \}(?:[^}]|$) # A } followed by end of the string or a non-}
2866 """,
2867 new_body,
2868 re.VERBOSE,
2869 )
2870 for m in matches:
2871 if "\\" in str(m):
2872 # Do not introduce backslashes in interpolated expressions
2873 return
2874 if new_quote == '"""' and new_body[-1:] == '"':
2875 # edge case:
2876 new_body = new_body[:-1] + '\\"'
2877 orig_escape_count = body.count("\\")
2878 new_escape_count = new_body.count("\\")
2879 if new_escape_count > orig_escape_count:
2880 return # Do not introduce more escaping
2881
2882 if new_escape_count == orig_escape_count and orig_quote == '"':
2883 return # Prefer double quotes
2884
2885 leaf.value = f"{prefix}{new_quote}{new_body}{new_quote}"
2886
2887
2888 def normalize_numeric_literal(leaf: Leaf) -> None:
2889 """Normalizes numeric (float, int, and complex) literals.
2890
2891 All letters used in the representation are normalized to lowercase (except
2892 in Python 2 long literals).
2893 """
2894 text = leaf.value.lower()
2895 if text.startswith(("0o", "0b")):
2896 # Leave octal and binary literals alone.
2897 pass
2898 elif text.startswith("0x"):
2899 # Change hex literals to upper case.
2900 before, after = text[:2], text[2:]
2901 text = f"{before}{after.upper()}"
2902 elif "e" in text:
2903 before, after = text.split("e")
2904 sign = ""
2905 if after.startswith("-"):
2906 after = after[1:]
2907 sign = "-"
2908 elif after.startswith("+"):
2909 after = after[1:]
2910 before = format_float_or_int_string(before)
2911 text = f"{before}e{sign}{after}"
2912 elif text.endswith(("j", "l")):
2913 number = text[:-1]
2914 suffix = text[-1]
2915 # Capitalize in "2L" because "l" looks too similar to "1".
2916 if suffix == "l":
2917 suffix = "L"
2918 text = f"{format_float_or_int_string(number)}{suffix}"
2919 else:
2920 text = format_float_or_int_string(text)
2921 leaf.value = text
2922
2923
2924 def format_float_or_int_string(text: str) -> str:
2925 """Formats a float string like "1.0"."""
2926 if "." not in text:
2927 return text
2928
2929 before, after = text.split(".")
2930 return f"{before or 0}.{after or 0}"
2931
2932
2933 def normalize_invisible_parens(node: Node, parens_after: Set[str]) -> None:
2934 """Make existing optional parentheses invisible or create new ones.
2935
2936 `parens_after` is a set of string leaf values immediately after which parens
2937 should be put.
2938
2939 Standardizes on visible parentheses for single-element tuples, and keeps
2940 existing visible parentheses for other tuples and generator expressions.
2941 """
2942 for pc in list_comments(node.prefix, is_endmarker=False):
2943 if pc.value in FMT_OFF:
2944 # This `node` has a prefix with `# fmt: off`, don't mess with parens.
2945 return
2946
2947 check_lpar = False
2948 for index, child in enumerate(list(node.children)):
2949 # Add parentheses around long tuple unpacking in assignments.
2950 if (
2951 index == 0
2952 and isinstance(child, Node)
2953 and child.type == syms.testlist_star_expr
2954 ):
2955 check_lpar = True
2956
2957 if check_lpar:
2958 if is_walrus_assignment(child):
2959 continue
2960 if child.type == syms.atom:
2961 # Determines if the underlying atom should be surrounded with
2962 # invisible params - also makes parens invisible recursively
2963 # within the atom and removes repeated invisible parens within
2964 # the atom
2965 should_surround_with_parens = maybe_make_parens_invisible_in_atom(
2966 child, parent=node
2967 )
2968
2969 if should_surround_with_parens:
2970 lpar = Leaf(token.LPAR, "")
2971 rpar = Leaf(token.RPAR, "")
2972 index = child.remove() or 0
2973 node.insert_child(index, Node(syms.atom, [lpar, child, rpar]))
2974 elif is_one_tuple(child):
2975 # wrap child in visible parentheses
2976 lpar = Leaf(token.LPAR, "(")
2977 rpar = Leaf(token.RPAR, ")")
2978 child.remove()
2979 node.insert_child(index, Node(syms.atom, [lpar, child, rpar]))
2980 elif node.type == syms.import_from:
2981 # "import from" nodes store parentheses directly as part of
2982 # the statement
2983 if child.type == token.LPAR:
2984 # make parentheses invisible
2985 child.value = "" # type: ignore
2986 node.children[-1].value = "" # type: ignore
2987 elif child.type != token.STAR:
2988 # insert invisible parentheses
2989 node.insert_child(index, Leaf(token.LPAR, ""))
2990 node.append_child(Leaf(token.RPAR, ""))
2991 break
2992
2993 elif not (isinstance(child, Leaf) and is_multiline_string(child)):
2994 # wrap child in invisible parentheses
2995 lpar = Leaf(token.LPAR, "")
2996 rpar = Leaf(token.RPAR, "")
2997 index = child.remove() or 0
2998 prefix = child.prefix
2999 child.prefix = ""
3000 new_child = Node(syms.atom, [lpar, child, rpar])
3001 new_child.prefix = prefix
3002 node.insert_child(index, new_child)
3003
3004 check_lpar = isinstance(child, Leaf) and child.value in parens_after
3005
3006
3007 def normalize_fmt_off(node: Node) -> None:
3008 """Convert content between `# fmt: off`/`# fmt: on` into standalone comments."""
3009 try_again = True
3010 while try_again:
3011 try_again = convert_one_fmt_off_pair(node)
3012
3013
3014 def convert_one_fmt_off_pair(node: Node) -> bool:
3015 """Convert content of a single `# fmt: off`/`# fmt: on` into a standalone comment.
3016
3017 Returns True if a pair was converted.
3018 """
3019 for leaf in node.leaves():
3020 previous_consumed = 0
3021 for comment in list_comments(leaf.prefix, is_endmarker=False):
3022 if comment.value in FMT_OFF:
3023 # We only want standalone comments. If there's no previous leaf or
3024 # the previous leaf is indentation, it's a standalone comment in
3025 # disguise.
3026 if comment.type != STANDALONE_COMMENT:
3027 prev = preceding_leaf(leaf)
3028 if prev and prev.type not in WHITESPACE:
3029 continue
3030
3031 ignored_nodes = list(generate_ignored_nodes(leaf))
3032 if not ignored_nodes:
3033 continue
3034
3035 first = ignored_nodes[0] # Can be a container node with the `leaf`.
3036 parent = first.parent
3037 prefix = first.prefix
3038 first.prefix = prefix[comment.consumed :]
3039 hidden_value = (
3040 comment.value + "\n" + "".join(str(n) for n in ignored_nodes)
3041 )
3042 if hidden_value.endswith("\n"):
3043 # That happens when one of the `ignored_nodes` ended with a NEWLINE
3044 # leaf (possibly followed by a DEDENT).
3045 hidden_value = hidden_value[:-1]
3046 first_idx = None
3047 for ignored in ignored_nodes:
3048 index = ignored.remove()
3049 if first_idx is None:
3050 first_idx = index
3051 assert parent is not None, "INTERNAL ERROR: fmt: on/off handling (1)"
3052 assert first_idx is not None, "INTERNAL ERROR: fmt: on/off handling (2)"
3053 parent.insert_child(
3054 first_idx,
3055 Leaf(
3056 STANDALONE_COMMENT,
3057 hidden_value,
3058 prefix=prefix[:previous_consumed] + "\n" * comment.newlines,
3059 ),
3060 )
3061 return True
3062
3063 previous_consumed = comment.consumed
3064
3065 return False
3066
3067
3068 def generate_ignored_nodes(leaf: Leaf) -> Iterator[LN]:
3069 """Starting from the container of `leaf`, generate all leaves until `# fmt: on`.
3070
3071 Stops at the end of the block.
3072 """
3073 container: Optional[LN] = container_of(leaf)
3074 while container is not None and container.type != token.ENDMARKER:
3075 for comment in list_comments(container.prefix, is_endmarker=False):
3076 if comment.value in FMT_ON:
3077 return
3078
3079 yield container
3080
3081 container = container.next_sibling
3082
3083
3084 def maybe_make_parens_invisible_in_atom(node: LN, parent: LN) -> bool:
3085 """If it's safe, make the parens in the atom `node` invisible, recursively.
3086 Additionally, remove repeated, adjacent invisible parens from the atom `node`
3087 as they are redundant.
3088
3089 Returns whether the node should itself be wrapped in invisible parentheses.
3090
3091 """
3092 if (
3093 node.type != syms.atom
3094 or is_empty_tuple(node)
3095 or is_one_tuple(node)
3096 or (is_yield(node) and parent.type != syms.expr_stmt)
3097 or max_delimiter_priority_in_atom(node) >= COMMA_PRIORITY
3098 ):
3099 return False
3100
3101 first = node.children[0]
3102 last = node.children[-1]
3103 if first.type == token.LPAR and last.type == token.RPAR:
3104 middle = node.children[1]
3105 # make parentheses invisible
3106 first.value = "" # type: ignore
3107 last.value = "" # type: ignore
3108 maybe_make_parens_invisible_in_atom(middle, parent=parent)
3109
3110 if is_atom_with_invisible_parens(middle):
3111 # Strip the invisible parens from `middle` by replacing
3112 # it with the child in-between the invisible parens
3113 middle.replace(middle.children[1])
3114
3115 return False
3116
3117 return True
3118
3119
3120 def is_atom_with_invisible_parens(node: LN) -> bool:
3121 """Given a `LN`, determines whether it's an atom `node` with invisible
3122 parens. Useful in dedupe-ing and normalizing parens.
3123 """
3124 if isinstance(node, Leaf) or node.type != syms.atom:
3125 return False
3126
3127 first, last = node.children[0], node.children[-1]
3128 return (
3129 isinstance(first, Leaf)
3130 and first.type == token.LPAR
3131 and first.value == ""
3132 and isinstance(last, Leaf)
3133 and last.type == token.RPAR
3134 and last.value == ""
3135 )
3136
3137
3138 def is_empty_tuple(node: LN) -> bool:
3139 """Return True if `node` holds an empty tuple."""
3140 return (
3141 node.type == syms.atom
3142 and len(node.children) == 2
3143 and node.children[0].type == token.LPAR
3144 and node.children[1].type == token.RPAR
3145 )
3146
3147
3148 def unwrap_singleton_parenthesis(node: LN) -> Optional[LN]:
3149 """Returns `wrapped` if `node` is of the shape ( wrapped ).
3150
3151 Parenthesis can be optional. Returns None otherwise"""
3152 if len(node.children) != 3:
3153 return None
3154 lpar, wrapped, rpar = node.children
3155 if not (lpar.type == token.LPAR and rpar.type == token.RPAR):
3156 return None
3157
3158 return wrapped
3159
3160
3161 def is_one_tuple(node: LN) -> bool:
3162 """Return True if `node` holds a tuple with one element, with or without parens."""
3163 if node.type == syms.atom:
3164 gexp = unwrap_singleton_parenthesis(node)
3165 if gexp is None or gexp.type != syms.testlist_gexp:
3166 return False
3167
3168 return len(gexp.children) == 2 and gexp.children[1].type == token.COMMA
3169
3170 return (
3171 node.type in IMPLICIT_TUPLE
3172 and len(node.children) == 2
3173 and node.children[1].type == token.COMMA
3174 )
3175
3176
3177 def is_walrus_assignment(node: LN) -> bool:
3178 """Return True iff `node` is of the shape ( test := test )"""
3179 inner = unwrap_singleton_parenthesis(node)
3180 return inner is not None and inner.type == syms.namedexpr_test
3181
3182
3183 def is_yield(node: LN) -> bool:
3184 """Return True if `node` holds a `yield` or `yield from` expression."""
3185 if node.type == syms.yield_expr:
3186 return True
3187
3188 if node.type == token.NAME and node.value == "yield": # type: ignore
3189 return True
3190
3191 if node.type != syms.atom:
3192 return False
3193
3194 if len(node.children) != 3:
3195 return False
3196
3197 lpar, expr, rpar = node.children
3198 if lpar.type == token.LPAR and rpar.type == token.RPAR:
3199 return is_yield(expr)
3200
3201 return False
3202
3203
3204 def is_vararg(leaf: Leaf, within: Set[NodeType]) -> bool:
3205 """Return True if `leaf` is a star or double star in a vararg or kwarg.
3206
3207 If `within` includes VARARGS_PARENTS, this applies to function signatures.
3208 If `within` includes UNPACKING_PARENTS, it applies to right hand-side
3209 extended iterable unpacking (PEP 3132) and additional unpacking
3210 generalizations (PEP 448).
3211 """
3212 if leaf.type not in VARARGS_SPECIALS or not leaf.parent:
3213 return False
3214
3215 p = leaf.parent
3216 if p.type == syms.star_expr:
3217 # Star expressions are also used as assignment targets in extended
3218 # iterable unpacking (PEP 3132). See what its parent is instead.
3219 if not p.parent:
3220 return False
3221
3222 p = p.parent
3223
3224 return p.type in within
3225
3226
3227 def is_multiline_string(leaf: Leaf) -> bool:
3228 """Return True if `leaf` is a multiline string that actually spans many lines."""
3229 value = leaf.value.lstrip("furbFURB")
3230 return value[:3] in {'"""', "'''"} and "\n" in value
3231
3232
3233 def is_stub_suite(node: Node) -> bool:
3234 """Return True if `node` is a suite with a stub body."""
3235 if (
3236 len(node.children) != 4
3237 or node.children[0].type != token.NEWLINE
3238 or node.children[1].type != token.INDENT
3239 or node.children[3].type != token.DEDENT
3240 ):
3241 return False
3242
3243 return is_stub_body(node.children[2])
3244
3245
3246 def is_stub_body(node: LN) -> bool:
3247 """Return True if `node` is a simple statement containing an ellipsis."""
3248 if not isinstance(node, Node) or node.type != syms.simple_stmt:
3249 return False
3250
3251 if len(node.children) != 2:
3252 return False
3253
3254 child = node.children[0]
3255 return (
3256 child.type == syms.atom
3257 and len(child.children) == 3
3258 and all(leaf == Leaf(token.DOT, ".") for leaf in child.children)
3259 )
3260
3261
3262 def max_delimiter_priority_in_atom(node: LN) -> Priority:
3263 """Return maximum delimiter priority inside `node`.
3264
3265 This is specific to atoms with contents contained in a pair of parentheses.
3266 If `node` isn't an atom or there are no enclosing parentheses, returns 0.
3267 """
3268 if node.type != syms.atom:
3269 return 0
3270
3271 first = node.children[0]
3272 last = node.children[-1]
3273 if not (first.type == token.LPAR and last.type == token.RPAR):
3274 return 0
3275
3276 bt = BracketTracker()
3277 for c in node.children[1:-1]:
3278 if isinstance(c, Leaf):
3279 bt.mark(c)
3280 else:
3281 for leaf in c.leaves():
3282 bt.mark(leaf)
3283 try:
3284 return bt.max_delimiter_priority()
3285
3286 except ValueError:
3287 return 0
3288
3289
3290 def ensure_visible(leaf: Leaf) -> None:
3291 """Make sure parentheses are visible.
3292
3293 They could be invisible as part of some statements (see
3294 :func:`normalize_invisible_parens` and :func:`visit_import_from`).
3295 """
3296 if leaf.type == token.LPAR:
3297 leaf.value = "("
3298 elif leaf.type == token.RPAR:
3299 leaf.value = ")"
3300
3301
3302 def should_explode(line: Line, opening_bracket: Leaf) -> bool:
3303 """Should `line` immediately be split with `delimiter_split()` after RHS?"""
3304
3305 if not (
3306 opening_bracket.parent
3307 and opening_bracket.parent.type in {syms.atom, syms.import_from}
3308 and opening_bracket.value in "[{("
3309 ):
3310 return False
3311
3312 try:
3313 last_leaf = line.leaves[-1]
3314 exclude = {id(last_leaf)} if last_leaf.type == token.COMMA else set()
3315 max_priority = line.bracket_tracker.max_delimiter_priority(exclude=exclude)
3316 except (IndexError, ValueError):
3317 return False
3318
3319 return max_priority == COMMA_PRIORITY
3320
3321
3322 def get_features_used(node: Node) -> Set[Feature]:
3323 """Return a set of (relatively) new Python features used in this file.
3324
3325 Currently looking for:
3326 - f-strings;
3327 - underscores in numeric literals;
3328 - trailing commas after * or ** in function signatures and calls;
3329 - positional only arguments in function signatures and lambdas;
3330 """
3331 features: Set[Feature] = set()
3332 for n in node.pre_order():
3333 if n.type == token.STRING:
3334 value_head = n.value[:2] # type: ignore
3335 if value_head in {'f"', 'F"', "f'", "F'", "rf", "fr", "RF", "FR"}:
3336 features.add(Feature.F_STRINGS)
3337
3338 elif n.type == token.NUMBER:
3339 if "_" in n.value: # type: ignore
3340 features.add(Feature.NUMERIC_UNDERSCORES)
3341
3342 elif n.type == token.SLASH:
3343 if n.parent and n.parent.type in {syms.typedargslist, syms.arglist}:
3344 features.add(Feature.POS_ONLY_ARGUMENTS)
3345
3346 elif n.type == token.COLONEQUAL:
3347 features.add(Feature.ASSIGNMENT_EXPRESSIONS)
3348
3349 elif (
3350 n.type in {syms.typedargslist, syms.arglist}
3351 and n.children
3352 and n.children[-1].type == token.COMMA
3353 ):
3354 if n.type == syms.typedargslist:
3355 feature = Feature.TRAILING_COMMA_IN_DEF
3356 else:
3357 feature = Feature.TRAILING_COMMA_IN_CALL
3358
3359 for ch in n.children:
3360 if ch.type in STARS:
3361 features.add(feature)
3362
3363 if ch.type == syms.argument:
3364 for argch in ch.children:
3365 if argch.type in STARS:
3366 features.add(feature)
3367
3368 return features
3369
3370
3371 def detect_target_versions(node: Node) -> Set[TargetVersion]:
3372 """Detect the version to target based on the nodes used."""
3373 features = get_features_used(node)
3374 return {
3375 version for version in TargetVersion if features <= VERSION_TO_FEATURES[version]
3376 }
3377
3378
3379 def generate_trailers_to_omit(line: Line, line_length: int) -> Iterator[Set[LeafID]]:
3380 """Generate sets of closing bracket IDs that should be omitted in a RHS.
3381
3382 Brackets can be omitted if the entire trailer up to and including
3383 a preceding closing bracket fits in one line.
3384
3385 Yielded sets are cumulative (contain results of previous yields, too). First
3386 set is empty.
3387 """
3388
3389 omit: Set[LeafID] = set()
3390 yield omit
3391
3392 length = 4 * line.depth
3393 opening_bracket = None
3394 closing_bracket = None
3395 inner_brackets: Set[LeafID] = set()
3396 for index, leaf, leaf_length in enumerate_with_length(line, reversed=True):
3397 length += leaf_length
3398 if length > line_length:
3399 break
3400
3401 has_inline_comment = leaf_length > len(leaf.value) + len(leaf.prefix)
3402 if leaf.type == STANDALONE_COMMENT or has_inline_comment:
3403 break
3404
3405 if opening_bracket:
3406 if leaf is opening_bracket:
3407 opening_bracket = None
3408 elif leaf.type in CLOSING_BRACKETS:
3409 inner_brackets.add(id(leaf))
3410 elif leaf.type in CLOSING_BRACKETS:
3411 if index > 0 and line.leaves[index - 1].type in OPENING_BRACKETS:
3412 # Empty brackets would fail a split so treat them as "inner"
3413 # brackets (e.g. only add them to the `omit` set if another
3414 # pair of brackets was good enough.
3415 inner_brackets.add(id(leaf))
3416 continue
3417
3418 if closing_bracket:
3419 omit.add(id(closing_bracket))
3420 omit.update(inner_brackets)
3421 inner_brackets.clear()
3422 yield omit
3423
3424 if leaf.value:
3425 opening_bracket = leaf.opening_bracket
3426 closing_bracket = leaf
3427
3428
3429 def get_future_imports(node: Node) -> Set[str]:
3430 """Return a set of __future__ imports in the file."""
3431 imports: Set[str] = set()
3432
3433 def get_imports_from_children(children: List[LN]) -> Generator[str, None, None]:
3434 for child in children:
3435 if isinstance(child, Leaf):
3436 if child.type == token.NAME:
3437 yield child.value
3438 elif child.type == syms.import_as_name:
3439 orig_name = child.children[0]
3440 assert isinstance(orig_name, Leaf), "Invalid syntax parsing imports"
3441 assert orig_name.type == token.NAME, "Invalid syntax parsing imports"
3442 yield orig_name.value
3443 elif child.type == syms.import_as_names:
3444 yield from get_imports_from_children(child.children)
3445 else:
3446 raise AssertionError("Invalid syntax parsing imports")
3447
3448 for child in node.children:
3449 if child.type != syms.simple_stmt:
3450 break
3451 first_child = child.children[0]
3452 if isinstance(first_child, Leaf):
3453 # Continue looking if we see a docstring; otherwise stop.
3454 if (
3455 len(child.children) == 2
3456 and first_child.type == token.STRING
3457 and child.children[1].type == token.NEWLINE
3458 ):
3459 continue
3460 else:
3461 break
3462 elif first_child.type == syms.import_from:
3463 module_name = first_child.children[1]
3464 if not isinstance(module_name, Leaf) or module_name.value != "__future__":
3465 break
3466 imports |= set(get_imports_from_children(first_child.children[3:]))
3467 else:
3468 break
3469 return imports
3470
3471
3472 def gen_python_files_in_dir(
3473 path: Path,
3474 root: Path,
3475 include: Pattern[str],
3476 exclude: Pattern[str],
3477 report: "Report",
3478 ) -> Iterator[Path]:
3479 """Generate all files under `path` whose paths are not excluded by the
3480 `exclude` regex, but are included by the `include` regex.
3481
3482 Symbolic links pointing outside of the `root` directory are ignored.
3483
3484 `report` is where output about exclusions goes.
3485 """
3486 assert root.is_absolute(), f"INTERNAL ERROR: `root` must be absolute but is {root}"
3487 for child in path.iterdir():
3488 try:
3489 normalized_path = "/" + child.resolve().relative_to(root).as_posix()
3490 except ValueError:
3491 if child.is_symlink():
3492 report.path_ignored(
3493 child, f"is a symbolic link that points outside {root}"
3494 )
3495 continue
3496
3497 raise
3498
3499 if child.is_dir():
3500 normalized_path += "/"
3501 exclude_match = exclude.search(normalized_path)
3502 if exclude_match and exclude_match.group(0):
3503 report.path_ignored(child, f"matches the --exclude regular expression")
3504 continue
3505
3506 if child.is_dir():
3507 yield from gen_python_files_in_dir(child, root, include, exclude, report)
3508
3509 elif child.is_file():
3510 include_match = include.search(normalized_path)
3511 if include_match:
3512 yield child
3513
3514
3515 @lru_cache()
3516 def find_project_root(srcs: Iterable[str]) -> Path:
3517 """Return a directory containing .git, .hg, or pyproject.toml.
3518
3519 That directory can be one of the directories passed in `srcs` or their
3520 common parent.
3521
3522 If no directory in the tree contains a marker that would specify it's the
3523 project root, the root of the file system is returned.
3524 """
3525 if not srcs:
3526 return Path("/").resolve()
3527
3528 common_base = min(Path(src).resolve() for src in srcs)
3529 if common_base.is_dir():
3530 # Append a fake file so `parents` below returns `common_base_dir`, too.
3531 common_base /= "fake-file"
3532 for directory in common_base.parents:
3533 if (directory / ".git").is_dir():
3534 return directory
3535
3536 if (directory / ".hg").is_dir():
3537 return directory
3538
3539 if (directory / "pyproject.toml").is_file():
3540 return directory
3541
3542 return directory
3543
3544
3545 @dataclass
3546 class Report:
3547 """Provides a reformatting counter. Can be rendered with `str(report)`."""
3548
3549 check: bool = False
3550 quiet: bool = False
3551 verbose: bool = False
3552 change_count: int = 0
3553 same_count: int = 0
3554 failure_count: int = 0
3555
3556 def done(self, src: Path, changed: Changed) -> None:
3557 """Increment the counter for successful reformatting. Write out a message."""
3558 if changed is Changed.YES:
3559 reformatted = "would reformat" if self.check else "reformatted"
3560 if self.verbose or not self.quiet:
3561 out(f"{reformatted} {src}")
3562 self.change_count += 1
3563 else:
3564 if self.verbose:
3565 if changed is Changed.NO:
3566 msg = f"{src} already well formatted, good job."
3567 else:
3568 msg = f"{src} wasn't modified on disk since last run."
3569 out(msg, bold=False)
3570 self.same_count += 1
3571
3572 def failed(self, src: Path, message: str) -> None:
3573 """Increment the counter for failed reformatting. Write out a message."""
3574 err(f"error: cannot format {src}: {message}")
3575 self.failure_count += 1
3576
3577 def path_ignored(self, path: Path, message: str) -> None:
3578 if self.verbose:
3579 out(f"{path} ignored: {message}", bold=False)
3580
3581 @property
3582 def return_code(self) -> int:
3583 """Return the exit code that the app should use.
3584
3585 This considers the current state of changed files and failures:
3586 - if there were any failures, return 123;
3587 - if any files were changed and --check is being used, return 1;
3588 - otherwise return 0.
3589 """
3590 # According to http://tldp.org/LDP/abs/html/exitcodes.html starting with
3591 # 126 we have special return codes reserved by the shell.
3592 if self.failure_count:
3593 return 123
3594
3595 elif self.change_count and self.check:
3596 return 1
3597
3598 return 0
3599
3600 def __str__(self) -> str:
3601 """Render a color report of the current state.
3602
3603 Use `click.unstyle` to remove colors.
3604 """
3605 if self.check:
3606 reformatted = "would be reformatted"
3607 unchanged = "would be left unchanged"
3608 failed = "would fail to reformat"
3609 else:
3610 reformatted = "reformatted"
3611 unchanged = "left unchanged"
3612 failed = "failed to reformat"
3613 report = []
3614 if self.change_count:
3615 s = "s" if self.change_count > 1 else ""
3616 report.append(
3617 click.style(f"{self.change_count} file{s} {reformatted}", bold=True)
3618 )
3619 if self.same_count:
3620 s = "s" if self.same_count > 1 else ""
3621 report.append(f"{self.same_count} file{s} {unchanged}")
3622 if self.failure_count:
3623 s = "s" if self.failure_count > 1 else ""
3624 report.append(
3625 click.style(f"{self.failure_count} file{s} {failed}", fg="red")
3626 )
3627 return ", ".join(report) + "."
3628
3629
3630 def parse_ast(src: str) -> Union[ast.AST, ast3.AST, ast27.AST]:
3631 filename = "<unknown>"
3632 if sys.version_info >= (3, 8):
3633 # TODO: support Python 4+ ;)
3634 for minor_version in range(sys.version_info[1], 4, -1):
3635 try:
3636 return ast.parse(src, filename, feature_version=(3, minor_version))
3637 except SyntaxError:
3638 continue
3639 else:
3640 for feature_version in (7, 6):
3641 try:
3642 return ast3.parse(src, filename, feature_version=feature_version)
3643 except SyntaxError:
3644 continue
3645
3646 return ast27.parse(src)
3647
3648
3649 def _fixup_ast_constants(
3650 node: Union[ast.AST, ast3.AST, ast27.AST]
3651 ) -> Union[ast.AST, ast3.AST, ast27.AST]:
3652 """Map ast nodes deprecated in 3.8 to Constant."""
3653 # casts are required until this is released:
3654 # https://github.com/python/typeshed/pull/3142
3655 if isinstance(node, (ast.Str, ast3.Str, ast27.Str, ast.Bytes, ast3.Bytes)):
3656 return cast(ast.AST, ast.Constant(value=node.s))
3657 elif isinstance(node, (ast.Num, ast3.Num, ast27.Num)):
3658 return cast(ast.AST, ast.Constant(value=node.n))
3659 elif isinstance(node, (ast.NameConstant, ast3.NameConstant)):
3660 return cast(ast.AST, ast.Constant(value=node.value))
3661 return node
3662
3663
3664 def assert_equivalent(src: str, dst: str) -> None:
3665 """Raise AssertionError if `src` and `dst` aren't equivalent."""
3666
3667 def _v(node: Union[ast.AST, ast3.AST, ast27.AST], depth: int = 0) -> Iterator[str]:
3668 """Simple visitor generating strings to compare ASTs by content."""
3669
3670 node = _fixup_ast_constants(node)
3671
3672 yield f"{' ' * depth}{node.__class__.__name__}("
3673
3674 for field in sorted(node._fields):
3675 # TypeIgnore has only one field 'lineno' which breaks this comparison
3676 type_ignore_classes = (ast3.TypeIgnore, ast27.TypeIgnore)
3677 if sys.version_info >= (3, 8):
3678 type_ignore_classes += (ast.TypeIgnore,)
3679 if isinstance(node, type_ignore_classes):
3680 break
3681
3682 try:
3683 value = getattr(node, field)
3684 except AttributeError:
3685 continue
3686
3687 yield f"{' ' * (depth+1)}{field}="
3688
3689 if isinstance(value, list):
3690 for item in value:
3691 # Ignore nested tuples within del statements, because we may insert
3692 # parentheses and they change the AST.
3693 if (
3694 field == "targets"
3695 and isinstance(node, (ast.Delete, ast3.Delete, ast27.Delete))
3696 and isinstance(item, (ast.Tuple, ast3.Tuple, ast27.Tuple))
3697 ):
3698 for item in item.elts:
3699 yield from _v(item, depth + 2)
3700 elif isinstance(item, (ast.AST, ast3.AST, ast27.AST)):
3701 yield from _v(item, depth + 2)
3702
3703 elif isinstance(value, (ast.AST, ast3.AST, ast27.AST)):
3704 yield from _v(value, depth + 2)
3705
3706 else:
3707 yield f"{' ' * (depth+2)}{value!r}, # {value.__class__.__name__}"
3708
3709 yield f"{' ' * depth}) # /{node.__class__.__name__}"
3710
3711 try:
3712 src_ast = parse_ast(src)
3713 except Exception as exc:
3714 raise AssertionError(
3715 f"cannot use --safe with this file; failed to parse source file. "
3716 f"AST error message: {exc}"
3717 )
3718
3719 try:
3720 dst_ast = parse_ast(dst)
3721 except Exception as exc:
3722 log = dump_to_file("".join(traceback.format_tb(exc.__traceback__)), dst)
3723 raise AssertionError(
3724 f"INTERNAL ERROR: Black produced invalid code: {exc}. "
3725 f"Please report a bug on https://github.com/psf/black/issues. "
3726 f"This invalid output might be helpful: {log}"
3727 ) from None
3728
3729 src_ast_str = "\n".join(_v(src_ast))
3730 dst_ast_str = "\n".join(_v(dst_ast))
3731 if src_ast_str != dst_ast_str:
3732 log = dump_to_file(diff(src_ast_str, dst_ast_str, "src", "dst"))
3733 raise AssertionError(
3734 f"INTERNAL ERROR: Black produced code that is not equivalent to "
3735 f"the source. "
3736 f"Please report a bug on https://github.com/psf/black/issues. "
3737 f"This diff might be helpful: {log}"
3738 ) from None
3739
3740
3741 def assert_stable(src: str, dst: str, mode: FileMode) -> None:
3742 """Raise AssertionError if `dst` reformats differently the second time."""
3743 newdst = format_str(dst, mode=mode)
3744 if dst != newdst:
3745 log = dump_to_file(
3746 diff(src, dst, "source", "first pass"),
3747 diff(dst, newdst, "first pass", "second pass"),
3748 )
3749 raise AssertionError(
3750 f"INTERNAL ERROR: Black produced different code on the second pass "
3751 f"of the formatter. "
3752 f"Please report a bug on https://github.com/psf/black/issues. "
3753 f"This diff might be helpful: {log}"
3754 ) from None
3755
3756
3757 def dump_to_file(*output: str) -> str:
3758 """Dump `output` to a temporary file. Return path to the file."""
3759 with tempfile.NamedTemporaryFile(
3760 mode="w", prefix="blk_", suffix=".log", delete=False, encoding="utf8"
3761 ) as f:
3762 for lines in output:
3763 f.write(lines)
3764 if lines and lines[-1] != "\n":
3765 f.write("\n")
3766 return f.name
3767
3768
3769 @contextmanager
3770 def nullcontext() -> Iterator[None]:
3771 """Return context manager that does nothing.
3772 Similar to `nullcontext` from python 3.7"""
3773 yield
3774
3775
3776 def diff(a: str, b: str, a_name: str, b_name: str) -> str:
3777 """Return a unified diff string between strings `a` and `b`."""
3778 import difflib
3779
3780 a_lines = [line + "\n" for line in a.split("\n")]
3781 b_lines = [line + "\n" for line in b.split("\n")]
3782 return "".join(
3783 difflib.unified_diff(a_lines, b_lines, fromfile=a_name, tofile=b_name, n=5)
3784 )
3785
3786
3787 def cancel(tasks: Iterable[asyncio.Task]) -> None:
3788 """asyncio signal handler that cancels all `tasks` and reports to stderr."""
3789 err("Aborted!")
3790 for task in tasks:
3791 task.cancel()
3792
3793
3794 def shutdown(loop: asyncio.AbstractEventLoop) -> None:
3795 """Cancel all pending tasks on `loop`, wait for them, and close the loop."""
3796 try:
3797 if sys.version_info[:2] >= (3, 7):
3798 all_tasks = asyncio.all_tasks
3799 else:
3800 all_tasks = asyncio.Task.all_tasks
3801 # This part is borrowed from asyncio/runners.py in Python 3.7b2.
3802 to_cancel = [task for task in all_tasks(loop) if not task.done()]
3803 if not to_cancel:
3804 return
3805
3806 for task in to_cancel:
3807 task.cancel()
3808 loop.run_until_complete(
3809 asyncio.gather(*to_cancel, loop=loop, return_exceptions=True)
3810 )
3811 finally:
3812 # `concurrent.futures.Future` objects cannot be cancelled once they
3813 # are already running. There might be some when the `shutdown()` happened.
3814 # Silence their logger's spew about the event loop being closed.
3815 cf_logger = logging.getLogger("concurrent.futures")
3816 cf_logger.setLevel(logging.CRITICAL)
3817 loop.close()
3818
3819
3820 def sub_twice(regex: Pattern[str], replacement: str, original: str) -> str:
3821 """Replace `regex` with `replacement` twice on `original`.
3822
3823 This is used by string normalization to perform replaces on
3824 overlapping matches.
3825 """
3826 return regex.sub(replacement, regex.sub(replacement, original))
3827
3828
3829 def re_compile_maybe_verbose(regex: str) -> Pattern[str]:
3830 """Compile a regular expression string in `regex`.
3831
3832 If it contains newlines, use verbose mode.
3833 """
3834 if "\n" in regex:
3835 regex = "(?x)" + regex
3836 return re.compile(regex)
3837
3838
3839 def enumerate_reversed(sequence: Sequence[T]) -> Iterator[Tuple[Index, T]]:
3840 """Like `reversed(enumerate(sequence))` if that were possible."""
3841 index = len(sequence) - 1
3842 for element in reversed(sequence):
3843 yield (index, element)
3844 index -= 1
3845
3846
3847 def enumerate_with_length(
3848 line: Line, reversed: bool = False
3849 ) -> Iterator[Tuple[Index, Leaf, int]]:
3850 """Return an enumeration of leaves with their length.
3851
3852 Stops prematurely on multiline strings and standalone comments.
3853 """
3854 op = cast(
3855 Callable[[Sequence[Leaf]], Iterator[Tuple[Index, Leaf]]],
3856 enumerate_reversed if reversed else enumerate,
3857 )
3858 for index, leaf in op(line.leaves):
3859 length = len(leaf.prefix) + len(leaf.value)
3860 if "\n" in leaf.value:
3861 return # Multiline strings, we can't continue.
3862
3863 for comment in line.comments_after(leaf):
3864 length += len(comment.value)
3865
3866 yield index, leaf, length
3867
3868
3869 def is_line_short_enough(line: Line, *, line_length: int, line_str: str = "") -> bool:
3870 """Return True if `line` is no longer than `line_length`.
3871
3872 Uses the provided `line_str` rendering, if any, otherwise computes a new one.
3873 """
3874 if not line_str:
3875 line_str = str(line).strip("\n")
3876 return (
3877 len(line_str) <= line_length
3878 and "\n" not in line_str # multiline strings
3879 and not line.contains_standalone_comments()
3880 )
3881
3882
3883 def can_be_split(line: Line) -> bool:
3884 """Return False if the line cannot be split *for sure*.
3885
3886 This is not an exhaustive search but a cheap heuristic that we can use to
3887 avoid some unfortunate formattings (mostly around wrapping unsplittable code
3888 in unnecessary parentheses).
3889 """
3890 leaves = line.leaves
3891 if len(leaves) < 2:
3892 return False
3893
3894 if leaves[0].type == token.STRING and leaves[1].type == token.DOT:
3895 call_count = 0
3896 dot_count = 0
3897 next = leaves[-1]
3898 for leaf in leaves[-2::-1]:
3899 if leaf.type in OPENING_BRACKETS:
3900 if next.type not in CLOSING_BRACKETS:
3901 return False
3902
3903 call_count += 1
3904 elif leaf.type == token.DOT:
3905 dot_count += 1
3906 elif leaf.type == token.NAME:
3907 if not (next.type == token.DOT or next.type in OPENING_BRACKETS):
3908 return False
3909
3910 elif leaf.type not in CLOSING_BRACKETS:
3911 return False
3912
3913 if dot_count > 1 and call_count > 1:
3914 return False
3915
3916 return True
3917
3918
3919 def can_omit_invisible_parens(line: Line, line_length: int) -> bool:
3920 """Does `line` have a shape safe to reformat without optional parens around it?
3921
3922 Returns True for only a subset of potentially nice looking formattings but
3923 the point is to not return false positives that end up producing lines that
3924 are too long.
3925 """
3926 bt = line.bracket_tracker
3927 if not bt.delimiters:
3928 # Without delimiters the optional parentheses are useless.
3929 return True
3930
3931 max_priority = bt.max_delimiter_priority()
3932 if bt.delimiter_count_with_priority(max_priority) > 1:
3933 # With more than one delimiter of a kind the optional parentheses read better.
3934 return False
3935
3936 if max_priority == DOT_PRIORITY:
3937 # A single stranded method call doesn't require optional parentheses.
3938 return True
3939
3940 assert len(line.leaves) >= 2, "Stranded delimiter"
3941
3942 first = line.leaves[0]
3943 second = line.leaves[1]
3944 penultimate = line.leaves[-2]
3945 last = line.leaves[-1]
3946
3947 # With a single delimiter, omit if the expression starts or ends with
3948 # a bracket.
3949 if first.type in OPENING_BRACKETS and second.type not in CLOSING_BRACKETS:
3950 remainder = False
3951 length = 4 * line.depth
3952 for _index, leaf, leaf_length in enumerate_with_length(line):
3953 if leaf.type in CLOSING_BRACKETS and leaf.opening_bracket is first:
3954 remainder = True
3955 if remainder:
3956 length += leaf_length
3957 if length > line_length:
3958 break
3959
3960 if leaf.type in OPENING_BRACKETS:
3961 # There are brackets we can further split on.
3962 remainder = False
3963
3964 else:
3965 # checked the entire string and line length wasn't exceeded
3966 if len(line.leaves) == _index + 1:
3967 return True
3968
3969 # Note: we are not returning False here because a line might have *both*
3970 # a leading opening bracket and a trailing closing bracket. If the
3971 # opening bracket doesn't match our rule, maybe the closing will.
3972
3973 if (
3974 last.type == token.RPAR
3975 or last.type == token.RBRACE
3976 or (
3977 # don't use indexing for omitting optional parentheses;
3978 # it looks weird
3979 last.type == token.RSQB
3980 and last.parent
3981 and last.parent.type != syms.trailer
3982 )
3983 ):
3984 if penultimate.type in OPENING_BRACKETS:
3985 # Empty brackets don't help.
3986 return False
3987
3988 if is_multiline_string(first):
3989 # Additional wrapping of a multiline string in this situation is
3990 # unnecessary.
3991 return True
3992
3993 length = 4 * line.depth
3994 seen_other_brackets = False
3995 for _index, leaf, leaf_length in enumerate_with_length(line):
3996 length += leaf_length
3997 if leaf is last.opening_bracket:
3998 if seen_other_brackets or length <= line_length:
3999 return True
4000
4001 elif leaf.type in OPENING_BRACKETS:
4002 # There are brackets we can further split on.
4003 seen_other_brackets = True
4004
4005 return False
4006
4007
4008 def get_cache_file(mode: FileMode) -> Path:
4009 return CACHE_DIR / f"cache.{mode.get_cache_key()}.pickle"
4010
4011
4012 def read_cache(mode: FileMode) -> Cache:
4013 """Read the cache if it exists and is well formed.
4014
4015 If it is not well formed, the call to write_cache later should resolve the issue.
4016 """
4017 cache_file = get_cache_file(mode)
4018 if not cache_file.exists():
4019 return {}
4020
4021 with cache_file.open("rb") as fobj:
4022 try:
4023 cache: Cache = pickle.load(fobj)
4024 except pickle.UnpicklingError:
4025 return {}
4026
4027 return cache
4028
4029
4030 def get_cache_info(path: Path) -> CacheInfo:
4031 """Return the information used to check if a file is already formatted or not."""
4032 stat = path.stat()
4033 return stat.st_mtime, stat.st_size
4034
4035
4036 def filter_cached(cache: Cache, sources: Iterable[Path]) -> Tuple[Set[Path], Set[Path]]:
4037 """Split an iterable of paths in `sources` into two sets.
4038
4039 The first contains paths of files that modified on disk or are not in the
4040 cache. The other contains paths to non-modified files.
4041 """
4042 todo, done = set(), set()
4043 for src in sources:
4044 src = src.resolve()
4045 if cache.get(src) != get_cache_info(src):
4046 todo.add(src)
4047 else:
4048 done.add(src)
4049 return todo, done
4050
4051
4052 def write_cache(cache: Cache, sources: Iterable[Path], mode: FileMode) -> None:
4053 """Update the cache file."""
4054 cache_file = get_cache_file(mode)
4055 try:
4056 CACHE_DIR.mkdir(parents=True, exist_ok=True)
4057 new_cache = {**cache, **{src.resolve(): get_cache_info(src) for src in sources}}
4058 with tempfile.NamedTemporaryFile(dir=str(cache_file.parent), delete=False) as f:
4059 pickle.dump(new_cache, f, protocol=pickle.HIGHEST_PROTOCOL)
4060 os.replace(f.name, cache_file)
4061 except OSError:
4062 pass
4063
4064
4065 def patch_click() -> None:
4066 """Make Click not crash.
4067
4068 On certain misconfigured environments, Python 3 selects the ASCII encoding as the
4069 default which restricts paths that it can access during the lifetime of the
4070 application. Click refuses to work in this scenario by raising a RuntimeError.
4071
4072 In case of Black the likelihood that non-ASCII characters are going to be used in
4073 file paths is minimal since it's Python source code. Moreover, this crash was
4074 spurious on Python 3.7 thanks to PEP 538 and PEP 540.
4075 """
4076 try:
4077 from click import core
4078 from click import _unicodefun # type: ignore
4079 except ModuleNotFoundError:
4080 return
4081
4082 for module in (core, _unicodefun):
4083 if hasattr(module, "_verify_python3_env"):
4084 module._verify_python3_env = lambda: None
4085
4086
4087 def patched_main() -> None:
4088 freeze_support()
4089 patch_click()
4090 main()
4091
4092
4093 if __name__ == "__main__":
4094 patched_main()
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
@@ -113,7 +113,7 b' check: tests'
113 tests:
113 tests:
114 # Run Rust tests if cargo is installed
114 # Run Rust tests if cargo is installed
115 if command -v $(CARGO) >/dev/null 2>&1; then \
115 if command -v $(CARGO) >/dev/null 2>&1; then \
116 cd $(HGROOT)/rust/hg-cpython && $(CARGO) test --quiet --all; \
116 $(MAKE) rust-tests; \
117 fi
117 fi
118 cd tests && $(PYTHON) run-tests.py $(TESTFLAGS)
118 cd tests && $(PYTHON) run-tests.py $(TESTFLAGS)
119
119
@@ -127,6 +127,13 b' testpy-%:'
127 $(MAKE) -f $(HGROOT)/contrib/Makefile.python PYTHONVER=$* PREFIX=$(HGPYTHONS)/$* python )
127 $(MAKE) -f $(HGROOT)/contrib/Makefile.python PYTHONVER=$* PREFIX=$(HGPYTHONS)/$* python )
128 cd tests && $(HGPYTHONS)/$*/bin/python run-tests.py $(TESTFLAGS)
128 cd tests && $(HGPYTHONS)/$*/bin/python run-tests.py $(TESTFLAGS)
129
129
130 rust-tests: py_feature = $(shell $(PYTHON) -c \
131 'import sys; print(["python27-bin", "python3-bin"][sys.version_info[0] >= 3])')
132 rust-tests:
133 cd $(HGROOT)/rust/hg-cpython \
134 && $(CARGO) test --quiet --all \
135 --no-default-features --features "$(py_feature)"
136
130 check-code:
137 check-code:
131 hg manifest | xargs python contrib/check-code.py
138 hg manifest | xargs python contrib/check-code.py
132
139
@@ -248,6 +255,7 b' osx:'
248
255
249 .PHONY: help all local build doc cleanbutpackages clean install install-bin \
256 .PHONY: help all local build doc cleanbutpackages clean install install-bin \
250 install-doc install-home install-home-bin install-home-doc \
257 install-doc install-home install-home-bin install-home-doc \
251 dist dist-notests check tests check-code format-c update-pot \
258 dist dist-notests check tests rust-tests check-code format-c \
259 update-pot \
252 $(packaging_targets) \
260 $(packaging_targets) \
253 osx
261 osx
@@ -33,6 +33,46 b" side-effects so they don't impact the lo"
33 into a remote machine, we create a temporary directory for the SSH
33 into a remote machine, we create a temporary directory for the SSH
34 config so the user's known hosts file isn't updated.
34 config so the user's known hosts file isn't updated.
35
35
36 Try Server
37 ==========
38
39 There exists a *Try Server* which allows automation to run against
40 an arbitrary Mercurial changeset and displays results via the web.
41
42 .. note::
43
44 The *Try Server* is still experimental infrastructure.
45
46 To use the *Try Server*::
47
48 $ ./automation.py try
49
50 With a custom AWS profile::
51
52 $ AWS_PROFILE=hg contrib/automation/automation.py try
53
54 By default, the ``.`` revision is submitted. **Any uncommitted changes
55 are not submitted.**
56
57 To switch which revision is used::
58
59 $ ./automation.py try -r abcdef
60
61 Access to the *Try Server* requires access to a special AWS account.
62 This account is currently run by Gregory Szorc. Here is the procedure
63 for accessing the *Try Server*:
64
65 1. Email Gregory Szorc at gregory.szorc@gmail.com and request a
66 username. This username will be stored in the public domain.
67 2. Wait for an email reply containing your temporary AWS credentials.
68 3. Log in at https://gregoryszorc-hg.signin.aws.amazon.com/console
69 and set a new, secure password.
70 4. Go to https://console.aws.amazon.com/iam/home?region=us-west-2#/security_credentials
71 5. Under ``Access keys for CLI, SDK, & API access``, click the
72 ``Create access key`` button.
73 6. See the ``AWS Integration`` section for instructions on
74 configuring your local client to use the generated credentials.
75
36 AWS Integration
76 AWS Integration
37 ===============
77 ===============
38
78
@@ -47,12 +87,25 b' https://boto3.amazonaws.com/v1/documenta'
47 for how ``boto3`` works. Once you have configured your environment such
87 for how ``boto3`` works. Once you have configured your environment such
48 that ``boto3`` can find credentials, interaction with AWS should *just work*.
88 that ``boto3`` can find credentials, interaction with AWS should *just work*.
49
89
50 .. hint::
90 To configure ``boto3``, you can use the ``aws configure`` command to
91 write out configuration files. (The ``aws`` command is typically provided
92 by an ``awscli`` package available in your package manager, including
93 ``pip``.) Alternatively, you can write out files in ``~/.aws/`` directly.
94 e.g.::
95
96 # ~/.aws/config
97 [default]
98 region = us-west-2
51
99
52 Typically you have a ``~/.aws/credentials`` file containing AWS
100 # ~/.aws/credentials
53 credentials. If you manage multiple credentials, you can override which
101 [default]
54 *profile* to use at run-time by setting the ``AWS_PROFILE`` environment
102 aws_access_key_id = XXXX
55 variable.
103 aws_secret_access_key = YYYY
104
105 If you have multiple AWS accounts, you can name the profile something
106 different from ``default``. e.g. ``hg``. You can influence which profile
107 is used by ``boto3`` by setting the ``AWS_PROFILE`` environment variable.
108 e.g. ``AWS_PROFILE=hg``.
56
109
57 Resource Management
110 Resource Management
58 -------------------
111 -------------------
@@ -181,3 +234,25 b' Various dependencies to run the Mercuria'
181 Documenting them is beyond the scope of this document. Various tests
234 Documenting them is beyond the scope of this document. Various tests
182 also require other optional dependencies and missing dependencies will
235 also require other optional dependencies and missing dependencies will
183 be printed by the test runner when a test is skipped.
236 be printed by the test runner when a test is skipped.
237
238 Releasing Windows Artifacts
239 ===========================
240
241 The `automation.py` script can be used to automate the release of Windows
242 artifacts::
243
244 $ ./automation.py build-all-windows-packages --revision 5.1.1
245 $ ./automation.py publish-windows-artifacts 5.1.1
246
247 The first command will launch an EC2 instance to build all Windows packages
248 and copy them into the `dist` directory relative to the repository root. The
249 second command will then attempt to upload these files to PyPI (via `twine`)
250 and to `mercurial-scm.org` (via SSH).
251
252 Uploading to PyPI requires a PyPI account with write access to the `Mercurial`
253 package. You can skip PyPI uploading by passing `--no-pypi`.
254
255 Uploading to `mercurial-scm.org` requires an SSH account on that server
256 with `windows` group membership and for the SSH key for that account to be the
257 default SSH key (e.g. `~/.ssh/id_rsa`) or in a running SSH agent. You can
258 skip `mercurial-scm.org` uploading by passing `--no-mercurial-scm-org`.
@@ -36,8 +36,13 b' def bootstrap():'
36 pip = venv_bin / 'pip'
36 pip = venv_bin / 'pip'
37 python = venv_bin / 'python'
37 python = venv_bin / 'python'
38
38
39 args = [str(pip), 'install', '-r', str(REQUIREMENTS_TXT),
39 args = [
40 '--disable-pip-version-check']
40 str(pip),
41 'install',
42 '-r',
43 str(REQUIREMENTS_TXT),
44 '--disable-pip-version-check',
45 ]
41
46
42 if not venv_created:
47 if not venv_created:
43 args.append('-q')
48 args.append('-q')
@@ -45,8 +50,7 b' def bootstrap():'
45 subprocess.run(args, check=True)
50 subprocess.run(args, check=True)
46
51
47 os.environ['HGAUTOMATION_BOOTSTRAPPED'] = '1'
52 os.environ['HGAUTOMATION_BOOTSTRAPPED'] = '1'
48 os.environ['PATH'] = '%s%s%s' % (
53 os.environ['PATH'] = '%s%s%s' % (venv_bin, os.pathsep, os.environ['PATH'])
49 venv_bin, os.pathsep, os.environ['PATH'])
50
54
51 subprocess.run([str(python), __file__] + sys.argv[1:], check=True)
55 subprocess.run([str(python), __file__] + sys.argv[1:], check=True)
52
56
@@ -10,9 +10,7 b''
10 import pathlib
10 import pathlib
11 import secrets
11 import secrets
12
12
13 from .aws import (
13 from .aws import AWSConnection
14 AWSConnection,
15 )
16
14
17
15
18 class HGAutomation:
16 class HGAutomation:
@@ -53,7 +51,7 b' class HGAutomation:'
53
51
54 return password
52 return password
55
53
56 def aws_connection(self, region: str, ensure_ec2_state: bool=True):
54 def aws_connection(self, region: str, ensure_ec2_state: bool = True):
57 """Obtain an AWSConnection instance bound to a specific region."""
55 """Obtain an AWSConnection instance bound to a specific region."""
58
56
59 return AWSConnection(self, region, ensure_ec2_state=ensure_ec2_state)
57 return AWSConnection(self, region, ensure_ec2_state=ensure_ec2_state)
@@ -19,9 +19,7 b' import time'
19 import boto3
19 import boto3
20 import botocore.exceptions
20 import botocore.exceptions
21
21
22 from .linux import (
22 from .linux import BOOTSTRAP_DEBIAN
23 BOOTSTRAP_DEBIAN,
24 )
25 from .ssh import (
23 from .ssh import (
26 exec_command as ssh_exec_command,
24 exec_command as ssh_exec_command,
27 wait_for_ssh,
25 wait_for_ssh,
@@ -32,10 +30,13 b' from .winrm import ('
32 )
30 )
33
31
34
32
35 SOURCE_ROOT = pathlib.Path(os.path.abspath(__file__)).parent.parent.parent.parent
33 SOURCE_ROOT = pathlib.Path(
34 os.path.abspath(__file__)
35 ).parent.parent.parent.parent
36
36
37 INSTALL_WINDOWS_DEPENDENCIES = (SOURCE_ROOT / 'contrib' /
37 INSTALL_WINDOWS_DEPENDENCIES = (
38 'install-windows-dependencies.ps1')
38 SOURCE_ROOT / 'contrib' / 'install-windows-dependencies.ps1'
39 )
39
40
40
41
41 INSTANCE_TYPES_WITH_STORAGE = {
42 INSTANCE_TYPES_WITH_STORAGE = {
@@ -54,6 +55,7 b' INSTANCE_TYPES_WITH_STORAGE = {'
54
55
55 AMAZON_ACCOUNT_ID = '801119661308'
56 AMAZON_ACCOUNT_ID = '801119661308'
56 DEBIAN_ACCOUNT_ID = '379101102735'
57 DEBIAN_ACCOUNT_ID = '379101102735'
58 DEBIAN_ACCOUNT_ID_2 = '136693071363'
57 UBUNTU_ACCOUNT_ID = '099720109477'
59 UBUNTU_ACCOUNT_ID = '099720109477'
58
60
59
61
@@ -106,7 +108,6 b' SECURITY_GROUPS = {'
106 'Description': 'RDP from entire Internet',
108 'Description': 'RDP from entire Internet',
107 },
109 },
108 ],
110 ],
109
110 },
111 },
111 {
112 {
112 'FromPort': 5985,
113 'FromPort': 5985,
@@ -118,7 +119,7 b' SECURITY_GROUPS = {'
118 'Description': 'PowerShell Remoting (Windows Remote Management)',
119 'Description': 'PowerShell Remoting (Windows Remote Management)',
119 },
120 },
120 ],
121 ],
121 }
122 },
122 ],
123 ],
123 },
124 },
124 }
125 }
@@ -151,11 +152,7 b" ASSUME_ROLE_POLICY_DOCUMENT = '''"
151
152
152
153
153 IAM_INSTANCE_PROFILES = {
154 IAM_INSTANCE_PROFILES = {
154 'ephemeral-ec2-1': {
155 'ephemeral-ec2-1': {'roles': ['ephemeral-ec2-role-1',],}
155 'roles': [
156 'ephemeral-ec2-role-1',
157 ],
158 }
159 }
156 }
160
157
161
158
@@ -225,7 +222,7 b' Install-WindowsFeature -Name Net-Framewo'
225 class AWSConnection:
222 class AWSConnection:
226 """Manages the state of a connection with AWS."""
223 """Manages the state of a connection with AWS."""
227
224
228 def __init__(self, automation, region: str, ensure_ec2_state: bool=True):
225 def __init__(self, automation, region: str, ensure_ec2_state: bool = True):
229 self.automation = automation
226 self.automation = automation
230 self.local_state_path = automation.state_path
227 self.local_state_path = automation.state_path
231
228
@@ -256,10 +253,19 b' def rsa_key_fingerprint(p: pathlib.Path)'
256
253
257 # TODO use rsa package.
254 # TODO use rsa package.
258 res = subprocess.run(
255 res = subprocess.run(
259 ['openssl', 'pkcs8', '-in', str(p), '-nocrypt', '-topk8',
256 [
260 '-outform', 'DER'],
257 'openssl',
258 'pkcs8',
259 '-in',
260 str(p),
261 '-nocrypt',
262 '-topk8',
263 '-outform',
264 'DER',
265 ],
261 capture_output=True,
266 capture_output=True,
262 check=True)
267 check=True,
268 )
263
269
264 sha1 = hashlib.sha1(res.stdout).hexdigest()
270 sha1 = hashlib.sha1(res.stdout).hexdigest()
265 return ':'.join(a + b for a, b in zip(sha1[::2], sha1[1::2]))
271 return ':'.join(a + b for a, b in zip(sha1[::2], sha1[1::2]))
@@ -270,7 +276,7 b' def ensure_key_pairs(state_path: pathlib'
270
276
271 for kpi in ec2resource.key_pairs.all():
277 for kpi in ec2resource.key_pairs.all():
272 if kpi.name.startswith(prefix):
278 if kpi.name.startswith(prefix):
273 remote_existing[kpi.name[len(prefix):]] = kpi.key_fingerprint
279 remote_existing[kpi.name[len(prefix) :]] = kpi.key_fingerprint
274
280
275 # Validate that we have these keys locally.
281 # Validate that we have these keys locally.
276 key_path = state_path / 'keys'
282 key_path = state_path / 'keys'
@@ -296,7 +302,7 b' def ensure_key_pairs(state_path: pathlib'
296 if not f.startswith('keypair-') or not f.endswith('.pub'):
302 if not f.startswith('keypair-') or not f.endswith('.pub'):
297 continue
303 continue
298
304
299 name = f[len('keypair-'):-len('.pub')]
305 name = f[len('keypair-') : -len('.pub')]
300
306
301 pub_full = key_path / f
307 pub_full = key_path / f
302 priv_full = key_path / ('keypair-%s' % name)
308 priv_full = key_path / ('keypair-%s' % name)
@@ -305,8 +311,9 b' def ensure_key_pairs(state_path: pathlib'
305 data = fh.read()
311 data = fh.read()
306
312
307 if not data.startswith('ssh-rsa '):
313 if not data.startswith('ssh-rsa '):
308 print('unexpected format for key pair file: %s; removing' %
314 print(
309 pub_full)
315 'unexpected format for key pair file: %s; removing' % pub_full
316 )
310 pub_full.unlink()
317 pub_full.unlink()
311 priv_full.unlink()
318 priv_full.unlink()
312 continue
319 continue
@@ -326,8 +333,10 b' def ensure_key_pairs(state_path: pathlib'
326 del local_existing[name]
333 del local_existing[name]
327
334
328 elif remote_existing[name] != local_existing[name]:
335 elif remote_existing[name] != local_existing[name]:
329 print('key fingerprint mismatch for %s; '
336 print(
330 'removing from local and remote' % name)
337 'key fingerprint mismatch for %s; '
338 'removing from local and remote' % name
339 )
331 remove_local(name)
340 remove_local(name)
332 remove_remote('%s%s' % (prefix, name))
341 remove_remote('%s%s' % (prefix, name))
333 del local_existing[name]
342 del local_existing[name]
@@ -355,15 +364,18 b' def ensure_key_pairs(state_path: pathlib'
355 subprocess.run(
364 subprocess.run(
356 ['ssh-keygen', '-y', '-f', str(priv_full)],
365 ['ssh-keygen', '-y', '-f', str(priv_full)],
357 stdout=fh,
366 stdout=fh,
358 check=True)
367 check=True,
368 )
359
369
360 pub_full.chmod(0o0600)
370 pub_full.chmod(0o0600)
361
371
362
372
363 def delete_instance_profile(profile):
373 def delete_instance_profile(profile):
364 for role in profile.roles:
374 for role in profile.roles:
365 print('removing role %s from instance profile %s' % (role.name,
375 print(
366 profile.name))
376 'removing role %s from instance profile %s'
377 % (role.name, profile.name)
378 )
367 profile.remove_role(RoleName=role.name)
379 profile.remove_role(RoleName=role.name)
368
380
369 print('deleting instance profile %s' % profile.name)
381 print('deleting instance profile %s' % profile.name)
@@ -377,7 +389,7 b' def ensure_iam_state(iamclient, iamresou'
377
389
378 for profile in iamresource.instance_profiles.all():
390 for profile in iamresource.instance_profiles.all():
379 if profile.name.startswith(prefix):
391 if profile.name.startswith(prefix):
380 remote_profiles[profile.name[len(prefix):]] = profile
392 remote_profiles[profile.name[len(prefix) :]] = profile
381
393
382 for name in sorted(set(remote_profiles) - set(IAM_INSTANCE_PROFILES)):
394 for name in sorted(set(remote_profiles) - set(IAM_INSTANCE_PROFILES)):
383 delete_instance_profile(remote_profiles[name])
395 delete_instance_profile(remote_profiles[name])
@@ -387,7 +399,7 b' def ensure_iam_state(iamclient, iamresou'
387
399
388 for role in iamresource.roles.all():
400 for role in iamresource.roles.all():
389 if role.name.startswith(prefix):
401 if role.name.startswith(prefix):
390 remote_roles[role.name[len(prefix):]] = role
402 remote_roles[role.name[len(prefix) :]] = role
391
403
392 for name in sorted(set(remote_roles) - set(IAM_ROLES)):
404 for name in sorted(set(remote_roles) - set(IAM_ROLES)):
393 role = remote_roles[name]
405 role = remote_roles[name]
@@ -403,7 +415,8 b' def ensure_iam_state(iamclient, iamresou'
403 print('creating IAM instance profile %s' % actual)
415 print('creating IAM instance profile %s' % actual)
404
416
405 profile = iamresource.create_instance_profile(
417 profile = iamresource.create_instance_profile(
406 InstanceProfileName=actual)
418 InstanceProfileName=actual
419 )
407 remote_profiles[name] = profile
420 remote_profiles[name] = profile
408
421
409 waiter = iamclient.get_waiter('instance_profile_exists')
422 waiter = iamclient.get_waiter('instance_profile_exists')
@@ -452,23 +465,12 b' def find_image(ec2resource, owner_id, na'
452
465
453 images = ec2resource.images.filter(
466 images = ec2resource.images.filter(
454 Filters=[
467 Filters=[
455 {
468 {'Name': 'owner-id', 'Values': [owner_id],},
456 'Name': 'owner-id',
469 {'Name': 'state', 'Values': ['available'],},
457 'Values': [owner_id],
470 {'Name': 'image-type', 'Values': ['machine'],},
458 },
471 {'Name': 'name', 'Values': [name],},
459 {
472 ]
460 'Name': 'state',
473 )
461 'Values': ['available'],
462 },
463 {
464 'Name': 'image-type',
465 'Values': ['machine'],
466 },
467 {
468 'Name': 'name',
469 'Values': [name],
470 },
471 ])
472
474
473 for image in images:
475 for image in images:
474 return image
476 return image
@@ -486,7 +488,7 b' def ensure_security_groups(ec2resource, '
486
488
487 for group in ec2resource.security_groups.all():
489 for group in ec2resource.security_groups.all():
488 if group.group_name.startswith(prefix):
490 if group.group_name.startswith(prefix):
489 existing[group.group_name[len(prefix):]] = group
491 existing[group.group_name[len(prefix) :]] = group
490
492
491 purge = set(existing) - set(SECURITY_GROUPS)
493 purge = set(existing) - set(SECURITY_GROUPS)
492
494
@@ -506,13 +508,10 b' def ensure_security_groups(ec2resource, '
506 print('adding security group %s' % actual)
508 print('adding security group %s' % actual)
507
509
508 group_res = ec2resource.create_security_group(
510 group_res = ec2resource.create_security_group(
509 Description=group['description'],
511 Description=group['description'], GroupName=actual,
510 GroupName=actual,
511 )
512 )
512
513
513 group_res.authorize_ingress(
514 group_res.authorize_ingress(IpPermissions=group['ingress'],)
514 IpPermissions=group['ingress'],
515 )
516
515
517 security_groups[name] = group_res
516 security_groups[name] = group_res
518
517
@@ -576,8 +575,10 b' def wait_for_ip_addresses(instances):'
576 instance.reload()
575 instance.reload()
577 continue
576 continue
578
577
579 print('public IP address for %s: %s' % (
578 print(
580 instance.id, instance.public_ip_address))
579 'public IP address for %s: %s'
580 % (instance.id, instance.public_ip_address)
581 )
581 break
582 break
582
583
583
584
@@ -602,10 +603,7 b' def wait_for_ssm(ssmclient, instances):'
602 while True:
603 while True:
603 res = ssmclient.describe_instance_information(
604 res = ssmclient.describe_instance_information(
604 Filters=[
605 Filters=[
605 {
606 {'Key': 'InstanceIds', 'Values': [i.id for i in instances],},
606 'Key': 'InstanceIds',
607 'Values': [i.id for i in instances],
608 },
609 ],
607 ],
610 )
608 )
611
609
@@ -627,9 +625,7 b' def run_ssm_command(ssmclient, instances'
627 InstanceIds=[i.id for i in instances],
625 InstanceIds=[i.id for i in instances],
628 DocumentName=document_name,
626 DocumentName=document_name,
629 Parameters=parameters,
627 Parameters=parameters,
630 CloudWatchOutputConfig={
628 CloudWatchOutputConfig={'CloudWatchOutputEnabled': True,},
631 'CloudWatchOutputEnabled': True,
632 },
633 )
629 )
634
630
635 command_id = res['Command']['CommandId']
631 command_id = res['Command']['CommandId']
@@ -638,8 +634,7 b' def run_ssm_command(ssmclient, instances'
638 while True:
634 while True:
639 try:
635 try:
640 res = ssmclient.get_command_invocation(
636 res = ssmclient.get_command_invocation(
641 CommandId=command_id,
637 CommandId=command_id, InstanceId=instance.id,
642 InstanceId=instance.id,
643 )
638 )
644 except botocore.exceptions.ClientError as e:
639 except botocore.exceptions.ClientError as e:
645 if e.response['Error']['Code'] == 'InvocationDoesNotExist':
640 if e.response['Error']['Code'] == 'InvocationDoesNotExist':
@@ -654,8 +649,9 b' def run_ssm_command(ssmclient, instances'
654 elif res['Status'] in ('Pending', 'InProgress', 'Delayed'):
649 elif res['Status'] in ('Pending', 'InProgress', 'Delayed'):
655 time.sleep(2)
650 time.sleep(2)
656 else:
651 else:
657 raise Exception('command failed on %s: %s' % (
652 raise Exception(
658 instance.id, res['Status']))
653 'command failed on %s: %s' % (instance.id, res['Status'])
654 )
659
655
660
656
661 @contextlib.contextmanager
657 @contextlib.contextmanager
@@ -691,7 +687,9 b' def temporary_ec2_instances(ec2resource,'
691
687
692
688
693 @contextlib.contextmanager
689 @contextlib.contextmanager
694 def create_temp_windows_ec2_instances(c: AWSConnection, config):
690 def create_temp_windows_ec2_instances(
691 c: AWSConnection, config, bootstrap: bool = False
692 ):
695 """Create temporary Windows EC2 instances.
693 """Create temporary Windows EC2 instances.
696
694
697 This is a higher-level wrapper around ``create_temp_ec2_instances()`` that
695 This is a higher-level wrapper around ``create_temp_ec2_instances()`` that
@@ -710,11 +708,15 b' def create_temp_windows_ec2_instances(c:'
710 config['IamInstanceProfile'] = {
708 config['IamInstanceProfile'] = {
711 'Name': 'hg-ephemeral-ec2-1',
709 'Name': 'hg-ephemeral-ec2-1',
712 }
710 }
713 config.setdefault('TagSpecifications', []).append({
711 config.setdefault('TagSpecifications', []).append(
714 'ResourceType': 'instance',
712 {
715 'Tags': [{'Key': 'Name', 'Value': 'hg-temp-windows'}],
713 'ResourceType': 'instance',
716 })
714 'Tags': [{'Key': 'Name', 'Value': 'hg-temp-windows'}],
717 config['UserData'] = WINDOWS_USER_DATA % password
715 }
716 )
717
718 if bootstrap:
719 config['UserData'] = WINDOWS_USER_DATA % password
718
720
719 with temporary_ec2_instances(c.ec2resource, config) as instances:
721 with temporary_ec2_instances(c.ec2resource, config) as instances:
720 wait_for_ip_addresses(instances)
722 wait_for_ip_addresses(instances)
@@ -722,7 +724,9 b' def create_temp_windows_ec2_instances(c:'
722 print('waiting for Windows Remote Management service...')
724 print('waiting for Windows Remote Management service...')
723
725
724 for instance in instances:
726 for instance in instances:
725 client = wait_for_winrm(instance.public_ip_address, 'Administrator', password)
727 client = wait_for_winrm(
728 instance.public_ip_address, 'Administrator', password
729 )
726 print('established WinRM connection to %s' % instance.id)
730 print('established WinRM connection to %s' % instance.id)
727 instance.winrm_client = client
731 instance.winrm_client = client
728
732
@@ -747,14 +751,17 b' def find_and_reconcile_image(ec2resource'
747 # Store a reference to a good image so it can be returned one the
751 # Store a reference to a good image so it can be returned one the
748 # image state is reconciled.
752 # image state is reconciled.
749 images = ec2resource.images.filter(
753 images = ec2resource.images.filter(
750 Filters=[{'Name': 'name', 'Values': [name]}])
754 Filters=[{'Name': 'name', 'Values': [name]}]
755 )
751
756
752 existing_image = None
757 existing_image = None
753
758
754 for image in images:
759 for image in images:
755 if image.tags is None:
760 if image.tags is None:
756 print('image %s for %s lacks required tags; removing' % (
761 print(
757 image.id, image.name))
762 'image %s for %s lacks required tags; removing'
763 % (image.id, image.name)
764 )
758 remove_ami(ec2resource, image)
765 remove_ami(ec2resource, image)
759 else:
766 else:
760 tags = {t['Key']: t['Value'] for t in image.tags}
767 tags = {t['Key']: t['Value'] for t in image.tags}
@@ -762,15 +769,18 b' def find_and_reconcile_image(ec2resource'
762 if tags.get('HGIMAGEFINGERPRINT') == fingerprint:
769 if tags.get('HGIMAGEFINGERPRINT') == fingerprint:
763 existing_image = image
770 existing_image = image
764 else:
771 else:
765 print('image %s for %s has wrong fingerprint; removing' % (
772 print(
766 image.id, image.name))
773 'image %s for %s has wrong fingerprint; removing'
774 % (image.id, image.name)
775 )
767 remove_ami(ec2resource, image)
776 remove_ami(ec2resource, image)
768
777
769 return existing_image
778 return existing_image
770
779
771
780
772 def create_ami_from_instance(ec2client, instance, name, description,
781 def create_ami_from_instance(
773 fingerprint):
782 ec2client, instance, name, description, fingerprint
783 ):
774 """Create an AMI from a running instance.
784 """Create an AMI from a running instance.
775
785
776 Returns the ``ec2resource.Image`` representing the created AMI.
786 Returns the ``ec2resource.Image`` representing the created AMI.
@@ -778,36 +788,26 b' def create_ami_from_instance(ec2client, '
778 instance.stop()
788 instance.stop()
779
789
780 ec2client.get_waiter('instance_stopped').wait(
790 ec2client.get_waiter('instance_stopped').wait(
781 InstanceIds=[instance.id],
791 InstanceIds=[instance.id], WaiterConfig={'Delay': 5,}
782 WaiterConfig={
792 )
783 'Delay': 5,
784 })
785 print('%s is stopped' % instance.id)
793 print('%s is stopped' % instance.id)
786
794
787 image = instance.create_image(
795 image = instance.create_image(Name=name, Description=description,)
788 Name=name,
789 Description=description,
790 )
791
796
792 image.create_tags(Tags=[
797 image.create_tags(
793 {
798 Tags=[{'Key': 'HGIMAGEFINGERPRINT', 'Value': fingerprint,},]
794 'Key': 'HGIMAGEFINGERPRINT',
799 )
795 'Value': fingerprint,
796 },
797 ])
798
800
799 print('waiting for image %s' % image.id)
801 print('waiting for image %s' % image.id)
800
802
801 ec2client.get_waiter('image_available').wait(
803 ec2client.get_waiter('image_available').wait(ImageIds=[image.id],)
802 ImageIds=[image.id],
803 )
804
804
805 print('image %s available as %s' % (image.id, image.name))
805 print('image %s available as %s' % (image.id, image.name))
806
806
807 return image
807 return image
808
808
809
809
810 def ensure_linux_dev_ami(c: AWSConnection, distro='debian9', prefix='hg-'):
810 def ensure_linux_dev_ami(c: AWSConnection, distro='debian10', prefix='hg-'):
811 """Ensures a Linux development AMI is available and up-to-date.
811 """Ensures a Linux development AMI is available and up-to-date.
812
812
813 Returns an ``ec2.Image`` of either an existing AMI or a newly-built one.
813 Returns an ``ec2.Image`` of either an existing AMI or a newly-built one.
@@ -821,28 +821,26 b' def ensure_linux_dev_ami(c: AWSConnectio'
821 image = find_image(
821 image = find_image(
822 ec2resource,
822 ec2resource,
823 DEBIAN_ACCOUNT_ID,
823 DEBIAN_ACCOUNT_ID,
824 'debian-stretch-hvm-x86_64-gp2-2019-02-19-26620',
824 'debian-stretch-hvm-x86_64-gp2-2019-09-08-17994',
825 )
826 ssh_username = 'admin'
827 elif distro == 'debian10':
828 image = find_image(
829 ec2resource, DEBIAN_ACCOUNT_ID_2, 'debian-10-amd64-20190909-10',
825 )
830 )
826 ssh_username = 'admin'
831 ssh_username = 'admin'
827 elif distro == 'ubuntu18.04':
832 elif distro == 'ubuntu18.04':
828 image = find_image(
833 image = find_image(
829 ec2resource,
834 ec2resource,
830 UBUNTU_ACCOUNT_ID,
835 UBUNTU_ACCOUNT_ID,
831 'ubuntu/images/hvm-ssd/ubuntu-bionic-18.04-amd64-server-20190403',
836 'ubuntu/images/hvm-ssd/ubuntu-bionic-18.04-amd64-server-20190918',
832 )
833 ssh_username = 'ubuntu'
834 elif distro == 'ubuntu18.10':
835 image = find_image(
836 ec2resource,
837 UBUNTU_ACCOUNT_ID,
838 'ubuntu/images/hvm-ssd/ubuntu-cosmic-18.10-amd64-server-20190402',
839 )
837 )
840 ssh_username = 'ubuntu'
838 ssh_username = 'ubuntu'
841 elif distro == 'ubuntu19.04':
839 elif distro == 'ubuntu19.04':
842 image = find_image(
840 image = find_image(
843 ec2resource,
841 ec2resource,
844 UBUNTU_ACCOUNT_ID,
842 UBUNTU_ACCOUNT_ID,
845 'ubuntu/images/hvm-ssd/ubuntu-disco-19.04-amd64-server-20190417',
843 'ubuntu/images/hvm-ssd/ubuntu-disco-19.04-amd64-server-20190918',
846 )
844 )
847 ssh_username = 'ubuntu'
845 ssh_username = 'ubuntu'
848 else:
846 else:
@@ -854,7 +852,7 b' def ensure_linux_dev_ami(c: AWSConnectio'
854 'DeviceName': image.block_device_mappings[0]['DeviceName'],
852 'DeviceName': image.block_device_mappings[0]['DeviceName'],
855 'Ebs': {
853 'Ebs': {
856 'DeleteOnTermination': True,
854 'DeleteOnTermination': True,
857 'VolumeSize': 8,
855 'VolumeSize': 10,
858 'VolumeType': 'gp2',
856 'VolumeType': 'gp2',
859 },
857 },
860 },
858 },
@@ -870,10 +868,12 b' def ensure_linux_dev_ami(c: AWSConnectio'
870 'SecurityGroupIds': [c.security_groups['linux-dev-1'].id],
868 'SecurityGroupIds': [c.security_groups['linux-dev-1'].id],
871 }
869 }
872
870
873 requirements2_path = (pathlib.Path(__file__).parent.parent /
871 requirements2_path = (
874 'linux-requirements-py2.txt')
872 pathlib.Path(__file__).parent.parent / 'linux-requirements-py2.txt'
875 requirements3_path = (pathlib.Path(__file__).parent.parent /
873 )
876 'linux-requirements-py3.txt')
874 requirements3_path = (
875 pathlib.Path(__file__).parent.parent / 'linux-requirements-py3.txt'
876 )
877 with requirements2_path.open('r', encoding='utf-8') as fh:
877 with requirements2_path.open('r', encoding='utf-8') as fh:
878 requirements2 = fh.read()
878 requirements2 = fh.read()
879 with requirements3_path.open('r', encoding='utf-8') as fh:
879 with requirements3_path.open('r', encoding='utf-8') as fh:
@@ -881,12 +881,14 b' def ensure_linux_dev_ami(c: AWSConnectio'
881
881
882 # Compute a deterministic fingerprint to determine whether image needs to
882 # Compute a deterministic fingerprint to determine whether image needs to
883 # be regenerated.
883 # be regenerated.
884 fingerprint = resolve_fingerprint({
884 fingerprint = resolve_fingerprint(
885 'instance_config': config,
885 {
886 'bootstrap_script': BOOTSTRAP_DEBIAN,
886 'instance_config': config,
887 'requirements_py2': requirements2,
887 'bootstrap_script': BOOTSTRAP_DEBIAN,
888 'requirements_py3': requirements3,
888 'requirements_py2': requirements2,
889 })
889 'requirements_py3': requirements3,
890 }
891 )
890
892
891 existing_image = find_and_reconcile_image(ec2resource, name, fingerprint)
893 existing_image = find_and_reconcile_image(ec2resource, name, fingerprint)
892
894
@@ -901,9 +903,11 b' def ensure_linux_dev_ami(c: AWSConnectio'
901 instance = instances[0]
903 instance = instances[0]
902
904
903 client = wait_for_ssh(
905 client = wait_for_ssh(
904 instance.public_ip_address, 22,
906 instance.public_ip_address,
907 22,
905 username=ssh_username,
908 username=ssh_username,
906 key_filename=str(c.key_pair_path_private('automation')))
909 key_filename=str(c.key_pair_path_private('automation')),
910 )
907
911
908 home = '/home/%s' % ssh_username
912 home = '/home/%s' % ssh_username
909
913
@@ -925,8 +929,9 b' def ensure_linux_dev_ami(c: AWSConnectio'
925 fh.chmod(0o0700)
929 fh.chmod(0o0700)
926
930
927 print('executing bootstrap')
931 print('executing bootstrap')
928 chan, stdin, stdout = ssh_exec_command(client,
932 chan, stdin, stdout = ssh_exec_command(
929 '%s/bootstrap' % home)
933 client, '%s/bootstrap' % home
934 )
930 stdin.close()
935 stdin.close()
931
936
932 for line in stdout:
937 for line in stdout:
@@ -936,17 +941,28 b' def ensure_linux_dev_ami(c: AWSConnectio'
936 if res:
941 if res:
937 raise Exception('non-0 exit from bootstrap: %d' % res)
942 raise Exception('non-0 exit from bootstrap: %d' % res)
938
943
939 print('bootstrap completed; stopping %s to create %s' % (
944 print(
940 instance.id, name))
945 'bootstrap completed; stopping %s to create %s'
946 % (instance.id, name)
947 )
941
948
942 return create_ami_from_instance(ec2client, instance, name,
949 return create_ami_from_instance(
943 'Mercurial Linux development environment',
950 ec2client,
944 fingerprint)
951 instance,
952 name,
953 'Mercurial Linux development environment',
954 fingerprint,
955 )
945
956
946
957
947 @contextlib.contextmanager
958 @contextlib.contextmanager
948 def temporary_linux_dev_instances(c: AWSConnection, image, instance_type,
959 def temporary_linux_dev_instances(
949 prefix='hg-', ensure_extra_volume=False):
960 c: AWSConnection,
961 image,
962 instance_type,
963 prefix='hg-',
964 ensure_extra_volume=False,
965 ):
950 """Create temporary Linux development EC2 instances.
966 """Create temporary Linux development EC2 instances.
951
967
952 Context manager resolves to a list of ``ec2.Instance`` that were created
968 Context manager resolves to a list of ``ec2.Instance`` that were created
@@ -970,7 +986,7 b' def temporary_linux_dev_instances(c: AWS'
970 'DeviceName': image.block_device_mappings[0]['DeviceName'],
986 'DeviceName': image.block_device_mappings[0]['DeviceName'],
971 'Ebs': {
987 'Ebs': {
972 'DeleteOnTermination': True,
988 'DeleteOnTermination': True,
973 'VolumeSize': 8,
989 'VolumeSize': 12,
974 'VolumeType': 'gp2',
990 'VolumeType': 'gp2',
975 },
991 },
976 }
992 }
@@ -978,8 +994,9 b' def temporary_linux_dev_instances(c: AWS'
978
994
979 # This is not an exhaustive list of instance types having instance storage.
995 # This is not an exhaustive list of instance types having instance storage.
980 # But
996 # But
981 if (ensure_extra_volume
997 if ensure_extra_volume and not instance_type.startswith(
982 and not instance_type.startswith(tuple(INSTANCE_TYPES_WITH_STORAGE))):
998 tuple(INSTANCE_TYPES_WITH_STORAGE)
999 ):
983 main_device = block_device_mappings[0]['DeviceName']
1000 main_device = block_device_mappings[0]['DeviceName']
984
1001
985 if main_device == 'xvda':
1002 if main_device == 'xvda':
@@ -987,17 +1004,20 b' def temporary_linux_dev_instances(c: AWS'
987 elif main_device == '/dev/sda1':
1004 elif main_device == '/dev/sda1':
988 second_device = '/dev/sdb'
1005 second_device = '/dev/sdb'
989 else:
1006 else:
990 raise ValueError('unhandled primary EBS device name: %s' %
1007 raise ValueError(
991 main_device)
1008 'unhandled primary EBS device name: %s' % main_device
1009 )
992
1010
993 block_device_mappings.append({
1011 block_device_mappings.append(
994 'DeviceName': second_device,
1012 {
995 'Ebs': {
1013 'DeviceName': second_device,
996 'DeleteOnTermination': True,
1014 'Ebs': {
997 'VolumeSize': 8,
1015 'DeleteOnTermination': True,
998 'VolumeType': 'gp2',
1016 'VolumeSize': 8,
1017 'VolumeType': 'gp2',
1018 },
999 }
1019 }
1000 })
1020 )
1001
1021
1002 config = {
1022 config = {
1003 'BlockDeviceMappings': block_device_mappings,
1023 'BlockDeviceMappings': block_device_mappings,
@@ -1018,9 +1038,11 b' def temporary_linux_dev_instances(c: AWS'
1018
1038
1019 for instance in instances:
1039 for instance in instances:
1020 client = wait_for_ssh(
1040 client = wait_for_ssh(
1021 instance.public_ip_address, 22,
1041 instance.public_ip_address,
1042 22,
1022 username='hg',
1043 username='hg',
1023 key_filename=ssh_private_key_path)
1044 key_filename=ssh_private_key_path,
1045 )
1024
1046
1025 instance.ssh_client = client
1047 instance.ssh_client = client
1026 instance.ssh_private_key_path = ssh_private_key_path
1048 instance.ssh_private_key_path = ssh_private_key_path
@@ -1032,8 +1054,9 b' def temporary_linux_dev_instances(c: AWS'
1032 instance.ssh_client.close()
1054 instance.ssh_client.close()
1033
1055
1034
1056
1035 def ensure_windows_dev_ami(c: AWSConnection, prefix='hg-',
1057 def ensure_windows_dev_ami(
1036 base_image_name=WINDOWS_BASE_IMAGE_NAME):
1058 c: AWSConnection, prefix='hg-', base_image_name=WINDOWS_BASE_IMAGE_NAME
1059 ):
1037 """Ensure Windows Development AMI is available and up-to-date.
1060 """Ensure Windows Development AMI is available and up-to-date.
1038
1061
1039 If necessary, a modern AMI will be built by starting a temporary EC2
1062 If necessary, a modern AMI will be built by starting a temporary EC2
@@ -1092,6 +1115,23 b' def ensure_windows_dev_ami(c: AWSConnect'
1092 with INSTALL_WINDOWS_DEPENDENCIES.open('r', encoding='utf-8') as fh:
1115 with INSTALL_WINDOWS_DEPENDENCIES.open('r', encoding='utf-8') as fh:
1093 commands.extend(l.rstrip() for l in fh)
1116 commands.extend(l.rstrip() for l in fh)
1094
1117
1118 # Schedule run of EC2Launch on next boot. This ensures that UserData
1119 # is executed.
1120 # We disable setComputerName because it forces a reboot.
1121 # We set an explicit admin password because this causes UserData to run
1122 # as Administrator instead of System.
1123 commands.extend(
1124 [
1125 r'''Set-Content -Path C:\ProgramData\Amazon\EC2-Windows\Launch\Config\LaunchConfig.json '''
1126 r'''-Value '{"setComputerName": false, "setWallpaper": true, "addDnsSuffixList": true, '''
1127 r'''"extendBootVolumeSize": true, "handleUserData": true, '''
1128 r'''"adminPasswordType": "Specify", "adminPassword": "%s"}' '''
1129 % c.automation.default_password(),
1130 r'C:\ProgramData\Amazon\EC2-Windows\Launch\Scripts\InitializeInstance.ps1 '
1131 r'–Schedule',
1132 ]
1133 )
1134
1095 # Disable Windows Defender when bootstrapping because it just slows
1135 # Disable Windows Defender when bootstrapping because it just slows
1096 # things down.
1136 # things down.
1097 commands.insert(0, 'Set-MpPreference -DisableRealtimeMonitoring $true')
1137 commands.insert(0, 'Set-MpPreference -DisableRealtimeMonitoring $true')
@@ -1099,13 +1139,15 b' def ensure_windows_dev_ami(c: AWSConnect'
1099
1139
1100 # Compute a deterministic fingerprint to determine whether image needs
1140 # Compute a deterministic fingerprint to determine whether image needs
1101 # to be regenerated.
1141 # to be regenerated.
1102 fingerprint = resolve_fingerprint({
1142 fingerprint = resolve_fingerprint(
1103 'instance_config': config,
1143 {
1104 'user_data': WINDOWS_USER_DATA,
1144 'instance_config': config,
1105 'initial_bootstrap': WINDOWS_BOOTSTRAP_POWERSHELL,
1145 'user_data': WINDOWS_USER_DATA,
1106 'bootstrap_commands': commands,
1146 'initial_bootstrap': WINDOWS_BOOTSTRAP_POWERSHELL,
1107 'base_image_name': base_image_name,
1147 'bootstrap_commands': commands,
1108 })
1148 'base_image_name': base_image_name,
1149 }
1150 )
1109
1151
1110 existing_image = find_and_reconcile_image(ec2resource, name, fingerprint)
1152 existing_image = find_and_reconcile_image(ec2resource, name, fingerprint)
1111
1153
@@ -1114,7 +1156,9 b' def ensure_windows_dev_ami(c: AWSConnect'
1114
1156
1115 print('no suitable Windows development image found; creating one...')
1157 print('no suitable Windows development image found; creating one...')
1116
1158
1117 with create_temp_windows_ec2_instances(c, config) as instances:
1159 with create_temp_windows_ec2_instances(
1160 c, config, bootstrap=True
1161 ) as instances:
1118 assert len(instances) == 1
1162 assert len(instances) == 1
1119 instance = instances[0]
1163 instance = instances[0]
1120
1164
@@ -1130,9 +1174,7 b' def ensure_windows_dev_ami(c: AWSConnect'
1130 ssmclient,
1174 ssmclient,
1131 [instance],
1175 [instance],
1132 'AWS-RunPowerShellScript',
1176 'AWS-RunPowerShellScript',
1133 {
1177 {'commands': WINDOWS_BOOTSTRAP_POWERSHELL.split('\n'),},
1134 'commands': WINDOWS_BOOTSTRAP_POWERSHELL.split('\n'),
1135 },
1136 )
1178 )
1137
1179
1138 # Reboot so all updates are fully applied.
1180 # Reboot so all updates are fully applied.
@@ -1144,10 +1186,8 b' def ensure_windows_dev_ami(c: AWSConnect'
1144 print('rebooting instance %s' % instance.id)
1186 print('rebooting instance %s' % instance.id)
1145 instance.stop()
1187 instance.stop()
1146 ec2client.get_waiter('instance_stopped').wait(
1188 ec2client.get_waiter('instance_stopped').wait(
1147 InstanceIds=[instance.id],
1189 InstanceIds=[instance.id], WaiterConfig={'Delay': 5,}
1148 WaiterConfig={
1190 )
1149 'Delay': 5,
1150 })
1151
1191
1152 instance.start()
1192 instance.start()
1153 wait_for_ip_addresses([instance])
1193 wait_for_ip_addresses([instance])
@@ -1158,8 +1198,11 b' def ensure_windows_dev_ami(c: AWSConnect'
1158 # TODO figure out a workaround.
1198 # TODO figure out a workaround.
1159
1199
1160 print('waiting for Windows Remote Management to come back...')
1200 print('waiting for Windows Remote Management to come back...')
1161 client = wait_for_winrm(instance.public_ip_address, 'Administrator',
1201 client = wait_for_winrm(
1162 c.automation.default_password())
1202 instance.public_ip_address,
1203 'Administrator',
1204 c.automation.default_password(),
1205 )
1163 print('established WinRM connection to %s' % instance.id)
1206 print('established WinRM connection to %s' % instance.id)
1164 instance.winrm_client = client
1207 instance.winrm_client = client
1165
1208
@@ -1167,14 +1210,23 b' def ensure_windows_dev_ami(c: AWSConnect'
1167 run_powershell(instance.winrm_client, '\n'.join(commands))
1210 run_powershell(instance.winrm_client, '\n'.join(commands))
1168
1211
1169 print('bootstrap completed; stopping %s to create image' % instance.id)
1212 print('bootstrap completed; stopping %s to create image' % instance.id)
1170 return create_ami_from_instance(ec2client, instance, name,
1213 return create_ami_from_instance(
1171 'Mercurial Windows development environment',
1214 ec2client,
1172 fingerprint)
1215 instance,
1216 name,
1217 'Mercurial Windows development environment',
1218 fingerprint,
1219 )
1173
1220
1174
1221
1175 @contextlib.contextmanager
1222 @contextlib.contextmanager
1176 def temporary_windows_dev_instances(c: AWSConnection, image, instance_type,
1223 def temporary_windows_dev_instances(
1177 prefix='hg-', disable_antivirus=False):
1224 c: AWSConnection,
1225 image,
1226 instance_type,
1227 prefix='hg-',
1228 disable_antivirus=False,
1229 ):
1178 """Create a temporary Windows development EC2 instance.
1230 """Create a temporary Windows development EC2 instance.
1179
1231
1180 Context manager resolves to the list of ``EC2.Instance`` that were created.
1232 Context manager resolves to the list of ``EC2.Instance`` that were created.
@@ -1204,6 +1256,7 b' def temporary_windows_dev_instances(c: A'
1204 for instance in instances:
1256 for instance in instances:
1205 run_powershell(
1257 run_powershell(
1206 instance.winrm_client,
1258 instance.winrm_client,
1207 'Set-MpPreference -DisableRealtimeMonitoring $true')
1259 'Set-MpPreference -DisableRealtimeMonitoring $true',
1260 )
1208
1261
1209 yield instances
1262 yield instances
@@ -17,16 +17,20 b' from . import ('
17 aws,
17 aws,
18 HGAutomation,
18 HGAutomation,
19 linux,
19 linux,
20 try_server,
20 windows,
21 windows,
21 )
22 )
22
23
23
24
24 SOURCE_ROOT = pathlib.Path(os.path.abspath(__file__)).parent.parent.parent.parent
25 SOURCE_ROOT = pathlib.Path(
26 os.path.abspath(__file__)
27 ).parent.parent.parent.parent
25 DIST_PATH = SOURCE_ROOT / 'dist'
28 DIST_PATH = SOURCE_ROOT / 'dist'
26
29
27
30
28 def bootstrap_linux_dev(hga: HGAutomation, aws_region, distros=None,
31 def bootstrap_linux_dev(
29 parallel=False):
32 hga: HGAutomation, aws_region, distros=None, parallel=False
33 ):
30 c = hga.aws_connection(aws_region)
34 c = hga.aws_connection(aws_region)
31
35
32 if distros:
36 if distros:
@@ -58,8 +62,9 b' def bootstrap_windows_dev(hga: HGAutomat'
58 print('Windows development AMI available as %s' % image.id)
62 print('Windows development AMI available as %s' % image.id)
59
63
60
64
61 def build_inno(hga: HGAutomation, aws_region, arch, revision, version,
65 def build_inno(
62 base_image_name):
66 hga: HGAutomation, aws_region, arch, revision, version, base_image_name
67 ):
63 c = hga.aws_connection(aws_region)
68 c = hga.aws_connection(aws_region)
64 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
69 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
65 DIST_PATH.mkdir(exist_ok=True)
70 DIST_PATH.mkdir(exist_ok=True)
@@ -70,13 +75,14 b' def build_inno(hga: HGAutomation, aws_re'
70 windows.synchronize_hg(SOURCE_ROOT, revision, instance)
75 windows.synchronize_hg(SOURCE_ROOT, revision, instance)
71
76
72 for a in arch:
77 for a in arch:
73 windows.build_inno_installer(instance.winrm_client, a,
78 windows.build_inno_installer(
74 DIST_PATH,
79 instance.winrm_client, a, DIST_PATH, version=version
75 version=version)
80 )
76
81
77
82
78 def build_wix(hga: HGAutomation, aws_region, arch, revision, version,
83 def build_wix(
79 base_image_name):
84 hga: HGAutomation, aws_region, arch, revision, version, base_image_name
85 ):
80 c = hga.aws_connection(aws_region)
86 c = hga.aws_connection(aws_region)
81 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
87 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
82 DIST_PATH.mkdir(exist_ok=True)
88 DIST_PATH.mkdir(exist_ok=True)
@@ -87,12 +93,14 b' def build_wix(hga: HGAutomation, aws_reg'
87 windows.synchronize_hg(SOURCE_ROOT, revision, instance)
93 windows.synchronize_hg(SOURCE_ROOT, revision, instance)
88
94
89 for a in arch:
95 for a in arch:
90 windows.build_wix_installer(instance.winrm_client, a,
96 windows.build_wix_installer(
91 DIST_PATH, version=version)
97 instance.winrm_client, a, DIST_PATH, version=version
98 )
92
99
93
100
94 def build_windows_wheel(hga: HGAutomation, aws_region, arch, revision,
101 def build_windows_wheel(
95 base_image_name):
102 hga: HGAutomation, aws_region, arch, revision, base_image_name
103 ):
96 c = hga.aws_connection(aws_region)
104 c = hga.aws_connection(aws_region)
97 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
105 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
98 DIST_PATH.mkdir(exist_ok=True)
106 DIST_PATH.mkdir(exist_ok=True)
@@ -106,8 +114,9 b' def build_windows_wheel(hga: HGAutomatio'
106 windows.build_wheel(instance.winrm_client, a, DIST_PATH)
114 windows.build_wheel(instance.winrm_client, a, DIST_PATH)
107
115
108
116
109 def build_all_windows_packages(hga: HGAutomation, aws_region, revision,
117 def build_all_windows_packages(
110 version, base_image_name):
118 hga: HGAutomation, aws_region, revision, version, base_image_name
119 ):
111 c = hga.aws_connection(aws_region)
120 c = hga.aws_connection(aws_region)
112 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
121 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
113 DIST_PATH.mkdir(exist_ok=True)
122 DIST_PATH.mkdir(exist_ok=True)
@@ -123,11 +132,13 b' def build_all_windows_packages(hga: HGAu'
123 windows.purge_hg(winrm_client)
132 windows.purge_hg(winrm_client)
124 windows.build_wheel(winrm_client, arch, DIST_PATH)
133 windows.build_wheel(winrm_client, arch, DIST_PATH)
125 windows.purge_hg(winrm_client)
134 windows.purge_hg(winrm_client)
126 windows.build_inno_installer(winrm_client, arch, DIST_PATH,
135 windows.build_inno_installer(
127 version=version)
136 winrm_client, arch, DIST_PATH, version=version
137 )
128 windows.purge_hg(winrm_client)
138 windows.purge_hg(winrm_client)
129 windows.build_wix_installer(winrm_client, arch, DIST_PATH,
139 windows.build_wix_installer(
130 version=version)
140 winrm_client, arch, DIST_PATH, version=version
141 )
131
142
132
143
133 def terminate_ec2_instances(hga: HGAutomation, aws_region):
144 def terminate_ec2_instances(hga: HGAutomation, aws_region):
@@ -140,8 +151,15 b' def purge_ec2_resources(hga: HGAutomatio'
140 aws.remove_resources(c)
151 aws.remove_resources(c)
141
152
142
153
143 def run_tests_linux(hga: HGAutomation, aws_region, instance_type,
154 def run_tests_linux(
144 python_version, test_flags, distro, filesystem):
155 hga: HGAutomation,
156 aws_region,
157 instance_type,
158 python_version,
159 test_flags,
160 distro,
161 filesystem,
162 ):
145 c = hga.aws_connection(aws_region)
163 c = hga.aws_connection(aws_region)
146 image = aws.ensure_linux_dev_ami(c, distro=distro)
164 image = aws.ensure_linux_dev_ami(c, distro=distro)
147
165
@@ -150,17 +168,17 b' def run_tests_linux(hga: HGAutomation, a'
150 ensure_extra_volume = filesystem not in ('default', 'tmpfs')
168 ensure_extra_volume = filesystem not in ('default', 'tmpfs')
151
169
152 with aws.temporary_linux_dev_instances(
170 with aws.temporary_linux_dev_instances(
153 c, image, instance_type,
171 c, image, instance_type, ensure_extra_volume=ensure_extra_volume
154 ensure_extra_volume=ensure_extra_volume) as insts:
172 ) as insts:
155
173
156 instance = insts[0]
174 instance = insts[0]
157
175
158 linux.prepare_exec_environment(instance.ssh_client,
176 linux.prepare_exec_environment(
159 filesystem=filesystem)
177 instance.ssh_client, filesystem=filesystem
178 )
160 linux.synchronize_hg(SOURCE_ROOT, instance, '.')
179 linux.synchronize_hg(SOURCE_ROOT, instance, '.')
161 t_prepared = time.time()
180 t_prepared = time.time()
162 linux.run_tests(instance.ssh_client, python_version,
181 linux.run_tests(instance.ssh_client, python_version, test_flags)
163 test_flags)
164 t_done = time.time()
182 t_done = time.time()
165
183
166 t_setup = t_prepared - t_start
184 t_setup = t_prepared - t_start
@@ -168,21 +186,53 b' def run_tests_linux(hga: HGAutomation, a'
168
186
169 print(
187 print(
170 'total time: %.1fs; setup: %.1fs; tests: %.1fs; setup overhead: %.1f%%'
188 'total time: %.1fs; setup: %.1fs; tests: %.1fs; setup overhead: %.1f%%'
171 % (t_all, t_setup, t_done - t_prepared, t_setup / t_all * 100.0))
189 % (t_all, t_setup, t_done - t_prepared, t_setup / t_all * 100.0)
190 )
172
191
173
192
174 def run_tests_windows(hga: HGAutomation, aws_region, instance_type,
193 def run_tests_windows(
175 python_version, arch, test_flags, base_image_name):
194 hga: HGAutomation,
195 aws_region,
196 instance_type,
197 python_version,
198 arch,
199 test_flags,
200 base_image_name,
201 ):
176 c = hga.aws_connection(aws_region)
202 c = hga.aws_connection(aws_region)
177 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
203 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
178
204
179 with aws.temporary_windows_dev_instances(c, image, instance_type,
205 with aws.temporary_windows_dev_instances(
180 disable_antivirus=True) as insts:
206 c, image, instance_type, disable_antivirus=True
207 ) as insts:
181 instance = insts[0]
208 instance = insts[0]
182
209
183 windows.synchronize_hg(SOURCE_ROOT, '.', instance)
210 windows.synchronize_hg(SOURCE_ROOT, '.', instance)
184 windows.run_tests(instance.winrm_client, python_version, arch,
211 windows.run_tests(
185 test_flags)
212 instance.winrm_client, python_version, arch, test_flags
213 )
214
215
216 def publish_windows_artifacts(
217 hg: HGAutomation,
218 aws_region,
219 version: str,
220 pypi: bool,
221 mercurial_scm_org: bool,
222 ssh_username: str,
223 ):
224 windows.publish_artifacts(
225 DIST_PATH,
226 version,
227 pypi=pypi,
228 mercurial_scm_org=mercurial_scm_org,
229 ssh_username=ssh_username,
230 )
231
232
233 def run_try(hga: HGAutomation, aws_region: str, rev: str):
234 c = hga.aws_connection(aws_region, ensure_ec2_state=False)
235 try_server.trigger_try(c, rev=rev)
186
236
187
237
188 def get_parser():
238 def get_parser():
@@ -194,25 +244,21 b' def get_parser():'
194 help='Path for local state files',
244 help='Path for local state files',
195 )
245 )
196 parser.add_argument(
246 parser.add_argument(
197 '--aws-region',
247 '--aws-region', help='AWS region to use', default='us-west-2',
198 help='AWS region to use',
199 default='us-west-1',
200 )
248 )
201
249
202 subparsers = parser.add_subparsers()
250 subparsers = parser.add_subparsers()
203
251
204 sp = subparsers.add_parser(
252 sp = subparsers.add_parser(
205 'bootstrap-linux-dev',
253 'bootstrap-linux-dev', help='Bootstrap Linux development environments',
206 help='Bootstrap Linux development environments',
207 )
254 )
208 sp.add_argument(
255 sp.add_argument(
209 '--distros',
256 '--distros', help='Comma delimited list of distros to bootstrap',
210 help='Comma delimited list of distros to bootstrap',
211 )
257 )
212 sp.add_argument(
258 sp.add_argument(
213 '--parallel',
259 '--parallel',
214 action='store_true',
260 action='store_true',
215 help='Generate AMIs in parallel (not CTRL-c safe)'
261 help='Generate AMIs in parallel (not CTRL-c safe)',
216 )
262 )
217 sp.set_defaults(func=bootstrap_linux_dev)
263 sp.set_defaults(func=bootstrap_linux_dev)
218
264
@@ -228,17 +274,13 b' def get_parser():'
228 sp.set_defaults(func=bootstrap_windows_dev)
274 sp.set_defaults(func=bootstrap_windows_dev)
229
275
230 sp = subparsers.add_parser(
276 sp = subparsers.add_parser(
231 'build-all-windows-packages',
277 'build-all-windows-packages', help='Build all Windows packages',
232 help='Build all Windows packages',
233 )
278 )
234 sp.add_argument(
279 sp.add_argument(
235 '--revision',
280 '--revision', help='Mercurial revision to build', default='.',
236 help='Mercurial revision to build',
237 default='.',
238 )
281 )
239 sp.add_argument(
282 sp.add_argument(
240 '--version',
283 '--version', help='Mercurial version string to use',
241 help='Mercurial version string to use',
242 )
284 )
243 sp.add_argument(
285 sp.add_argument(
244 '--base-image-name',
286 '--base-image-name',
@@ -248,8 +290,7 b' def get_parser():'
248 sp.set_defaults(func=build_all_windows_packages)
290 sp.set_defaults(func=build_all_windows_packages)
249
291
250 sp = subparsers.add_parser(
292 sp = subparsers.add_parser(
251 'build-inno',
293 'build-inno', help='Build Inno Setup installer(s)',
252 help='Build Inno Setup installer(s)',
253 )
294 )
254 sp.add_argument(
295 sp.add_argument(
255 '--arch',
296 '--arch',
@@ -259,13 +300,10 b' def get_parser():'
259 default=['x64'],
300 default=['x64'],
260 )
301 )
261 sp.add_argument(
302 sp.add_argument(
262 '--revision',
303 '--revision', help='Mercurial revision to build', default='.',
263 help='Mercurial revision to build',
264 default='.',
265 )
304 )
266 sp.add_argument(
305 sp.add_argument(
267 '--version',
306 '--version', help='Mercurial version string to use in installer',
268 help='Mercurial version string to use in installer',
269 )
307 )
270 sp.add_argument(
308 sp.add_argument(
271 '--base-image-name',
309 '--base-image-name',
@@ -275,8 +313,7 b' def get_parser():'
275 sp.set_defaults(func=build_inno)
313 sp.set_defaults(func=build_inno)
276
314
277 sp = subparsers.add_parser(
315 sp = subparsers.add_parser(
278 'build-windows-wheel',
316 'build-windows-wheel', help='Build Windows wheel(s)',
279 help='Build Windows wheel(s)',
280 )
317 )
281 sp.add_argument(
318 sp.add_argument(
282 '--arch',
319 '--arch',
@@ -286,9 +323,7 b' def get_parser():'
286 default=['x64'],
323 default=['x64'],
287 )
324 )
288 sp.add_argument(
325 sp.add_argument(
289 '--revision',
326 '--revision', help='Mercurial revision to build', default='.',
290 help='Mercurial revision to build',
291 default='.',
292 )
327 )
293 sp.add_argument(
328 sp.add_argument(
294 '--base-image-name',
329 '--base-image-name',
@@ -297,10 +332,7 b' def get_parser():'
297 )
332 )
298 sp.set_defaults(func=build_windows_wheel)
333 sp.set_defaults(func=build_windows_wheel)
299
334
300 sp = subparsers.add_parser(
335 sp = subparsers.add_parser('build-wix', help='Build WiX installer(s)')
301 'build-wix',
302 help='Build WiX installer(s)'
303 )
304 sp.add_argument(
336 sp.add_argument(
305 '--arch',
337 '--arch',
306 help='Architecture to build for',
338 help='Architecture to build for',
@@ -309,13 +341,10 b' def get_parser():'
309 default=['x64'],
341 default=['x64'],
310 )
342 )
311 sp.add_argument(
343 sp.add_argument(
312 '--revision',
344 '--revision', help='Mercurial revision to build', default='.',
313 help='Mercurial revision to build',
314 default='.',
315 )
345 )
316 sp.add_argument(
346 sp.add_argument(
317 '--version',
347 '--version', help='Mercurial version string to use in installer',
318 help='Mercurial version string to use in installer',
319 )
348 )
320 sp.add_argument(
349 sp.add_argument(
321 '--base-image-name',
350 '--base-image-name',
@@ -331,20 +360,16 b' def get_parser():'
331 sp.set_defaults(func=terminate_ec2_instances)
360 sp.set_defaults(func=terminate_ec2_instances)
332
361
333 sp = subparsers.add_parser(
362 sp = subparsers.add_parser(
334 'purge-ec2-resources',
363 'purge-ec2-resources', help='Purge all EC2 resources managed by us',
335 help='Purge all EC2 resources managed by us',
336 )
364 )
337 sp.set_defaults(func=purge_ec2_resources)
365 sp.set_defaults(func=purge_ec2_resources)
338
366
339 sp = subparsers.add_parser(
367 sp = subparsers.add_parser('run-tests-linux', help='Run tests on Linux',)
340 'run-tests-linux',
341 help='Run tests on Linux',
342 )
343 sp.add_argument(
368 sp.add_argument(
344 '--distro',
369 '--distro',
345 help='Linux distribution to run tests on',
370 help='Linux distribution to run tests on',
346 choices=linux.DISTROS,
371 choices=linux.DISTROS,
347 default='debian9',
372 default='debian10',
348 )
373 )
349 sp.add_argument(
374 sp.add_argument(
350 '--filesystem',
375 '--filesystem',
@@ -360,8 +385,18 b' def get_parser():'
360 sp.add_argument(
385 sp.add_argument(
361 '--python-version',
386 '--python-version',
362 help='Python version to use',
387 help='Python version to use',
363 choices={'system2', 'system3', '2.7', '3.5', '3.6', '3.7', '3.8',
388 choices={
364 'pypy', 'pypy3.5', 'pypy3.6'},
389 'system2',
390 'system3',
391 '2.7',
392 '3.5',
393 '3.6',
394 '3.7',
395 '3.8',
396 'pypy',
397 'pypy3.5',
398 'pypy3.6',
399 },
365 default='system2',
400 default='system2',
366 )
401 )
367 sp.add_argument(
402 sp.add_argument(
@@ -372,13 +407,10 b' def get_parser():'
372 sp.set_defaults(func=run_tests_linux)
407 sp.set_defaults(func=run_tests_linux)
373
408
374 sp = subparsers.add_parser(
409 sp = subparsers.add_parser(
375 'run-tests-windows',
410 'run-tests-windows', help='Run tests on Windows',
376 help='Run tests on Windows',
377 )
411 )
378 sp.add_argument(
412 sp.add_argument(
379 '--instance-type',
413 '--instance-type', help='EC2 instance type to use', default='t3.medium',
380 help='EC2 instance type to use',
381 default='t3.medium',
382 )
414 )
383 sp.add_argument(
415 sp.add_argument(
384 '--python-version',
416 '--python-version',
@@ -393,8 +425,7 b' def get_parser():'
393 default='x64',
425 default='x64',
394 )
426 )
395 sp.add_argument(
427 sp.add_argument(
396 '--test-flags',
428 '--test-flags', help='Extra command line flags to pass to run-tests.py',
397 help='Extra command line flags to pass to run-tests.py',
398 )
429 )
399 sp.add_argument(
430 sp.add_argument(
400 '--base-image-name',
431 '--base-image-name',
@@ -403,6 +434,38 b' def get_parser():'
403 )
434 )
404 sp.set_defaults(func=run_tests_windows)
435 sp.set_defaults(func=run_tests_windows)
405
436
437 sp = subparsers.add_parser(
438 'publish-windows-artifacts',
439 help='Publish built Windows artifacts (wheels, installers, etc)',
440 )
441 sp.add_argument(
442 '--no-pypi',
443 dest='pypi',
444 action='store_false',
445 default=True,
446 help='Skip uploading to PyPI',
447 )
448 sp.add_argument(
449 '--no-mercurial-scm-org',
450 dest='mercurial_scm_org',
451 action='store_false',
452 default=True,
453 help='Skip uploading to www.mercurial-scm.org',
454 )
455 sp.add_argument(
456 '--ssh-username', help='SSH username for mercurial-scm.org',
457 )
458 sp.add_argument(
459 'version', help='Mercurial version string to locate local packages',
460 )
461 sp.set_defaults(func=publish_windows_artifacts)
462
463 sp = subparsers.add_parser(
464 'try', help='Run CI automation against a custom changeset'
465 )
466 sp.add_argument('-r', '--rev', default='.', help='Revision to run CI on')
467 sp.set_defaults(func=run_try)
468
406 return parser
469 return parser
407
470
408
471
@@ -13,39 +13,37 b' import shlex'
13 import subprocess
13 import subprocess
14 import tempfile
14 import tempfile
15
15
16 from .ssh import (
16 from .ssh import exec_command
17 exec_command,
18 )
19
17
20
18
21 # Linux distributions that are supported.
19 # Linux distributions that are supported.
22 DISTROS = {
20 DISTROS = {
23 'debian9',
21 'debian9',
22 'debian10',
24 'ubuntu18.04',
23 'ubuntu18.04',
25 'ubuntu18.10',
26 'ubuntu19.04',
24 'ubuntu19.04',
27 }
25 }
28
26
29 INSTALL_PYTHONS = r'''
27 INSTALL_PYTHONS = r'''
30 PYENV2_VERSIONS="2.7.16 pypy2.7-7.1.1"
28 PYENV2_VERSIONS="2.7.16 pypy2.7-7.1.1"
31 PYENV3_VERSIONS="3.5.7 3.6.8 3.7.3 3.8-dev pypy3.5-7.0.0 pypy3.6-7.1.1"
29 PYENV3_VERSIONS="3.5.7 3.6.9 3.7.4 3.8.0 pypy3.5-7.0.0 pypy3.6-7.1.1"
32
30
33 git clone https://github.com/pyenv/pyenv.git /hgdev/pyenv
31 git clone https://github.com/pyenv/pyenv.git /hgdev/pyenv
34 pushd /hgdev/pyenv
32 pushd /hgdev/pyenv
35 git checkout 3faeda67bb33e07750d1a104271369a7384ca45c
33 git checkout d6d6bc8bb08bcdcbf4eb79509aa7061011ade1c4
36 popd
34 popd
37
35
38 export PYENV_ROOT="/hgdev/pyenv"
36 export PYENV_ROOT="/hgdev/pyenv"
39 export PATH="$PYENV_ROOT/bin:$PATH"
37 export PATH="$PYENV_ROOT/bin:$PATH"
40
38
41 # pip 19.0.3.
39 # pip 19.2.3.
42 PIP_SHA256=efe99298f3fbb1f56201ce6b81d2658067d2f7d7dfc2d412e0d3cacc9a397c61
40 PIP_SHA256=57e3643ff19f018f8a00dfaa6b7e4620e3c1a7a2171fd218425366ec006b3bfe
43 wget -O get-pip.py --progress dot:mega https://github.com/pypa/get-pip/raw/fee32c376da1ff6496a798986d7939cd51e1644f/get-pip.py
41 wget -O get-pip.py --progress dot:mega https://github.com/pypa/get-pip/raw/309a56c5fd94bd1134053a541cb4657a4e47e09d/get-pip.py
44 echo "${PIP_SHA256} get-pip.py" | sha256sum --check -
42 echo "${PIP_SHA256} get-pip.py" | sha256sum --check -
45
43
46 VIRTUALENV_SHA256=984d7e607b0a5d1329425dd8845bd971b957424b5ba664729fab51ab8c11bc39
44 VIRTUALENV_SHA256=f78d81b62d3147396ac33fc9d77579ddc42cc2a98dd9ea38886f616b33bc7fb2
47 VIRTUALENV_TARBALL=virtualenv-16.4.3.tar.gz
45 VIRTUALENV_TARBALL=virtualenv-16.7.5.tar.gz
48 wget -O ${VIRTUALENV_TARBALL} --progress dot:mega https://files.pythonhosted.org/packages/37/db/89d6b043b22052109da35416abc3c397655e4bd3cff031446ba02b9654fa/${VIRTUALENV_TARBALL}
46 wget -O ${VIRTUALENV_TARBALL} --progress dot:mega https://files.pythonhosted.org/packages/66/f0/6867af06d2e2f511e4e1d7094ff663acdebc4f15d4a0cb0fed1007395124/${VIRTUALENV_TARBALL}
49 echo "${VIRTUALENV_SHA256} ${VIRTUALENV_TARBALL}" | sha256sum --check -
47 echo "${VIRTUALENV_SHA256} ${VIRTUALENV_TARBALL}" | sha256sum --check -
50
48
51 for v in ${PYENV2_VERSIONS}; do
49 for v in ${PYENV2_VERSIONS}; do
@@ -62,23 +60,40 b' for v in ${PYENV3_VERSIONS}; do'
62 done
60 done
63
61
64 pyenv global ${PYENV2_VERSIONS} ${PYENV3_VERSIONS} system
62 pyenv global ${PYENV2_VERSIONS} ${PYENV3_VERSIONS} system
65 '''.lstrip().replace('\r\n', '\n')
63 '''.lstrip().replace(
64 '\r\n', '\n'
65 )
66
67
68 INSTALL_RUST = r'''
69 RUSTUP_INIT_SHA256=a46fe67199b7bcbbde2dcbc23ae08db6f29883e260e23899a88b9073effc9076
70 wget -O rustup-init --progress dot:mega https://static.rust-lang.org/rustup/archive/1.18.3/x86_64-unknown-linux-gnu/rustup-init
71 echo "${RUSTUP_INIT_SHA256} rustup-init" | sha256sum --check -
72
73 chmod +x rustup-init
74 sudo -H -u hg -g hg ./rustup-init -y
75 sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup install 1.31.1 1.34.2
76 sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup component add clippy
77 '''
66
78
67
79
68 BOOTSTRAP_VIRTUALENV = r'''
80 BOOTSTRAP_VIRTUALENV = r'''
69 /usr/bin/virtualenv /hgdev/venv-bootstrap
81 /usr/bin/virtualenv /hgdev/venv-bootstrap
70
82
71 HG_SHA256=1bdd21bb87d1e05fb5cd395d488d0e0cc2f2f90ce0fd248e31a03595da5ccb47
83 HG_SHA256=35fc8ba5e0379c1b3affa2757e83fb0509e8ac314cbd9f1fd133cf265d16e49f
72 HG_TARBALL=mercurial-4.9.1.tar.gz
84 HG_TARBALL=mercurial-5.1.1.tar.gz
73
85
74 wget -O ${HG_TARBALL} --progress dot:mega https://www.mercurial-scm.org/release/${HG_TARBALL}
86 wget -O ${HG_TARBALL} --progress dot:mega https://www.mercurial-scm.org/release/${HG_TARBALL}
75 echo "${HG_SHA256} ${HG_TARBALL}" | sha256sum --check -
87 echo "${HG_SHA256} ${HG_TARBALL}" | sha256sum --check -
76
88
77 /hgdev/venv-bootstrap/bin/pip install ${HG_TARBALL}
89 /hgdev/venv-bootstrap/bin/pip install ${HG_TARBALL}
78 '''.lstrip().replace('\r\n', '\n')
90 '''.lstrip().replace(
91 '\r\n', '\n'
92 )
79
93
80
94
81 BOOTSTRAP_DEBIAN = r'''
95 BOOTSTRAP_DEBIAN = (
96 r'''
82 #!/bin/bash
97 #!/bin/bash
83
98
84 set -ex
99 set -ex
@@ -175,18 +190,22 b' EOF'
175
190
176 sudo apt-key add docker-apt-key
191 sudo apt-key add docker-apt-key
177
192
178 if [ "$DEBIAN_VERSION" = "9.8" ]; then
193 if [ "$LSB_RELEASE" = "stretch" ]; then
179 cat << EOF | sudo tee -a /etc/apt/sources.list
194 cat << EOF | sudo tee -a /etc/apt/sources.list
180 # Need backports for clang-format-6.0
195 # Need backports for clang-format-6.0
181 deb http://deb.debian.org/debian stretch-backports main
196 deb http://deb.debian.org/debian stretch-backports main
197 EOF
198 fi
182
199
200 if [ "$LSB_RELEASE" = "stretch" -o "$LSB_RELEASE" = "buster" ]; then
201 cat << EOF | sudo tee -a /etc/apt/sources.list
183 # Sources are useful if we want to compile things locally.
202 # Sources are useful if we want to compile things locally.
184 deb-src http://deb.debian.org/debian stretch main
203 deb-src http://deb.debian.org/debian $LSB_RELEASE main
185 deb-src http://security.debian.org/debian-security stretch/updates main
204 deb-src http://security.debian.org/debian-security $LSB_RELEASE/updates main
186 deb-src http://deb.debian.org/debian stretch-updates main
205 deb-src http://deb.debian.org/debian $LSB_RELEASE-updates main
187 deb-src http://deb.debian.org/debian stretch-backports main
206 deb-src http://deb.debian.org/debian $LSB_RELEASE-backports main
188
207
189 deb [arch=amd64] https://download.docker.com/linux/debian stretch stable
208 deb [arch=amd64] https://download.docker.com/linux/debian $LSB_RELEASE stable
190 EOF
209 EOF
191
210
192 elif [ "$DISTRO" = "Ubuntu" ]; then
211 elif [ "$DISTRO" = "Ubuntu" ]; then
@@ -199,6 +218,7 b' fi'
199 sudo apt-get update
218 sudo apt-get update
200
219
201 PACKAGES="\
220 PACKAGES="\
221 awscli \
202 btrfs-progs \
222 btrfs-progs \
203 build-essential \
223 build-essential \
204 bzr \
224 bzr \
@@ -207,6 +227,7 b' PACKAGES="\\'
207 darcs \
227 darcs \
208 debhelper \
228 debhelper \
209 devscripts \
229 devscripts \
230 docker-ce \
210 dpkg-dev \
231 dpkg-dev \
211 dstat \
232 dstat \
212 emacs \
233 emacs \
@@ -239,6 +260,7 b' PACKAGES="\\'
239 python-pygments \
260 python-pygments \
240 python-subversion \
261 python-subversion \
241 python-vcr \
262 python-vcr \
263 python3-boto3 \
242 python3-dev \
264 python3-dev \
243 python3-docutils \
265 python3-docutils \
244 python3-fuzzywuzzy \
266 python3-fuzzywuzzy \
@@ -259,23 +281,17 b' PACKAGES="\\'
259 zip \
281 zip \
260 zlib1g-dev"
282 zlib1g-dev"
261
283
262 if [ "$DEBIAN_VERSION" = "9.8" ]; then
284 if [ "LSB_RELEASE" = "stretch" ]; then
263 PACKAGES="$PACKAGES linux-perf"
285 PACKAGES="$PACKAGES linux-perf"
264 elif [ "$DISTRO" = "Ubuntu" ]; then
286 elif [ "$DISTRO" = "Ubuntu" ]; then
265 PACKAGES="$PACKAGES linux-tools-common"
287 PACKAGES="$PACKAGES linux-tools-common"
266 fi
288 fi
267
289
268 # Ubuntu 19.04 removes monotone.
290 # Monotone only available in older releases.
269 if [ "$LSB_RELEASE" != "disco" ]; then
291 if [ "$LSB_RELEASE" = "stretch" -o "$LSB_RELEASE" = "xenial" ]; then
270 PACKAGES="$PACKAGES monotone"
292 PACKAGES="$PACKAGES monotone"
271 fi
293 fi
272
294
273 # As of April 27, 2019, Docker hasn't published packages for
274 # Ubuntu 19.04 yet.
275 if [ "$LSB_RELEASE" != "disco" ]; then
276 PACKAGES="$PACKAGES docker-ce"
277 fi
278
279 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends $PACKAGES
295 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends $PACKAGES
280
296
281 # Create clang-format symlink so test harness finds it.
297 # Create clang-format symlink so test harness finds it.
@@ -286,6 +302,8 b' sudo mkdir /hgdev'
286 # Will be normalized to hg:hg later.
302 # Will be normalized to hg:hg later.
287 sudo chown `whoami` /hgdev
303 sudo chown `whoami` /hgdev
288
304
305 {install_rust}
306
289 cp requirements-py2.txt /hgdev/requirements-py2.txt
307 cp requirements-py2.txt /hgdev/requirements-py2.txt
290 cp requirements-py3.txt /hgdev/requirements-py3.txt
308 cp requirements-py3.txt /hgdev/requirements-py3.txt
291
309
@@ -308,10 +326,14 b' publish = false'
308 EOF
326 EOF
309
327
310 sudo chown -R hg:hg /hgdev
328 sudo chown -R hg:hg /hgdev
311 '''.lstrip().format(
329 '''.lstrip()
312 install_pythons=INSTALL_PYTHONS,
330 .format(
313 bootstrap_virtualenv=BOOTSTRAP_VIRTUALENV
331 install_rust=INSTALL_RUST,
314 ).replace('\r\n', '\n')
332 install_pythons=INSTALL_PYTHONS,
333 bootstrap_virtualenv=BOOTSTRAP_VIRTUALENV,
334 )
335 .replace('\r\n', '\n')
336 )
315
337
316
338
317 # Prepares /hgdev for operations.
339 # Prepares /hgdev for operations.
@@ -393,7 +415,9 b' mkdir /hgwork/tmp'
393 chown hg:hg /hgwork/tmp
415 chown hg:hg /hgwork/tmp
394
416
395 rsync -a /hgdev/src /hgwork/
417 rsync -a /hgdev/src /hgwork/
396 '''.lstrip().replace('\r\n', '\n')
418 '''.lstrip().replace(
419 '\r\n', '\n'
420 )
397
421
398
422
399 HG_UPDATE_CLEAN = '''
423 HG_UPDATE_CLEAN = '''
@@ -405,7 +429,9 b' cd /hgwork/src'
405 ${HG} --config extensions.purge= purge --all
429 ${HG} --config extensions.purge= purge --all
406 ${HG} update -C $1
430 ${HG} update -C $1
407 ${HG} log -r .
431 ${HG} log -r .
408 '''.lstrip().replace('\r\n', '\n')
432 '''.lstrip().replace(
433 '\r\n', '\n'
434 )
409
435
410
436
411 def prepare_exec_environment(ssh_client, filesystem='default'):
437 def prepare_exec_environment(ssh_client, filesystem='default'):
@@ -440,11 +466,12 b' def prepare_exec_environment(ssh_client,'
440 res = chan.recv_exit_status()
466 res = chan.recv_exit_status()
441
467
442 if res:
468 if res:
443 raise Exception('non-0 exit code updating working directory; %d'
469 raise Exception('non-0 exit code updating working directory; %d' % res)
444 % res)
445
470
446
471
447 def synchronize_hg(source_path: pathlib.Path, ec2_instance, revision: str=None):
472 def synchronize_hg(
473 source_path: pathlib.Path, ec2_instance, revision: str = None
474 ):
448 """Synchronize a local Mercurial source path to remote EC2 instance."""
475 """Synchronize a local Mercurial source path to remote EC2 instance."""
449
476
450 with tempfile.TemporaryDirectory() as temp_dir:
477 with tempfile.TemporaryDirectory() as temp_dir:
@@ -466,8 +493,10 b' def synchronize_hg(source_path: pathlib.'
466 fh.write(' IdentityFile %s\n' % ec2_instance.ssh_private_key_path)
493 fh.write(' IdentityFile %s\n' % ec2_instance.ssh_private_key_path)
467
494
468 if not (source_path / '.hg').is_dir():
495 if not (source_path / '.hg').is_dir():
469 raise Exception('%s is not a Mercurial repository; synchronization '
496 raise Exception(
470 'not yet supported' % source_path)
497 '%s is not a Mercurial repository; synchronization '
498 'not yet supported' % source_path
499 )
471
500
472 env = dict(os.environ)
501 env = dict(os.environ)
473 env['HGPLAIN'] = '1'
502 env['HGPLAIN'] = '1'
@@ -477,17 +506,29 b' def synchronize_hg(source_path: pathlib.'
477
506
478 res = subprocess.run(
507 res = subprocess.run(
479 ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
508 ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
480 cwd=str(source_path), env=env, check=True, capture_output=True)
509 cwd=str(source_path),
510 env=env,
511 check=True,
512 capture_output=True,
513 )
481
514
482 full_revision = res.stdout.decode('ascii')
515 full_revision = res.stdout.decode('ascii')
483
516
484 args = [
517 args = [
485 'python2.7', str(hg_bin),
518 'python2.7',
486 '--config', 'ui.ssh=ssh -F %s' % ssh_config,
519 str(hg_bin),
487 '--config', 'ui.remotecmd=/hgdev/venv-bootstrap/bin/hg',
520 '--config',
521 'ui.ssh=ssh -F %s' % ssh_config,
522 '--config',
523 'ui.remotecmd=/hgdev/venv-bootstrap/bin/hg',
488 # Also ensure .hgtags changes are present so auto version
524 # Also ensure .hgtags changes are present so auto version
489 # calculation works.
525 # calculation works.
490 'push', '-f', '-r', full_revision, '-r', 'file(.hgtags)',
526 'push',
527 '-f',
528 '-r',
529 full_revision,
530 '-r',
531 'file(.hgtags)',
491 'ssh://%s//hgwork/src' % public_ip,
532 'ssh://%s//hgwork/src' % public_ip,
492 ]
533 ]
493
534
@@ -506,7 +547,8 b' def synchronize_hg(source_path: pathlib.'
506 fh.chmod(0o0700)
547 fh.chmod(0o0700)
507
548
508 chan, stdin, stdout = exec_command(
549 chan, stdin, stdout = exec_command(
509 ec2_instance.ssh_client, '/hgdev/hgup %s' % full_revision)
550 ec2_instance.ssh_client, '/hgdev/hgup %s' % full_revision
551 )
510 stdin.close()
552 stdin.close()
511
553
512 for line in stdout:
554 for line in stdout:
@@ -515,8 +557,9 b' def synchronize_hg(source_path: pathlib.'
515 res = chan.recv_exit_status()
557 res = chan.recv_exit_status()
516
558
517 if res:
559 if res:
518 raise Exception('non-0 exit code updating working directory; %d'
560 raise Exception(
519 % res)
561 'non-0 exit code updating working directory; %d' % res
562 )
520
563
521
564
522 def run_tests(ssh_client, python_version, test_flags=None):
565 def run_tests(ssh_client, python_version, test_flags=None):
@@ -538,8 +581,8 b' def run_tests(ssh_client, python_version'
538
581
539 command = (
582 command = (
540 '/bin/sh -c "export TMPDIR=/hgwork/tmp; '
583 '/bin/sh -c "export TMPDIR=/hgwork/tmp; '
541 'cd /hgwork/src/tests && %s run-tests.py %s"' % (
584 'cd /hgwork/src/tests && %s run-tests.py %s"' % (python, test_flags)
542 python, test_flags))
585 )
543
586
544 chan, stdin, stdout = exec_command(ssh_client, command)
587 chan, stdin, stdout = exec_command(ssh_client, command)
545
588
@@ -11,14 +11,13 b' import socket'
11 import time
11 import time
12 import warnings
12 import warnings
13
13
14 from cryptography.utils import (
14 from cryptography.utils import CryptographyDeprecationWarning
15 CryptographyDeprecationWarning,
16 )
17 import paramiko
15 import paramiko
18
16
19
17
20 def wait_for_ssh(hostname, port, timeout=60, username=None, key_filename=None):
18 def wait_for_ssh(hostname, port, timeout=60, username=None, key_filename=None):
21 """Wait for an SSH server to start on the specified host and port."""
19 """Wait for an SSH server to start on the specified host and port."""
20
22 class IgnoreHostKeyPolicy(paramiko.MissingHostKeyPolicy):
21 class IgnoreHostKeyPolicy(paramiko.MissingHostKeyPolicy):
23 def missing_host_key(self, client, hostname, key):
22 def missing_host_key(self, client, hostname, key):
24 return
23 return
@@ -28,17 +27,23 b' def wait_for_ssh(hostname, port, timeout'
28 # paramiko triggers a CryptographyDeprecationWarning in the cryptography
27 # paramiko triggers a CryptographyDeprecationWarning in the cryptography
29 # package. Let's suppress
28 # package. Let's suppress
30 with warnings.catch_warnings():
29 with warnings.catch_warnings():
31 warnings.filterwarnings('ignore',
30 warnings.filterwarnings(
32 category=CryptographyDeprecationWarning)
31 'ignore', category=CryptographyDeprecationWarning
32 )
33
33
34 while True:
34 while True:
35 client = paramiko.SSHClient()
35 client = paramiko.SSHClient()
36 client.set_missing_host_key_policy(IgnoreHostKeyPolicy())
36 client.set_missing_host_key_policy(IgnoreHostKeyPolicy())
37 try:
37 try:
38 client.connect(hostname, port=port, username=username,
38 client.connect(
39 key_filename=key_filename,
39 hostname,
40 timeout=5.0, allow_agent=False,
40 port=port,
41 look_for_keys=False)
41 username=username,
42 key_filename=key_filename,
43 timeout=5.0,
44 allow_agent=False,
45 look_for_keys=False,
46 )
42
47
43 return client
48 return client
44 except socket.error:
49 except socket.error:
@@ -7,15 +7,16 b''
7
7
8 # no-check-code because Python 3 native.
8 # no-check-code because Python 3 native.
9
9
10 import datetime
10 import os
11 import os
12 import paramiko
11 import pathlib
13 import pathlib
12 import re
14 import re
13 import subprocess
15 import subprocess
14 import tempfile
16 import tempfile
15
17
16 from .winrm import (
18 from .pypi import upload as pypi_upload
17 run_powershell,
19 from .winrm import run_powershell
18 )
19
20
20
21
21 # PowerShell commands to activate a Visual Studio 2008 environment.
22 # PowerShell commands to activate a Visual Studio 2008 environment.
@@ -100,6 +101,33 b' if ($LASTEXITCODE -ne 0) {{'
100 }}
101 }}
101 '''
102 '''
102
103
104 X86_WHEEL_FILENAME = 'mercurial-{version}-cp27-cp27m-win32.whl'
105 X64_WHEEL_FILENAME = 'mercurial-{version}-cp27-cp27m-win_amd64.whl'
106 X86_EXE_FILENAME = 'Mercurial-{version}.exe'
107 X64_EXE_FILENAME = 'Mercurial-{version}-x64.exe'
108 X86_MSI_FILENAME = 'mercurial-{version}-x86.msi'
109 X64_MSI_FILENAME = 'mercurial-{version}-x64.msi'
110
111 MERCURIAL_SCM_BASE_URL = 'https://mercurial-scm.org/release/windows'
112
113 X86_USER_AGENT_PATTERN = '.*Windows.*'
114 X64_USER_AGENT_PATTERN = '.*Windows.*(WOW|x)64.*'
115
116 X86_EXE_DESCRIPTION = (
117 'Mercurial {version} Inno Setup installer - x86 Windows '
118 '- does not require admin rights'
119 )
120 X64_EXE_DESCRIPTION = (
121 'Mercurial {version} Inno Setup installer - x64 Windows '
122 '- does not require admin rights'
123 )
124 X86_MSI_DESCRIPTION = (
125 'Mercurial {version} MSI installer - x86 Windows ' '- requires admin rights'
126 )
127 X64_MSI_DESCRIPTION = (
128 'Mercurial {version} MSI installer - x64 Windows ' '- requires admin rights'
129 )
130
103
131
104 def get_vc_prefix(arch):
132 def get_vc_prefix(arch):
105 if arch == 'x86':
133 if arch == 'x86':
@@ -133,10 +161,21 b' def synchronize_hg(hg_repo: pathlib.Path'
133 ssh_dir.chmod(0o0700)
161 ssh_dir.chmod(0o0700)
134
162
135 # Generate SSH key to use for communication.
163 # Generate SSH key to use for communication.
136 subprocess.run([
164 subprocess.run(
137 'ssh-keygen', '-t', 'rsa', '-b', '4096', '-N', '',
165 [
138 '-f', str(ssh_dir / 'id_rsa')],
166 'ssh-keygen',
139 check=True, capture_output=True)
167 '-t',
168 'rsa',
169 '-b',
170 '4096',
171 '-N',
172 '',
173 '-f',
174 str(ssh_dir / 'id_rsa'),
175 ],
176 check=True,
177 capture_output=True,
178 )
140
179
141 # Add it to ~/.ssh/authorized_keys on remote.
180 # Add it to ~/.ssh/authorized_keys on remote.
142 # This assumes the file doesn't already exist.
181 # This assumes the file doesn't already exist.
@@ -157,8 +196,10 b' def synchronize_hg(hg_repo: pathlib.Path'
157 fh.write(' IdentityFile %s\n' % (ssh_dir / 'id_rsa'))
196 fh.write(' IdentityFile %s\n' % (ssh_dir / 'id_rsa'))
158
197
159 if not (hg_repo / '.hg').is_dir():
198 if not (hg_repo / '.hg').is_dir():
160 raise Exception('%s is not a Mercurial repository; '
199 raise Exception(
161 'synchronization not yet supported' % hg_repo)
200 '%s is not a Mercurial repository; '
201 'synchronization not yet supported' % hg_repo
202 )
162
203
163 env = dict(os.environ)
204 env = dict(os.environ)
164 env['HGPLAIN'] = '1'
205 env['HGPLAIN'] = '1'
@@ -168,17 +209,29 b' def synchronize_hg(hg_repo: pathlib.Path'
168
209
169 res = subprocess.run(
210 res = subprocess.run(
170 ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
211 ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
171 cwd=str(hg_repo), env=env, check=True, capture_output=True)
212 cwd=str(hg_repo),
213 env=env,
214 check=True,
215 capture_output=True,
216 )
172
217
173 full_revision = res.stdout.decode('ascii')
218 full_revision = res.stdout.decode('ascii')
174
219
175 args = [
220 args = [
176 'python2.7', hg_bin,
221 'python2.7',
177 '--config', 'ui.ssh=ssh -F %s' % ssh_config,
222 hg_bin,
178 '--config', 'ui.remotecmd=c:/hgdev/venv-bootstrap/Scripts/hg.exe',
223 '--config',
224 'ui.ssh=ssh -F %s' % ssh_config,
225 '--config',
226 'ui.remotecmd=c:/hgdev/venv-bootstrap/Scripts/hg.exe',
179 # Also ensure .hgtags changes are present so auto version
227 # Also ensure .hgtags changes are present so auto version
180 # calculation works.
228 # calculation works.
181 'push', '-f', '-r', full_revision, '-r', 'file(.hgtags)',
229 'push',
230 '-f',
231 '-r',
232 full_revision,
233 '-r',
234 'file(.hgtags)',
182 'ssh://%s/c:/hgdev/src' % public_ip,
235 'ssh://%s/c:/hgdev/src' % public_ip,
183 ]
236 ]
184
237
@@ -188,8 +241,9 b' def synchronize_hg(hg_repo: pathlib.Path'
188 if res.returncode not in (0, 1):
241 if res.returncode not in (0, 1):
189 res.check_returncode()
242 res.check_returncode()
190
243
191 run_powershell(winrm_client,
244 run_powershell(
192 HG_UPDATE_CLEAN.format(revision=full_revision))
245 winrm_client, HG_UPDATE_CLEAN.format(revision=full_revision)
246 )
193
247
194 # TODO detect dirty local working directory and synchronize accordingly.
248 # TODO detect dirty local working directory and synchronize accordingly.
195
249
@@ -225,8 +279,9 b' def copy_latest_dist(winrm_client, patte'
225 winrm_client.fetch(source, str(dest))
279 winrm_client.fetch(source, str(dest))
226
280
227
281
228 def build_inno_installer(winrm_client, arch: str, dest_path: pathlib.Path,
282 def build_inno_installer(
229 version=None):
283 winrm_client, arch: str, dest_path: pathlib.Path, version=None
284 ):
230 """Build the Inno Setup installer on a remote machine.
285 """Build the Inno Setup installer on a remote machine.
231
286
232 Using a WinRM client, remote commands are executed to build
287 Using a WinRM client, remote commands are executed to build
@@ -238,8 +293,9 b' def build_inno_installer(winrm_client, a'
238 if version:
293 if version:
239 extra_args.extend(['--version', version])
294 extra_args.extend(['--version', version])
240
295
241 ps = get_vc_prefix(arch) + BUILD_INNO.format(arch=arch,
296 ps = get_vc_prefix(arch) + BUILD_INNO.format(
242 extra_args=' '.join(extra_args))
297 arch=arch, extra_args=' '.join(extra_args)
298 )
243 run_powershell(winrm_client, ps)
299 run_powershell(winrm_client, ps)
244 copy_latest_dist(winrm_client, '*.exe', dest_path)
300 copy_latest_dist(winrm_client, '*.exe', dest_path)
245
301
@@ -256,8 +312,9 b' def build_wheel(winrm_client, arch: str,'
256 copy_latest_dist(winrm_client, '*.whl', dest_path)
312 copy_latest_dist(winrm_client, '*.whl', dest_path)
257
313
258
314
259 def build_wix_installer(winrm_client, arch: str, dest_path: pathlib.Path,
315 def build_wix_installer(
260 version=None):
316 winrm_client, arch: str, dest_path: pathlib.Path, version=None
317 ):
261 """Build the WiX installer on a remote machine.
318 """Build the WiX installer on a remote machine.
262
319
263 Using a WinRM client, remote commands are executed to build a WiX installer.
320 Using a WinRM client, remote commands are executed to build a WiX installer.
@@ -267,8 +324,9 b' def build_wix_installer(winrm_client, ar'
267 if version:
324 if version:
268 extra_args.extend(['--version', version])
325 extra_args.extend(['--version', version])
269
326
270 ps = get_vc_prefix(arch) + BUILD_WIX.format(arch=arch,
327 ps = get_vc_prefix(arch) + BUILD_WIX.format(
271 extra_args=' '.join(extra_args))
328 arch=arch, extra_args=' '.join(extra_args)
329 )
272 run_powershell(winrm_client, ps)
330 run_powershell(winrm_client, ps)
273 copy_latest_dist(winrm_client, '*.msi', dest_path)
331 copy_latest_dist(winrm_client, '*.msi', dest_path)
274
332
@@ -282,17 +340,171 b' def run_tests(winrm_client, python_versi'
282 ``run-tests.py``.
340 ``run-tests.py``.
283 """
341 """
284 if not re.match(r'\d\.\d', python_version):
342 if not re.match(r'\d\.\d', python_version):
285 raise ValueError(r'python_version must be \d.\d; got %s' %
343 raise ValueError(
286 python_version)
344 r'python_version must be \d.\d; got %s' % python_version
345 )
287
346
288 if arch not in ('x86', 'x64'):
347 if arch not in ('x86', 'x64'):
289 raise ValueError('arch must be x86 or x64; got %s' % arch)
348 raise ValueError('arch must be x86 or x64; got %s' % arch)
290
349
291 python_path = 'python%s-%s' % (python_version.replace('.', ''), arch)
350 python_path = 'python%s-%s' % (python_version.replace('.', ''), arch)
292
351
293 ps = RUN_TESTS.format(
352 ps = RUN_TESTS.format(python_path=python_path, test_flags=test_flags or '',)
294 python_path=python_path,
353
295 test_flags=test_flags or '',
354 run_powershell(winrm_client, ps)
355
356
357 def resolve_wheel_artifacts(dist_path: pathlib.Path, version: str):
358 return (
359 dist_path / X86_WHEEL_FILENAME.format(version=version),
360 dist_path / X64_WHEEL_FILENAME.format(version=version),
361 )
362
363
364 def resolve_all_artifacts(dist_path: pathlib.Path, version: str):
365 return (
366 dist_path / X86_WHEEL_FILENAME.format(version=version),
367 dist_path / X64_WHEEL_FILENAME.format(version=version),
368 dist_path / X86_EXE_FILENAME.format(version=version),
369 dist_path / X64_EXE_FILENAME.format(version=version),
370 dist_path / X86_MSI_FILENAME.format(version=version),
371 dist_path / X64_MSI_FILENAME.format(version=version),
372 )
373
374
375 def generate_latest_dat(version: str):
376 x86_exe_filename = X86_EXE_FILENAME.format(version=version)
377 x64_exe_filename = X64_EXE_FILENAME.format(version=version)
378 x86_msi_filename = X86_MSI_FILENAME.format(version=version)
379 x64_msi_filename = X64_MSI_FILENAME.format(version=version)
380
381 entries = (
382 (
383 '10',
384 version,
385 X86_USER_AGENT_PATTERN,
386 '%s/%s' % (MERCURIAL_SCM_BASE_URL, x86_exe_filename),
387 X86_EXE_DESCRIPTION.format(version=version),
388 ),
389 (
390 '10',
391 version,
392 X64_USER_AGENT_PATTERN,
393 '%s/%s' % (MERCURIAL_SCM_BASE_URL, x64_exe_filename),
394 X64_EXE_DESCRIPTION.format(version=version),
395 ),
396 (
397 '10',
398 version,
399 X86_USER_AGENT_PATTERN,
400 '%s/%s' % (MERCURIAL_SCM_BASE_URL, x86_msi_filename),
401 X86_MSI_DESCRIPTION.format(version=version),
402 ),
403 (
404 '10',
405 version,
406 X64_USER_AGENT_PATTERN,
407 '%s/%s' % (MERCURIAL_SCM_BASE_URL, x64_msi_filename),
408 X64_MSI_DESCRIPTION.format(version=version),
409 ),
296 )
410 )
297
411
298 run_powershell(winrm_client, ps)
412 lines = ['\t'.join(e) for e in entries]
413
414 return '\n'.join(lines) + '\n'
415
416
417 def publish_artifacts_pypi(dist_path: pathlib.Path, version: str):
418 """Publish Windows release artifacts to PyPI."""
419
420 wheel_paths = resolve_wheel_artifacts(dist_path, version)
421
422 for p in wheel_paths:
423 if not p.exists():
424 raise Exception('%s not found' % p)
425
426 print('uploading wheels to PyPI (you may be prompted for credentials)')
427 pypi_upload(wheel_paths)
428
429
430 def publish_artifacts_mercurial_scm_org(
431 dist_path: pathlib.Path, version: str, ssh_username=None
432 ):
433 """Publish Windows release artifacts to mercurial-scm.org."""
434 all_paths = resolve_all_artifacts(dist_path, version)
435
436 for p in all_paths:
437 if not p.exists():
438 raise Exception('%s not found' % p)
439
440 client = paramiko.SSHClient()
441 client.load_system_host_keys()
442 # We assume the system SSH configuration knows how to connect.
443 print('connecting to mercurial-scm.org via ssh...')
444 try:
445 client.connect('mercurial-scm.org', username=ssh_username)
446 except paramiko.AuthenticationException:
447 print('error authenticating; is an SSH key available in an SSH agent?')
448 raise
449
450 print('SSH connection established')
451
452 print('opening SFTP client...')
453 sftp = client.open_sftp()
454 print('SFTP client obtained')
455
456 for p in all_paths:
457 dest_path = '/var/www/release/windows/%s' % p.name
458 print('uploading %s to %s' % (p, dest_path))
459
460 with p.open('rb') as fh:
461 data = fh.read()
462
463 with sftp.open(dest_path, 'wb') as fh:
464 fh.write(data)
465 fh.chmod(0o0664)
466
467 latest_dat_path = '/var/www/release/windows/latest.dat'
468
469 now = datetime.datetime.utcnow()
470 backup_path = dist_path / (
471 'latest-windows-%s.dat' % now.strftime('%Y%m%dT%H%M%S')
472 )
473 print('backing up %s to %s' % (latest_dat_path, backup_path))
474
475 with sftp.open(latest_dat_path, 'rb') as fh:
476 latest_dat_old = fh.read()
477
478 with backup_path.open('wb') as fh:
479 fh.write(latest_dat_old)
480
481 print('writing %s with content:' % latest_dat_path)
482 latest_dat_content = generate_latest_dat(version)
483 print(latest_dat_content)
484
485 with sftp.open(latest_dat_path, 'wb') as fh:
486 fh.write(latest_dat_content.encode('ascii'))
487
488
489 def publish_artifacts(
490 dist_path: pathlib.Path,
491 version: str,
492 pypi=True,
493 mercurial_scm_org=True,
494 ssh_username=None,
495 ):
496 """Publish Windows release artifacts.
497
498 Files are found in `dist_path`. We will look for files with version string
499 `version`.
500
501 `pypi` controls whether we upload to PyPI.
502 `mercurial_scm_org` controls whether we upload to mercurial-scm.org.
503 """
504 if pypi:
505 publish_artifacts_pypi(dist_path, version)
506
507 if mercurial_scm_org:
508 publish_artifacts_mercurial_scm_org(
509 dist_path, version, ssh_username=ssh_username
510 )
@@ -11,9 +11,7 b' import logging'
11 import pprint
11 import pprint
12 import time
12 import time
13
13
14 from pypsrp.client import (
14 from pypsrp.client import Client
15 Client,
16 )
17 from pypsrp.powershell import (
15 from pypsrp.powershell import (
18 PowerShell,
16 PowerShell,
19 PSInvocationState,
17 PSInvocationState,
@@ -35,8 +33,13 b' def wait_for_winrm(host, username, passw'
35
33
36 while True:
34 while True:
37 try:
35 try:
38 client = Client(host, username=username, password=password,
36 client = Client(
39 ssl=ssl, connection_timeout=5)
37 host,
38 username=username,
39 password=password,
40 ssl=ssl,
41 connection_timeout=5,
42 )
40 client.execute_ps("Write-Host 'Hello, World!'")
43 client.execute_ps("Write-Host 'Hello, World!'")
41 return client
44 return client
42 except requests.exceptions.ConnectionError:
45 except requests.exceptions.ConnectionError:
@@ -52,7 +55,7 b' def format_object(o):'
52
55
53 try:
56 try:
54 o = str(o)
57 o = str(o)
55 except TypeError:
58 except (AttributeError, TypeError):
56 o = pprint.pformat(o.extended_properties)
59 o = pprint.pformat(o.extended_properties)
57
60
58 return o
61 return o
@@ -78,5 +81,7 b' def run_powershell(client, script):'
78 print(format_object(o))
81 print(format_object(o))
79
82
80 if ps.state == PSInvocationState.FAILED:
83 if ps.state == PSInvocationState.FAILED:
81 raise Exception('PowerShell execution failed: %s' %
84 raise Exception(
82 ' '.join(map(format_object, ps.streams.error)))
85 'PowerShell execution failed: %s'
86 % ' '.join(map(format_object, ps.streams.error))
87 )
@@ -2,7 +2,7 b''
2 # This file is autogenerated by pip-compile
2 # This file is autogenerated by pip-compile
3 # To update, run:
3 # To update, run:
4 #
4 #
5 # pip-compile -U --generate-hashes --output-file contrib/automation/linux-requirements-py2.txt contrib/automation/linux-requirements.txt.in
5 # pip-compile --generate-hashes --output-file=contrib/automation/linux-requirements-py2.txt contrib/automation/linux-requirements.txt.in
6 #
6 #
7 astroid==1.6.6 \
7 astroid==1.6.6 \
8 --hash=sha256:87de48a92e29cedf7210ffa853d11441e7ad94cb47bacd91b023499b51cbc756 \
8 --hash=sha256:87de48a92e29cedf7210ffa853d11441e7ad94cb47bacd91b023499b51cbc756 \
@@ -22,10 +22,10 b' contextlib2==0.5.5 \\'
22 --hash=sha256:509f9419ee91cdd00ba34443217d5ca51f5a364a404e1dce9e8979cea969ca48 \
22 --hash=sha256:509f9419ee91cdd00ba34443217d5ca51f5a364a404e1dce9e8979cea969ca48 \
23 --hash=sha256:f5260a6e679d2ff42ec91ec5252f4eeffdcf21053db9113bd0a8e4d953769c00 \
23 --hash=sha256:f5260a6e679d2ff42ec91ec5252f4eeffdcf21053db9113bd0a8e4d953769c00 \
24 # via vcrpy
24 # via vcrpy
25 docutils==0.14 \
25 docutils==0.15.2 \
26 --hash=sha256:02aec4bd92ab067f6ff27a38a38a41173bf01bed8f89157768c1573f53e474a6 \
26 --hash=sha256:6c4f696463b79f1fb8ba0c594b63840ebd41f059e92b31957c46b74a4599b6d0 \
27 --hash=sha256:51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274 \
27 --hash=sha256:9e4d7ecfc600058e07ba661411a2b7de2fd0fafa17d1a7f7361cd47b1175c827 \
28 --hash=sha256:7a4bd47eaf6596e1295ecb11361139febe29b084a87bf005bf899f9a42edc3c6
28 --hash=sha256:a2aeea129088da402665e92e0b25b04b073c04b2dce4ab65caaa38b7ce2e1a99
29 enum34==1.1.6 \
29 enum34==1.1.6 \
30 --hash=sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850 \
30 --hash=sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850 \
31 --hash=sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a \
31 --hash=sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a \
@@ -36,83 +36,70 b' funcsigs==1.0.2 \\'
36 --hash=sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca \
36 --hash=sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca \
37 --hash=sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50 \
37 --hash=sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50 \
38 # via mock
38 # via mock
39 futures==3.2.0 \
39 futures==3.3.0 \
40 --hash=sha256:9ec02aa7d674acb8618afb127e27fde7fc68994c0437ad759fa094a574adb265 \
40 --hash=sha256:49b3f5b064b6e3afc3316421a3f25f66c137ae88f068abbf72830170033c5e16 \
41 --hash=sha256:ec0a6cb848cc212002b9828c3e34c675e0c9ff6741dc445cab6fdd4e1085d1f1 \
41 --hash=sha256:7e033af76a5e35f58e56da7a91e687706faf4e7bdfb2cbc3f2cca6b9bcda9794 \
42 # via isort
42 # via isort
43 fuzzywuzzy==0.17.0 \
43 fuzzywuzzy==0.17.0 \
44 --hash=sha256:5ac7c0b3f4658d2743aa17da53a55598144edbc5bee3c6863840636e6926f254 \
44 --hash=sha256:5ac7c0b3f4658d2743aa17da53a55598144edbc5bee3c6863840636e6926f254 \
45 --hash=sha256:6f49de47db00e1c71d40ad16da42284ac357936fa9b66bea1df63fed07122d62
45 --hash=sha256:6f49de47db00e1c71d40ad16da42284ac357936fa9b66bea1df63fed07122d62
46 isort==4.3.17 \
46 isort==4.3.21 \
47 --hash=sha256:01cb7e1ca5e6c5b3f235f0385057f70558b70d2f00320208825fa62887292f43 \
47 --hash=sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1 \
48 --hash=sha256:268067462aed7eb2a1e237fcb287852f22077de3fb07964e87e00f829eea2d1a \
48 --hash=sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd \
49 # via pylint
49 # via pylint
50 lazy-object-proxy==1.3.1 \
50 lazy-object-proxy==1.4.1 \
51 --hash=sha256:0ce34342b419bd8f018e6666bfef729aec3edf62345a53b537a4dcc115746a33 \
51 --hash=sha256:159a745e61422217881c4de71f9eafd9d703b93af95618635849fe469a283661 \
52 --hash=sha256:1b668120716eb7ee21d8a38815e5eb3bb8211117d9a90b0f8e21722c0758cc39 \
52 --hash=sha256:23f63c0821cc96a23332e45dfaa83266feff8adc72b9bcaef86c202af765244f \
53 --hash=sha256:209615b0fe4624d79e50220ce3310ca1a9445fd8e6d3572a896e7f9146bbf019 \
53 --hash=sha256:3b11be575475db2e8a6e11215f5aa95b9ec14de658628776e10d96fa0b4dac13 \
54 --hash=sha256:27bf62cb2b1a2068d443ff7097ee33393f8483b570b475db8ebf7e1cba64f088 \
54 --hash=sha256:3f447aff8bc61ca8b42b73304f6a44fa0d915487de144652816f950a3f1ab821 \
55 --hash=sha256:27ea6fd1c02dcc78172a82fc37fcc0992a94e4cecf53cb6d73f11749825bd98b \
55 --hash=sha256:4ba73f6089cd9b9478bc0a4fa807b47dbdb8fad1d8f31a0f0a5dbf26a4527a71 \
56 --hash=sha256:2c1b21b44ac9beb0fc848d3993924147ba45c4ebc24be19825e57aabbe74a99e \
56 --hash=sha256:4f53eadd9932055eac465bd3ca1bd610e4d7141e1278012bd1f28646aebc1d0e \
57 --hash=sha256:2df72ab12046a3496a92476020a1a0abf78b2a7db9ff4dc2036b8dd980203ae6 \
57 --hash=sha256:64483bd7154580158ea90de5b8e5e6fc29a16a9b4db24f10193f0c1ae3f9d1ea \
58 --hash=sha256:320ffd3de9699d3892048baee45ebfbbf9388a7d65d832d7e580243ade426d2b \
58 --hash=sha256:6f72d42b0d04bfee2397aa1862262654b56922c20a9bb66bb76b6f0e5e4f9229 \
59 --hash=sha256:50e3b9a464d5d08cc5227413db0d1c4707b6172e4d4d915c1c70e4de0bbff1f5 \
59 --hash=sha256:7c7f1ec07b227bdc561299fa2328e85000f90179a2f44ea30579d38e037cb3d4 \
60 --hash=sha256:5276db7ff62bb7b52f77f1f51ed58850e315154249aceb42e7f4c611f0f847ff \
60 --hash=sha256:7c8b1ba1e15c10b13cad4171cfa77f5bb5ec2580abc5a353907780805ebe158e \
61 --hash=sha256:61a6cf00dcb1a7f0c773ed4acc509cb636af2d6337a08f362413c76b2b47a8dd \
61 --hash=sha256:8559b94b823f85342e10d3d9ca4ba5478168e1ac5658a8a2f18c991ba9c52c20 \
62 --hash=sha256:6ae6c4cb59f199d8827c5a07546b2ab7e85d262acaccaacd49b62f53f7c456f7 \
62 --hash=sha256:a262c7dfb046f00e12a2bdd1bafaed2408114a89ac414b0af8755c696eb3fc16 \
63 --hash=sha256:7661d401d60d8bf15bb5da39e4dd72f5d764c5aff5a86ef52a042506e3e970ff \
63 --hash=sha256:acce4e3267610c4fdb6632b3886fe3f2f7dd641158a843cf6b6a68e4ce81477b \
64 --hash=sha256:7bd527f36a605c914efca5d3d014170b2cb184723e423d26b1fb2fd9108e264d \
64 --hash=sha256:be089bb6b83fac7f29d357b2dc4cf2b8eb8d98fe9d9ff89f9ea6012970a853c7 \
65 --hash=sha256:7cb54db3535c8686ea12e9535eb087d32421184eacc6939ef15ef50f83a5e7e2 \
65 --hash=sha256:bfab710d859c779f273cc48fb86af38d6e9210f38287df0069a63e40b45a2f5c \
66 --hash=sha256:7f3a2d740291f7f2c111d86a1c4851b70fb000a6c8883a59660d95ad57b9df35 \
66 --hash=sha256:c10d29019927301d524a22ced72706380de7cfc50f767217485a912b4c8bd82a \
67 --hash=sha256:81304b7d8e9c824d058087dcb89144842c8e0dea6d281c031f59f0acf66963d4 \
67 --hash=sha256:dd6e2b598849b3d7aee2295ac765a578879830fb8966f70be8cd472e6069932e \
68 --hash=sha256:933947e8b4fbe617a51528b09851685138b49d511af0b6c0da2539115d6d4514 \
68 --hash=sha256:e408f1eacc0a68fed0c08da45f31d0ebb38079f043328dce69ff133b95c29dc1 \
69 --hash=sha256:94223d7f060301b3a8c09c9b3bc3294b56b2188e7d8179c762a1cda72c979252 \
70 --hash=sha256:ab3ca49afcb47058393b0122428358d2fbe0408cf99f1b58b295cfeb4ed39109 \
71 --hash=sha256:bd6292f565ca46dee4e737ebcc20742e3b5be2b01556dafe169f6c65d088875f \
72 --hash=sha256:cb924aa3e4a3fb644d0c463cad5bc2572649a6a3f68a7f8e4fbe44aaa6d77e4c \
73 --hash=sha256:d0fc7a286feac9077ec52a927fc9fe8fe2fabab95426722be4c953c9a8bede92 \
74 --hash=sha256:ddc34786490a6e4ec0a855d401034cbd1242ef186c20d79d2166d6a4bd449577 \
75 --hash=sha256:e34b155e36fa9da7e1b7c738ed7767fc9491a62ec6af70fe9da4a057759edc2d \
76 --hash=sha256:e5b9e8f6bda48460b7b143c3821b21b452cb3a835e6bbd5dd33aa0c8d3f5137d \
77 --hash=sha256:e81ebf6c5ee9684be8f2c87563880f93eedd56dd2b6146d8a725b50b7e5adb0f \
78 --hash=sha256:eb91be369f945f10d3a49f5f9be8b3d0b93a4c2be8f8a5b83b0571b8123e0a7a \
79 --hash=sha256:f460d1ceb0e4a5dcb2a652db0904224f367c9b3c1470d5a7683c0480e582468b \
80 # via astroid
69 # via astroid
81 mccabe==0.6.1 \
70 mccabe==0.6.1 \
82 --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \
71 --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \
83 --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f \
72 --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f \
84 # via pylint
73 # via pylint
85 mock==2.0.0 \
74 mock==3.0.5 \
86 --hash=sha256:5ce3c71c5545b472da17b72268978914d0252980348636840bd34a00b5cc96c1 \
75 --hash=sha256:83657d894c90d5681d62155c82bda9c1187827525880eda8ff5df4ec813437c3 \
87 --hash=sha256:b158b6df76edd239b8208d481dc46b6afd45a846b7812ff0ce58971cf5bc8bba \
76 --hash=sha256:d157e52d4e5b938c550f39eb2fd15610db062441a9c2747d3dbfa9298211d0f8 \
88 # via vcrpy
77 # via vcrpy
89 pbr==5.1.3 \
90 --hash=sha256:8257baf496c8522437e8a6cfe0f15e00aedc6c0e0e7c9d55eeeeab31e0853843 \
91 --hash=sha256:8c361cc353d988e4f5b998555c88098b9d5964c2e11acf7b0d21925a66bb5824 \
92 # via mock
93 pyflakes==2.1.1 \
78 pyflakes==2.1.1 \
94 --hash=sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0 \
79 --hash=sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0 \
95 --hash=sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2
80 --hash=sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2
96 pygments==2.3.1 \
81 pygments==2.4.2 \
97 --hash=sha256:5ffada19f6203563680669ee7f53b64dabbeb100eb51b61996085e99c03b284a \
82 --hash=sha256:71e430bc85c88a430f000ac1d9b331d2407f681d6f6aec95e8bcfbc3df5b0127 \
98 --hash=sha256:e8218dd399a61674745138520d0d4cf2621d7e032439341bc3f647bff125818d
83 --hash=sha256:881c4c157e45f30af185c1ffe8d549d48ac9127433f2c380c24b84572ad66297
99 pylint==1.9.4 \
84 pylint==1.9.5 \
100 --hash=sha256:02c2b6d268695a8b64ad61847f92e611e6afcff33fd26c3a2125370c4662905d \
85 --hash=sha256:367e3d49813d349a905390ac27989eff82ab84958731c5ef0bef867452cfdc42 \
101 --hash=sha256:ee1e85575587c5b58ddafa25e1c1b01691ef172e139fc25585e5d3f02451da93
86 --hash=sha256:97a42df23d436c70132971d1dcb9efad2fe5c0c6add55b90161e773caf729300
102 python-levenshtein==0.12.0 \
87 python-levenshtein==0.12.0 \
103 --hash=sha256:033a11de5e3d19ea25c9302d11224e1a1898fe5abd23c61c7c360c25195e3eb1
88 --hash=sha256:033a11de5e3d19ea25c9302d11224e1a1898fe5abd23c61c7c360c25195e3eb1
104 pyyaml==5.1 \
89 pyyaml==5.1.2 \
105 --hash=sha256:1adecc22f88d38052fb787d959f003811ca858b799590a5eaa70e63dca50308c \
90 --hash=sha256:0113bc0ec2ad727182326b61326afa3d1d8280ae1122493553fd6f4397f33df9 \
106 --hash=sha256:436bc774ecf7c103814098159fbb84c2715d25980175292c648f2da143909f95 \
91 --hash=sha256:01adf0b6c6f61bd11af6e10ca52b7d4057dd0be0343eb9283c878cf3af56aee4 \
107 --hash=sha256:460a5a4248763f6f37ea225d19d5c205677d8d525f6a83357ca622ed541830c2 \
92 --hash=sha256:5124373960b0b3f4aa7df1707e63e9f109b5263eca5976c66e08b1c552d4eaf8 \
108 --hash=sha256:5a22a9c84653debfbf198d02fe592c176ea548cccce47553f35f466e15cf2fd4 \
93 --hash=sha256:5ca4f10adbddae56d824b2c09668e91219bb178a1eee1faa56af6f99f11bf696 \
109 --hash=sha256:7a5d3f26b89d688db27822343dfa25c599627bc92093e788956372285c6298ad \
94 --hash=sha256:7907be34ffa3c5a32b60b95f4d95ea25361c951383a894fec31be7252b2b6f34 \
110 --hash=sha256:9372b04a02080752d9e6f990179a4ab840227c6e2ce15b95e1278456664cf2ba \
95 --hash=sha256:7ec9b2a4ed5cad025c2278a1e6a19c011c80a3caaac804fd2d329e9cc2c287c9 \
111 --hash=sha256:a5dcbebee834eaddf3fa7366316b880ff4062e4bcc9787b78c7fbb4a26ff2dd1 \
96 --hash=sha256:87ae4c829bb25b9fe99cf71fbb2140c448f534e24c998cc60f39ae4f94396a73 \
112 --hash=sha256:aee5bab92a176e7cd034e57f46e9df9a9862a71f8f37cad167c6fc74c65f5b4e \
97 --hash=sha256:9de9919becc9cc2ff03637872a440195ac4241c80536632fffeb6a1e25a74299 \
113 --hash=sha256:c51f642898c0bacd335fc119da60baae0824f2cde95b0330b56c0553439f0673 \
98 --hash=sha256:a5a85b10e450c66b49f98846937e8cfca1db3127a9d5d1e31ca45c3d0bef4c5b \
114 --hash=sha256:c68ea4d3ba1705da1e0d85da6684ac657912679a649e8868bd850d2c299cce13 \
99 --hash=sha256:b0997827b4f6a7c286c01c5f60384d218dca4ed7d9efa945c3e1aa623d5709ae \
115 --hash=sha256:e23d0cc5299223dcc37885dae624f382297717e459ea24053709675a976a3e19 \
100 --hash=sha256:b631ef96d3222e62861443cc89d6563ba3eeb816eeb96b2629345ab795e53681 \
101 --hash=sha256:bf47c0607522fdbca6c9e817a6e81b08491de50f3766a7a0e6a5be7905961b41 \
102 --hash=sha256:f81025eddd0327c7d4cfe9b62cf33190e1e736cc6e97502b3ec425f574b3e7a8 \
116 # via vcrpy
103 # via vcrpy
117 singledispatch==3.4.0.3 \
104 singledispatch==3.4.0.3 \
118 --hash=sha256:5b06af87df13818d14f08a028e42f566640aef80805c3b50c5056b086e3c2b9c \
105 --hash=sha256:5b06af87df13818d14f08a028e42f566640aef80805c3b50c5056b086e3c2b9c \
@@ -125,6 +112,10 b' six==1.12.0 \\'
125 vcrpy==2.0.1 \
112 vcrpy==2.0.1 \
126 --hash=sha256:127e79cf7b569d071d1bd761b83f7b62b2ce2a2eb63ceca7aa67cba8f2602ea3 \
113 --hash=sha256:127e79cf7b569d071d1bd761b83f7b62b2ce2a2eb63ceca7aa67cba8f2602ea3 \
127 --hash=sha256:57be64aa8e9883a4117d0b15de28af62275c001abcdb00b6dc2d4406073d9a4f
114 --hash=sha256:57be64aa8e9883a4117d0b15de28af62275c001abcdb00b6dc2d4406073d9a4f
128 wrapt==1.11.1 \
115 wrapt==1.11.2 \
129 --hash=sha256:4aea003270831cceb8a90ff27c4031da6ead7ec1886023b80ce0dfe0adf61533 \
116 --hash=sha256:565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1 \
130 # via astroid, vcrpy
117 # via astroid, vcrpy
118
119 # WARNING: The following packages were not pinned, but pip requires them to be
120 # pinned when the requirements file includes hashes. Consider using the --allow-unsafe flag.
121 # setuptools==41.0.1 # via python-levenshtein
@@ -2,16 +2,16 b''
2 # This file is autogenerated by pip-compile
2 # This file is autogenerated by pip-compile
3 # To update, run:
3 # To update, run:
4 #
4 #
5 # pip-compile -U --generate-hashes --output-file contrib/automation/linux-requirements-py3.txt contrib/automation/linux-requirements.txt.in
5 # pip-compile --generate-hashes --output-file=contrib/automation/linux-requirements-py3.txt contrib/automation/linux-requirements.txt.in
6 #
6 #
7 astroid==2.2.5 \
7 astroid==2.2.5 \
8 --hash=sha256:6560e1e1749f68c64a4b5dee4e091fce798d2f0d84ebe638cf0e0585a343acf4 \
8 --hash=sha256:6560e1e1749f68c64a4b5dee4e091fce798d2f0d84ebe638cf0e0585a343acf4 \
9 --hash=sha256:b65db1bbaac9f9f4d190199bb8680af6f6f84fd3769a5ea883df8a91fe68b4c4 \
9 --hash=sha256:b65db1bbaac9f9f4d190199bb8680af6f6f84fd3769a5ea883df8a91fe68b4c4 \
10 # via pylint
10 # via pylint
11 docutils==0.14 \
11 docutils==0.15.2 \
12 --hash=sha256:02aec4bd92ab067f6ff27a38a38a41173bf01bed8f89157768c1573f53e474a6 \
12 --hash=sha256:6c4f696463b79f1fb8ba0c594b63840ebd41f059e92b31957c46b74a4599b6d0 \
13 --hash=sha256:51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274 \
13 --hash=sha256:9e4d7ecfc600058e07ba661411a2b7de2fd0fafa17d1a7f7361cd47b1175c827 \
14 --hash=sha256:7a4bd47eaf6596e1295ecb11361139febe29b084a87bf005bf899f9a42edc3c6
14 --hash=sha256:a2aeea129088da402665e92e0b25b04b073c04b2dce4ab65caaa38b7ce2e1a99
15 fuzzywuzzy==0.17.0 \
15 fuzzywuzzy==0.17.0 \
16 --hash=sha256:5ac7c0b3f4658d2743aa17da53a55598144edbc5bee3c6863840636e6926f254 \
16 --hash=sha256:5ac7c0b3f4658d2743aa17da53a55598144edbc5bee3c6863840636e6926f254 \
17 --hash=sha256:6f49de47db00e1c71d40ad16da42284ac357936fa9b66bea1df63fed07122d62
17 --hash=sha256:6f49de47db00e1c71d40ad16da42284ac357936fa9b66bea1df63fed07122d62
@@ -19,40 +19,29 b' idna==2.8 \\'
19 --hash=sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407 \
19 --hash=sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407 \
20 --hash=sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c \
20 --hash=sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c \
21 # via yarl
21 # via yarl
22 isort==4.3.17 \
22 isort==4.3.21 \
23 --hash=sha256:01cb7e1ca5e6c5b3f235f0385057f70558b70d2f00320208825fa62887292f43 \
23 --hash=sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1 \
24 --hash=sha256:268067462aed7eb2a1e237fcb287852f22077de3fb07964e87e00f829eea2d1a \
24 --hash=sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd \
25 # via pylint
25 # via pylint
26 lazy-object-proxy==1.3.1 \
26 lazy-object-proxy==1.4.1 \
27 --hash=sha256:0ce34342b419bd8f018e6666bfef729aec3edf62345a53b537a4dcc115746a33 \
27 --hash=sha256:159a745e61422217881c4de71f9eafd9d703b93af95618635849fe469a283661 \
28 --hash=sha256:1b668120716eb7ee21d8a38815e5eb3bb8211117d9a90b0f8e21722c0758cc39 \
28 --hash=sha256:23f63c0821cc96a23332e45dfaa83266feff8adc72b9bcaef86c202af765244f \
29 --hash=sha256:209615b0fe4624d79e50220ce3310ca1a9445fd8e6d3572a896e7f9146bbf019 \
29 --hash=sha256:3b11be575475db2e8a6e11215f5aa95b9ec14de658628776e10d96fa0b4dac13 \
30 --hash=sha256:27bf62cb2b1a2068d443ff7097ee33393f8483b570b475db8ebf7e1cba64f088 \
30 --hash=sha256:3f447aff8bc61ca8b42b73304f6a44fa0d915487de144652816f950a3f1ab821 \
31 --hash=sha256:27ea6fd1c02dcc78172a82fc37fcc0992a94e4cecf53cb6d73f11749825bd98b \
31 --hash=sha256:4ba73f6089cd9b9478bc0a4fa807b47dbdb8fad1d8f31a0f0a5dbf26a4527a71 \
32 --hash=sha256:2c1b21b44ac9beb0fc848d3993924147ba45c4ebc24be19825e57aabbe74a99e \
32 --hash=sha256:4f53eadd9932055eac465bd3ca1bd610e4d7141e1278012bd1f28646aebc1d0e \
33 --hash=sha256:2df72ab12046a3496a92476020a1a0abf78b2a7db9ff4dc2036b8dd980203ae6 \
33 --hash=sha256:64483bd7154580158ea90de5b8e5e6fc29a16a9b4db24f10193f0c1ae3f9d1ea \
34 --hash=sha256:320ffd3de9699d3892048baee45ebfbbf9388a7d65d832d7e580243ade426d2b \
34 --hash=sha256:6f72d42b0d04bfee2397aa1862262654b56922c20a9bb66bb76b6f0e5e4f9229 \
35 --hash=sha256:50e3b9a464d5d08cc5227413db0d1c4707b6172e4d4d915c1c70e4de0bbff1f5 \
35 --hash=sha256:7c7f1ec07b227bdc561299fa2328e85000f90179a2f44ea30579d38e037cb3d4 \
36 --hash=sha256:5276db7ff62bb7b52f77f1f51ed58850e315154249aceb42e7f4c611f0f847ff \
36 --hash=sha256:7c8b1ba1e15c10b13cad4171cfa77f5bb5ec2580abc5a353907780805ebe158e \
37 --hash=sha256:61a6cf00dcb1a7f0c773ed4acc509cb636af2d6337a08f362413c76b2b47a8dd \
37 --hash=sha256:8559b94b823f85342e10d3d9ca4ba5478168e1ac5658a8a2f18c991ba9c52c20 \
38 --hash=sha256:6ae6c4cb59f199d8827c5a07546b2ab7e85d262acaccaacd49b62f53f7c456f7 \
38 --hash=sha256:a262c7dfb046f00e12a2bdd1bafaed2408114a89ac414b0af8755c696eb3fc16 \
39 --hash=sha256:7661d401d60d8bf15bb5da39e4dd72f5d764c5aff5a86ef52a042506e3e970ff \
39 --hash=sha256:acce4e3267610c4fdb6632b3886fe3f2f7dd641158a843cf6b6a68e4ce81477b \
40 --hash=sha256:7bd527f36a605c914efca5d3d014170b2cb184723e423d26b1fb2fd9108e264d \
40 --hash=sha256:be089bb6b83fac7f29d357b2dc4cf2b8eb8d98fe9d9ff89f9ea6012970a853c7 \
41 --hash=sha256:7cb54db3535c8686ea12e9535eb087d32421184eacc6939ef15ef50f83a5e7e2 \
41 --hash=sha256:bfab710d859c779f273cc48fb86af38d6e9210f38287df0069a63e40b45a2f5c \
42 --hash=sha256:7f3a2d740291f7f2c111d86a1c4851b70fb000a6c8883a59660d95ad57b9df35 \
42 --hash=sha256:c10d29019927301d524a22ced72706380de7cfc50f767217485a912b4c8bd82a \
43 --hash=sha256:81304b7d8e9c824d058087dcb89144842c8e0dea6d281c031f59f0acf66963d4 \
43 --hash=sha256:dd6e2b598849b3d7aee2295ac765a578879830fb8966f70be8cd472e6069932e \
44 --hash=sha256:933947e8b4fbe617a51528b09851685138b49d511af0b6c0da2539115d6d4514 \
44 --hash=sha256:e408f1eacc0a68fed0c08da45f31d0ebb38079f043328dce69ff133b95c29dc1 \
45 --hash=sha256:94223d7f060301b3a8c09c9b3bc3294b56b2188e7d8179c762a1cda72c979252 \
46 --hash=sha256:ab3ca49afcb47058393b0122428358d2fbe0408cf99f1b58b295cfeb4ed39109 \
47 --hash=sha256:bd6292f565ca46dee4e737ebcc20742e3b5be2b01556dafe169f6c65d088875f \
48 --hash=sha256:cb924aa3e4a3fb644d0c463cad5bc2572649a6a3f68a7f8e4fbe44aaa6d77e4c \
49 --hash=sha256:d0fc7a286feac9077ec52a927fc9fe8fe2fabab95426722be4c953c9a8bede92 \
50 --hash=sha256:ddc34786490a6e4ec0a855d401034cbd1242ef186c20d79d2166d6a4bd449577 \
51 --hash=sha256:e34b155e36fa9da7e1b7c738ed7767fc9491a62ec6af70fe9da4a057759edc2d \
52 --hash=sha256:e5b9e8f6bda48460b7b143c3821b21b452cb3a835e6bbd5dd33aa0c8d3f5137d \
53 --hash=sha256:e81ebf6c5ee9684be8f2c87563880f93eedd56dd2b6146d8a725b50b7e5adb0f \
54 --hash=sha256:eb91be369f945f10d3a49f5f9be8b3d0b93a4c2be8f8a5b83b0571b8123e0a7a \
55 --hash=sha256:f460d1ceb0e4a5dcb2a652db0904224f367c9b3c1470d5a7683c0480e582468b \
56 # via astroid
45 # via astroid
57 mccabe==0.6.1 \
46 mccabe==0.6.1 \
58 --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \
47 --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \
@@ -92,57 +81,54 b' multidict==4.5.2 \\'
92 pyflakes==2.1.1 \
81 pyflakes==2.1.1 \
93 --hash=sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0 \
82 --hash=sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0 \
94 --hash=sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2
83 --hash=sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2
95 pygments==2.3.1 \
84 pygments==2.4.2 \
96 --hash=sha256:5ffada19f6203563680669ee7f53b64dabbeb100eb51b61996085e99c03b284a \
85 --hash=sha256:71e430bc85c88a430f000ac1d9b331d2407f681d6f6aec95e8bcfbc3df5b0127 \
97 --hash=sha256:e8218dd399a61674745138520d0d4cf2621d7e032439341bc3f647bff125818d
86 --hash=sha256:881c4c157e45f30af185c1ffe8d549d48ac9127433f2c380c24b84572ad66297
98 pylint==2.3.1 \
87 pylint==2.3.1 \
99 --hash=sha256:5d77031694a5fb97ea95e828c8d10fc770a1df6eb3906067aaed42201a8a6a09 \
88 --hash=sha256:5d77031694a5fb97ea95e828c8d10fc770a1df6eb3906067aaed42201a8a6a09 \
100 --hash=sha256:723e3db49555abaf9bf79dc474c6b9e2935ad82230b10c1138a71ea41ac0fff1
89 --hash=sha256:723e3db49555abaf9bf79dc474c6b9e2935ad82230b10c1138a71ea41ac0fff1
101 python-levenshtein==0.12.0 \
90 python-levenshtein==0.12.0 \
102 --hash=sha256:033a11de5e3d19ea25c9302d11224e1a1898fe5abd23c61c7c360c25195e3eb1
91 --hash=sha256:033a11de5e3d19ea25c9302d11224e1a1898fe5abd23c61c7c360c25195e3eb1
103 pyyaml==5.1 \
92 pyyaml==5.1.2 \
104 --hash=sha256:1adecc22f88d38052fb787d959f003811ca858b799590a5eaa70e63dca50308c \
93 --hash=sha256:0113bc0ec2ad727182326b61326afa3d1d8280ae1122493553fd6f4397f33df9 \
105 --hash=sha256:436bc774ecf7c103814098159fbb84c2715d25980175292c648f2da143909f95 \
94 --hash=sha256:01adf0b6c6f61bd11af6e10ca52b7d4057dd0be0343eb9283c878cf3af56aee4 \
106 --hash=sha256:460a5a4248763f6f37ea225d19d5c205677d8d525f6a83357ca622ed541830c2 \
95 --hash=sha256:5124373960b0b3f4aa7df1707e63e9f109b5263eca5976c66e08b1c552d4eaf8 \
107 --hash=sha256:5a22a9c84653debfbf198d02fe592c176ea548cccce47553f35f466e15cf2fd4 \
96 --hash=sha256:5ca4f10adbddae56d824b2c09668e91219bb178a1eee1faa56af6f99f11bf696 \
108 --hash=sha256:7a5d3f26b89d688db27822343dfa25c599627bc92093e788956372285c6298ad \
97 --hash=sha256:7907be34ffa3c5a32b60b95f4d95ea25361c951383a894fec31be7252b2b6f34 \
109 --hash=sha256:9372b04a02080752d9e6f990179a4ab840227c6e2ce15b95e1278456664cf2ba \
98 --hash=sha256:7ec9b2a4ed5cad025c2278a1e6a19c011c80a3caaac804fd2d329e9cc2c287c9 \
110 --hash=sha256:a5dcbebee834eaddf3fa7366316b880ff4062e4bcc9787b78c7fbb4a26ff2dd1 \
99 --hash=sha256:87ae4c829bb25b9fe99cf71fbb2140c448f534e24c998cc60f39ae4f94396a73 \
111 --hash=sha256:aee5bab92a176e7cd034e57f46e9df9a9862a71f8f37cad167c6fc74c65f5b4e \
100 --hash=sha256:9de9919becc9cc2ff03637872a440195ac4241c80536632fffeb6a1e25a74299 \
112 --hash=sha256:c51f642898c0bacd335fc119da60baae0824f2cde95b0330b56c0553439f0673 \
101 --hash=sha256:a5a85b10e450c66b49f98846937e8cfca1db3127a9d5d1e31ca45c3d0bef4c5b \
113 --hash=sha256:c68ea4d3ba1705da1e0d85da6684ac657912679a649e8868bd850d2c299cce13 \
102 --hash=sha256:b0997827b4f6a7c286c01c5f60384d218dca4ed7d9efa945c3e1aa623d5709ae \
114 --hash=sha256:e23d0cc5299223dcc37885dae624f382297717e459ea24053709675a976a3e19 \
103 --hash=sha256:b631ef96d3222e62861443cc89d6563ba3eeb816eeb96b2629345ab795e53681 \
104 --hash=sha256:bf47c0607522fdbca6c9e817a6e81b08491de50f3766a7a0e6a5be7905961b41 \
105 --hash=sha256:f81025eddd0327c7d4cfe9b62cf33190e1e736cc6e97502b3ec425f574b3e7a8 \
115 # via vcrpy
106 # via vcrpy
116 six==1.12.0 \
107 six==1.12.0 \
117 --hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \
108 --hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \
118 --hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73 \
109 --hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73 \
119 # via astroid, vcrpy
110 # via astroid, vcrpy
120 typed-ast==1.3.4 ; python_version >= "3.0" and platform_python_implementation != "PyPy" \
111 typed-ast==1.4.0 ; python_version >= "3.0" and platform_python_implementation != "PyPy" \
121 --hash=sha256:04894d268ba6eab7e093d43107869ad49e7b5ef40d1a94243ea49b352061b200 \
112 --hash=sha256:18511a0b3e7922276346bcb47e2ef9f38fb90fd31cb9223eed42c85d1312344e \
122 --hash=sha256:16616ece19daddc586e499a3d2f560302c11f122b9c692bc216e821ae32aa0d0 \
113 --hash=sha256:262c247a82d005e43b5b7f69aff746370538e176131c32dda9cb0f324d27141e \
123 --hash=sha256:252fdae740964b2d3cdfb3f84dcb4d6247a48a6abe2579e8029ab3be3cdc026c \
114 --hash=sha256:2b907eb046d049bcd9892e3076c7a6456c93a25bebfe554e931620c90e6a25b0 \
124 --hash=sha256:2af80a373af123d0b9f44941a46df67ef0ff7a60f95872412a145f4500a7fc99 \
115 --hash=sha256:354c16e5babd09f5cb0ee000d54cfa38401d8b8891eefa878ac772f827181a3c \
125 --hash=sha256:2c88d0a913229a06282b285f42a31e063c3bf9071ff65c5ea4c12acb6977c6a7 \
116 --hash=sha256:4e0b70c6fc4d010f8107726af5fd37921b666f5b31d9331f0bd24ad9a088e631 \
126 --hash=sha256:2ea99c029ebd4b5a308d915cc7fb95b8e1201d60b065450d5d26deb65d3f2bc1 \
117 --hash=sha256:630968c5cdee51a11c05a30453f8cd65e0cc1d2ad0d9192819df9978984529f4 \
127 --hash=sha256:3d2e3ab175fc097d2a51c7a0d3fda442f35ebcc93bb1d7bd9b95ad893e44c04d \
118 --hash=sha256:66480f95b8167c9c5c5c87f32cf437d585937970f3fc24386f313a4c97b44e34 \
128 --hash=sha256:4766dd695548a15ee766927bf883fb90c6ac8321be5a60c141f18628fb7f8da8 \
119 --hash=sha256:71211d26ffd12d63a83e079ff258ac9d56a1376a25bc80b1cdcdf601b855b90b \
129 --hash=sha256:56b6978798502ef66625a2e0f80cf923da64e328da8bbe16c1ff928c70c873de \
120 --hash=sha256:95bd11af7eafc16e829af2d3df510cecfd4387f6453355188342c3e79a2ec87a \
130 --hash=sha256:5cddb6f8bce14325b2863f9d5ac5c51e07b71b462361fd815d1d7706d3a9d682 \
121 --hash=sha256:bc6c7d3fa1325a0c6613512a093bc2a2a15aeec350451cbdf9e1d4bffe3e3233 \
131 --hash=sha256:644ee788222d81555af543b70a1098f2025db38eaa99226f3a75a6854924d4db \
122 --hash=sha256:cc34a6f5b426748a507dd5d1de4c1978f2eb5626d51326e43280941206c209e1 \
132 --hash=sha256:64cf762049fc4775efe6b27161467e76d0ba145862802a65eefc8879086fc6f8 \
123 --hash=sha256:d755f03c1e4a51e9b24d899561fec4ccaf51f210d52abdf8c07ee2849b212a36 \
133 --hash=sha256:68c362848d9fb71d3c3e5f43c09974a0ae319144634e7a47db62f0f2a54a7fa7 \
124 --hash=sha256:d7c45933b1bdfaf9f36c579671fec15d25b06c8398f113dab64c18ed1adda01d \
134 --hash=sha256:6c1f3c6f6635e611d58e467bf4371883568f0de9ccc4606f17048142dec14a1f \
125 --hash=sha256:d896919306dd0aa22d0132f62a1b78d11aaf4c9fc5b3410d3c666b818191630a \
135 --hash=sha256:b213d4a02eec4ddf622f4d2fbc539f062af3788d1f332f028a2e19c42da53f15 \
126 --hash=sha256:ffde2fbfad571af120fcbfbbc61c72469e72f550d676c3342492a9dfdefb8f12
136 --hash=sha256:bb27d4e7805a7de0e35bd0cb1411bc85f807968b2b0539597a49a23b00a622ae \
137 --hash=sha256:c9d414512eaa417aadae7758bc118868cd2396b0e6138c1dd4fda96679c079d3 \
138 --hash=sha256:f0937165d1e25477b01081c4763d2d9cdc3b18af69cb259dd4f640c9b900fe5e \
139 --hash=sha256:fb96a6e2c11059ecf84e6741a319f93f683e440e341d4489c9b161eca251cf2a \
140 --hash=sha256:fc71d2d6ae56a091a8d94f33ec9d0f2001d1cb1db423d8b4355debfe9ce689b7
141 vcrpy==2.0.1 \
127 vcrpy==2.0.1 \
142 --hash=sha256:127e79cf7b569d071d1bd761b83f7b62b2ce2a2eb63ceca7aa67cba8f2602ea3 \
128 --hash=sha256:127e79cf7b569d071d1bd761b83f7b62b2ce2a2eb63ceca7aa67cba8f2602ea3 \
143 --hash=sha256:57be64aa8e9883a4117d0b15de28af62275c001abcdb00b6dc2d4406073d9a4f
129 --hash=sha256:57be64aa8e9883a4117d0b15de28af62275c001abcdb00b6dc2d4406073d9a4f
144 wrapt==1.11.1 \
130 wrapt==1.11.2 \
145 --hash=sha256:4aea003270831cceb8a90ff27c4031da6ead7ec1886023b80ce0dfe0adf61533 \
131 --hash=sha256:565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1 \
146 # via astroid, vcrpy
132 # via astroid, vcrpy
147 yarl==1.3.0 \
133 yarl==1.3.0 \
148 --hash=sha256:024ecdc12bc02b321bc66b41327f930d1c2c543fa9a561b39861da9388ba7aa9 \
134 --hash=sha256:024ecdc12bc02b321bc66b41327f930d1c2c543fa9a561b39861da9388ba7aa9 \
@@ -157,3 +143,7 b' yarl==1.3.0 \\'
157 --hash=sha256:c9bb7c249c4432cd47e75af3864bc02d26c9594f49c82e2a28624417f0ae63b8 \
143 --hash=sha256:c9bb7c249c4432cd47e75af3864bc02d26c9594f49c82e2a28624417f0ae63b8 \
158 --hash=sha256:e060906c0c585565c718d1c3841747b61c5439af2211e185f6739a9412dfbde1 \
144 --hash=sha256:e060906c0c585565c718d1c3841747b61c5439af2211e185f6739a9412dfbde1 \
159 # via vcrpy
145 # via vcrpy
146
147 # WARNING: The following packages were not pinned, but pip requires them to be
148 # pinned when the requirements file includes hashes. Consider using the --allow-unsafe flag.
149 # setuptools==41.0.1 # via python-levenshtein
@@ -2,43 +2,44 b''
2 # This file is autogenerated by pip-compile
2 # This file is autogenerated by pip-compile
3 # To update, run:
3 # To update, run:
4 #
4 #
5 # pip-compile -U --generate-hashes --output-file contrib/automation/requirements.txt contrib/automation/requirements.txt.in
5 # pip-compile --generate-hashes --output-file=contrib/automation/requirements.txt contrib/automation/requirements.txt.in
6 #
6 #
7 asn1crypto==0.24.0 \
7 asn1crypto==1.0.1 \
8 --hash=sha256:2f1adbb7546ed199e3c90ef23ec95c5cf3585bac7d11fb7eb562a3fe89c64e87 \
8 --hash=sha256:0b199f211ae690df3db4fd6c1c4ff976497fb1da689193e368eedbadc53d9292 \
9 --hash=sha256:9d5c20441baf0cb60a4ac34cc447c6c189024b6b4c6cd7877034f4965c464e49 \
9 --hash=sha256:bca90060bd995c3f62c4433168eab407e44bdbdb567b3f3a396a676c1a4c4a3f \
10 # via cryptography
10 # via cryptography
11 bcrypt==3.1.6 \
11 bcrypt==3.1.7 \
12 --hash=sha256:0ba875eb67b011add6d8c5b76afbd92166e98b1f1efab9433d5dc0fafc76e203 \
12 --hash=sha256:0258f143f3de96b7c14f762c770f5fc56ccd72f8a1857a451c1cd9a655d9ac89 \
13 --hash=sha256:21ed446054c93e209434148ef0b362432bb82bbdaf7beef70a32c221f3e33d1c \
13 --hash=sha256:0b0069c752ec14172c5f78208f1863d7ad6755a6fae6fe76ec2c80d13be41e42 \
14 --hash=sha256:28a0459381a8021f57230954b9e9a65bb5e3d569d2c253c5cac6cb181d71cf23 \
14 --hash=sha256:19a4b72a6ae5bb467fea018b825f0a7d917789bcfe893e53f15c92805d187294 \
15 --hash=sha256:2aed3091eb6f51c26b7c2fad08d6620d1c35839e7a362f706015b41bd991125e \
15 --hash=sha256:5432dd7b34107ae8ed6c10a71b4397f1c853bd39a4d6ffa7e35f40584cffd161 \
16 --hash=sha256:2fa5d1e438958ea90eaedbf8082c2ceb1a684b4f6c75a3800c6ec1e18ebef96f \
16 --hash=sha256:69361315039878c0680be456640f8705d76cb4a3a3fe1e057e0f261b74be4b31 \
17 --hash=sha256:3a73f45484e9874252002793518da060fb11eaa76c30713faa12115db17d1430 \
17 --hash=sha256:6fe49a60b25b584e2f4ef175b29d3a83ba63b3a4df1b4c0605b826668d1b6be5 \
18 --hash=sha256:3e489787638a36bb466cd66780e15715494b6d6905ffdbaede94440d6d8e7dba \
18 --hash=sha256:74a015102e877d0ccd02cdeaa18b32aa7273746914a6c5d0456dd442cb65b99c \
19 --hash=sha256:44636759d222baa62806bbceb20e96f75a015a6381690d1bc2eda91c01ec02ea \
19 --hash=sha256:763669a367869786bb4c8fcf731f4175775a5b43f070f50f46f0b59da45375d0 \
20 --hash=sha256:678c21b2fecaa72a1eded0cf12351b153615520637efcadc09ecf81b871f1596 \
20 --hash=sha256:8b10acde4e1919d6015e1df86d4c217d3b5b01bb7744c36113ea43d529e1c3de \
21 --hash=sha256:75460c2c3786977ea9768d6c9d8957ba31b5fbeb0aae67a5c0e96aab4155f18c \
21 --hash=sha256:9fe92406c857409b70a38729dbdf6578caf9228de0aef5bc44f859ffe971a39e \
22 --hash=sha256:8ac06fb3e6aacb0a95b56eba735c0b64df49651c6ceb1ad1cf01ba75070d567f \
22 --hash=sha256:a190f2a5dbbdbff4b74e3103cef44344bc30e61255beb27310e2aec407766052 \
23 --hash=sha256:8fdced50a8b646fff8fa0e4b1c5fd940ecc844b43d1da5a980cb07f2d1b1132f \
23 --hash=sha256:a595c12c618119255c90deb4b046e1ca3bcfad64667c43d1166f2b04bc72db09 \
24 --hash=sha256:9b2c5b640a2da533b0ab5f148d87fb9989bf9bcb2e61eea6a729102a6d36aef9 \
24 --hash=sha256:c9457fa5c121e94a58d6505cadca8bed1c64444b83b3204928a866ca2e599105 \
25 --hash=sha256:a9083e7fa9adb1a4de5ac15f9097eb15b04e2c8f97618f1b881af40abce382e1 \
25 --hash=sha256:cb93f6b2ab0f6853550b74e051d297c27a638719753eb9ff66d1e4072be67133 \
26 --hash=sha256:b7e3948b8b1a81c5a99d41da5fb2dc03ddb93b5f96fcd3fd27e643f91efa33e1 \
26 --hash=sha256:d7bdc26475679dd073ba0ed2766445bb5b20ca4793ca0db32b399dccc6bc84b7 \
27 --hash=sha256:b998b8ca979d906085f6a5d84f7b5459e5e94a13fc27c28a3514437013b6c2f6 \
27 --hash=sha256:ff032765bb8716d9387fd5376d987a937254b0619eff0972779515b5c98820bc \
28 --hash=sha256:dd08c50bc6f7be69cd7ba0769acca28c846ec46b7a8ddc2acf4b9ac6f8a7457e \
29 --hash=sha256:de5badee458544ab8125e63e39afeedfcf3aef6a6e2282ac159c95ae7472d773 \
30 --hash=sha256:ede2a87333d24f55a4a7338a6ccdccf3eaa9bed081d1737e0db4dbd1a4f7e6b6 \
31 # via paramiko
28 # via paramiko
32 boto3==1.9.137 \
29 bleach==3.1.0 \
33 --hash=sha256:882cc4869b47b51dae4b4a900769e72171ff00e0b6bca644b2d7a7ad7378f324 \
30 --hash=sha256:213336e49e102af26d9cde77dd2d0397afabc5a6bf2fed985dc35b5d1e285a16 \
34 --hash=sha256:cd503a7e7a04f1c14d2801f9727159dfa88c393b4004e98940fa4aa205d920c8
31 --hash=sha256:3fdf7f77adcf649c9911387df51254b813185e32b2c6619f690b593a617e19fa \
35 botocore==1.12.137 \
32 # via readme-renderer
36 --hash=sha256:0d95794f6b1239c75e2c5f966221bcd4b68020fddb5676f757531eedbb612ed8 \
33 boto3==1.9.243 \
37 --hash=sha256:3213cf48cf2ceee10fc3b93221f2cd1c38521cca7584f547d5c086213cc60f35 \
34 --hash=sha256:404acbecef8f4912f18312fcfaffe7eba7f10b3b7adf7853bdba59cdf2275ebb \
35 --hash=sha256:c6e5a7e4548ce7586c354ff633f2a66ba3c471d15a8ae6a30f873122ab04e1cf
36 botocore==1.12.243 \
37 --hash=sha256:397585a7881230274afb8d1877ef69a661b0a311745cd324f14a052fb2a2863a \
38 --hash=sha256:4496f8da89cb496462a831897ad248e13e431d9fa7e41e06d426fd6658ab6e59 \
38 # via boto3, s3transfer
39 # via boto3, s3transfer
39 certifi==2019.3.9 \
40 certifi==2019.9.11 \
40 --hash=sha256:59b7658e26ca9c7339e00f8f4636cdfe59d34fa37b9b04f6f9e9926b3cece1a5 \
41 --hash=sha256:e4f3620cfea4f83eedc95b24abd9cd56f3c4b146dd0177e83a21b4eb49e21e50 \
41 --hash=sha256:b26104d6835d1f5e49452a26eb2ff87fe7090b89dfcaee5ea2212697e1e1d7ae \
42 --hash=sha256:fd7c7c74727ddcf00e9acd26bba8da604ffec95bf1c2144e67aff7a8b50e6cef \
42 # via requests
43 # via requests
43 cffi==1.12.3 \
44 cffi==1.12.3 \
44 --hash=sha256:041c81822e9f84b1d9c401182e174996f0bae9991f33725d059b771744290774 \
45 --hash=sha256:041c81822e9f84b1d9c401182e174996f0bae9991f33725d059b771744290774 \
@@ -74,32 +75,29 b' chardet==3.0.4 \\'
74 --hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \
75 --hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \
75 --hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 \
76 --hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 \
76 # via requests
77 # via requests
77 cryptography==2.6.1 \
78 cryptography==2.7 \
78 --hash=sha256:066f815f1fe46020877c5983a7e747ae140f517f1b09030ec098503575265ce1 \
79 --hash=sha256:24b61e5fcb506424d3ec4e18bca995833839bf13c59fc43e530e488f28d46b8c \
79 --hash=sha256:210210d9df0afba9e000636e97810117dc55b7157c903a55716bb73e3ae07705 \
80 --hash=sha256:25dd1581a183e9e7a806fe0543f485103232f940fcfc301db65e630512cce643 \
80 --hash=sha256:26c821cbeb683facb966045e2064303029d572a87ee69ca5a1bf54bf55f93ca6 \
81 --hash=sha256:3452bba7c21c69f2df772762be0066c7ed5dc65df494a1d53a58b683a83e1216 \
81 --hash=sha256:2afb83308dc5c5255149ff7d3fb9964f7c9ee3d59b603ec18ccf5b0a8852e2b1 \
82 --hash=sha256:41a0be220dd1ed9e998f5891948306eb8c812b512dc398e5a01846d855050799 \
82 --hash=sha256:2db34e5c45988f36f7a08a7ab2b69638994a8923853dec2d4af121f689c66dc8 \
83 --hash=sha256:5751d8a11b956fbfa314f6553d186b94aa70fdb03d8a4d4f1c82dcacf0cbe28a \
83 --hash=sha256:409c4653e0f719fa78febcb71ac417076ae5e20160aec7270c91d009837b9151 \
84 --hash=sha256:5f61c7d749048fa6e3322258b4263463bfccefecb0dd731b6561cb617a1d9bb9 \
84 --hash=sha256:45a4f4cf4f4e6a55c8128f8b76b4c057027b27d4c67e3fe157fa02f27e37830d \
85 --hash=sha256:72e24c521fa2106f19623a3851e9f89ddfdeb9ac63871c7643790f872a305dfc \
85 --hash=sha256:48eab46ef38faf1031e58dfcc9c3e71756a1108f4c9c966150b605d4a1a7f659 \
86 --hash=sha256:7b97ae6ef5cba2e3bb14256625423413d5ce8d1abb91d4f29b6d1a081da765f8 \
86 --hash=sha256:6b9e0ae298ab20d371fc26e2129fd683cfc0cfde4d157c6341722de645146537 \
87 --hash=sha256:961e886d8a3590fd2c723cf07be14e2a91cf53c25f02435c04d39e90780e3b53 \
87 --hash=sha256:6c4778afe50f413707f604828c1ad1ff81fadf6c110cb669579dea7e2e98a75e \
88 --hash=sha256:96d8473848e984184b6728e2c9d391482008646276c3ff084a1bd89e15ff53a1 \
88 --hash=sha256:8c33fb99025d353c9520141f8bc989c2134a1f76bac6369cea060812f5b5c2bb \
89 --hash=sha256:ae536da50c7ad1e002c3eee101871d93abdc90d9c5f651818450a0d3af718609 \
89 --hash=sha256:9873a1760a274b620a135054b756f9f218fa61ca030e42df31b409f0fb738b6c \
90 --hash=sha256:b0db0cecf396033abb4a93c95d1602f268b3a68bb0a9cc06a7cff587bb9a7292 \
90 --hash=sha256:9b069768c627f3f5623b1cbd3248c5e7e92aec62f4c98827059eed7053138cc9 \
91 --hash=sha256:cfee9164954c186b191b91d4193989ca994703b2fff406f71cf454a2d3c7327e \
91 --hash=sha256:9e4ce27a507e4886efbd3c32d120db5089b906979a4debf1d5939ec01b9dd6c5 \
92 --hash=sha256:e6347742ac8f35ded4a46ff835c60e68c22a536a8ae5c4422966d06946b6d4c6 \
92 --hash=sha256:acb424eaca214cb08735f1a744eceb97d014de6530c1ea23beb86d9c6f13c2ad \
93 --hash=sha256:f27d93f0139a3c056172ebb5d4f9056e770fdf0206c2f422ff2ebbad142e09ed \
93 --hash=sha256:c8181c7d77388fe26ab8418bb088b1a1ef5fde058c6926790c8a0a3d94075a4a \
94 --hash=sha256:f57b76e46a58b63d1c6375017f4564a28f19a5ca912691fd2e4261b3414b618d \
94 --hash=sha256:d4afbb0840f489b60f5a580a41a1b9c3622e08ecb5eec8614d4fb4cd914c4460 \
95 --hash=sha256:d9ed28030797c00f4bc43c86bf819266c76a5ea61d006cd4078a93ebf7da6bfd \
96 --hash=sha256:e603aa7bb52e4e8ed4119a58a03b60323918467ef209e6ff9db3ac382e5cf2c6 \
97 # via paramiko, pypsrp
95 # via paramiko, pypsrp
98 docutils==0.14 \
96 docutils==0.15.2 \
99 --hash=sha256:02aec4bd92ab067f6ff27a38a38a41173bf01bed8f89157768c1573f53e474a6 \
97 --hash=sha256:6c4f696463b79f1fb8ba0c594b63840ebd41f059e92b31957c46b74a4599b6d0 \
100 --hash=sha256:51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274 \
98 --hash=sha256:9e4d7ecfc600058e07ba661411a2b7de2fd0fafa17d1a7f7361cd47b1175c827 \
101 --hash=sha256:7a4bd47eaf6596e1295ecb11361139febe29b084a87bf005bf899f9a42edc3c6 \
99 --hash=sha256:a2aeea129088da402665e92e0b25b04b073c04b2dce4ab65caaa38b7ce2e1a99 \
102 # via botocore
100 # via botocore, readme-renderer
103 idna==2.8 \
101 idna==2.8 \
104 --hash=sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407 \
102 --hash=sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407 \
105 --hash=sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c \
103 --hash=sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c \
@@ -108,20 +106,24 b' jmespath==0.9.4 \\'
108 --hash=sha256:3720a4b1bd659dd2eecad0666459b9788813e032b83e7ba58578e48254e0a0e6 \
106 --hash=sha256:3720a4b1bd659dd2eecad0666459b9788813e032b83e7ba58578e48254e0a0e6 \
109 --hash=sha256:bde2aef6f44302dfb30320115b17d030798de8c4110e28d5cf6cf91a7a31074c \
107 --hash=sha256:bde2aef6f44302dfb30320115b17d030798de8c4110e28d5cf6cf91a7a31074c \
110 # via boto3, botocore
108 # via boto3, botocore
111 ntlm-auth==1.3.0 \
109 ntlm-auth==1.4.0 \
112 --hash=sha256:bb2fd03c665f0f62c5f65695b62dcdb07fb7a45df6ebc86c770be2054d6902dd \
110 --hash=sha256:11f7a3cec38155b7cecdd9bbc8c37cd738d8012f0523b3f98d8caefe394feb97 \
113 --hash=sha256:ce5b4483ed761f341a538a426a71a52e5a9cf5fd834ebef1d2090f9eef14b3f8 \
111 --hash=sha256:350f2389c8ee5517f47db55a36ac2f8efc9742a60a678d6e2caa92385bdcaa9a \
114 # via pypsrp
112 # via pypsrp
115 paramiko==2.4.2 \
113 paramiko==2.6.0 \
116 --hash=sha256:3c16b2bfb4c0d810b24c40155dbfd113c0521e7e6ee593d704e84b4c658a1f3b \
114 --hash=sha256:99f0179bdc176281d21961a003ffdb2ec369daac1a1007241f53374e376576cf \
117 --hash=sha256:a8975a7df3560c9f1e2b43dc54ebd40fd00a7017392ca5445ce7df409f900fcb
115 --hash=sha256:f4b2edfa0d226b70bd4ca31ea7e389325990283da23465d572ed1f70a7583041
118 pyasn1==0.4.5 \
116 pkginfo==1.5.0.1 \
119 --hash=sha256:da2420fe13a9452d8ae97a0e478adde1dee153b11ba832a95b223a2ba01c10f7 \
117 --hash=sha256:7424f2c8511c186cd5424bbf31045b77435b37a8d604990b79d4e70d741148bb \
120 --hash=sha256:da6b43a8c9ae93bc80e2739efb38cc776ba74a886e3e9318d65fe81a8b8a2c6e \
118 --hash=sha256:a6d9e40ca61ad3ebd0b72fbadd4fba16e4c0e4df0428c041e01e06eb6ee71f32 \
121 # via paramiko
119 # via twine
122 pycparser==2.19 \
120 pycparser==2.19 \
123 --hash=sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3 \
121 --hash=sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3 \
124 # via cffi
122 # via cffi
123 pygments==2.4.2 \
124 --hash=sha256:71e430bc85c88a430f000ac1d9b331d2407f681d6f6aec95e8bcfbc3df5b0127 \
125 --hash=sha256:881c4c157e45f30af185c1ffe8d549d48ac9127433f2c380c24b84572ad66297 \
126 # via readme-renderer
125 pynacl==1.3.0 \
127 pynacl==1.3.0 \
126 --hash=sha256:05c26f93964373fc0abe332676cb6735f0ecad27711035b9472751faa8521255 \
128 --hash=sha256:05c26f93964373fc0abe332676cb6735f0ecad27711035b9472751faa8521255 \
127 --hash=sha256:0c6100edd16fefd1557da078c7a31e7b7d7a52ce39fdca2bec29d4f7b6e7600c \
129 --hash=sha256:0c6100edd16fefd1557da078c7a31e7b7d7a52ce39fdca2bec29d4f7b6e7600c \
@@ -143,26 +145,49 b' pynacl==1.3.0 \\'
143 --hash=sha256:e2da3c13307eac601f3de04887624939aca8ee3c9488a0bb0eca4fb9401fc6b1 \
145 --hash=sha256:e2da3c13307eac601f3de04887624939aca8ee3c9488a0bb0eca4fb9401fc6b1 \
144 --hash=sha256:f67814c38162f4deb31f68d590771a29d5ae3b1bd64b75cf232308e5c74777e0 \
146 --hash=sha256:f67814c38162f4deb31f68d590771a29d5ae3b1bd64b75cf232308e5c74777e0 \
145 # via paramiko
147 # via paramiko
146 pypsrp==0.3.1 \
148 pypsrp==0.4.0 \
147 --hash=sha256:309853380fe086090a03cc6662a778ee69b1cae355ae4a932859034fd76e9d0b \
149 --hash=sha256:64b5bdd725a9744c821483b05ecd266f6417f4c6e90ee961a08838480f7d025e \
148 --hash=sha256:90f946254f547dc3493cea8493c819ab87e152a755797c93aa2668678ba8ae85
150 --hash=sha256:f42919247fb80f7dc24c552560d7c24e754d15326030c9e3b7b94f51cfa4dc69
149 python-dateutil==2.8.0 \
151 python-dateutil==2.8.0 \
150 --hash=sha256:7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb \
152 --hash=sha256:7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb \
151 --hash=sha256:c89805f6f4d64db21ed966fda138f8a5ed7a4fdbc1a8ee329ce1b74e3c74da9e \
153 --hash=sha256:c89805f6f4d64db21ed966fda138f8a5ed7a4fdbc1a8ee329ce1b74e3c74da9e \
152 # via botocore
154 # via botocore
153 requests==2.21.0 \
155 readme-renderer==24.0 \
154 --hash=sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e \
156 --hash=sha256:bb16f55b259f27f75f640acf5e00cf897845a8b3e4731b5c1a436e4b8529202f \
155 --hash=sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b \
157 --hash=sha256:c8532b79afc0375a85f10433eca157d6b50f7d6990f337fa498c96cd4bfc203d \
156 # via pypsrp
158 # via twine
157 s3transfer==0.2.0 \
159 requests-toolbelt==0.9.1 \
158 --hash=sha256:7b9ad3213bff7d357f888e0fab5101b56fa1a0548ee77d121c3a3dbfbef4cb2e \
160 --hash=sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f \
159 --hash=sha256:f23d5cb7d862b104401d9021fc82e5fa0e0cf57b7660a1331425aab0c691d021 \
161 --hash=sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0 \
162 # via twine
163 requests==2.22.0 \
164 --hash=sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4 \
165 --hash=sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31 \
166 # via pypsrp, requests-toolbelt, twine
167 s3transfer==0.2.1 \
168 --hash=sha256:6efc926738a3cd576c2a79725fed9afde92378aa5c6a957e3af010cb019fac9d \
169 --hash=sha256:b780f2411b824cb541dbcd2c713d0cb61c7d1bcadae204cdddda2b35cef493ba \
160 # via boto3
170 # via boto3
161 six==1.12.0 \
171 six==1.12.0 \
162 --hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \
172 --hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \
163 --hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73 \
173 --hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73 \
164 # via bcrypt, cryptography, pynacl, pypsrp, python-dateutil
174 # via bcrypt, bleach, cryptography, pynacl, pypsrp, python-dateutil, readme-renderer
165 urllib3==1.24.2 \
175 tqdm==4.36.1 \
166 --hash=sha256:4c291ca23bbb55c76518905869ef34bdd5f0e46af7afe6861e8375643ffee1a0 \
176 --hash=sha256:abc25d0ce2397d070ef07d8c7e706aede7920da163c64997585d42d3537ece3d \
167 --hash=sha256:9a247273df709c4fedb38c711e44292304f73f39ab01beda9f6b9fc375669ac3 \
177 --hash=sha256:dd3fcca8488bb1d416aa7469d2f277902f26260c45aa86b667b074cd44b3b115 \
178 # via twine
179 twine==2.0.0 \
180 --hash=sha256:5319dd3e02ac73fcddcd94f035b9631589ab5d23e1f4699d57365199d85261e1 \
181 --hash=sha256:9fe7091715c7576df166df8ef6654e61bada39571783f2fd415bdcba867c6993
182 urllib3==1.25.6 \
183 --hash=sha256:3de946ffbed6e6746608990594d08faac602528ac7015ac28d33cee6a45b7398 \
184 --hash=sha256:9a107b99a5393caf59c7aa3c1249c16e6879447533d0887f4336dde834c7be86 \
168 # via botocore, requests
185 # via botocore, requests
186 webencodings==0.5.1 \
187 --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \
188 --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923 \
189 # via bleach
190
191 # WARNING: The following packages were not pinned, but pip requires them to be
192 # pinned when the requirements file includes hashes. Consider using the --allow-unsafe flag.
193 # setuptools==41.2.0 # via twine
@@ -1,3 +1,4 b''
1 boto3
1 boto3
2 paramiko
2 paramiko
3 pypsrp
3 pypsrp
4 twine
@@ -6,17 +6,23 b' import sys'
6
6
7 from mercurial import (
7 from mercurial import (
8 mdiff,
8 mdiff,
9 pycompat,
9 )
10 )
10
11
12
11 def reducetest(a, b):
13 def reducetest(a, b):
12 tries = 0
14 tries = 0
13 reductions = 0
15 reductions = 0
14 print("reducing...")
16 print("reducing...")
15 while tries < 1000:
17 while tries < 1000:
16 a2 = "\n".join(l for l in a.splitlines()
18 a2 = (
17 if random.randint(0, 100) > 0) + "\n"
19 "\n".join(l for l in a.splitlines() if random.randint(0, 100) > 0)
18 b2 = "\n".join(l for l in b.splitlines()
20 + "\n"
19 if random.randint(0, 100) > 0) + "\n"
21 )
22 b2 = (
23 "\n".join(l for l in b.splitlines() if random.randint(0, 100) > 0)
24 + "\n"
25 )
20 if a2 == a and b2 == b:
26 if a2 == a and b2 == b:
21 continue
27 continue
22 if a2 == b2:
28 if a2 == b2:
@@ -31,8 +37,7 b' def reducetest(a, b):'
31 a = a2
37 a = a2
32 b = b2
38 b = b2
33
39
34 print("reduced:", reductions, len(a) + len(b),
40 print("reduced:", reductions, len(a) + len(b), repr(a), repr(b))
35 repr(a), repr(b))
36 try:
41 try:
37 test1(a, b)
42 test1(a, b)
38 except Exception as inst:
43 except Exception as inst:
@@ -40,6 +45,7 b' def reducetest(a, b):'
40
45
41 sys.exit(0)
46 sys.exit(0)
42
47
48
43 def test1(a, b):
49 def test1(a, b):
44 d = mdiff.textdiff(a, b)
50 d = mdiff.textdiff(a, b)
45 if not d:
51 if not d:
@@ -48,23 +54,25 b' def test1(a, b):'
48 if c != b:
54 if c != b:
49 raise ValueError("bad")
55 raise ValueError("bad")
50
56
57
51 def testwrap(a, b):
58 def testwrap(a, b):
52 try:
59 try:
53 test1(a, b)
60 test1(a, b)
54 return
61 return
55 except Exception as inst:
62 except Exception as inst:
56 pass
63 print("exception:", inst)
57 print("exception:", inst)
58 reducetest(a, b)
64 reducetest(a, b)
59
65
66
60 def test(a, b):
67 def test(a, b):
61 testwrap(a, b)
68 testwrap(a, b)
62 testwrap(b, a)
69 testwrap(b, a)
63
70
71
64 def rndtest(size, noise):
72 def rndtest(size, noise):
65 a = []
73 a = []
66 src = " aaaaaaaabbbbccd"
74 src = " aaaaaaaabbbbccd"
67 for x in xrange(size):
75 for x in pycompat.xrange(size):
68 a.append(src[random.randint(0, len(src) - 1)])
76 a.append(src[random.randint(0, len(src) - 1)])
69
77
70 while True:
78 while True:
@@ -82,6 +90,7 b' def rndtest(size, noise):'
82
90
83 test(a, b)
91 test(a, b)
84
92
93
85 maxvol = 10000
94 maxvol = 10000
86 startsize = 2
95 startsize = 2
87 while True:
96 while True:
@@ -44,15 +44,24 b' from mercurial import ('
44 util,
44 util,
45 )
45 )
46
46
47 basedir = os.path.abspath(os.path.join(os.path.dirname(__file__),
47 basedir = os.path.abspath(
48 os.path.pardir, os.path.pardir))
48 os.path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir)
49 )
49 reposdir = os.environ['REPOS_DIR']
50 reposdir = os.environ['REPOS_DIR']
50 reposnames = [name for name in os.listdir(reposdir)
51 reposnames = [
51 if os.path.isdir(os.path.join(reposdir, name, ".hg"))]
52 name
53 for name in os.listdir(reposdir)
54 if os.path.isdir(os.path.join(reposdir, name, ".hg"))
55 ]
52 if not reposnames:
56 if not reposnames:
53 raise ValueError("No repositories found in $REPO_DIR")
57 raise ValueError("No repositories found in $REPO_DIR")
54 outputre = re.compile((r'! wall (\d+.\d+) comb \d+.\d+ user \d+.\d+ sys '
58 outputre = re.compile(
55 r'\d+.\d+ \(best of \d+\)'))
59 (
60 r'! wall (\d+.\d+) comb \d+.\d+ user \d+.\d+ sys '
61 r'\d+.\d+ \(best of \d+\)'
62 )
63 )
64
56
65
57 def runperfcommand(reponame, command, *args, **kwargs):
66 def runperfcommand(reponame, command, *args, **kwargs):
58 os.environ["HGRCPATH"] = os.environ.get("ASVHGRCPATH", "")
67 os.environ["HGRCPATH"] = os.environ.get("ASVHGRCPATH", "")
@@ -63,8 +72,9 b' def runperfcommand(reponame, command, *a'
63 else:
72 else:
64 ui = uimod.ui()
73 ui = uimod.ui()
65 repo = hg.repository(ui, os.path.join(reposdir, reponame))
74 repo = hg.repository(ui, os.path.join(reposdir, reponame))
66 perfext = extensions.load(ui, 'perfext',
75 perfext = extensions.load(
67 os.path.join(basedir, 'contrib', 'perf.py'))
76 ui, 'perfext', os.path.join(basedir, 'contrib', 'perf.py')
77 )
68 cmd = getattr(perfext, command)
78 cmd = getattr(perfext, command)
69 ui.pushbuffer()
79 ui.pushbuffer()
70 cmd(ui, repo, *args, **kwargs)
80 cmd(ui, repo, *args, **kwargs)
@@ -74,6 +84,7 b' def runperfcommand(reponame, command, *a'
74 raise ValueError("Invalid output {0}".format(output))
84 raise ValueError("Invalid output {0}".format(output))
75 return float(match.group(1))
85 return float(match.group(1))
76
86
87
77 def perfbench(repos=reposnames, name=None, params=None):
88 def perfbench(repos=reposnames, name=None, params=None):
78 """decorator to declare ASV benchmark based on contrib/perf.py extension
89 """decorator to declare ASV benchmark based on contrib/perf.py extension
79
90
@@ -104,10 +115,12 b' def perfbench(repos=reposnames, name=Non'
104 def wrapped(repo, *args):
115 def wrapped(repo, *args):
105 def perf(command, *a, **kw):
116 def perf(command, *a, **kw):
106 return runperfcommand(repo, command, *a, **kw)
117 return runperfcommand(repo, command, *a, **kw)
118
107 return func(perf, *args)
119 return func(perf, *args)
108
120
109 wrapped.params = [p[1] for p in params]
121 wrapped.params = [p[1] for p in params]
110 wrapped.param_names = [p[0] for p in params]
122 wrapped.param_names = [p[0] for p in params]
111 wrapped.pretty_name = name
123 wrapped.pretty_name = name
112 return wrapped
124 return wrapped
125
113 return decorator
126 return decorator
@@ -9,18 +9,22 b' from __future__ import absolute_import'
9
9
10 from . import perfbench
10 from . import perfbench
11
11
12
12 @perfbench()
13 @perfbench()
13 def track_tags(perf):
14 def track_tags(perf):
14 return perf("perftags")
15 return perf("perftags")
15
16
17
16 @perfbench()
18 @perfbench()
17 def track_status(perf):
19 def track_status(perf):
18 return perf("perfstatus", unknown=False)
20 return perf("perfstatus", unknown=False)
19
21
22
20 @perfbench(params=[('rev', ['1000', '10000', 'tip'])])
23 @perfbench(params=[('rev', ['1000', '10000', 'tip'])])
21 def track_manifest(perf, rev):
24 def track_manifest(perf, rev):
22 return perf("perfmanifest", rev)
25 return perf("perfmanifest", rev)
23
26
27
24 @perfbench()
28 @perfbench()
25 def track_heads(perf):
29 def track_heads(perf):
26 return perf("perfheads")
30 return perf("perfheads")
@@ -18,15 +18,16 b' import sys'
18
18
19 from . import basedir, perfbench
19 from . import basedir, perfbench
20
20
21
21 def createrevsetbenchmark(baseset, variants=None):
22 def createrevsetbenchmark(baseset, variants=None):
22 if variants is None:
23 if variants is None:
23 # Default variants
24 # Default variants
24 variants = ["plain", "first", "last", "sort", "sort+first",
25 variants = ["plain", "first", "last", "sort", "sort+first", "sort+last"]
25 "sort+last"]
26 fname = "track_" + "_".join(
26 fname = "track_" + "_".join("".join([
27 "".join(
27 c if c in string.digits + string.letters else " "
28 [c if c in string.digits + string.letters else " " for c in baseset]
28 for c in baseset
29 ).split()
29 ]).split())
30 )
30
31
31 def wrap(fname, baseset):
32 def wrap(fname, baseset):
32 @perfbench(name=baseset, params=[("variant", variants)])
33 @perfbench(name=baseset, params=[("variant", variants)])
@@ -36,18 +37,21 b' def createrevsetbenchmark(baseset, varia'
36 for var in variant.split("+"):
37 for var in variant.split("+"):
37 revset = "%s(%s)" % (var, revset)
38 revset = "%s(%s)" % (var, revset)
38 return perf("perfrevset", revset)
39 return perf("perfrevset", revset)
40
39 f.__name__ = fname
41 f.__name__ = fname
40 return f
42 return f
43
41 return wrap(fname, baseset)
44 return wrap(fname, baseset)
42
45
46
43 def initializerevsetbenchmarks():
47 def initializerevsetbenchmarks():
44 mod = sys.modules[__name__]
48 mod = sys.modules[__name__]
45 with open(os.path.join(basedir, 'contrib', 'base-revsets.txt'),
49 with open(os.path.join(basedir, 'contrib', 'base-revsets.txt'), 'rb') as fh:
46 'rb') as fh:
47 for line in fh:
50 for line in fh:
48 baseset = line.strip()
51 baseset = line.strip()
49 if baseset and not baseset.startswith('#'):
52 if baseset and not baseset.startswith('#'):
50 func = createrevsetbenchmark(baseset)
53 func = createrevsetbenchmark(baseset)
51 setattr(mod, func.__name__, func)
54 setattr(mod, func.__name__, func)
52
55
56
53 initializerevsetbenchmarks()
57 initializerevsetbenchmarks()
@@ -18,10 +18,13 b' import tempfile'
18 import token
18 import token
19 import tokenize
19 import tokenize
20
20
21
21 def adjusttokenpos(t, ofs):
22 def adjusttokenpos(t, ofs):
22 """Adjust start/end column of the given token"""
23 """Adjust start/end column of the given token"""
23 return t._replace(start=(t.start[0], t.start[1] + ofs),
24 return t._replace(
24 end=(t.end[0], t.end[1] + ofs))
25 start=(t.start[0], t.start[1] + ofs), end=(t.end[0], t.end[1] + ofs)
26 )
27
25
28
26 def replacetokens(tokens, opts):
29 def replacetokens(tokens, opts):
27 """Transform a stream of tokens from raw to Python 3.
30 """Transform a stream of tokens from raw to Python 3.
@@ -78,23 +81,68 b' def replacetokens(tokens, opts):'
78 already been done.
81 already been done.
79
82
80 """
83 """
81 st = tokens[j]
84 k = j
82 if st.type == token.STRING and st.string.startswith(("'", '"')):
85 currtoken = tokens[k]
83 sysstrtokens.add(st)
86 while currtoken.type in (token.STRING, token.NEWLINE, tokenize.NL):
87 k += 1
88 if currtoken.type == token.STRING and currtoken.string.startswith(
89 ("'", '"')
90 ):
91 sysstrtokens.add(currtoken)
92 try:
93 currtoken = tokens[k]
94 except IndexError:
95 break
96
97 def _isitemaccess(j):
98 """Assert the next tokens form an item access on `tokens[j]` and that
99 `tokens[j]` is a name.
100 """
101 try:
102 return (
103 tokens[j].type == token.NAME
104 and _isop(j + 1, '[')
105 and tokens[j + 2].type == token.STRING
106 and _isop(j + 3, ']')
107 )
108 except IndexError:
109 return False
110
111 def _ismethodcall(j, *methodnames):
112 """Assert the next tokens form a call to `methodname` with a string
113 as first argument on `tokens[j]` and that `tokens[j]` is a name.
114 """
115 try:
116 return (
117 tokens[j].type == token.NAME
118 and _isop(j + 1, '.')
119 and tokens[j + 2].type == token.NAME
120 and tokens[j + 2].string in methodnames
121 and _isop(j + 3, '(')
122 and tokens[j + 4].type == token.STRING
123 )
124 except IndexError:
125 return False
84
126
85 coldelta = 0 # column increment for new opening parens
127 coldelta = 0 # column increment for new opening parens
86 coloffset = -1 # column offset for the current line (-1: TBD)
128 coloffset = -1 # column offset for the current line (-1: TBD)
87 parens = [(0, 0, 0)] # stack of (line, end-column, column-offset)
129 parens = [(0, 0, 0, -1)] # stack of (line, end-column, column-offset, type)
130 ignorenextline = False # don't transform the next line
131 insideignoreblock = False # don't transform until turned off
88 for i, t in enumerate(tokens):
132 for i, t in enumerate(tokens):
89 # Compute the column offset for the current line, such that
133 # Compute the column offset for the current line, such that
90 # the current line will be aligned to the last opening paren
134 # the current line will be aligned to the last opening paren
91 # as before.
135 # as before.
92 if coloffset < 0:
136 if coloffset < 0:
93 if t.start[1] == parens[-1][1]:
137 lastparen = parens[-1]
94 coloffset = parens[-1][2]
138 if t.start[1] == lastparen[1]:
95 elif t.start[1] + 1 == parens[-1][1]:
139 coloffset = lastparen[2]
140 elif t.start[1] + 1 == lastparen[1] and lastparen[3] not in (
141 token.NEWLINE,
142 tokenize.NL,
143 ):
96 # fix misaligned indent of s/util.Abort/error.Abort/
144 # fix misaligned indent of s/util.Abort/error.Abort/
97 coloffset = parens[-1][2] + (parens[-1][1] - t.start[1])
145 coloffset = lastparen[2] + (lastparen[1] - t.start[1])
98 else:
146 else:
99 coloffset = 0
147 coloffset = 0
100
148
@@ -103,11 +151,26 b' def replacetokens(tokens, opts):'
103 yield adjusttokenpos(t, coloffset)
151 yield adjusttokenpos(t, coloffset)
104 coldelta = 0
152 coldelta = 0
105 coloffset = -1
153 coloffset = -1
154 if not insideignoreblock:
155 ignorenextline = (
156 tokens[i - 1].type == token.COMMENT
157 and tokens[i - 1].string == "# no-py3-transform"
158 )
159 continue
160
161 if t.type == token.COMMENT:
162 if t.string == "# py3-transform: off":
163 insideignoreblock = True
164 if t.string == "# py3-transform: on":
165 insideignoreblock = False
166
167 if ignorenextline or insideignoreblock:
168 yield adjusttokenpos(t, coloffset)
106 continue
169 continue
107
170
108 # Remember the last paren position.
171 # Remember the last paren position.
109 if _isop(i, '(', '[', '{'):
172 if _isop(i, '(', '[', '{'):
110 parens.append(t.end + (coloffset + coldelta,))
173 parens.append(t.end + (coloffset + coldelta, tokens[i + 1].type))
111 elif _isop(i, ')', ']', '}'):
174 elif _isop(i, ')', ']', '}'):
112 parens.pop()
175 parens.pop()
113
176
@@ -129,8 +192,10 b' def replacetokens(tokens, opts):'
129 # components touching docstrings need to handle unicode,
192 # components touching docstrings need to handle unicode,
130 # unfortunately.
193 # unfortunately.
131 if s[0:3] in ("'''", '"""'):
194 if s[0:3] in ("'''", '"""'):
132 yield adjusttokenpos(t, coloffset)
195 # If it's assigned to something, it's not a docstring
133 continue
196 if not _isop(i - 1, '='):
197 yield adjusttokenpos(t, coloffset)
198 continue
134
199
135 # If the first character isn't a quote, it is likely a string
200 # If the first character isn't a quote, it is likely a string
136 # prefixing character (such as 'b', 'u', or 'r'. Ignore.
201 # prefixing character (such as 'b', 'u', or 'r'. Ignore.
@@ -139,8 +204,7 b' def replacetokens(tokens, opts):'
139 continue
204 continue
140
205
141 # String literal. Prefix to make a b'' string.
206 # String literal. Prefix to make a b'' string.
142 yield adjusttokenpos(t._replace(string='b%s' % t.string),
207 yield adjusttokenpos(t._replace(string='b%s' % t.string), coloffset)
143 coloffset)
144 coldelta += 1
208 coldelta += 1
145 continue
209 continue
146
210
@@ -149,8 +213,15 b' def replacetokens(tokens, opts):'
149 fn = t.string
213 fn = t.string
150
214
151 # *attr() builtins don't accept byte strings to 2nd argument.
215 # *attr() builtins don't accept byte strings to 2nd argument.
152 if (fn in ('getattr', 'setattr', 'hasattr', 'safehasattr') and
216 if fn in (
153 not _isop(i - 1, '.')):
217 'getattr',
218 'setattr',
219 'hasattr',
220 'safehasattr',
221 'wrapfunction',
222 'wrapclass',
223 'addattr',
224 ) and (opts['allow-attr-methods'] or not _isop(i - 1, '.')):
154 arg1idx = _findargnofcall(1)
225 arg1idx = _findargnofcall(1)
155 if arg1idx is not None:
226 if arg1idx is not None:
156 _ensuresysstr(arg1idx)
227 _ensuresysstr(arg1idx)
@@ -169,19 +240,30 b' def replacetokens(tokens, opts):'
169 yield adjusttokenpos(t._replace(string=fn[4:]), coloffset)
240 yield adjusttokenpos(t._replace(string=fn[4:]), coloffset)
170 continue
241 continue
171
242
243 if t.type == token.NAME and t.string in opts['treat-as-kwargs']:
244 if _isitemaccess(i):
245 _ensuresysstr(i + 2)
246 if _ismethodcall(i, 'get', 'pop', 'setdefault', 'popitem'):
247 _ensuresysstr(i + 4)
248
172 # Looks like "if __name__ == '__main__'".
249 # Looks like "if __name__ == '__main__'".
173 if (t.type == token.NAME and t.string == '__name__'
250 if (
174 and _isop(i + 1, '==')):
251 t.type == token.NAME
252 and t.string == '__name__'
253 and _isop(i + 1, '==')
254 ):
175 _ensuresysstr(i + 2)
255 _ensuresysstr(i + 2)
176
256
177 # Emit unmodified token.
257 # Emit unmodified token.
178 yield adjusttokenpos(t, coloffset)
258 yield adjusttokenpos(t, coloffset)
179
259
260
180 def process(fin, fout, opts):
261 def process(fin, fout, opts):
181 tokens = tokenize.tokenize(fin.readline)
262 tokens = tokenize.tokenize(fin.readline)
182 tokens = replacetokens(list(tokens), opts)
263 tokens = replacetokens(list(tokens), opts)
183 fout.write(tokenize.untokenize(tokens))
264 fout.write(tokenize.untokenize(tokens))
184
265
266
185 def tryunlink(fname):
267 def tryunlink(fname):
186 try:
268 try:
187 os.unlink(fname)
269 os.unlink(fname)
@@ -189,12 +271,14 b' def tryunlink(fname):'
189 if err.errno != errno.ENOENT:
271 if err.errno != errno.ENOENT:
190 raise
272 raise
191
273
274
192 @contextlib.contextmanager
275 @contextlib.contextmanager
193 def editinplace(fname):
276 def editinplace(fname):
194 n = os.path.basename(fname)
277 n = os.path.basename(fname)
195 d = os.path.dirname(fname)
278 d = os.path.dirname(fname)
196 fp = tempfile.NamedTemporaryFile(prefix='.%s-' % n, suffix='~', dir=d,
279 fp = tempfile.NamedTemporaryFile(
197 delete=False)
280 prefix='.%s-' % n, suffix='~', dir=d, delete=False
281 )
198 try:
282 try:
199 yield fp
283 yield fp
200 fp.close()
284 fp.close()
@@ -205,16 +289,43 b' def editinplace(fname):'
205 fp.close()
289 fp.close()
206 tryunlink(fp.name)
290 tryunlink(fp.name)
207
291
292
208 def main():
293 def main():
209 ap = argparse.ArgumentParser()
294 ap = argparse.ArgumentParser()
210 ap.add_argument('-i', '--inplace', action='store_true', default=False,
295 ap.add_argument(
211 help='edit files in place')
296 '--version', action='version', version='Byteify strings 1.0'
212 ap.add_argument('--dictiter', action='store_true', default=False,
297 )
213 help='rewrite iteritems() and itervalues()'),
298 ap.add_argument(
299 '-i',
300 '--inplace',
301 action='store_true',
302 default=False,
303 help='edit files in place',
304 )
305 ap.add_argument(
306 '--dictiter',
307 action='store_true',
308 default=False,
309 help='rewrite iteritems() and itervalues()',
310 ),
311 ap.add_argument(
312 '--allow-attr-methods',
313 action='store_true',
314 default=False,
315 help='also handle attr*() when they are methods',
316 ),
317 ap.add_argument(
318 '--treat-as-kwargs',
319 nargs="+",
320 default=[],
321 help="ignore kwargs-like objects",
322 ),
214 ap.add_argument('files', metavar='FILE', nargs='+', help='source file')
323 ap.add_argument('files', metavar='FILE', nargs='+', help='source file')
215 args = ap.parse_args()
324 args = ap.parse_args()
216 opts = {
325 opts = {
217 'dictiter': args.dictiter,
326 'dictiter': args.dictiter,
327 'treat-as-kwargs': set(args.treat_as_kwargs),
328 'allow-attr-methods': args.allow_attr_methods,
218 }
329 }
219 for fname in args.files:
330 for fname in args.files:
220 if args.inplace:
331 if args.inplace:
@@ -226,6 +337,7 b' def main():'
226 fout = sys.stdout.buffer
337 fout = sys.stdout.buffer
227 process(fin, fout, opts)
338 process(fin, fout, opts)
228
339
340
229 if __name__ == '__main__':
341 if __name__ == '__main__':
230 if sys.version_info.major < 3:
342 if sys.version_info.major < 3:
231 print('This script must be run under Python 3.')
343 print('This script must be run under Python 3.')
@@ -1,12 +1,12 b''
1 from __future__ import absolute_import
1 from __future__ import absolute_import
2 import __builtin__
2 import __builtin__
3 import os
3 import os
4 from mercurial import (
4 from mercurial import util
5 util,
5
6 )
7
6
8 def lowerwrap(scope, funcname):
7 def lowerwrap(scope, funcname):
9 f = getattr(scope, funcname)
8 f = getattr(scope, funcname)
9
10 def wrap(fname, *args, **kwargs):
10 def wrap(fname, *args, **kwargs):
11 d, base = os.path.split(fname)
11 d, base = os.path.split(fname)
12 try:
12 try:
@@ -19,11 +19,14 b' def lowerwrap(scope, funcname):'
19 if fn.lower() == base.lower():
19 if fn.lower() == base.lower():
20 return f(os.path.join(d, fn), *args, **kwargs)
20 return f(os.path.join(d, fn), *args, **kwargs)
21 return f(fname, *args, **kwargs)
21 return f(fname, *args, **kwargs)
22
22 scope.__dict__[funcname] = wrap
23 scope.__dict__[funcname] = wrap
23
24
25
24 def normcase(path):
26 def normcase(path):
25 return path.lower()
27 return path.lower()
26
28
29
27 os.path.normcase = normcase
30 os.path.normcase = normcase
28
31
29 for f in 'file open'.split():
32 for f in 'file open'.split():
@@ -53,15 +53,28 b' import timeit'
53 # Python version and OS
53 # Python version and OS
54 timer = timeit.default_timer
54 timer = timeit.default_timer
55
55
56
56 def main():
57 def main():
57 parser = argparse.ArgumentParser()
58 parser = argparse.ArgumentParser()
58 parser.add_argument('pipe', type=str, nargs=1,
59 parser.add_argument(
59 help='Path of named pipe to create and listen on.')
60 'pipe',
60 parser.add_argument('output', default='trace.json', type=str, nargs='?',
61 type=str,
61 help='Path of json file to create where the traces '
62 nargs=1,
62 'will be stored.')
63 help='Path of named pipe to create and listen on.',
63 parser.add_argument('--debug', default=False, action='store_true',
64 )
64 help='Print useful debug messages')
65 parser.add_argument(
66 'output',
67 default='trace.json',
68 type=str,
69 nargs='?',
70 help='Path of json file to create where the traces ' 'will be stored.',
71 )
72 parser.add_argument(
73 '--debug',
74 default=False,
75 action='store_true',
76 help='Print useful debug messages',
77 )
65 args = parser.parse_args()
78 args = parser.parse_args()
66 fn = args.pipe[0]
79 fn = args.pipe[0]
67 os.mkfifo(fn)
80 os.mkfifo(fn)
@@ -86,19 +99,23 b' def main():'
86 payload_args = {}
99 payload_args = {}
87 pid = _threadmap[session]
100 pid = _threadmap[session]
88 ts_micros = (now - start) * 1000000
101 ts_micros = (now - start) * 1000000
89 out.write(json.dumps(
102 out.write(
90 {
103 json.dumps(
91 "name": label,
104 {
92 "cat": "misc",
105 "name": label,
93 "ph": _TYPEMAP[verb],
106 "cat": "misc",
94 "ts": ts_micros,
107 "ph": _TYPEMAP[verb],
95 "pid": pid,
108 "ts": ts_micros,
96 "tid": 1,
109 "pid": pid,
97 "args": payload_args,
110 "tid": 1,
98 }))
111 "args": payload_args,
112 }
113 )
114 )
99 out.write(',\n')
115 out.write(',\n')
100 finally:
116 finally:
101 os.unlink(fn)
117 os.unlink(fn)
102
118
119
103 if __name__ == '__main__':
120 if __name__ == '__main__':
104 main()
121 main()
This diff has been collapsed as it changes many lines, (1001 lines changed) Show them Hide them
@@ -26,11 +26,15 b' import optparse'
26 import os
26 import os
27 import re
27 import re
28 import sys
28 import sys
29
29 if sys.version_info[0] < 3:
30 if sys.version_info[0] < 3:
30 opentext = open
31 opentext = open
31 else:
32 else:
33
32 def opentext(f):
34 def opentext(f):
33 return open(f, encoding='latin1')
35 return open(f, encoding='latin1')
36
37
34 try:
38 try:
35 xrange
39 xrange
36 except NameError:
40 except NameError:
@@ -42,6 +46,7 b' except ImportError:'
42
46
43 import testparseutil
47 import testparseutil
44
48
49
45 def compilere(pat, multiline=False):
50 def compilere(pat, multiline=False):
46 if multiline:
51 if multiline:
47 pat = '(?m)' + pat
52 pat = '(?m)' + pat
@@ -52,10 +57,22 b' def compilere(pat, multiline=False):'
52 pass
57 pass
53 return re.compile(pat)
58 return re.compile(pat)
54
59
60
55 # check "rules depending on implementation of repquote()" in each
61 # check "rules depending on implementation of repquote()" in each
56 # patterns (especially pypats), before changing around repquote()
62 # patterns (especially pypats), before changing around repquote()
57 _repquotefixedmap = {' ': ' ', '\n': '\n', '.': 'p', ':': 'q',
63 _repquotefixedmap = {
58 '%': '%', '\\': 'b', '*': 'A', '+': 'P', '-': 'M'}
64 ' ': ' ',
65 '\n': '\n',
66 '.': 'p',
67 ':': 'q',
68 '%': '%',
69 '\\': 'b',
70 '*': 'A',
71 '+': 'P',
72 '-': 'M',
73 }
74
75
59 def _repquoteencodechr(i):
76 def _repquoteencodechr(i):
60 if i > 255:
77 if i > 255:
61 return 'u'
78 return 'u'
@@ -67,13 +84,17 b' def _repquoteencodechr(i):'
67 if c.isdigit():
84 if c.isdigit():
68 return 'n'
85 return 'n'
69 return 'o'
86 return 'o'
87
88
70 _repquotett = ''.join(_repquoteencodechr(i) for i in xrange(256))
89 _repquotett = ''.join(_repquoteencodechr(i) for i in xrange(256))
71
90
91
72 def repquote(m):
92 def repquote(m):
73 t = m.group('text')
93 t = m.group('text')
74 t = t.translate(_repquotett)
94 t = t.translate(_repquotett)
75 return m.group('quote') + t + m.group('quote')
95 return m.group('quote') + t + m.group('quote')
76
96
97
77 def reppython(m):
98 def reppython(m):
78 comment = m.group('comment')
99 comment = m.group('comment')
79 if comment:
100 if comment:
@@ -81,87 +102,103 b' def reppython(m):'
81 return "#" * l + comment[l:]
102 return "#" * l + comment[l:]
82 return repquote(m)
103 return repquote(m)
83
104
105
84 def repcomment(m):
106 def repcomment(m):
85 return m.group(1) + "#" * len(m.group(2))
107 return m.group(1) + "#" * len(m.group(2))
86
108
109
87 def repccomment(m):
110 def repccomment(m):
88 t = re.sub(r"((?<=\n) )|\S", "x", m.group(2))
111 t = re.sub(r"((?<=\n) )|\S", "x", m.group(2))
89 return m.group(1) + t + "*/"
112 return m.group(1) + t + "*/"
90
113
114
91 def repcallspaces(m):
115 def repcallspaces(m):
92 t = re.sub(r"\n\s+", "\n", m.group(2))
116 t = re.sub(r"\n\s+", "\n", m.group(2))
93 return m.group(1) + t
117 return m.group(1) + t
94
118
119
95 def repinclude(m):
120 def repinclude(m):
96 return m.group(1) + "<foo>"
121 return m.group(1) + "<foo>"
97
122
123
98 def rephere(m):
124 def rephere(m):
99 t = re.sub(r"\S", "x", m.group(2))
125 t = re.sub(r"\S", "x", m.group(2))
100 return m.group(1) + t
126 return m.group(1) + t
101
127
102
128
103 testpats = [
129 testpats = [
104 [
130 [
105 (r'\b(push|pop)d\b', "don't use 'pushd' or 'popd', use 'cd'"),
131 (r'\b(push|pop)d\b', "don't use 'pushd' or 'popd', use 'cd'"),
106 (r'\W\$?\(\([^\)\n]*\)\)', "don't use (()) or $(()), use 'expr'"),
132 (r'\W\$?\(\([^\)\n]*\)\)', "don't use (()) or $(()), use 'expr'"),
107 (r'grep.*-q', "don't use 'grep -q', redirect to /dev/null"),
133 (r'grep.*-q', "don't use 'grep -q', redirect to /dev/null"),
108 (r'(?<!hg )grep.* -a', "don't use 'grep -a', use in-line python"),
134 (r'(?<!hg )grep.* -a', "don't use 'grep -a', use in-line python"),
109 (r'sed.*-i', "don't use 'sed -i', use a temporary file"),
135 (r'sed.*-i', "don't use 'sed -i', use a temporary file"),
110 (r'\becho\b.*\\n', "don't use 'echo \\n', use printf"),
136 (r'\becho\b.*\\n', "don't use 'echo \\n', use printf"),
111 (r'echo -n', "don't use 'echo -n', use printf"),
137 (r'echo -n', "don't use 'echo -n', use printf"),
112 (r'(^|\|\s*)\bwc\b[^|]*$\n(?!.*\(re\))', "filter wc output"),
138 (r'(^|\|\s*)\bwc\b[^|]*$\n(?!.*\(re\))', "filter wc output"),
113 (r'head -c', "don't use 'head -c', use 'dd'"),
139 (r'head -c', "don't use 'head -c', use 'dd'"),
114 (r'tail -n', "don't use the '-n' option to tail, just use '-<num>'"),
140 (r'tail -n', "don't use the '-n' option to tail, just use '-<num>'"),
115 (r'sha1sum', "don't use sha1sum, use $TESTDIR/md5sum.py"),
141 (r'sha1sum', "don't use sha1sum, use $TESTDIR/md5sum.py"),
116 (r'\bls\b.*-\w*R', "don't use 'ls -R', use 'find'"),
142 (r'\bls\b.*-\w*R', "don't use 'ls -R', use 'find'"),
117 (r'printf.*[^\\]\\([1-9]|0\d)', r"don't use 'printf \NNN', use Python"),
143 (r'printf.*[^\\]\\([1-9]|0\d)', r"don't use 'printf \NNN', use Python"),
118 (r'printf.*[^\\]\\x', "don't use printf \\x, use Python"),
144 (r'printf.*[^\\]\\x', "don't use printf \\x, use Python"),
119 (r'\$\(.*\)', "don't use $(expr), use `expr`"),
145 (r'rm -rf \*', "don't use naked rm -rf, target a directory"),
120 (r'rm -rf \*', "don't use naked rm -rf, target a directory"),
146 (
121 (r'\[[^\]]+==', '[ foo == bar ] is a bashism, use [ foo = bar ] instead'),
147 r'\[[^\]]+==',
122 (r'(^|\|\s*)grep (-\w\s+)*[^|]*[(|]\w',
148 '[ foo == bar ] is a bashism, use [ foo = bar ] instead',
123 "use egrep for extended grep syntax"),
149 ),
124 (r'(^|\|\s*)e?grep .*\\S', "don't use \\S in regular expression"),
150 (
125 (r'(?<!!)/bin/', "don't use explicit paths for tools"),
151 r'(^|\|\s*)grep (-\w\s+)*[^|]*[(|]\w',
126 (r'#!.*/bash', "don't use bash in shebang, use sh"),
152 "use egrep for extended grep syntax",
127 (r'[^\n]\Z', "no trailing newline"),
153 ),
128 (r'export .*=', "don't export and assign at once"),
154 (r'(^|\|\s*)e?grep .*\\S', "don't use \\S in regular expression"),
129 (r'^source\b', "don't use 'source', use '.'"),
155 (r'(?<!!)/bin/', "don't use explicit paths for tools"),
130 (r'touch -d', "don't use 'touch -d', use 'touch -t' instead"),
156 (r'#!.*/bash', "don't use bash in shebang, use sh"),
131 (r'\bls +[^|\n-]+ +-', "options to 'ls' must come before filenames"),
157 (r'[^\n]\Z', "no trailing newline"),
132 (r'[^>\n]>\s*\$HGRCPATH', "don't overwrite $HGRCPATH, append to it"),
158 (r'export .*=', "don't export and assign at once"),
133 (r'^stop\(\)', "don't use 'stop' as a shell function name"),
159 (r'^source\b', "don't use 'source', use '.'"),
134 (r'(\[|\btest\b).*-e ', "don't use 'test -e', use 'test -f'"),
160 (r'touch -d', "don't use 'touch -d', use 'touch -t' instead"),
135 (r'\[\[\s+[^\]]*\]\]', "don't use '[[ ]]', use '[ ]'"),
161 (r'\bls +[^|\n-]+ +-', "options to 'ls' must come before filenames"),
136 (r'^alias\b.*=', "don't use alias, use a function"),
162 (r'[^>\n]>\s*\$HGRCPATH', "don't overwrite $HGRCPATH, append to it"),
137 (r'if\s*!', "don't use '!' to negate exit status"),
163 (r'^stop\(\)', "don't use 'stop' as a shell function name"),
138 (r'/dev/u?random', "don't use entropy, use /dev/zero"),
164 (r'(\[|\btest\b).*-e ', "don't use 'test -e', use 'test -f'"),
139 (r'do\s*true;\s*done', "don't use true as loop body, use sleep 0"),
165 (r'\[\[\s+[^\]]*\]\]', "don't use '[[ ]]', use '[ ]'"),
140 (r'sed (-e )?\'(\d+|/[^/]*/)i(?!\\\n)',
166 (r'^alias\b.*=', "don't use alias, use a function"),
141 "put a backslash-escaped newline after sed 'i' command"),
167 (r'if\s*!', "don't use '!' to negate exit status"),
142 (r'^diff *-\w*[uU].*$\n(^ \$ |^$)', "prefix diff -u/-U with cmp"),
168 (r'/dev/u?random', "don't use entropy, use /dev/zero"),
143 (r'^\s+(if)? diff *-\w*[uU]', "prefix diff -u/-U with cmp"),
169 (r'do\s*true;\s*done', "don't use true as loop body, use sleep 0"),
144 (r'[\s="`\']python\s(?!bindings)', "don't use 'python', use '$PYTHON'"),
170 (
145 (r'seq ', "don't use 'seq', use $TESTDIR/seq.py"),
171 r'sed (-e )?\'(\d+|/[^/]*/)i(?!\\\n)',
146 (r'\butil\.Abort\b', "directly use error.Abort"),
172 "put a backslash-escaped newline after sed 'i' command",
147 (r'\|&', "don't use |&, use 2>&1"),
173 ),
148 (r'\w = +\w', "only one space after = allowed"),
174 (r'^diff *-\w*[uU].*$\n(^ \$ |^$)', "prefix diff -u/-U with cmp"),
149 (r'\bsed\b.*[^\\]\\n', "don't use 'sed ... \\n', use a \\ and a newline"),
175 (r'^\s+(if)? diff *-\w*[uU]', "prefix diff -u/-U with cmp"),
150 (r'env.*-u', "don't use 'env -u VAR', use 'unset VAR'"),
176 (r'[\s="`\']python\s(?!bindings)', "don't use 'python', use '$PYTHON'"),
151 (r'cp.* -r ', "don't use 'cp -r', use 'cp -R'"),
177 (r'seq ', "don't use 'seq', use $TESTDIR/seq.py"),
152 (r'grep.* -[ABC]', "don't use grep's context flags"),
178 (r'\butil\.Abort\b', "directly use error.Abort"),
153 (r'find.*-printf',
179 (r'\|&', "don't use |&, use 2>&1"),
154 "don't use 'find -printf', it doesn't exist on BSD find(1)"),
180 (r'\w = +\w', "only one space after = allowed"),
155 (r'\$RANDOM ', "don't use bash-only $RANDOM to generate random values"),
181 (
156 ],
182 r'\bsed\b.*[^\\]\\n',
157 # warnings
183 "don't use 'sed ... \\n', use a \\ and a newline",
158 [
184 ),
159 (r'^function', "don't use 'function', use old style"),
185 (r'env.*-u', "don't use 'env -u VAR', use 'unset VAR'"),
160 (r'^diff.*-\w*N', "don't use 'diff -N'"),
186 (r'cp.* -r ', "don't use 'cp -r', use 'cp -R'"),
161 (r'\$PWD|\${PWD}', "don't use $PWD, use `pwd`"),
187 (r'grep.* -[ABC]', "don't use grep's context flags"),
162 (r'^([^"\'\n]|("[^"\n]*")|(\'[^\'\n]*\'))*\^', "^ must be quoted"),
188 (
163 (r'kill (`|\$\()', "don't use kill, use killdaemons.py")
189 r'find.*-printf',
164 ]
190 "don't use 'find -printf', it doesn't exist on BSD find(1)",
191 ),
192 (r'\$RANDOM ', "don't use bash-only $RANDOM to generate random values"),
193 ],
194 # warnings
195 [
196 (r'^function', "don't use 'function', use old style"),
197 (r'^diff.*-\w*N', "don't use 'diff -N'"),
198 (r'\$PWD|\${PWD}', "don't use $PWD, use `pwd`"),
199 (r'^([^"\'\n]|("[^"\n]*")|(\'[^\'\n]*\'))*\^', "^ must be quoted"),
200 (r'kill (`|\$\()', "don't use kill, use killdaemons.py"),
201 ],
165 ]
202 ]
166
203
167 testfilters = [
204 testfilters = [
@@ -171,45 +208,72 b' testfilters = ['
171
208
172 uprefix = r"^ \$ "
209 uprefix = r"^ \$ "
173 utestpats = [
210 utestpats = [
174 [
211 [
175 (r'^(\S.*|| [$>] \S.*)[ \t]\n', "trailing whitespace on non-output"),
212 (r'^(\S.*|| [$>] \S.*)[ \t]\n', "trailing whitespace on non-output"),
176 (uprefix + r'.*\|\s*sed[^|>\n]*\n',
213 (
177 "use regex test output patterns instead of sed"),
214 uprefix + r'.*\|\s*sed[^|>\n]*\n',
178 (uprefix + r'(true|exit 0)', "explicit zero exit unnecessary"),
215 "use regex test output patterns instead of sed",
179 (uprefix + r'.*(?<!\[)\$\?', "explicit exit code checks unnecessary"),
216 ),
180 (uprefix + r'.*\|\| echo.*(fail|error)',
217 (uprefix + r'(true|exit 0)', "explicit zero exit unnecessary"),
181 "explicit exit code checks unnecessary"),
218 (uprefix + r'.*(?<!\[)\$\?', "explicit exit code checks unnecessary"),
182 (uprefix + r'set -e', "don't use set -e"),
219 (
183 (uprefix + r'(\s|fi\b|done\b)', "use > for continued lines"),
220 uprefix + r'.*\|\| echo.*(fail|error)',
184 (uprefix + r'.*:\.\S*/', "x:.y in a path does not work on msys, rewrite "
221 "explicit exit code checks unnecessary",
185 "as x://.y, or see `hg log -k msys` for alternatives", r'-\S+:\.|' #-Rxxx
222 ),
186 '# no-msys'), # in test-pull.t which is skipped on windows
223 (uprefix + r'set -e', "don't use set -e"),
187 (r'^ [^$>].*27\.0\.0\.1',
224 (uprefix + r'(\s|fi\b|done\b)', "use > for continued lines"),
188 'use $LOCALIP not an explicit loopback address'),
225 (
189 (r'^ (?![>$] ).*\$LOCALIP.*[^)]$',
226 uprefix + r'.*:\.\S*/',
190 'mark $LOCALIP output lines with (glob) to help tests in BSD jails'),
227 "x:.y in a path does not work on msys, rewrite "
191 (r'^ (cat|find): .*: \$ENOENT\$',
228 "as x://.y, or see `hg log -k msys` for alternatives",
192 'use test -f to test for file existence'),
229 r'-\S+:\.|' '# no-msys', # -Rxxx
193 (r'^ diff -[^ -]*p',
230 ), # in test-pull.t which is skipped on windows
194 "don't use (external) diff with -p for portability"),
231 (
195 (r' readlink ', 'use readlink.py instead of readlink'),
232 r'^ [^$>].*27\.0\.0\.1',
196 (r'^ [-+][-+][-+] .* [-+]0000 \(glob\)',
233 'use $LOCALIP not an explicit loopback address',
197 "glob timezone field in diff output for portability"),
234 ),
198 (r'^ @@ -[0-9]+ [+][0-9]+,[0-9]+ @@',
235 (
199 "use '@@ -N* +N,n @@ (glob)' style chunk header for portability"),
236 r'^ (?![>$] ).*\$LOCALIP.*[^)]$',
200 (r'^ @@ -[0-9]+,[0-9]+ [+][0-9]+ @@',
237 'mark $LOCALIP output lines with (glob) to help tests in BSD jails',
201 "use '@@ -N,n +N* @@ (glob)' style chunk header for portability"),
238 ),
202 (r'^ @@ -[0-9]+ [+][0-9]+ @@',
239 (
203 "use '@@ -N* +N* @@ (glob)' style chunk header for portability"),
240 r'^ (cat|find): .*: \$ENOENT\$',
204 (uprefix + r'hg( +-[^ ]+( +[^ ]+)?)* +extdiff'
241 'use test -f to test for file existence',
205 r'( +(-[^ po-]+|--(?!program|option)[^ ]+|[^-][^ ]*))*$',
242 ),
206 "use $RUNTESTDIR/pdiff via extdiff (or -o/-p for false-positives)"),
243 (
207 ],
244 r'^ diff -[^ -]*p',
208 # warnings
245 "don't use (external) diff with -p for portability",
209 [
246 ),
210 (r'^ (?!.*\$LOCALIP)[^*?/\n]* \(glob\)$',
247 (r' readlink ', 'use readlink.py instead of readlink'),
211 "glob match with no glob string (?, *, /, and $LOCALIP)"),
248 (
212 ]
249 r'^ [-+][-+][-+] .* [-+]0000 \(glob\)',
250 "glob timezone field in diff output for portability",
251 ),
252 (
253 r'^ @@ -[0-9]+ [+][0-9]+,[0-9]+ @@',
254 "use '@@ -N* +N,n @@ (glob)' style chunk header for portability",
255 ),
256 (
257 r'^ @@ -[0-9]+,[0-9]+ [+][0-9]+ @@',
258 "use '@@ -N,n +N* @@ (glob)' style chunk header for portability",
259 ),
260 (
261 r'^ @@ -[0-9]+ [+][0-9]+ @@',
262 "use '@@ -N* +N* @@ (glob)' style chunk header for portability",
263 ),
264 (
265 uprefix + r'hg( +-[^ ]+( +[^ ]+)?)* +extdiff'
266 r'( +(-[^ po-]+|--(?!program|option)[^ ]+|[^-][^ ]*))*$',
267 "use $RUNTESTDIR/pdiff via extdiff (or -o/-p for false-positives)",
268 ),
269 ],
270 # warnings
271 [
272 (
273 r'^ (?!.*\$LOCALIP)[^*?/\n]* \(glob\)$',
274 "glob match with no glob string (?, *, /, and $LOCALIP)",
275 ),
276 ],
213 ]
277 ]
214
278
215 # transform plain test rules to unified test's
279 # transform plain test rules to unified test's
@@ -235,157 +299,214 b' utestfilters = ['
235
299
236 # common patterns to check *.py
300 # common patterns to check *.py
237 commonpypats = [
301 commonpypats = [
238 [
302 [
239 (r'\\$', 'Use () to wrap long lines in Python, not \\'),
303 (r'\\$', 'Use () to wrap long lines in Python, not \\'),
240 (r'^\s*def\s*\w+\s*\(.*,\s*\(',
304 (
241 "tuple parameter unpacking not available in Python 3+"),
305 r'^\s*def\s*\w+\s*\(.*,\s*\(',
242 (r'lambda\s*\(.*,.*\)',
306 "tuple parameter unpacking not available in Python 3+",
243 "tuple parameter unpacking not available in Python 3+"),
307 ),
244 (r'(?<!def)\s+(cmp)\(', "cmp is not available in Python 3+"),
308 (
245 (r'(?<!\.)\breduce\s*\(.*', "reduce is not available in Python 3+"),
309 r'lambda\s*\(.*,.*\)',
246 (r'\bdict\(.*=', 'dict() is different in Py2 and 3 and is slower than {}',
310 "tuple parameter unpacking not available in Python 3+",
247 'dict-from-generator'),
311 ),
248 (r'\.has_key\b', "dict.has_key is not available in Python 3+"),
312 (r'(?<!def)\s+(cmp)\(', "cmp is not available in Python 3+"),
249 (r'\s<>\s', '<> operator is not available in Python 3+, use !='),
313 (r'(?<!\.)\breduce\s*\(.*', "reduce is not available in Python 3+"),
250 (r'^\s*\t', "don't use tabs"),
314 (
251 (r'\S;\s*\n', "semicolon"),
315 r'\bdict\(.*=',
252 (r'[^_]_\([ \t\n]*(?:"[^"]+"[ \t\n+]*)+%', "don't use % inside _()"),
316 'dict() is different in Py2 and 3 and is slower than {}',
253 (r"[^_]_\([ \t\n]*(?:'[^']+'[ \t\n+]*)+%", "don't use % inside _()"),
317 'dict-from-generator',
254 (r'(\w|\)),\w', "missing whitespace after ,"),
318 ),
255 (r'(\w|\))[+/*\-<>]\w', "missing whitespace in expression"),
319 (r'\.has_key\b', "dict.has_key is not available in Python 3+"),
256 (r'^\s+(\w|\.)+=\w[^,()\n]*$', "missing whitespace in assignment"),
320 (r'\s<>\s', '<> operator is not available in Python 3+, use !='),
257 (r'\w\s=\s\s+\w', "gratuitous whitespace after ="),
321 (r'^\s*\t', "don't use tabs"),
258 ((
322 (r'\S;\s*\n', "semicolon"),
259 # a line ending with a colon, potentially with trailing comments
323 (r'[^_]_\([ \t\n]*(?:"[^"]+"[ \t\n+]*)+%', "don't use % inside _()"),
260 r':([ \t]*#[^\n]*)?\n'
324 (r"[^_]_\([ \t\n]*(?:'[^']+'[ \t\n+]*)+%", "don't use % inside _()"),
261 # one that is not a pass and not only a comment
325 (r'(\w|\)),\w', "missing whitespace after ,"),
262 r'(?P<indent>[ \t]+)[^#][^\n]+\n'
326 (r'(\w|\))[+/*\-<>]\w', "missing whitespace in expression"),
263 # more lines at the same indent level
327 (r'\w\s=\s\s+\w', "gratuitous whitespace after ="),
264 r'((?P=indent)[^\n]+\n)*'
328 (
265 # a pass at the same indent level, which is bogus
329 (
266 r'(?P=indent)pass[ \t\n#]'
330 # a line ending with a colon, potentially with trailing comments
267 ), 'omit superfluous pass'),
331 r':([ \t]*#[^\n]*)?\n'
268 (r'[^\n]\Z', "no trailing newline"),
332 # one that is not a pass and not only a comment
269 (r'(\S[ \t]+|^[ \t]+)\n', "trailing whitespace"),
333 r'(?P<indent>[ \t]+)[^#][^\n]+\n'
270 # (r'^\s+[^_ \n][^_. \n]+_[^_\n]+\s*=',
334 # more lines at the same indent level
271 # "don't use underbars in identifiers"),
335 r'((?P=indent)[^\n]+\n)*'
272 (r'^\s+(self\.)?[A-Za-z][a-z0-9]+[A-Z]\w* = ',
336 # a pass at the same indent level, which is bogus
273 "don't use camelcase in identifiers", r'#.*camelcase-required'),
337 r'(?P=indent)pass[ \t\n#]'
274 (r'^\s*(if|while|def|class|except|try)\s[^[\n]*:\s*[^\\n]#\s]+',
338 ),
275 "linebreak after :"),
339 'omit superfluous pass',
276 (r'class\s[^( \n]+:', "old-style class, use class foo(object)",
340 ),
277 r'#.*old-style'),
341 (r'[^\n]\Z', "no trailing newline"),
278 (r'class\s[^( \n]+\(\):',
342 (r'(\S[ \t]+|^[ \t]+)\n', "trailing whitespace"),
279 "class foo() creates old style object, use class foo(object)",
343 (
280 r'#.*old-style'),
344 r'^\s+(self\.)?[A-Za-z][a-z0-9]+[A-Z]\w* = ',
281 (r'\b(%s)\(' % '|'.join(k for k in keyword.kwlist
345 "don't use camelcase in identifiers",
282 if k not in ('print', 'exec')),
346 r'#.*camelcase-required',
283 "Python keyword is not a function"),
347 ),
284 (r',]', "unneeded trailing ',' in list"),
348 (
285 # (r'class\s[A-Z][^\(]*\((?!Exception)',
349 r'^\s*(if|while|def|class|except|try)\s[^[\n]*:\s*[^\\n]#\s]+',
286 # "don't capitalize non-exception classes"),
350 "linebreak after :",
287 # (r'in range\(', "use xrange"),
351 ),
288 # (r'^\s*print\s+', "avoid using print in core and extensions"),
352 (
289 (r'[\x80-\xff]', "non-ASCII character literal"),
353 r'class\s[^( \n]+:',
290 (r'("\')\.format\(', "str.format() has no bytes counterpart, use %"),
354 "old-style class, use class foo(object)",
291 (r'^\s*(%s)\s\s' % '|'.join(keyword.kwlist),
355 r'#.*old-style',
292 "gratuitous whitespace after Python keyword"),
356 ),
293 (r'([\(\[][ \t]\S)|(\S[ \t][\)\]])', "gratuitous whitespace in () or []"),
357 (
294 # (r'\s\s=', "gratuitous whitespace before ="),
358 r'class\s[^( \n]+\(\):',
295 (r'[^>< ](\+=|-=|!=|<>|<=|>=|<<=|>>=|%=)\S',
359 "class foo() creates old style object, use class foo(object)",
296 "missing whitespace around operator"),
360 r'#.*old-style',
297 (r'[^>< ](\+=|-=|!=|<>|<=|>=|<<=|>>=|%=)\s',
361 ),
298 "missing whitespace around operator"),
362 (
299 (r'\s(\+=|-=|!=|<>|<=|>=|<<=|>>=|%=)\S',
363 r'\b(%s)\('
300 "missing whitespace around operator"),
364 % '|'.join(k for k in keyword.kwlist if k not in ('print', 'exec')),
301 (r'[^^+=*/!<>&| %-](\s=|=\s)[^= ]',
365 "Python keyword is not a function",
302 "wrong whitespace around ="),
366 ),
303 (r'\([^()]*( =[^=]|[^<>!=]= )',
367 # (r'class\s[A-Z][^\(]*\((?!Exception)',
304 "no whitespace around = for named parameters"),
368 # "don't capitalize non-exception classes"),
305 (r'raise [^,(]+, (\([^\)]+\)|[^,\(\)]+)$',
369 # (r'in range\(', "use xrange"),
306 "don't use old-style two-argument raise, use Exception(message)"),
370 # (r'^\s*print\s+', "avoid using print in core and extensions"),
307 (r' is\s+(not\s+)?["\'0-9-]', "object comparison with literal"),
371 (r'[\x80-\xff]', "non-ASCII character literal"),
308 (r' [=!]=\s+(True|False|None)',
372 (r'("\')\.format\(', "str.format() has no bytes counterpart, use %"),
309 "comparison with singleton, use 'is' or 'is not' instead"),
373 (
310 (r'^\s*(while|if) [01]:',
374 r'([\(\[][ \t]\S)|(\S[ \t][\)\]])',
311 "use True/False for constant Boolean expression"),
375 "gratuitous whitespace in () or []",
312 (r'^\s*if False(:| +and)', 'Remove code instead of using `if False`'),
376 ),
313 (r'(?:(?<!def)\s+|\()hasattr\(',
377 # (r'\s\s=', "gratuitous whitespace before ="),
314 'hasattr(foo, bar) is broken on py2, use util.safehasattr(foo, bar) '
378 (
315 'instead', r'#.*hasattr-py3-only'),
379 r'[^>< ](\+=|-=|!=|<>|<=|>=|<<=|>>=|%=)\S',
316 (r'opener\([^)]*\).read\(',
380 "missing whitespace around operator",
317 "use opener.read() instead"),
381 ),
318 (r'opener\([^)]*\).write\(',
382 (
319 "use opener.write() instead"),
383 r'[^>< ](\+=|-=|!=|<>|<=|>=|<<=|>>=|%=)\s',
320 (r'(?i)descend[e]nt', "the proper spelling is descendAnt"),
384 "missing whitespace around operator",
321 (r'\.debug\(\_', "don't mark debug messages for translation"),
385 ),
322 (r'\.strip\(\)\.split\(\)', "no need to strip before splitting"),
386 (
323 (r'^\s*except\s*:', "naked except clause", r'#.*re-raises'),
387 r'\s(\+=|-=|!=|<>|<=|>=|<<=|>>=|%=)\S',
324 (r'^\s*except\s([^\(,]+|\([^\)]+\))\s*,',
388 "missing whitespace around operator",
325 'legacy exception syntax; use "as" instead of ","'),
389 ),
326 (r'release\(.*wlock, .*lock\)', "wrong lock release order"),
390 (r'[^^+=*/!<>&| %-](\s=|=\s)[^= ]', "wrong whitespace around ="),
327 (r'\bdef\s+__bool__\b', "__bool__ should be __nonzero__ in Python 2"),
391 (
328 (r'os\.path\.join\(.*, *(""|\'\')\)',
392 r'\([^()]*( =[^=]|[^<>!=]= )',
329 "use pathutil.normasprefix(path) instead of os.path.join(path, '')"),
393 "no whitespace around = for named parameters",
330 (r'\s0[0-7]+\b', 'legacy octal syntax; use "0o" prefix instead of "0"'),
394 ),
331 # XXX only catch mutable arguments on the first line of the definition
395 (
332 (r'def.*[( ]\w+=\{\}', "don't use mutable default arguments"),
396 r'raise [^,(]+, (\([^\)]+\)|[^,\(\)]+)$',
333 (r'\butil\.Abort\b', "directly use error.Abort"),
397 "don't use old-style two-argument raise, use Exception(message)",
334 (r'^@(\w*\.)?cachefunc', "module-level @cachefunc is risky, please avoid"),
398 ),
335 (r'^import Queue', "don't use Queue, use pycompat.queue.Queue + "
399 (r' is\s+(not\s+)?["\'0-9-]', "object comparison with literal"),
336 "pycompat.queue.Empty"),
400 (
337 (r'^import cStringIO', "don't use cStringIO.StringIO, use util.stringio"),
401 r' [=!]=\s+(True|False|None)',
338 (r'^import urllib', "don't use urllib, use util.urlreq/util.urlerr"),
402 "comparison with singleton, use 'is' or 'is not' instead",
339 (r'^import SocketServer', "don't use SockerServer, use util.socketserver"),
403 ),
340 (r'^import urlparse', "don't use urlparse, use util.urlreq"),
404 (
341 (r'^import xmlrpclib', "don't use xmlrpclib, use util.xmlrpclib"),
405 r'^\s*(while|if) [01]:',
342 (r'^import cPickle', "don't use cPickle, use util.pickle"),
406 "use True/False for constant Boolean expression",
343 (r'^import pickle', "don't use pickle, use util.pickle"),
407 ),
344 (r'^import httplib', "don't use httplib, use util.httplib"),
408 (r'^\s*if False(:| +and)', 'Remove code instead of using `if False`'),
345 (r'^import BaseHTTPServer', "use util.httpserver instead"),
409 (
346 (r'^(from|import) mercurial\.(cext|pure|cffi)',
410 r'(?:(?<!def)\s+|\()hasattr\(',
347 "use mercurial.policy.importmod instead"),
411 'hasattr(foo, bar) is broken on py2, use util.safehasattr(foo, bar) '
348 (r'\.next\(\)', "don't use .next(), use next(...)"),
412 'instead',
349 (r'([a-z]*).revision\(\1\.node\(',
413 r'#.*hasattr-py3-only',
350 "don't convert rev to node before passing to revision(nodeorrev)"),
414 ),
351 (r'platform\.system\(\)', "don't use platform.system(), use pycompat"),
415 (r'opener\([^)]*\).read\(', "use opener.read() instead"),
352
416 (r'opener\([^)]*\).write\(', "use opener.write() instead"),
353 ],
417 (r'(?i)descend[e]nt', "the proper spelling is descendAnt"),
354 # warnings
418 (r'\.debug\(\_', "don't mark debug messages for translation"),
355 [
419 (r'\.strip\(\)\.split\(\)', "no need to strip before splitting"),
356 ]
420 (r'^\s*except\s*:', "naked except clause", r'#.*re-raises'),
421 (
422 r'^\s*except\s([^\(,]+|\([^\)]+\))\s*,',
423 'legacy exception syntax; use "as" instead of ","',
424 ),
425 (r'release\(.*wlock, .*lock\)', "wrong lock release order"),
426 (r'\bdef\s+__bool__\b', "__bool__ should be __nonzero__ in Python 2"),
427 (
428 r'os\.path\.join\(.*, *(""|\'\')\)',
429 "use pathutil.normasprefix(path) instead of os.path.join(path, '')",
430 ),
431 (r'\s0[0-7]+\b', 'legacy octal syntax; use "0o" prefix instead of "0"'),
432 # XXX only catch mutable arguments on the first line of the definition
433 (r'def.*[( ]\w+=\{\}', "don't use mutable default arguments"),
434 (r'\butil\.Abort\b', "directly use error.Abort"),
435 (
436 r'^@(\w*\.)?cachefunc',
437 "module-level @cachefunc is risky, please avoid",
438 ),
439 (
440 r'^import Queue',
441 "don't use Queue, use pycompat.queue.Queue + "
442 "pycompat.queue.Empty",
443 ),
444 (
445 r'^import cStringIO',
446 "don't use cStringIO.StringIO, use util.stringio",
447 ),
448 (r'^import urllib', "don't use urllib, use util.urlreq/util.urlerr"),
449 (
450 r'^import SocketServer',
451 "don't use SockerServer, use util.socketserver",
452 ),
453 (r'^import urlparse', "don't use urlparse, use util.urlreq"),
454 (r'^import xmlrpclib', "don't use xmlrpclib, use util.xmlrpclib"),
455 (r'^import cPickle', "don't use cPickle, use util.pickle"),
456 (r'^import pickle', "don't use pickle, use util.pickle"),
457 (r'^import httplib', "don't use httplib, use util.httplib"),
458 (r'^import BaseHTTPServer', "use util.httpserver instead"),
459 (
460 r'^(from|import) mercurial\.(cext|pure|cffi)',
461 "use mercurial.policy.importmod instead",
462 ),
463 (r'\.next\(\)', "don't use .next(), use next(...)"),
464 (
465 r'([a-z]*).revision\(\1\.node\(',
466 "don't convert rev to node before passing to revision(nodeorrev)",
467 ),
468 (r'platform\.system\(\)', "don't use platform.system(), use pycompat"),
469 ],
470 # warnings
471 [],
357 ]
472 ]
358
473
359 # patterns to check normal *.py files
474 # patterns to check normal *.py files
360 pypats = [
475 pypats = [
361 [
476 [
362 # Ideally, these should be placed in "commonpypats" for
477 # Ideally, these should be placed in "commonpypats" for
363 # consistency of coding rules in Mercurial source tree.
478 # consistency of coding rules in Mercurial source tree.
364 # But on the other hand, these are not so seriously required for
479 # But on the other hand, these are not so seriously required for
365 # python code fragments embedded in test scripts. Fixing test
480 # python code fragments embedded in test scripts. Fixing test
366 # scripts for these patterns requires many changes, and has less
481 # scripts for these patterns requires many changes, and has less
367 # profit than effort.
482 # profit than effort.
368 (r'.{81}', "line too long"),
483 (r'raise Exception', "don't raise generic exceptions"),
369 (r'raise Exception', "don't raise generic exceptions"),
484 (r'[\s\(](open|file)\([^)]*\)\.read\(', "use util.readfile() instead"),
370 (r'[\s\(](open|file)\([^)]*\)\.read\(',
485 (
371 "use util.readfile() instead"),
486 r'[\s\(](open|file)\([^)]*\)\.write\(',
372 (r'[\s\(](open|file)\([^)]*\)\.write\(',
487 "use util.writefile() instead",
373 "use util.writefile() instead"),
488 ),
374 (r'^[\s\(]*(open(er)?|file)\([^)]*\)(?!\.close\(\))',
489 (
375 "always assign an opened file to a variable, and close it afterwards"),
490 r'^[\s\(]*(open(er)?|file)\([^)]*\)(?!\.close\(\))',
376 (r'[\s\(](open|file)\([^)]*\)\.(?!close\(\))',
491 "always assign an opened file to a variable, and close it afterwards",
377 "always assign an opened file to a variable, and close it afterwards"),
492 ),
378 (r':\n( )*( ){1,3}[^ ]', "must indent 4 spaces"),
493 (
379 (r'^import atexit', "don't use atexit, use ui.atexit"),
494 r'[\s\(](open|file)\([^)]*\)\.(?!close\(\))',
380
495 "always assign an opened file to a variable, and close it afterwards",
381 # rules depending on implementation of repquote()
496 ),
382 (r' x+[xpqo%APM][\'"]\n\s+[\'"]x',
497 (r':\n( )*( ){1,3}[^ ]', "must indent 4 spaces"),
383 'string join across lines with no space'),
498 (r'^import atexit', "don't use atexit, use ui.atexit"),
384 (r'''(?x)ui\.(status|progress|write|note|warn)\(
499 # rules depending on implementation of repquote()
500 (
501 r' x+[xpqo%APM][\'"]\n\s+[\'"]x',
502 'string join across lines with no space',
503 ),
504 (
505 r'''(?x)ui\.(status|progress|write|note|warn)\(
385 [ \t\n#]*
506 [ \t\n#]*
386 (?# any strings/comments might precede a string, which
507 (?# any strings/comments might precede a string, which
387 # contains translatable message)
508 # contains translatable message)
388 ((['"]|\'\'\'|""")[ \npq%bAPMxno]*(['"]|\'\'\'|""")[ \t\n#]+)*
509 b?((['"]|\'\'\'|""")[ \npq%bAPMxno]*(['"]|\'\'\'|""")[ \t\n#]+)*
389 (?# sequence consisting of below might precede translatable message
510 (?# sequence consisting of below might precede translatable message
390 # - formatting string: "% 10s", "%05d", "% -3.2f", "%*s", "%%" ...
511 # - formatting string: "% 10s", "%05d", "% -3.2f", "%*s", "%%" ...
391 # - escaped character: "\\", "\n", "\0" ...
512 # - escaped character: "\\", "\n", "\0" ...
@@ -395,51 +516,55 b' pypats = ['
395 (?# this regexp can't use [^...] style,
516 (?# this regexp can't use [^...] style,
396 # because _preparepats forcibly adds "\n" into [^...],
517 # because _preparepats forcibly adds "\n" into [^...],
397 # even though this regexp wants match it against "\n")''',
518 # even though this regexp wants match it against "\n")''',
398 "missing _() in ui message (use () to hide false-positives)"),
519 "missing _() in ui message (use () to hide false-positives)",
399 ] + commonpypats[0],
520 ),
400 # warnings
521 ]
401 [
522 + commonpypats[0],
402 # rules depending on implementation of repquote()
523 # warnings
403 (r'(^| )pp +xxxxqq[ \n][^\n]', "add two newlines after '.. note::'"),
524 [
404 ] + commonpypats[1]
525 # rules depending on implementation of repquote()
526 (r'(^| )pp +xxxxqq[ \n][^\n]', "add two newlines after '.. note::'"),
527 ]
528 + commonpypats[1],
405 ]
529 ]
406
530
407 # patterns to check *.py for embedded ones in test script
531 # patterns to check *.py for embedded ones in test script
408 embeddedpypats = [
532 embeddedpypats = [
409 [
533 [] + commonpypats[0],
410 ] + commonpypats[0],
534 # warnings
411 # warnings
535 [] + commonpypats[1],
412 [
413 ] + commonpypats[1]
414 ]
536 ]
415
537
416 # common filters to convert *.py
538 # common filters to convert *.py
417 commonpyfilters = [
539 commonpyfilters = [
418 (r"""(?msx)(?P<comment>\#.*?$)|
540 (
541 r"""(?msx)(?P<comment>\#.*?$)|
419 ((?P<quote>('''|\"\"\"|(?<!')'(?!')|(?<!")"(?!")))
542 ((?P<quote>('''|\"\"\"|(?<!')'(?!')|(?<!")"(?!")))
420 (?P<text>(([^\\]|\\.)*?))
543 (?P<text>(([^\\]|\\.)*?))
421 (?P=quote))""", reppython),
544 (?P=quote))""",
545 reppython,
546 ),
422 ]
547 ]
423
548
424 # filters to convert normal *.py files
549 # filters to convert normal *.py files
425 pyfilters = [
550 pyfilters = [] + commonpyfilters
426 ] + commonpyfilters
427
551
428 # non-filter patterns
552 # non-filter patterns
429 pynfpats = [
553 pynfpats = [
430 [
554 [
431 (r'pycompat\.osname\s*[=!]=\s*[\'"]nt[\'"]', "use pycompat.iswindows"),
555 (r'pycompat\.osname\s*[=!]=\s*[\'"]nt[\'"]', "use pycompat.iswindows"),
432 (r'pycompat\.osname\s*[=!]=\s*[\'"]posix[\'"]', "use pycompat.isposix"),
556 (r'pycompat\.osname\s*[=!]=\s*[\'"]posix[\'"]', "use pycompat.isposix"),
433 (r'pycompat\.sysplatform\s*[!=]=\s*[\'"]darwin[\'"]',
557 (
434 "use pycompat.isdarwin"),
558 r'pycompat\.sysplatform\s*[!=]=\s*[\'"]darwin[\'"]',
559 "use pycompat.isdarwin",
560 ),
435 ],
561 ],
436 # warnings
562 # warnings
437 [],
563 [],
438 ]
564 ]
439
565
440 # filters to convert *.py for embedded ones in test script
566 # filters to convert *.py for embedded ones in test script
441 embeddedpyfilters = [
567 embeddedpyfilters = [] + commonpyfilters
442 ] + commonpyfilters
443
568
444 # extension non-filter patterns
569 # extension non-filter patterns
445 pyextnfpats = [
570 pyextnfpats = [
@@ -451,42 +576,40 b' pyextnfpats = ['
451 txtfilters = []
576 txtfilters = []
452
577
453 txtpats = [
578 txtpats = [
454 [
579 [
455 (r'\s$', 'trailing whitespace'),
580 (r'\s$', 'trailing whitespace'),
456 ('.. note::[ \n][^\n]', 'add two newlines after note::')
581 ('.. note::[ \n][^\n]', 'add two newlines after note::'),
457 ],
582 ],
458 []
583 [],
459 ]
584 ]
460
585
461 cpats = [
586 cpats = [
462 [
587 [
463 (r'//', "don't use //-style comments"),
588 (r'//', "don't use //-style comments"),
464 (r'\S\t', "don't use tabs except for indent"),
589 (r'\S\t', "don't use tabs except for indent"),
465 (r'(\S[ \t]+|^[ \t]+)\n', "trailing whitespace"),
590 (r'(\S[ \t]+|^[ \t]+)\n', "trailing whitespace"),
466 (r'.{81}', "line too long"),
591 (r'(while|if|do|for)\(', "use space after while/if/do/for"),
467 (r'(while|if|do|for)\(', "use space after while/if/do/for"),
592 (r'return\(', "return is not a function"),
468 (r'return\(', "return is not a function"),
593 (r' ;', "no space before ;"),
469 (r' ;', "no space before ;"),
594 (r'[^;] \)', "no space before )"),
470 (r'[^;] \)', "no space before )"),
595 (r'[)][{]', "space between ) and {"),
471 (r'[)][{]', "space between ) and {"),
596 (r'\w+\* \w+', "use int *foo, not int* foo"),
472 (r'\w+\* \w+', "use int *foo, not int* foo"),
597 (r'\W\([^\)]+\) \w+', "use (int)foo, not (int) foo"),
473 (r'\W\([^\)]+\) \w+', "use (int)foo, not (int) foo"),
598 (r'\w+ (\+\+|--)', "use foo++, not foo ++"),
474 (r'\w+ (\+\+|--)', "use foo++, not foo ++"),
599 (r'\w,\w', "missing whitespace after ,"),
475 (r'\w,\w', "missing whitespace after ,"),
600 (r'^[^#]\w[+/*]\w', "missing whitespace in expression"),
476 (r'^[^#]\w[+/*]\w', "missing whitespace in expression"),
601 (r'\w\s=\s\s+\w', "gratuitous whitespace after ="),
477 (r'\w\s=\s\s+\w', "gratuitous whitespace after ="),
602 (r'^#\s+\w', "use #foo, not # foo"),
478 (r'^#\s+\w', "use #foo, not # foo"),
603 (r'[^\n]\Z', "no trailing newline"),
479 (r'[^\n]\Z', "no trailing newline"),
604 (r'^\s*#import\b', "use only #include in standard C code"),
480 (r'^\s*#import\b', "use only #include in standard C code"),
605 (r'strcpy\(', "don't use strcpy, use strlcpy or memcpy"),
481 (r'strcpy\(', "don't use strcpy, use strlcpy or memcpy"),
606 (r'strcat\(', "don't use strcat"),
482 (r'strcat\(', "don't use strcat"),
607 # rules depending on implementation of repquote()
483
608 ],
484 # rules depending on implementation of repquote()
609 # warnings
485 ],
610 [
486 # warnings
611 # rules depending on implementation of repquote()
487 [
612 ],
488 # rules depending on implementation of repquote()
489 ]
490 ]
613 ]
491
614
492 cfilters = [
615 cfilters = [
@@ -497,82 +620,109 b' cfilters = ['
497 ]
620 ]
498
621
499 inutilpats = [
622 inutilpats = [
500 [
623 [(r'\bui\.', "don't use ui in util"),],
501 (r'\bui\.', "don't use ui in util"),
624 # warnings
502 ],
625 [],
503 # warnings
504 []
505 ]
626 ]
506
627
507 inrevlogpats = [
628 inrevlogpats = [
508 [
629 [(r'\brepo\.', "don't use repo in revlog"),],
509 (r'\brepo\.', "don't use repo in revlog"),
630 # warnings
510 ],
631 [],
511 # warnings
512 []
513 ]
632 ]
514
633
515 webtemplatefilters = []
634 webtemplatefilters = []
516
635
517 webtemplatepats = [
636 webtemplatepats = [
518 [],
637 [],
519 [
638 [
520 (r'{desc(\|(?!websub|firstline)[^\|]*)+}',
639 (
521 'follow desc keyword with either firstline or websub'),
640 r'{desc(\|(?!websub|firstline)[^\|]*)+}',
522 ]
641 'follow desc keyword with either firstline or websub',
642 ),
643 ],
523 ]
644 ]
524
645
525 allfilesfilters = []
646 allfilesfilters = []
526
647
527 allfilespats = [
648 allfilespats = [
528 [
649 [
529 (r'(http|https)://[a-zA-Z0-9./]*selenic.com/',
650 (
530 'use mercurial-scm.org domain URL'),
651 r'(http|https)://[a-zA-Z0-9./]*selenic.com/',
531 (r'mercurial@selenic\.com',
652 'use mercurial-scm.org domain URL',
532 'use mercurial-scm.org domain for mercurial ML address'),
653 ),
533 (r'mercurial-devel@selenic\.com',
654 (
534 'use mercurial-scm.org domain for mercurial-devel ML address'),
655 r'mercurial@selenic\.com',
535 ],
656 'use mercurial-scm.org domain for mercurial ML address',
536 # warnings
657 ),
537 [],
658 (
659 r'mercurial-devel@selenic\.com',
660 'use mercurial-scm.org domain for mercurial-devel ML address',
661 ),
662 ],
663 # warnings
664 [],
538 ]
665 ]
539
666
540 py3pats = [
667 py3pats = [
541 [
668 [
542 (r'os\.environ', "use encoding.environ instead (py3)", r'#.*re-exports'),
669 (
543 (r'os\.name', "use pycompat.osname instead (py3)"),
670 r'os\.environ',
544 (r'os\.getcwd', "use encoding.getcwd instead (py3)", r'#.*re-exports'),
671 "use encoding.environ instead (py3)",
545 (r'os\.sep', "use pycompat.ossep instead (py3)"),
672 r'#.*re-exports',
546 (r'os\.pathsep', "use pycompat.ospathsep instead (py3)"),
673 ),
547 (r'os\.altsep', "use pycompat.osaltsep instead (py3)"),
674 (r'os\.name', "use pycompat.osname instead (py3)"),
548 (r'sys\.platform', "use pycompat.sysplatform instead (py3)"),
675 (r'os\.getcwd', "use encoding.getcwd instead (py3)", r'#.*re-exports'),
549 (r'getopt\.getopt', "use pycompat.getoptb instead (py3)"),
676 (r'os\.sep', "use pycompat.ossep instead (py3)"),
550 (r'os\.getenv', "use encoding.environ.get instead"),
677 (r'os\.pathsep', "use pycompat.ospathsep instead (py3)"),
551 (r'os\.setenv', "modifying the environ dict is not preferred"),
678 (r'os\.altsep', "use pycompat.osaltsep instead (py3)"),
552 (r'(?<!pycompat\.)xrange', "use pycompat.xrange instead (py3)"),
679 (r'sys\.platform', "use pycompat.sysplatform instead (py3)"),
553 ],
680 (r'getopt\.getopt', "use pycompat.getoptb instead (py3)"),
554 # warnings
681 (r'os\.getenv', "use encoding.environ.get instead"),
555 [],
682 (r'os\.setenv', "modifying the environ dict is not preferred"),
683 (r'(?<!pycompat\.)xrange', "use pycompat.xrange instead (py3)"),
684 ],
685 # warnings
686 [],
556 ]
687 ]
557
688
558 checks = [
689 checks = [
559 ('python', r'.*\.(py|cgi)$', r'^#!.*python', pyfilters, pypats),
690 ('python', r'.*\.(py|cgi)$', r'^#!.*python', pyfilters, pypats),
560 ('python', r'.*\.(py|cgi)$', r'^#!.*python', [], pynfpats),
691 ('python', r'.*\.(py|cgi)$', r'^#!.*python', [], pynfpats),
561 ('python', r'.*hgext.*\.py$', '', [], pyextnfpats),
692 ('python', r'.*hgext.*\.py$', '', [], pyextnfpats),
562 ('python 3', r'.*(hgext|mercurial)/(?!demandimport|policy|pycompat).*\.py',
693 (
563 '', pyfilters, py3pats),
694 'python 3',
695 r'.*(hgext|mercurial)/(?!demandimport|policy|pycompat).*\.py',
696 '',
697 pyfilters,
698 py3pats,
699 ),
564 ('test script', r'(.*/)?test-[^.~]*$', '', testfilters, testpats),
700 ('test script', r'(.*/)?test-[^.~]*$', '', testfilters, testpats),
565 ('c', r'.*\.[ch]$', '', cfilters, cpats),
701 ('c', r'.*\.[ch]$', '', cfilters, cpats),
566 ('unified test', r'.*\.t$', '', utestfilters, utestpats),
702 ('unified test', r'.*\.t$', '', utestfilters, utestpats),
567 ('layering violation repo in revlog', r'mercurial/revlog\.py', '',
703 (
568 pyfilters, inrevlogpats),
704 'layering violation repo in revlog',
569 ('layering violation ui in util', r'mercurial/util\.py', '', pyfilters,
705 r'mercurial/revlog\.py',
570 inutilpats),
706 '',
707 pyfilters,
708 inrevlogpats,
709 ),
710 (
711 'layering violation ui in util',
712 r'mercurial/util\.py',
713 '',
714 pyfilters,
715 inutilpats,
716 ),
571 ('txt', r'.*\.txt$', '', txtfilters, txtpats),
717 ('txt', r'.*\.txt$', '', txtfilters, txtpats),
572 ('web template', r'mercurial/templates/.*\.tmpl', '',
718 (
573 webtemplatefilters, webtemplatepats),
719 'web template',
574 ('all except for .po', r'.*(?<!\.po)$', '',
720 r'mercurial/templates/.*\.tmpl',
575 allfilesfilters, allfilespats),
721 '',
722 webtemplatefilters,
723 webtemplatepats,
724 ),
725 ('all except for .po', r'.*(?<!\.po)$', '', allfilesfilters, allfilespats),
576 ]
726 ]
577
727
578 # (desc,
728 # (desc,
@@ -580,10 +730,15 b' checks = ['
580 # list of patterns to convert target files
730 # list of patterns to convert target files
581 # list of patterns to detect errors/warnings)
731 # list of patterns to detect errors/warnings)
582 embeddedchecks = [
732 embeddedchecks = [
583 ('embedded python',
733 (
584 testparseutil.pyembedded, embeddedpyfilters, embeddedpypats)
734 'embedded python',
735 testparseutil.pyembedded,
736 embeddedpyfilters,
737 embeddedpypats,
738 )
585 ]
739 ]
586
740
741
587 def _preparepats():
742 def _preparepats():
588 def preparefailandwarn(failandwarn):
743 def preparefailandwarn(failandwarn):
589 for pats in failandwarn:
744 for pats in failandwarn:
@@ -612,6 +767,7 b' def _preparepats():'
612 filters = c[-2]
767 filters = c[-2]
613 preparefilters(filters)
768 preparefilters(filters)
614
769
770
615 class norepeatlogger(object):
771 class norepeatlogger(object):
616 def __init__(self):
772 def __init__(self):
617 self._lastseen = None
773 self._lastseen = None
@@ -637,8 +793,10 b' class norepeatlogger(object):'
637 self._lastseen = msgid
793 self._lastseen = msgid
638 print(" " + msg)
794 print(" " + msg)
639
795
796
640 _defaultlogger = norepeatlogger()
797 _defaultlogger = norepeatlogger()
641
798
799
642 def getblame(f):
800 def getblame(f):
643 lines = []
801 lines = []
644 for l in os.popen('hg annotate -un %s' % f):
802 for l in os.popen('hg annotate -un %s' % f):
@@ -647,8 +805,16 b' def getblame(f):'
647 lines.append((line[1:-1], user, rev))
805 lines.append((line[1:-1], user, rev))
648 return lines
806 return lines
649
807
650 def checkfile(f, logfunc=_defaultlogger.log, maxerr=None, warnings=False,
808
651 blame=False, debug=False, lineno=True):
809 def checkfile(
810 f,
811 logfunc=_defaultlogger.log,
812 maxerr=None,
813 warnings=False,
814 blame=False,
815 debug=False,
816 lineno=True,
817 ):
652 """checks style and portability of a given file
818 """checks style and portability of a given file
653
819
654 :f: filepath
820 :f: filepath
@@ -680,8 +846,9 b' def checkfile(f, logfunc=_defaultlogger.'
680 print(name, f)
846 print(name, f)
681 if not (re.match(match, f) or (magic and re.search(magic, pre))):
847 if not (re.match(match, f) or (magic and re.search(magic, pre))):
682 if debug:
848 if debug:
683 print("Skipping %s for %s it doesn't match %s" % (
849 print(
684 name, match, f))
850 "Skipping %s for %s it doesn't match %s" % (name, match, f)
851 )
685 continue
852 continue
686 if "no-" "check-code" in pre:
853 if "no-" "check-code" in pre:
687 # If you're looking at this line, it's because a file has:
854 # If you're looking at this line, it's because a file has:
@@ -691,16 +858,28 b' def checkfile(f, logfunc=_defaultlogger.'
691 # spelling, we write it with the expected spelling from
858 # spelling, we write it with the expected spelling from
692 # tests/test-check-code.t
859 # tests/test-check-code.t
693 print("Skipping %s it has no-che?k-code (glob)" % f)
860 print("Skipping %s it has no-che?k-code (glob)" % f)
694 return "Skip" # skip checking this file
861 return "Skip" # skip checking this file
695
862
696 fc = _checkfiledata(name, f, pre, filters, pats, context,
863 fc = _checkfiledata(
697 logfunc, maxerr, warnings, blame, debug, lineno)
864 name,
865 f,
866 pre,
867 filters,
868 pats,
869 context,
870 logfunc,
871 maxerr,
872 warnings,
873 blame,
874 debug,
875 lineno,
876 )
698 if fc:
877 if fc:
699 result = False
878 result = False
700
879
701 if f.endswith('.t') and "no-" "check-code" not in pre:
880 if f.endswith('.t') and "no-" "check-code" not in pre:
702 if debug:
881 if debug:
703 print("Checking embedded code in %s" % (f))
882 print("Checking embedded code in %s" % f)
704
883
705 prelines = pre.splitlines()
884 prelines = pre.splitlines()
706 embeddederros = []
885 embeddederros = []
@@ -712,9 +891,21 b' def checkfile(f, logfunc=_defaultlogger.'
712
891
713 for found in embedded(f, prelines, embeddederros):
892 for found in embedded(f, prelines, embeddederros):
714 filename, starts, ends, code = found
893 filename, starts, ends, code = found
715 fc = _checkfiledata(name, f, code, filters, pats, context,
894 fc = _checkfiledata(
716 logfunc, curmaxerr, warnings, blame, debug,
895 name,
717 lineno, offset=starts - 1)
896 f,
897 code,
898 filters,
899 pats,
900 context,
901 logfunc,
902 curmaxerr,
903 warnings,
904 blame,
905 debug,
906 lineno,
907 offset=starts - 1,
908 )
718 if fc:
909 if fc:
719 result = False
910 result = False
720 if curmaxerr:
911 if curmaxerr:
@@ -724,9 +915,22 b' def checkfile(f, logfunc=_defaultlogger.'
724
915
725 return result
916 return result
726
917
727 def _checkfiledata(name, f, filedata, filters, pats, context,
918
728 logfunc, maxerr, warnings, blame, debug, lineno,
919 def _checkfiledata(
729 offset=None):
920 name,
921 f,
922 filedata,
923 filters,
924 pats,
925 context,
926 logfunc,
927 maxerr,
928 warnings,
929 blame,
930 debug,
931 lineno,
932 offset=None,
933 ):
730 """Execute actual error check for file data
934 """Execute actual error check for file data
731
935
732 :name: of the checking category
936 :name: of the checking category
@@ -759,10 +963,10 b' def _checkfiledata(name, f, filedata, fi'
759 fc = 0
963 fc = 0
760 pre = post = filedata
964 pre = post = filedata
761
965
762 if True: # TODO: get rid of this redundant 'if' block
966 if True: # TODO: get rid of this redundant 'if' block
763 for p, r in filters:
967 for p, r in filters:
764 post = re.sub(p, r, post)
968 post = re.sub(p, r, post)
765 nerrs = len(pats[0]) # nerr elements are errors
969 nerrs = len(pats[0]) # nerr elements are errors
766 if warnings:
970 if warnings:
767 pats = pats[0] + pats[1]
971 pats = pats[0] + pats[1]
768 else:
972 else:
@@ -801,8 +1005,10 b' def _checkfiledata(name, f, filedata, fi'
801
1005
802 if ignore and re.search(ignore, l, re.MULTILINE):
1006 if ignore and re.search(ignore, l, re.MULTILINE):
803 if debug:
1007 if debug:
804 print("Skipping %s for %s:%s (ignore pattern)" % (
1008 print(
805 name, f, (n + lineoffset)))
1009 "Skipping %s for %s:%s (ignore pattern)"
1010 % (name, f, (n + lineoffset))
1011 )
806 continue
1012 continue
807 bd = ""
1013 bd = ""
808 if blame:
1014 if blame:
@@ -837,21 +1043,38 b' def _checkfiledata(name, f, filedata, fi'
837
1043
838 return fc
1044 return fc
839
1045
1046
840 def main():
1047 def main():
841 parser = optparse.OptionParser("%prog [options] [files | -]")
1048 parser = optparse.OptionParser("%prog [options] [files | -]")
842 parser.add_option("-w", "--warnings", action="store_true",
1049 parser.add_option(
843 help="include warning-level checks")
1050 "-w",
844 parser.add_option("-p", "--per-file", type="int",
1051 "--warnings",
845 help="max warnings per file")
1052 action="store_true",
846 parser.add_option("-b", "--blame", action="store_true",
1053 help="include warning-level checks",
847 help="use annotate to generate blame info")
1054 )
848 parser.add_option("", "--debug", action="store_true",
1055 parser.add_option(
849 help="show debug information")
1056 "-p", "--per-file", type="int", help="max warnings per file"
850 parser.add_option("", "--nolineno", action="store_false",
1057 )
851 dest='lineno', help="don't show line numbers")
1058 parser.add_option(
1059 "-b",
1060 "--blame",
1061 action="store_true",
1062 help="use annotate to generate blame info",
1063 )
1064 parser.add_option(
1065 "", "--debug", action="store_true", help="show debug information"
1066 )
1067 parser.add_option(
1068 "",
1069 "--nolineno",
1070 action="store_false",
1071 dest='lineno',
1072 help="don't show line numbers",
1073 )
852
1074
853 parser.set_defaults(per_file=15, warnings=False, blame=False, debug=False,
1075 parser.set_defaults(
854 lineno=True)
1076 per_file=15, warnings=False, blame=False, debug=False, lineno=True
1077 )
855 (options, args) = parser.parse_args()
1078 (options, args) = parser.parse_args()
856
1079
857 if len(args) == 0:
1080 if len(args) == 0:
@@ -866,11 +1089,17 b' def main():'
866
1089
867 ret = 0
1090 ret = 0
868 for f in check:
1091 for f in check:
869 if not checkfile(f, maxerr=options.per_file, warnings=options.warnings,
1092 if not checkfile(
870 blame=options.blame, debug=options.debug,
1093 f,
871 lineno=options.lineno):
1094 maxerr=options.per_file,
1095 warnings=options.warnings,
1096 blame=options.blame,
1097 debug=options.debug,
1098 lineno=options.lineno,
1099 ):
872 ret = 1
1100 ret = 1
873 return ret
1101 return ret
874
1102
1103
875 if __name__ == "__main__":
1104 if __name__ == "__main__":
876 sys.exit(main())
1105 sys.exit(main())
@@ -39,12 +39,6 b' errors = ['
39 "summary keyword should be most user-relevant one-word command or topic"),
39 "summary keyword should be most user-relevant one-word command or topic"),
40 (afterheader + r".*\.\s*\n", "don't add trailing period on summary line"),
40 (afterheader + r".*\.\s*\n", "don't add trailing period on summary line"),
41 (afterheader + r".{79,}", "summary line too long (limit is 78)"),
41 (afterheader + r".{79,}", "summary line too long (limit is 78)"),
42 # Forbid "_" in function name.
43 #
44 # We skip the check for cffi related functions. They use names mapping the
45 # name of the C function. C function names may contain "_".
46 (r"\n\+[ \t]+def (?!cffi)[a-z]+_[a-z]",
47 "adds a function with foo_bar naming"),
48 ]
42 ]
49
43
50 word = re.compile(r'\S')
44 word = re.compile(r'\S')
@@ -15,7 +15,8 b' foundopts = {}'
15 documented = {}
15 documented = {}
16 allowinconsistent = set()
16 allowinconsistent = set()
17
17
18 configre = re.compile(br'''
18 configre = re.compile(
19 br'''
19 # Function call
20 # Function call
20 ui\.config(?P<ctype>|int|bool|list)\(
21 ui\.config(?P<ctype>|int|bool|list)\(
21 # First argument.
22 # First argument.
@@ -23,9 +24,12 b" configre = re.compile(br'''"
23 # Second argument
24 # Second argument
24 ['"](?P<option>\S+)['"](,\s+
25 ['"](?P<option>\S+)['"](,\s+
25 (?:default=)?(?P<default>\S+?))?
26 (?:default=)?(?P<default>\S+?))?
26 \)''', re.VERBOSE | re.MULTILINE)
27 \)''',
28 re.VERBOSE | re.MULTILINE,
29 )
27
30
28 configwithre = re.compile(br'''
31 configwithre = re.compile(
32 br'''
29 ui\.config(?P<ctype>with)\(
33 ui\.config(?P<ctype>with)\(
30 # First argument is callback function. This doesn't parse robustly
34 # First argument is callback function. This doesn't parse robustly
31 # if it is e.g. a function call.
35 # if it is e.g. a function call.
@@ -33,23 +37,32 b" configwithre = re.compile(br'''"
33 ['"](?P<section>\S+)['"],\s*
37 ['"](?P<section>\S+)['"],\s*
34 ['"](?P<option>\S+)['"](,\s+
38 ['"](?P<option>\S+)['"](,\s+
35 (?:default=)?(?P<default>\S+?))?
39 (?:default=)?(?P<default>\S+?))?
36 \)''', re.VERBOSE | re.MULTILINE)
40 \)''',
41 re.VERBOSE | re.MULTILINE,
42 )
37
43
38 configpartialre = (br"""ui\.config""")
44 configpartialre = br"""ui\.config"""
39
45
40 ignorere = re.compile(br'''
46 ignorere = re.compile(
47 br'''
41 \#\s(?P<reason>internal|experimental|deprecated|developer|inconsistent)\s
48 \#\s(?P<reason>internal|experimental|deprecated|developer|inconsistent)\s
42 config:\s(?P<config>\S+\.\S+)$
49 config:\s(?P<config>\S+\.\S+)$
43 ''', re.VERBOSE | re.MULTILINE)
50 ''',
51 re.VERBOSE | re.MULTILINE,
52 )
44
53
45 if sys.version_info[0] > 2:
54 if sys.version_info[0] > 2:
55
46 def mkstr(b):
56 def mkstr(b):
47 if isinstance(b, str):
57 if isinstance(b, str):
48 return b
58 return b
49 return b.decode('utf8')
59 return b.decode('utf8')
60
61
50 else:
62 else:
51 mkstr = lambda x: x
63 mkstr = lambda x: x
52
64
65
53 def main(args):
66 def main(args):
54 for f in args:
67 for f in args:
55 sect = b''
68 sect = b''
@@ -115,18 +128,32 b' def main(args):'
115 name = m.group('section') + b"." + m.group('option')
128 name = m.group('section') + b"." + m.group('option')
116 default = m.group('default')
129 default = m.group('default')
117 if default in (
130 if default in (
118 None, b'False', b'None', b'0', b'[]', b'""', b"''"):
131 None,
132 b'False',
133 b'None',
134 b'0',
135 b'[]',
136 b'""',
137 b"''",
138 ):
119 default = b''
139 default = b''
120 if re.match(b'[a-z.]+$', default):
140 if re.match(b'[a-z.]+$', default):
121 default = b'<variable>'
141 default = b'<variable>'
122 if (name in foundopts and (ctype, default) != foundopts[name]
142 if (
123 and name not in allowinconsistent):
143 name in foundopts
144 and (ctype, default) != foundopts[name]
145 and name not in allowinconsistent
146 ):
124 print(mkstr(l.rstrip()))
147 print(mkstr(l.rstrip()))
125 fctype, fdefault = foundopts[name]
148 fctype, fdefault = foundopts[name]
126 print("conflict on %s: %r != %r" % (
149 print(
127 mkstr(name),
150 "conflict on %s: %r != %r"
128 (mkstr(ctype), mkstr(default)),
151 % (
129 (mkstr(fctype), mkstr(fdefault))))
152 mkstr(name),
153 (mkstr(ctype), mkstr(default)),
154 (mkstr(fctype), mkstr(fdefault)),
155 )
156 )
130 print("at %s:%d:" % (mkstr(f), linenum))
157 print("at %s:%d:" % (mkstr(f), linenum))
131 foundopts[name] = (ctype, default)
158 foundopts[name] = (ctype, default)
132 carryover = b''
159 carryover = b''
@@ -139,9 +166,11 b' def main(args):'
139
166
140 for name in sorted(foundopts):
167 for name in sorted(foundopts):
141 if name not in documented:
168 if name not in documented:
142 if not (name.startswith(b"devel.") or
169 if not (
143 name.startswith(b"experimental.") or
170 name.startswith(b"devel.")
144 name.startswith(b"debug.")):
171 or name.startswith(b"experimental.")
172 or name.startswith(b"debug.")
173 ):
145 ctype, default = foundopts[name]
174 ctype, default = foundopts[name]
146 if default:
175 if default:
147 if isinstance(default, bytes):
176 if isinstance(default, bytes):
@@ -149,8 +178,11 b' def main(args):'
149 default = ' [%s]' % default
178 default = ' [%s]' % default
150 elif isinstance(default, bytes):
179 elif isinstance(default, bytes):
151 default = mkstr(default)
180 default = mkstr(default)
152 print("undocumented: %s (%s)%s" % (
181 print(
153 mkstr(name), mkstr(ctype), default))
182 "undocumented: %s (%s)%s"
183 % (mkstr(name), mkstr(ctype), default)
184 )
185
154
186
155 if __name__ == "__main__":
187 if __name__ == "__main__":
156 if len(sys.argv) > 1:
188 if len(sys.argv) > 1:
@@ -16,6 +16,7 b' import sys'
16 import traceback
16 import traceback
17 import warnings
17 import warnings
18
18
19
19 def check_compat_py2(f):
20 def check_compat_py2(f):
20 """Check Python 3 compatibility for a file with Python 2"""
21 """Check Python 3 compatibility for a file with Python 2"""
21 with open(f, 'rb') as fh:
22 with open(f, 'rb') as fh:
@@ -40,6 +41,7 b' def check_compat_py2(f):'
40 if haveprint and 'print_function' not in futures:
41 if haveprint and 'print_function' not in futures:
41 print('%s requires print_function' % f)
42 print('%s requires print_function' % f)
42
43
44
43 def check_compat_py3(f):
45 def check_compat_py3(f):
44 """Check Python 3 compatibility of a file with Python 3."""
46 """Check Python 3 compatibility of a file with Python 3."""
45 with open(f, 'rb') as fh:
47 with open(f, 'rb') as fh:
@@ -54,8 +56,9 b' def check_compat_py3(f):'
54 # Try to import the module.
56 # Try to import the module.
55 # For now we only support modules in packages because figuring out module
57 # For now we only support modules in packages because figuring out module
56 # paths for things not in a package can be confusing.
58 # paths for things not in a package can be confusing.
57 if (f.startswith(('hgdemandimport/', 'hgext/', 'mercurial/'))
59 if f.startswith(
58 and not f.endswith('__init__.py')):
60 ('hgdemandimport/', 'hgext/', 'mercurial/')
61 ) and not f.endswith('__init__.py'):
59 assert f.endswith('.py')
62 assert f.endswith('.py')
60 name = f.replace('/', '.')[:-3]
63 name = f.replace('/', '.')[:-3]
61 try:
64 try:
@@ -79,11 +82,16 b' def check_compat_py3(f):'
79
82
80 if frame.filename:
83 if frame.filename:
81 filename = os.path.basename(frame.filename)
84 filename = os.path.basename(frame.filename)
82 print('%s: error importing: <%s> %s (error at %s:%d)' % (
85 print(
83 f, type(e).__name__, e, filename, frame.lineno))
86 '%s: error importing: <%s> %s (error at %s:%d)'
87 % (f, type(e).__name__, e, filename, frame.lineno)
88 )
84 else:
89 else:
85 print('%s: error importing module: <%s> %s (line %d)' % (
90 print(
86 f, type(e).__name__, e, frame.lineno))
91 '%s: error importing module: <%s> %s (line %d)'
92 % (f, type(e).__name__, e, frame.lineno)
93 )
94
87
95
88 if __name__ == '__main__':
96 if __name__ == '__main__':
89 if sys.version_info[0] == 2:
97 if sys.version_info[0] == 2:
@@ -96,7 +104,10 b" if __name__ == '__main__':"
96 fn(f)
104 fn(f)
97
105
98 for w in warns:
106 for w in warns:
99 print(warnings.formatwarning(w.message, w.category,
107 print(
100 w.filename, w.lineno).rstrip())
108 warnings.formatwarning(
109 w.message, w.category, w.filename, w.lineno
110 ).rstrip()
111 )
101
112
102 sys.exit(0)
113 sys.exit(0)
@@ -1,6 +1,5 b''
1 # Files that just need to be migrated to the formatter.
1 # Files that just need to be migrated to the formatter.
2 # Do not add new files here!
2 # Do not add new files here!
3 mercurial/cext/dirs.c
4 mercurial/cext/manifest.c
3 mercurial/cext/manifest.c
5 mercurial/cext/osutil.c
4 mercurial/cext/osutil.c
6 # Vendored code that we should never format:
5 # Vendored code that we should never format:
@@ -49,6 +48,10 b' contrib/python-zstandard/zstd/compress/h'
49 contrib/python-zstandard/zstd/compress/huf_compress.c
48 contrib/python-zstandard/zstd/compress/huf_compress.c
50 contrib/python-zstandard/zstd/compress/zstd_compress.c
49 contrib/python-zstandard/zstd/compress/zstd_compress.c
51 contrib/python-zstandard/zstd/compress/zstd_compress_internal.h
50 contrib/python-zstandard/zstd/compress/zstd_compress_internal.h
51 contrib/python-zstandard/zstd/compress/zstd_compress_literals.c
52 contrib/python-zstandard/zstd/compress/zstd_compress_literals.h
53 contrib/python-zstandard/zstd/compress/zstd_compress_sequences.c
54 contrib/python-zstandard/zstd/compress/zstd_compress_sequences.h
52 contrib/python-zstandard/zstd/compress/zstd_double_fast.c
55 contrib/python-zstandard/zstd/compress/zstd_double_fast.c
53 contrib/python-zstandard/zstd/compress/zstd_double_fast.h
56 contrib/python-zstandard/zstd/compress/zstd_double_fast.h
54 contrib/python-zstandard/zstd/compress/zstd_fast.c
57 contrib/python-zstandard/zstd/compress/zstd_fast.c
@@ -23,6 +23,7 b" if sys.argv[1] == '-':"
23 else:
23 else:
24 log = open(sys.argv[1], 'a')
24 log = open(sys.argv[1], 'a')
25
25
26
26 def read(size):
27 def read(size):
27 data = sys.stdin.read(size)
28 data = sys.stdin.read(size)
28 if not data:
29 if not data:
@@ -31,6 +32,7 b' def read(size):'
31 sys.stdout.flush()
32 sys.stdout.flush()
32 return data
33 return data
33
34
35
34 try:
36 try:
35 while True:
37 while True:
36 header = read(outputfmtsize)
38 header = read(outputfmtsize)
@@ -14,6 +14,7 b' from mercurial import ('
14 cmdtable = {}
14 cmdtable = {}
15 command = registrar.command(cmdtable)
15 command = registrar.command(cmdtable)
16
16
17
17 def pdb(ui, repo, msg, **opts):
18 def pdb(ui, repo, msg, **opts):
18 objects = {
19 objects = {
19 'mercurial': mercurial,
20 'mercurial': mercurial,
@@ -24,25 +25,25 b' def pdb(ui, repo, msg, **opts):'
24
25
25 code.interact(msg, local=objects)
26 code.interact(msg, local=objects)
26
27
28
27 def ipdb(ui, repo, msg, **opts):
29 def ipdb(ui, repo, msg, **opts):
28 import IPython
30 import IPython
29
31
30 cl = repo.changelog
32 cl = repo.changelog
31 mf = repo.manifestlog
33 mf = repo.manifestlog
32 cl, mf # use variables to appease pyflakes
34 cl, mf # use variables to appease pyflakes
33
35
34 IPython.embed()
36 IPython.embed()
35
37
38
36 @command(b'debugshell|dbsh', [])
39 @command(b'debugshell|dbsh', [])
37 def debugshell(ui, repo, **opts):
40 def debugshell(ui, repo, **opts):
38 bannermsg = ("loaded repo : %s\n"
41 bannermsg = "loaded repo : %s\n" "using source: %s" % (
39 "using source: %s" % (pycompat.sysstr(repo.root),
42 pycompat.sysstr(repo.root),
40 mercurial.__path__[0]))
43 mercurial.__path__[0],
44 )
41
45
42 pdbmap = {
46 pdbmap = {'pdb': 'code', 'ipdb': 'IPython'}
43 'pdb' : 'code',
44 'ipdb' : 'IPython'
45 }
46
47
47 debugger = ui.config(b"ui", b"debugger")
48 debugger = ui.config(b"ui", b"debugger")
48 if not debugger:
49 if not debugger:
@@ -55,8 +56,10 b' def debugshell(ui, repo, **opts):'
55 with demandimport.deactivated():
56 with demandimport.deactivated():
56 __import__(pdbmap[debugger])
57 __import__(pdbmap[debugger])
57 except ImportError:
58 except ImportError:
58 ui.warn((b"%s debugger specified but %s module was not found\n")
59 ui.warnnoi18n(
59 % (debugger, pdbmap[debugger]))
60 b"%s debugger specified but %s module was not found\n"
61 % (debugger, pdbmap[debugger])
62 )
60 debugger = b'pdb'
63 debugger = b'pdb'
61
64
62 getattr(sys.modules[__name__], debugger)(ui, repo, bannermsg, **opts)
65 getattr(sys.modules[__name__], debugger)(ui, repo, bannermsg, **opts)
@@ -13,6 +13,7 b' from mercurial import ('
13 extensions,
13 extensions,
14 )
14 )
15
15
16
16 def nonnormalentries(dmap):
17 def nonnormalentries(dmap):
17 """Compute nonnormal entries from dirstate's dmap"""
18 """Compute nonnormal entries from dirstate's dmap"""
18 res = set()
19 res = set()
@@ -21,6 +22,7 b' def nonnormalentries(dmap):'
21 res.add(f)
22 res.add(f)
22 return res
23 return res
23
24
25
24 def checkconsistency(ui, orig, dmap, _nonnormalset, label):
26 def checkconsistency(ui, orig, dmap, _nonnormalset, label):
25 """Compute nonnormalset from dmap, check that it matches _nonnormalset"""
27 """Compute nonnormalset from dmap, check that it matches _nonnormalset"""
26 nonnormalcomputedmap = nonnormalentries(dmap)
28 nonnormalcomputedmap = nonnormalentries(dmap)
@@ -30,15 +32,19 b' def checkconsistency(ui, orig, dmap, _no'
30 ui.develwarn(b"[nonnormalset] %s\n" % _nonnormalset, config=b'dirstate')
32 ui.develwarn(b"[nonnormalset] %s\n" % _nonnormalset, config=b'dirstate')
31 ui.develwarn(b"[map] %s\n" % nonnormalcomputedmap, config=b'dirstate')
33 ui.develwarn(b"[map] %s\n" % nonnormalcomputedmap, config=b'dirstate')
32
34
35
33 def _checkdirstate(orig, self, arg):
36 def _checkdirstate(orig, self, arg):
34 """Check nonnormal set consistency before and after the call to orig"""
37 """Check nonnormal set consistency before and after the call to orig"""
35 checkconsistency(self._ui, orig, self._map, self._map.nonnormalset,
38 checkconsistency(
36 b"before")
39 self._ui, orig, self._map, self._map.nonnormalset, b"before"
40 )
37 r = orig(self, arg)
41 r = orig(self, arg)
38 checkconsistency(self._ui, orig, self._map, self._map.nonnormalset,
42 checkconsistency(
39 b"after")
43 self._ui, orig, self._map, self._map.nonnormalset, b"after"
44 )
40 return r
45 return r
41
46
47
42 def extsetup(ui):
48 def extsetup(ui):
43 """Wrap functions modifying dirstate to check nonnormalset consistency"""
49 """Wrap functions modifying dirstate to check nonnormalset consistency"""
44 dirstatecl = dirstate.dirstate
50 dirstatecl = dirstate.dirstate
@@ -22,6 +22,7 b" def binopen(path, mode=b'rb'):"
22 if b'b' not in mode:
22 if b'b' not in mode:
23 mode = mode + b'b'
23 mode = mode + b'b'
24 return open(path, pycompat.sysstr(mode))
24 return open(path, pycompat.sysstr(mode))
25 binopen.options = {}
25
26
26 def printb(data, end=b'\n'):
27 def printb(data, end=b'\n'):
27 sys.stdout.flush()
28 sys.stdout.flush()
@@ -105,6 +105,33 b' revlog.o: ../../mercurial/cext/revlog.c'
105 -I../../mercurial \
105 -I../../mercurial \
106 -c -o revlog.o ../../mercurial/cext/revlog.c
106 -c -o revlog.o ../../mercurial/cext/revlog.c
107
107
108 dirs_fuzzer: dirs.cc manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o
109 $(CXX) $(CXXFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
110 -Wno-register -Wno-macro-redefined \
111 -I../../mercurial dirs.cc \
112 manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o \
113 -lFuzzingEngine `$$OUT/sanpy/bin/python-config --ldflags` \
114 -o $$OUT/dirs_fuzzer
115
116 fncache_fuzzer: fncache.cc manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o
117 $(CXX) $(CXXFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
118 -Wno-register -Wno-macro-redefined \
119 -I../../mercurial fncache.cc \
120 manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o \
121 -lFuzzingEngine `$$OUT/sanpy/bin/python-config --ldflags` \
122 -o $$OUT/fncache_fuzzer
123
124 jsonescapeu8fast_fuzzer: jsonescapeu8fast.cc manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o
125 $(CXX) $(CXXFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
126 -Wno-register -Wno-macro-redefined \
127 -I../../mercurial jsonescapeu8fast.cc \
128 manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o \
129 -lFuzzingEngine `$$OUT/sanpy/bin/python-config --ldflags` \
130 -o $$OUT/jsonescapeu8fast_fuzzer
131
132 manifest_corpus.zip:
133 python manifest_corpus.py $$OUT/manifest_fuzzer_seed_corpus.zip
134
108 manifest_fuzzer: manifest.cc manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o
135 manifest_fuzzer: manifest.cc manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o
109 $(CXX) $(CXXFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
136 $(CXX) $(CXXFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
110 -Wno-register -Wno-macro-redefined \
137 -Wno-register -Wno-macro-redefined \
@@ -113,9 +140,6 b' manifest_fuzzer: manifest.cc manifest.o '
113 -lFuzzingEngine `$$OUT/sanpy/bin/python-config --ldflags` \
140 -lFuzzingEngine `$$OUT/sanpy/bin/python-config --ldflags` \
114 -o $$OUT/manifest_fuzzer
141 -o $$OUT/manifest_fuzzer
115
142
116 manifest_corpus.zip:
117 python manifest_corpus.py $$OUT/manifest_fuzzer_seed_corpus.zip
118
119 revlog_fuzzer: revlog.cc manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o
143 revlog_fuzzer: revlog.cc manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o
120 $(CXX) $(CXXFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
144 $(CXX) $(CXXFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
121 -Wno-register -Wno-macro-redefined \
145 -Wno-register -Wno-macro-redefined \
@@ -155,6 +179,6 b' clean:'
155 mpatch \
179 mpatch \
156 xdiff
180 xdiff
157
181
158 oss-fuzz: bdiff_fuzzer mpatch_fuzzer mpatch_corpus.zip xdiff_fuzzer manifest_fuzzer manifest_corpus.zip revlog_fuzzer revlog_corpus.zip dirstate_fuzzer dirstate_corpus.zip fm1readmarkers_fuzzer fm1readmarkers_corpus.zip
182 oss-fuzz: bdiff_fuzzer mpatch_fuzzer mpatch_corpus.zip xdiff_fuzzer dirs_fuzzer fncache_fuzzer jsonescapeu8fast_fuzzer manifest_fuzzer manifest_corpus.zip revlog_fuzzer revlog_corpus.zip dirstate_fuzzer dirstate_corpus.zip fm1readmarkers_fuzzer fm1readmarkers_corpus.zip
159
183
160 .PHONY: all clean oss-fuzz
184 .PHONY: all clean oss-fuzz
@@ -8,8 +8,7 b' ap = argparse.ArgumentParser()'
8 ap.add_argument("out", metavar="some.zip", type=str, nargs=1)
8 ap.add_argument("out", metavar="some.zip", type=str, nargs=1)
9 args = ap.parse_args()
9 args = ap.parse_args()
10
10
11 reporoot = os.path.normpath(os.path.join(os.path.dirname(__file__),
11 reporoot = os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..'))
12 '..', '..'))
13 dirstate = os.path.join(reporoot, '.hg', 'dirstate')
12 dirstate = os.path.join(reporoot, '.hg', 'dirstate')
14
13
15 with zipfile.ZipFile(args.out[0], "w", zipfile.ZIP_STORED) as zf:
14 with zipfile.ZipFile(args.out[0], "w", zipfile.ZIP_STORED) as zf:
@@ -33,4 +33,6 b' with zipfile.ZipFile(args.out[0], "w", z'
33 'nhistedituserAugie Fackler <raf@durin42.com>\x00\x00\x00yA\xd7\x02'
33 'nhistedituserAugie Fackler <raf@durin42.com>\x00\x00\x00yA\xd7\x02'
34 'MtA\xd4\xe1\x01,\x00\x00\x01\x03\x03"\xa5\xcb\x86\xb6\xf4\xbaO\xa0'
34 'MtA\xd4\xe1\x01,\x00\x00\x01\x03\x03"\xa5\xcb\x86\xb6\xf4\xbaO\xa0'
35 'sH\xe7?\xcb\x9b\xc2n\xcfI\x9e\x14\xf0D\xf0!\x18DN\xcd\x97\x016\xa5'
35 'sH\xe7?\xcb\x9b\xc2n\xcfI\x9e\x14\xf0D\xf0!\x18DN\xcd\x97\x016\xa5'
36 '\xef\xa06\xcb\x884\x8a\x03\x01\t\x08\x04\x1fef14operationhisted'))
36 '\xef\xa06\xcb\x884\x8a\x03\x01\t\x08\x04\x1fef14operationhisted'
37 ),
38 )
@@ -8,8 +8,9 b' ap.add_argument("out", metavar="some.zip'
8 args = ap.parse_args()
8 args = ap.parse_args()
9
9
10 with zipfile.ZipFile(args.out[0], "w", zipfile.ZIP_STORED) as zf:
10 with zipfile.ZipFile(args.out[0], "w", zipfile.ZIP_STORED) as zf:
11 zf.writestr("manifest_zero",
11 zf.writestr(
12 '''PKG-INFO\09b3ed8f2b81095a13064402e930565f083346e9a
12 "manifest_zero",
13 '''PKG-INFO\09b3ed8f2b81095a13064402e930565f083346e9a
13 README\080b6e76643dcb44d4bc729e932fc464b3e36dbe3
14 README\080b6e76643dcb44d4bc729e932fc464b3e36dbe3
14 hg\0b6444347c629cc058d478023905cfb83b7f5bb9d
15 hg\0b6444347c629cc058d478023905cfb83b7f5bb9d
15 mercurial/__init__.py\0b80de5d138758541c5f05265ad144ab9fa86d1db
16 mercurial/__init__.py\0b80de5d138758541c5f05265ad144ab9fa86d1db
@@ -22,9 +23,11 b' mercurial/transaction.py\\09d180df101dc14'
22 notes.txt\0703afcec5edb749cf5cec67831f554d6da13f2fb
23 notes.txt\0703afcec5edb749cf5cec67831f554d6da13f2fb
23 setup.py\0ccf3f6daf0f13101ca73631f7a1769e328b472c9
24 setup.py\0ccf3f6daf0f13101ca73631f7a1769e328b472c9
24 tkmerge\03c922edb43a9c143682f7bc7b00f98b3c756ebe7
25 tkmerge\03c922edb43a9c143682f7bc7b00f98b3c756ebe7
25 ''')
26 ''',
26 zf.writestr("badmanifest_shorthashes",
27 )
27 "narf\0aa\nnarf2\0aaa\n")
28 zf.writestr("badmanifest_shorthashes", "narf\0aa\nnarf2\0aaa\n")
28 zf.writestr("badmanifest_nonull",
29 zf.writestr(
29 "narf\0cccccccccccccccccccccccccccccccccccccccc\n"
30 "badmanifest_nonull",
30 "narf2aaaaaaaaaaaaaaaaaaaa\n")
31 "narf\0cccccccccccccccccccccccccccccccccccccccc\n"
32 "narf2aaaaaaaaaaaaaaaaaaaa\n",
33 )
@@ -13,6 +13,7 b' ap = argparse.ArgumentParser()'
13 ap.add_argument("out", metavar="some.zip", type=str, nargs=1)
13 ap.add_argument("out", metavar="some.zip", type=str, nargs=1)
14 args = ap.parse_args()
14 args = ap.parse_args()
15
15
16
16 class deltafrag(object):
17 class deltafrag(object):
17 def __init__(self, start, end, data):
18 def __init__(self, start, end, data):
18 self.start = start
19 self.start = start
@@ -20,8 +21,11 b' class deltafrag(object):'
20 self.data = data
21 self.data = data
21
22
22 def __str__(self):
23 def __str__(self):
23 return struct.pack(
24 return (
24 ">lll", self.start, self.end, len(self.data)) + self.data
25 struct.pack(">lll", self.start, self.end, len(self.data))
26 + self.data
27 )
28
25
29
26 class delta(object):
30 class delta(object):
27 def __init__(self, frags):
31 def __init__(self, frags):
@@ -30,8 +34,8 b' class delta(object):'
30 def __str__(self):
34 def __str__(self):
31 return ''.join(str(f) for f in self.frags)
35 return ''.join(str(f) for f in self.frags)
32
36
37
33 class corpus(object):
38 class corpus(object):
34
35 def __init__(self, base, deltas):
39 def __init__(self, base, deltas):
36 self.base = base
40 self.base = base
37 self.deltas = deltas
41 self.deltas = deltas
@@ -49,19 +53,19 b' class corpus(object):'
49 )
53 )
50 return "".join(parts)
54 return "".join(parts)
51
55
56
52 with zipfile.ZipFile(args.out[0], "w", zipfile.ZIP_STORED) as zf:
57 with zipfile.ZipFile(args.out[0], "w", zipfile.ZIP_STORED) as zf:
53 # Manually constructed entries
58 # Manually constructed entries
54 zf.writestr(
59 zf.writestr(
55 "one_delta_applies",
60 "one_delta_applies", str(corpus('a', [delta([deltafrag(0, 1, 'b')])]))
56 str(corpus('a', [delta([deltafrag(0, 1, 'b')])]))
57 )
61 )
58 zf.writestr(
62 zf.writestr(
59 "one_delta_starts_late",
63 "one_delta_starts_late",
60 str(corpus('a', [delta([deltafrag(3, 1, 'b')])]))
64 str(corpus('a', [delta([deltafrag(3, 1, 'b')])])),
61 )
65 )
62 zf.writestr(
66 zf.writestr(
63 "one_delta_ends_late",
67 "one_delta_ends_late",
64 str(corpus('a', [delta([deltafrag(0, 20, 'b')])]))
68 str(corpus('a', [delta([deltafrag(0, 20, 'b')])])),
65 )
69 )
66
70
67 try:
71 try:
@@ -70,9 +74,8 b' with zipfile.ZipFile(args.out[0], "w", z'
70 fl = r.file('mercurial/manifest.py')
74 fl = r.file('mercurial/manifest.py')
71 rl = getattr(fl, '_revlog', fl)
75 rl = getattr(fl, '_revlog', fl)
72 bins = rl._chunks(rl._deltachain(10)[0])
76 bins = rl._chunks(rl._deltachain(10)[0])
73 zf.writestr('manifest_py_rev_10',
77 zf.writestr('manifest_py_rev_10', str(corpus(bins[0], bins[1:])))
74 str(corpus(bins[0], bins[1:])))
78 except: # skip this, so no re-raises
75 except: # skip this, so no re-raises
76 print('skipping seed file from repo data')
79 print('skipping seed file from repo data')
77 # Automatically discovered by running the fuzzer
80 # Automatically discovered by running the fuzzer
78 zf.writestr(
81 zf.writestr(
@@ -81,7 +84,8 b' with zipfile.ZipFile(args.out[0], "w", z'
81 # https://bugs.chromium.org/p/oss-fuzz/issues/detail?id=8876
84 # https://bugs.chromium.org/p/oss-fuzz/issues/detail?id=8876
82 zf.writestr(
85 zf.writestr(
83 "mpatch_ossfuzz_getbe32_ubsan",
86 "mpatch_ossfuzz_getbe32_ubsan",
84 "\x02\x00\x00\x00\x0c \xff\xff\xff\xff ")
87 "\x02\x00\x00\x00\x0c \xff\xff\xff\xff ",
88 )
85 zf.writestr(
89 zf.writestr(
86 "mpatch_apply_over_memcpy",
90 "mpatch_apply_over_memcpy",
87 '\x13\x01\x00\x05\xd0\x00\x00\x00\x00\x00\x00\x00\x00\n \x00\x00\x00'
91 '\x13\x01\x00\x05\xd0\x00\x00\x00\x00\x00\x00\x00\x00\n \x00\x00\x00'
@@ -342,4 +346,5 b' with zipfile.ZipFile(args.out[0], "w", z'
342 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
346 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
343 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00se\x00\x00'
347 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00se\x00\x00'
344 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
348 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
345 '\x00\x00\x00\x00')
349 '\x00\x00\x00\x00',
350 )
@@ -20,10 +20,15 b' for inline in (True, False):'
20 try:
20 try:
21 index, cache = parse_index2(data, inline)
21 index, cache = parse_index2(data, inline)
22 index.slicechunktodensity(list(range(len(index))), 0.5, 262144)
22 index.slicechunktodensity(list(range(len(index))), 0.5, 262144)
23 index.stats()
24 index.findsnapshots({}, 0)
25 10 in index
23 for rev in range(len(index)):
26 for rev in range(len(index)):
27 index.reachableroots(0, [len(index)-1], [rev])
24 node = index[rev][7]
28 node = index[rev][7]
25 partial = index.shortest(node)
29 partial = index.shortest(node)
26 index.partialmatch(node[:partial])
30 index.partialmatch(node[:partial])
31 index.deltachain(rev, None, True)
27 except Exception as e:
32 except Exception as e:
28 pass
33 pass
29 # uncomment this print if you're editing this Python code
34 # uncomment this print if you're editing this Python code
@@ -8,13 +8,13 b' ap = argparse.ArgumentParser()'
8 ap.add_argument("out", metavar="some.zip", type=str, nargs=1)
8 ap.add_argument("out", metavar="some.zip", type=str, nargs=1)
9 args = ap.parse_args()
9 args = ap.parse_args()
10
10
11 reporoot = os.path.normpath(os.path.join(os.path.dirname(__file__),
11 reporoot = os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..'))
12 '..', '..'))
13 # typically a standalone index
12 # typically a standalone index
14 changelog = os.path.join(reporoot, '.hg', 'store', '00changelog.i')
13 changelog = os.path.join(reporoot, '.hg', 'store', '00changelog.i')
15 # an inline revlog with only a few revisions
14 # an inline revlog with only a few revisions
16 contributing = os.path.join(
15 contributing = os.path.join(
17 reporoot, '.hg', 'store', 'data', 'contrib', 'fuzz', 'mpatch.cc.i')
16 reporoot, '.hg', 'store', 'data', 'contrib', 'fuzz', 'mpatch.cc.i'
17 )
18
18
19 print(changelog, os.path.exists(changelog))
19 print(changelog, os.path.exists(changelog))
20 print(contributing, os.path.exists(contributing))
20 print(contributing, os.path.exists(contributing))
@@ -2,27 +2,30 b''
2 from __future__ import absolute_import, print_function
2 from __future__ import absolute_import, print_function
3
3
4 import argparse
4 import argparse
5 import json
6 import os
5 import os
7 import subprocess
6 import subprocess
8 import sys
7 import sys
9
8
10 # Always load hg libraries from the hg we can find on $PATH.
9 # Always load hg libraries from the hg we can find on $PATH.
11 hglib = json.loads(subprocess.check_output(
10 hglib = subprocess.check_output(['hg', 'debuginstall', '-T', '{hgmodules}'])
12 ['hg', 'debuginstall', '-Tjson']))[0]['hgmodules']
13 sys.path.insert(0, os.path.dirname(hglib))
11 sys.path.insert(0, os.path.dirname(hglib))
14
12
15 from mercurial import util
13 from mercurial import util
16
14
17 ap = argparse.ArgumentParser()
15 ap = argparse.ArgumentParser()
18 ap.add_argument('--paranoid',
16 ap.add_argument(
19 action='store_true',
17 '--paranoid',
20 help=("Be paranoid about how version numbers compare and "
18 action='store_true',
21 "produce something that's more likely to sort "
19 help=(
22 "reasonably."))
20 "Be paranoid about how version numbers compare and "
21 "produce something that's more likely to sort "
22 "reasonably."
23 ),
24 )
23 ap.add_argument('--selftest', action='store_true', help='Run self-tests.')
25 ap.add_argument('--selftest', action='store_true', help='Run self-tests.')
24 ap.add_argument('versionfile', help='Path to a valid mercurial __version__.py')
26 ap.add_argument('versionfile', help='Path to a valid mercurial __version__.py')
25
27
28
26 def paranoidver(ver):
29 def paranoidver(ver):
27 """Given an hg version produce something that distutils can sort.
30 """Given an hg version produce something that distutils can sort.
28
31
@@ -109,22 +112,25 b' def paranoidver(ver):'
109 extra = ''
112 extra = ''
110 return '%d.%d.%d%s' % (major, minor, micro, extra)
113 return '%d.%d.%d%s' % (major, minor, micro, extra)
111
114
115
112 def main(argv):
116 def main(argv):
113 opts = ap.parse_args(argv[1:])
117 opts = ap.parse_args(argv[1:])
114 if opts.selftest:
118 if opts.selftest:
115 import doctest
119 import doctest
120
116 doctest.testmod()
121 doctest.testmod()
117 return
122 return
118 with open(opts.versionfile) as f:
123 with open(opts.versionfile) as f:
119 for l in f:
124 for l in f:
120 if l.startswith('version = b'):
125 if l.startswith('version = b'):
121 # version number is entire line minus the quotes
126 # version number is entire line minus the quotes
122 ver = l[len('version = b') + 1:-2]
127 ver = l[len('version = b') + 1 : -2]
123 break
128 break
124 if opts.paranoid:
129 if opts.paranoid:
125 print(paranoidver(ver))
130 print(paranoidver(ver))
126 else:
131 else:
127 print(ver)
132 print(ver)
128
133
134
129 if __name__ == '__main__':
135 if __name__ == '__main__':
130 main(sys.argv)
136 main(sys.argv)
@@ -16,17 +16,22 b' if sys.version_info[0] >= 3:'
16 stdout = sys.stdout.buffer
16 stdout = sys.stdout.buffer
17 stderr = sys.stderr.buffer
17 stderr = sys.stderr.buffer
18 stringio = io.BytesIO
18 stringio = io.BytesIO
19
19 def bprint(*args):
20 def bprint(*args):
20 # remove b'' as well for ease of test migration
21 # remove b'' as well for ease of test migration
21 pargs = [re.sub(br'''\bb(['"])''', br'\1', b'%s' % a) for a in args]
22 pargs = [re.sub(br'''\bb(['"])''', br'\1', b'%s' % a) for a in args]
22 stdout.write(b' '.join(pargs) + b'\n')
23 stdout.write(b' '.join(pargs) + b'\n')
24
25
23 else:
26 else:
24 import cStringIO
27 import cStringIO
28
25 stdout = sys.stdout
29 stdout = sys.stdout
26 stderr = sys.stderr
30 stderr = sys.stderr
27 stringio = cStringIO.StringIO
31 stringio = cStringIO.StringIO
28 bprint = print
32 bprint = print
29
33
34
30 def connectpipe(path=None, extraargs=()):
35 def connectpipe(path=None, extraargs=()):
31 cmdline = [b'hg', b'serve', b'--cmdserver', b'pipe']
36 cmdline = [b'hg', b'serve', b'--cmdserver', b'pipe']
32 if path:
37 if path:
@@ -38,11 +43,13 b' def connectpipe(path=None, extraargs=())'
38 return cmdline
43 return cmdline
39 return [arg.decode("utf-8") for arg in cmdline]
44 return [arg.decode("utf-8") for arg in cmdline]
40
45
41 server = subprocess.Popen(tonative(cmdline), stdin=subprocess.PIPE,
46 server = subprocess.Popen(
42 stdout=subprocess.PIPE)
47 tonative(cmdline), stdin=subprocess.PIPE, stdout=subprocess.PIPE
48 )
43
49
44 return server
50 return server
45
51
52
46 class unixconnection(object):
53 class unixconnection(object):
47 def __init__(self, sockpath):
54 def __init__(self, sockpath):
48 self.sock = sock = socket.socket(socket.AF_UNIX)
55 self.sock = sock = socket.socket(socket.AF_UNIX)
@@ -55,6 +62,7 b' class unixconnection(object):'
55 self.stdout.close()
62 self.stdout.close()
56 self.sock.close()
63 self.sock.close()
57
64
65
58 class unixserver(object):
66 class unixserver(object):
59 def __init__(self, sockpath, logpath=None, repopath=None):
67 def __init__(self, sockpath, logpath=None, repopath=None):
60 self.sockpath = sockpath
68 self.sockpath = sockpath
@@ -80,11 +88,13 b' class unixserver(object):'
80 os.kill(self.server.pid, signal.SIGTERM)
88 os.kill(self.server.pid, signal.SIGTERM)
81 self.server.wait()
89 self.server.wait()
82
90
91
83 def writeblock(server, data):
92 def writeblock(server, data):
84 server.stdin.write(struct.pack(b'>I', len(data)))
93 server.stdin.write(struct.pack(b'>I', len(data)))
85 server.stdin.write(data)
94 server.stdin.write(data)
86 server.stdin.flush()
95 server.stdin.flush()
87
96
97
88 def readchannel(server):
98 def readchannel(server):
89 data = server.stdout.read(5)
99 data = server.stdout.read(5)
90 if not data:
100 if not data:
@@ -95,11 +105,14 b' def readchannel(server):'
95 else:
105 else:
96 return channel, server.stdout.read(length)
106 return channel, server.stdout.read(length)
97
107
108
98 def sep(text):
109 def sep(text):
99 return text.replace(b'\\', b'/')
110 return text.replace(b'\\', b'/')
100
111
101 def runcommand(server, args, output=stdout, error=stderr, input=None,
112
102 outfilter=lambda x: x):
113 def runcommand(
114 server, args, output=stdout, error=stderr, input=None, outfilter=lambda x: x
115 ):
103 bprint(b'*** runcommand', b' '.join(args))
116 bprint(b'*** runcommand', b' '.join(args))
104 stdout.flush()
117 stdout.flush()
105 server.stdin.write(b'runcommand\n')
118 server.stdin.write(b'runcommand\n')
@@ -123,7 +136,7 b' def runcommand(server, args, output=stdo'
123 elif ch == b'm':
136 elif ch == b'm':
124 bprint(b"message: %r" % data)
137 bprint(b"message: %r" % data)
125 elif ch == b'r':
138 elif ch == b'r':
126 ret, = struct.unpack('>i', data)
139 (ret,) = struct.unpack('>i', data)
127 if ret != 0:
140 if ret != 0:
128 bprint(b' [%d]' % ret)
141 bprint(b' [%d]' % ret)
129 return ret
142 return ret
@@ -132,6 +145,7 b' def runcommand(server, args, output=stdo'
132 if ch.isupper():
145 if ch.isupper():
133 return
146 return
134
147
148
135 def check(func, connect=connectpipe):
149 def check(func, connect=connectpipe):
136 stdout.flush()
150 stdout.flush()
137 server = connect()
151 server = connect()
@@ -141,7 +155,9 b' def check(func, connect=connectpipe):'
141 server.stdin.close()
155 server.stdin.close()
142 server.wait()
156 server.wait()
143
157
158
144 def checkwith(connect=connectpipe, **kwargs):
159 def checkwith(connect=connectpipe, **kwargs):
145 def wrap(func):
160 def wrap(func):
146 return check(func, lambda: connect(**kwargs))
161 return check(func, lambda: connect(**kwargs))
162
147 return wrap
163 return wrap
@@ -10,7 +10,7 b' import sys'
10 # Import a minimal set of stdlib modules needed for list_stdlib_modules()
10 # Import a minimal set of stdlib modules needed for list_stdlib_modules()
11 # to work when run from a virtualenv. The modules were chosen empirically
11 # to work when run from a virtualenv. The modules were chosen empirically
12 # so that the return value matches the return value without virtualenv.
12 # so that the return value matches the return value without virtualenv.
13 if True: # disable lexical sorting checks
13 if True: # disable lexical sorting checks
14 try:
14 try:
15 import BaseHTTPServer as basehttpserver
15 import BaseHTTPServer as basehttpserver
16 except ImportError:
16 except ImportError:
@@ -28,9 +28,12 b' allowsymbolimports = ('
28 'mercurial.hgweb.common',
28 'mercurial.hgweb.common',
29 'mercurial.hgweb.request',
29 'mercurial.hgweb.request',
30 'mercurial.i18n',
30 'mercurial.i18n',
31 'mercurial.interfaces',
31 'mercurial.node',
32 'mercurial.node',
33 'mercurial.pycompat',
32 # for revlog to re-export constant to extensions
34 # for revlog to re-export constant to extensions
33 'mercurial.revlogutils.constants',
35 'mercurial.revlogutils.constants',
36 'mercurial.revlogutils.flagutil',
34 # for cffi modules to re-export pure functions
37 # for cffi modules to re-export pure functions
35 'mercurial.pure.base85',
38 'mercurial.pure.base85',
36 'mercurial.pure.bdiff',
39 'mercurial.pure.bdiff',
@@ -45,9 +48,7 b' allowsymbolimports = ('
45 )
48 )
46
49
47 # Whitelist of symbols that can be directly imported.
50 # Whitelist of symbols that can be directly imported.
48 directsymbols = (
51 directsymbols = ('demandimport',)
49 'demandimport',
50 )
51
52
52 # Modules that must be aliased because they are commonly confused with
53 # Modules that must be aliased because they are commonly confused with
53 # common variables and can create aliasing and readability issues.
54 # common variables and can create aliasing and readability issues.
@@ -55,6 +56,7 b' requirealias = {'
55 'ui': 'uimod',
56 'ui': 'uimod',
56 }
57 }
57
58
59
58 def usingabsolute(root):
60 def usingabsolute(root):
59 """Whether absolute imports are being used."""
61 """Whether absolute imports are being used."""
60 if sys.version_info[0] >= 3:
62 if sys.version_info[0] >= 3:
@@ -69,6 +71,7 b' def usingabsolute(root):'
69
71
70 return False
72 return False
71
73
74
72 def walklocal(root):
75 def walklocal(root):
73 """Recursively yield all descendant nodes but not in a different scope"""
76 """Recursively yield all descendant nodes but not in a different scope"""
74 todo = collections.deque(ast.iter_child_nodes(root))
77 todo = collections.deque(ast.iter_child_nodes(root))
@@ -80,6 +83,7 b' def walklocal(root):'
80 todo.extend(ast.iter_child_nodes(node))
83 todo.extend(ast.iter_child_nodes(node))
81 yield node, newscope
84 yield node, newscope
82
85
86
83 def dotted_name_of_path(path):
87 def dotted_name_of_path(path):
84 """Given a relative path to a source file, return its dotted module name.
88 """Given a relative path to a source file, return its dotted module name.
85
89
@@ -89,11 +93,12 b' def dotted_name_of_path(path):'
89 'zlib'
93 'zlib'
90 """
94 """
91 parts = path.replace(os.sep, '/').split('/')
95 parts = path.replace(os.sep, '/').split('/')
92 parts[-1] = parts[-1].split('.', 1)[0] # remove .py and .so and .ARCH.so
96 parts[-1] = parts[-1].split('.', 1)[0] # remove .py and .so and .ARCH.so
93 if parts[-1].endswith('module'):
97 if parts[-1].endswith('module'):
94 parts[-1] = parts[-1][:-6]
98 parts[-1] = parts[-1][:-6]
95 return '.'.join(parts)
99 return '.'.join(parts)
96
100
101
97 def fromlocalfunc(modulename, localmods):
102 def fromlocalfunc(modulename, localmods):
98 """Get a function to examine which locally defined module the
103 """Get a function to examine which locally defined module the
99 target source imports via a specified name.
104 target source imports via a specified name.
@@ -162,6 +167,7 b' def fromlocalfunc(modulename, localmods)'
162 prefix = '.'.join(modulename.split('.')[:-1])
167 prefix = '.'.join(modulename.split('.')[:-1])
163 if prefix:
168 if prefix:
164 prefix += '.'
169 prefix += '.'
170
165 def fromlocal(name, level=0):
171 def fromlocal(name, level=0):
166 # name is false value when relative imports are used.
172 # name is false value when relative imports are used.
167 if not name:
173 if not name:
@@ -173,8 +179,9 b' def fromlocalfunc(modulename, localmods)'
173 # Check relative name first.
179 # Check relative name first.
174 candidates = [prefix + name, name]
180 candidates = [prefix + name, name]
175 else:
181 else:
176 candidates = ['.'.join(modulename.split('.')[:-level]) +
182 candidates = [
177 '.' + name]
183 '.'.join(modulename.split('.')[:-level]) + '.' + name
184 ]
178
185
179 for n in candidates:
186 for n in candidates:
180 if n in localmods:
187 if n in localmods:
@@ -183,18 +190,21 b' def fromlocalfunc(modulename, localmods)'
183 if dottedpath in localmods:
190 if dottedpath in localmods:
184 return (n, dottedpath, True)
191 return (n, dottedpath, True)
185 return False
192 return False
193
186 return fromlocal
194 return fromlocal
187
195
196
188 def populateextmods(localmods):
197 def populateextmods(localmods):
189 """Populate C extension modules based on pure modules"""
198 """Populate C extension modules based on pure modules"""
190 newlocalmods = set(localmods)
199 newlocalmods = set(localmods)
191 for n in localmods:
200 for n in localmods:
192 if n.startswith('mercurial.pure.'):
201 if n.startswith('mercurial.pure.'):
193 m = n[len('mercurial.pure.'):]
202 m = n[len('mercurial.pure.') :]
194 newlocalmods.add('mercurial.cext.' + m)
203 newlocalmods.add('mercurial.cext.' + m)
195 newlocalmods.add('mercurial.cffi._' + m)
204 newlocalmods.add('mercurial.cffi._' + m)
196 return newlocalmods
205 return newlocalmods
197
206
207
198 def list_stdlib_modules():
208 def list_stdlib_modules():
199 """List the modules present in the stdlib.
209 """List the modules present in the stdlib.
200
210
@@ -230,13 +240,13 b' def list_stdlib_modules():'
230 for m in ['msvcrt', '_winreg']:
240 for m in ['msvcrt', '_winreg']:
231 yield m
241 yield m
232 yield '__builtin__'
242 yield '__builtin__'
233 yield 'builtins' # python3 only
243 yield 'builtins' # python3 only
234 yield 'importlib.abc' # python3 only
244 yield 'importlib.abc' # python3 only
235 yield 'importlib.machinery' # python3 only
245 yield 'importlib.machinery' # python3 only
236 yield 'importlib.util' # python3 only
246 yield 'importlib.util' # python3 only
237 for m in 'fcntl', 'grp', 'pwd', 'termios': # Unix only
247 for m in 'fcntl', 'grp', 'pwd', 'termios': # Unix only
238 yield m
248 yield m
239 for m in 'cPickle', 'datetime': # in Python (not C) on PyPy
249 for m in 'cPickle', 'datetime': # in Python (not C) on PyPy
240 yield m
250 yield m
241 for m in ['cffi']:
251 for m in ['cffi']:
242 yield m
252 yield m
@@ -262,14 +272,17 b' def list_stdlib_modules():'
262 for libpath in sys.path:
272 for libpath in sys.path:
263 # We want to walk everything in sys.path that starts with something in
273 # We want to walk everything in sys.path that starts with something in
264 # stdlib_prefixes, but not directories from the hg sources.
274 # stdlib_prefixes, but not directories from the hg sources.
265 if (os.path.abspath(libpath).startswith(sourceroot)
275 if os.path.abspath(libpath).startswith(sourceroot) or not any(
266 or not any(libpath.startswith(p) for p in stdlib_prefixes)):
276 libpath.startswith(p) for p in stdlib_prefixes
277 ):
267 continue
278 continue
268 for top, dirs, files in os.walk(libpath):
279 for top, dirs, files in os.walk(libpath):
269 for i, d in reversed(list(enumerate(dirs))):
280 for i, d in reversed(list(enumerate(dirs))):
270 if (not os.path.exists(os.path.join(top, d, '__init__.py'))
281 if (
271 or top == libpath and d in ('hgdemandimport', 'hgext',
282 not os.path.exists(os.path.join(top, d, '__init__.py'))
272 'mercurial')):
283 or top == libpath
284 and d in ('hgdemandimport', 'hgext', 'mercurial')
285 ):
273 del dirs[i]
286 del dirs[i]
274 for name in files:
287 for name in files:
275 if not name.endswith(('.py', '.so', '.pyc', '.pyo', '.pyd')):
288 if not name.endswith(('.py', '.so', '.pyc', '.pyo', '.pyd')):
@@ -278,12 +291,14 b' def list_stdlib_modules():'
278 full_path = top
291 full_path = top
279 else:
292 else:
280 full_path = os.path.join(top, name)
293 full_path = os.path.join(top, name)
281 rel_path = full_path[len(libpath) + 1:]
294 rel_path = full_path[len(libpath) + 1 :]
282 mod = dotted_name_of_path(rel_path)
295 mod = dotted_name_of_path(rel_path)
283 yield mod
296 yield mod
284
297
298
285 stdlib_modules = set(list_stdlib_modules())
299 stdlib_modules = set(list_stdlib_modules())
286
300
301
287 def imported_modules(source, modulename, f, localmods, ignore_nested=False):
302 def imported_modules(source, modulename, f, localmods, ignore_nested=False):
288 """Given the source of a file as a string, yield the names
303 """Given the source of a file as a string, yield the names
289 imported by that file.
304 imported by that file.
@@ -381,6 +396,7 b' def imported_modules(source, modulename,'
381 # lookup
396 # lookup
382 yield dottedpath
397 yield dottedpath
383
398
399
384 def verify_import_convention(module, source, localmods):
400 def verify_import_convention(module, source, localmods):
385 """Verify imports match our established coding convention.
401 """Verify imports match our established coding convention.
386
402
@@ -398,6 +414,7 b' def verify_import_convention(module, sou'
398 else:
414 else:
399 return verify_stdlib_on_own_line(root)
415 return verify_stdlib_on_own_line(root)
400
416
417
401 def verify_modern_convention(module, root, localmods, root_col_offset=0):
418 def verify_modern_convention(module, root, localmods, root_col_offset=0):
402 """Verify a file conforms to the modern import convention rules.
419 """Verify a file conforms to the modern import convention rules.
403
420
@@ -441,19 +458,24 b' def verify_modern_convention(module, roo'
441 seenlevels = set()
458 seenlevels = set()
442
459
443 for node, newscope in walklocal(root):
460 for node, newscope in walklocal(root):
461
444 def msg(fmt, *args):
462 def msg(fmt, *args):
445 return (fmt % args, node.lineno)
463 return (fmt % args, node.lineno)
464
446 if newscope:
465 if newscope:
447 # Check for local imports in function
466 # Check for local imports in function
448 for r in verify_modern_convention(module, node, localmods,
467 for r in verify_modern_convention(
449 node.col_offset + 4):
468 module, node, localmods, node.col_offset + 4
469 ):
450 yield r
470 yield r
451 elif isinstance(node, ast.Import):
471 elif isinstance(node, ast.Import):
452 # Disallow "import foo, bar" and require separate imports
472 # Disallow "import foo, bar" and require separate imports
453 # for each module.
473 # for each module.
454 if len(node.names) > 1:
474 if len(node.names) > 1:
455 yield msg('multiple imported names: %s',
475 yield msg(
456 ', '.join(n.name for n in node.names))
476 'multiple imported names: %s',
477 ', '.join(n.name for n in node.names),
478 )
457
479
458 name = node.names[0].name
480 name = node.names[0].name
459 asname = node.names[0].asname
481 asname = node.names[0].asname
@@ -463,16 +485,20 b' def verify_modern_convention(module, roo'
463 # Ignore sorting rules on imports inside blocks.
485 # Ignore sorting rules on imports inside blocks.
464 if node.col_offset == root_col_offset:
486 if node.col_offset == root_col_offset:
465 if lastname and name < lastname and laststdlib == stdlib:
487 if lastname and name < lastname and laststdlib == stdlib:
466 yield msg('imports not lexically sorted: %s < %s',
488 yield msg(
467 name, lastname)
489 'imports not lexically sorted: %s < %s', name, lastname
490 )
468
491
469 lastname = name
492 lastname = name
470 laststdlib = stdlib
493 laststdlib = stdlib
471
494
472 # stdlib imports should be before local imports.
495 # stdlib imports should be before local imports.
473 if stdlib and seenlocal and node.col_offset == root_col_offset:
496 if stdlib and seenlocal and node.col_offset == root_col_offset:
474 yield msg('stdlib import "%s" follows local import: %s',
497 yield msg(
475 name, seenlocal)
498 'stdlib import "%s" follows local import: %s',
499 name,
500 seenlocal,
501 )
476
502
477 if not stdlib:
503 if not stdlib:
478 seenlocal = name
504 seenlocal = name
@@ -483,13 +509,16 b' def verify_modern_convention(module, roo'
483 yield msg('import should be relative: %s', name)
509 yield msg('import should be relative: %s', name)
484
510
485 if name in requirealias and asname != requirealias[name]:
511 if name in requirealias and asname != requirealias[name]:
486 yield msg('%s module must be "as" aliased to %s',
512 yield msg(
487 name, requirealias[name])
513 '%s module must be "as" aliased to %s',
514 name,
515 requirealias[name],
516 )
488
517
489 elif isinstance(node, ast.ImportFrom):
518 elif isinstance(node, ast.ImportFrom):
490 # Resolve the full imported module name.
519 # Resolve the full imported module name.
491 if node.level > 0:
520 if node.level > 0:
492 fullname = '.'.join(module.split('.')[:-node.level])
521 fullname = '.'.join(module.split('.')[: -node.level])
493 if node.module:
522 if node.module:
494 fullname += '.%s' % node.module
523 fullname += '.%s' % node.module
495 else:
524 else:
@@ -506,7 +535,8 b' def verify_modern_convention(module, roo'
506 if not fullname or (
535 if not fullname or (
507 fullname in stdlib_modules
536 fullname in stdlib_modules
508 and fullname not in localmods
537 and fullname not in localmods
509 and fullname + '.__init__' not in localmods):
538 and fullname + '.__init__' not in localmods
539 ):
510 yield msg('relative import of stdlib module')
540 yield msg('relative import of stdlib module')
511 else:
541 else:
512 seenlocal = fullname
542 seenlocal = fullname
@@ -516,19 +546,24 b' def verify_modern_convention(module, roo'
516 found = fromlocal(node.module, node.level)
546 found = fromlocal(node.module, node.level)
517 if found and found[2]: # node.module is a package
547 if found and found[2]: # node.module is a package
518 prefix = found[0] + '.'
548 prefix = found[0] + '.'
519 symbols = (n.name for n in node.names
549 symbols = (
520 if not fromlocal(prefix + n.name))
550 n.name for n in node.names if not fromlocal(prefix + n.name)
551 )
521 else:
552 else:
522 symbols = (n.name for n in node.names)
553 symbols = (n.name for n in node.names)
523 symbols = [sym for sym in symbols if sym not in directsymbols]
554 symbols = [sym for sym in symbols if sym not in directsymbols]
524 if node.module and node.col_offset == root_col_offset:
555 if node.module and node.col_offset == root_col_offset:
525 if symbols and fullname not in allowsymbolimports:
556 if symbols and fullname not in allowsymbolimports:
526 yield msg('direct symbol import %s from %s',
557 yield msg(
527 ', '.join(symbols), fullname)
558 'direct symbol import %s from %s',
559 ', '.join(symbols),
560 fullname,
561 )
528
562
529 if symbols and seennonsymbollocal:
563 if symbols and seennonsymbollocal:
530 yield msg('symbol import follows non-symbol import: %s',
564 yield msg(
531 fullname)
565 'symbol import follows non-symbol import: %s', fullname
566 )
532 if not symbols and fullname not in stdlib_modules:
567 if not symbols and fullname not in stdlib_modules:
533 seennonsymbollocal = True
568 seennonsymbollocal = True
534
569
@@ -536,15 +571,19 b' def verify_modern_convention(module, roo'
536 assert node.level
571 assert node.level
537
572
538 # Only allow 1 group per level.
573 # Only allow 1 group per level.
539 if (node.level in seenlevels
574 if (
540 and node.col_offset == root_col_offset):
575 node.level in seenlevels
541 yield msg('multiple "from %s import" statements',
576 and node.col_offset == root_col_offset
542 '.' * node.level)
577 ):
578 yield msg(
579 'multiple "from %s import" statements', '.' * node.level
580 )
543
581
544 # Higher-level groups come before lower-level groups.
582 # Higher-level groups come before lower-level groups.
545 if any(node.level > l for l in seenlevels):
583 if any(node.level > l for l in seenlevels):
546 yield msg('higher-level import should come first: %s',
584 yield msg(
547 fullname)
585 'higher-level import should come first: %s', fullname
586 )
548
587
549 seenlevels.add(node.level)
588 seenlevels.add(node.level)
550
589
@@ -554,14 +593,23 b' def verify_modern_convention(module, roo'
554
593
555 for n in node.names:
594 for n in node.names:
556 if lastentryname and n.name < lastentryname:
595 if lastentryname and n.name < lastentryname:
557 yield msg('imports from %s not lexically sorted: %s < %s',
596 yield msg(
558 fullname, n.name, lastentryname)
597 'imports from %s not lexically sorted: %s < %s',
598 fullname,
599 n.name,
600 lastentryname,
601 )
559
602
560 lastentryname = n.name
603 lastentryname = n.name
561
604
562 if n.name in requirealias and n.asname != requirealias[n.name]:
605 if n.name in requirealias and n.asname != requirealias[n.name]:
563 yield msg('%s from %s must be "as" aliased to %s',
606 yield msg(
564 n.name, fullname, requirealias[n.name])
607 '%s from %s must be "as" aliased to %s',
608 n.name,
609 fullname,
610 requirealias[n.name],
611 )
612
565
613
566 def verify_stdlib_on_own_line(root):
614 def verify_stdlib_on_own_line(root):
567 """Given some python source, verify that stdlib imports are done
615 """Given some python source, verify that stdlib imports are done
@@ -580,13 +628,20 b' def verify_stdlib_on_own_line(root):'
580 for n in node.names:
628 for n in node.names:
581 from_stdlib[n.name in stdlib_modules].append(n.name)
629 from_stdlib[n.name in stdlib_modules].append(n.name)
582 if from_stdlib[True] and from_stdlib[False]:
630 if from_stdlib[True] and from_stdlib[False]:
583 yield ('mixed imports\n stdlib: %s\n relative: %s' %
631 yield (
584 (', '.join(sorted(from_stdlib[True])),
632 'mixed imports\n stdlib: %s\n relative: %s'
585 ', '.join(sorted(from_stdlib[False]))), node.lineno)
633 % (
634 ', '.join(sorted(from_stdlib[True])),
635 ', '.join(sorted(from_stdlib[False])),
636 ),
637 node.lineno,
638 )
639
586
640
587 class CircularImport(Exception):
641 class CircularImport(Exception):
588 pass
642 pass
589
643
644
590 def checkmod(mod, imports):
645 def checkmod(mod, imports):
591 shortest = {}
646 shortest = {}
592 visit = [[mod]]
647 visit = [[mod]]
@@ -601,6 +656,7 b' def checkmod(mod, imports):'
601 continue
656 continue
602 visit.append(path + [i])
657 visit.append(path + [i])
603
658
659
604 def rotatecycle(cycle):
660 def rotatecycle(cycle):
605 """arrange a cycle so that the lexicographically first module listed first
661 """arrange a cycle so that the lexicographically first module listed first
606
662
@@ -611,6 +667,7 b' def rotatecycle(cycle):'
611 idx = cycle.index(lowest)
667 idx = cycle.index(lowest)
612 return cycle[idx:] + cycle[:idx] + [lowest]
668 return cycle[idx:] + cycle[:idx] + [lowest]
613
669
670
614 def find_cycles(imports):
671 def find_cycles(imports):
615 """Find cycles in an already-loaded import graph.
672 """Find cycles in an already-loaded import graph.
616
673
@@ -634,9 +691,11 b' def find_cycles(imports):'
634 cycles.add(" -> ".join(rotatecycle(cycle)))
691 cycles.add(" -> ".join(rotatecycle(cycle)))
635 return cycles
692 return cycles
636
693
694
637 def _cycle_sortkey(c):
695 def _cycle_sortkey(c):
638 return len(c), c
696 return len(c), c
639
697
698
640 def embedded(f, modname, src):
699 def embedded(f, modname, src):
641 """Extract embedded python code
700 """Extract embedded python code
642
701
@@ -678,6 +737,7 b' def embedded(f, modname, src):'
678 modname = modname.decode('utf8')
737 modname = modname.decode('utf8')
679 yield code, "%s[%d]" % (modname, starts), name, starts - 1
738 yield code, "%s[%d]" % (modname, starts), name, starts - 1
680
739
740
681 def sources(f, modname):
741 def sources(f, modname):
682 """Yields possibly multiple sources from a filepath
742 """Yields possibly multiple sources from a filepath
683
743
@@ -698,6 +758,7 b' def sources(f, modname):'
698 for script, modname, t, line in embedded(f, modname, src):
758 for script, modname, t, line in embedded(f, modname, src):
699 yield script, modname.encode('utf8'), t, line
759 yield script, modname.encode('utf8'), t, line
700
760
761
701 def main(argv):
762 def main(argv):
702 if len(argv) < 2 or (argv[1] == '-' and len(argv) > 2):
763 if len(argv) < 2 or (argv[1] == '-' and len(argv) > 2):
703 print('Usage: %s {-|file [file] [file] ...}')
764 print('Usage: %s {-|file [file] [file] ...}')
@@ -719,15 +780,19 b' def main(argv):'
719 for src, modname, name, line in sources(source_path, localmodname):
780 for src, modname, name, line in sources(source_path, localmodname):
720 try:
781 try:
721 used_imports[modname] = sorted(
782 used_imports[modname] = sorted(
722 imported_modules(src, modname, name, localmods,
783 imported_modules(
723 ignore_nested=True))
784 src, modname, name, localmods, ignore_nested=True
724 for error, lineno in verify_import_convention(modname, src,
785 )
725 localmods):
786 )
787 for error, lineno in verify_import_convention(
788 modname, src, localmods
789 ):
726 any_errors = True
790 any_errors = True
727 print('%s:%d: %s' % (source_path, lineno + line, error))
791 print('%s:%d: %s' % (source_path, lineno + line, error))
728 except SyntaxError as e:
792 except SyntaxError as e:
729 print('%s:%d: SyntaxError: %s' %
793 print(
730 (source_path, e.lineno + line, e))
794 '%s:%d: SyntaxError: %s' % (source_path, e.lineno + line, e)
795 )
731 cycles = find_cycles(used_imports)
796 cycles = find_cycles(used_imports)
732 if cycles:
797 if cycles:
733 firstmods = set()
798 firstmods = set()
@@ -743,5 +808,6 b' def main(argv):'
743 any_errors = True
808 any_errors = True
744 return any_errors != 0
809 return any_errors != 0
745
810
811
746 if __name__ == '__main__':
812 if __name__ == '__main__':
747 sys.exit(int(main(sys.argv)))
813 sys.exit(int(main(sys.argv)))
@@ -37,22 +37,22 b''
37 $PYTHON36_x64_URL = "https://www.python.org/ftp/python/3.6.8/python-3.6.8-amd64.exe"
37 $PYTHON36_x64_URL = "https://www.python.org/ftp/python/3.6.8/python-3.6.8-amd64.exe"
38 $PYTHON36_x64_SHA256 = "96088A58B7C43BC83B84E6B67F15E8706C614023DD64F9A5A14E81FF824ADADC"
38 $PYTHON36_x64_SHA256 = "96088A58B7C43BC83B84E6B67F15E8706C614023DD64F9A5A14E81FF824ADADC"
39
39
40 $PYTHON37_x86_URL = "https://www.python.org/ftp/python/3.7.2/python-3.7.2.exe"
40 $PYTHON37_x86_URL = "https://www.python.org/ftp/python/3.7.4/python-3.7.4.exe"
41 $PYTHON37_x86_SHA256 = "8BACE330FB409E428B04EEEE083DD9CA7F6C754366D07E23B3853891D8F8C3D0"
41 $PYTHON37_x86_SHA256 = "9a30ab5568ba37bfbcae5cdee19e9dc30765c42cf066f605221563ff8b20ee34"
42 $PYTHON37_x64_URL = "https://www.python.org/ftp/python/3.7.2/python-3.7.2-amd64.exe"
42 $PYTHON37_X64_URL = "https://www.python.org/ftp/python/3.7.4/python-3.7.4-amd64.exe"
43 $PYTHON37_x64_SHA256 = "0FE2A696F5A3E481FED795EF6896ED99157BCEF273EF3C4A96F2905CBDB3AA13"
43 $PYTHON37_x64_SHA256 = "bab92f987320975c7826171a072bfd64f8f0941aaf2cdeba6924b7025c9968a3"
44
44
45 $PYTHON38_x86_URL = "https://www.python.org/ftp/python/3.8.0/python-3.8.0b2.exe"
45 $PYTHON38_x86_URL = "https://www.python.org/ftp/python/3.8.0/python-3.8.0.exe"
46 $PYTHON38_x86_SHA256 = "efa37ff7a239332bd5cf8b6e6ff15e3f183da942fd8c8d3e4b6bd11fa5e07e23"
46 $PYTHON38_x86_SHA256 = "b471908de5e10d8fb5c3351a5affb1172da7790c533e0c9ffbaeec9c11611b15"
47 $PYTHON38_x64_URL = "https://www.python.org/ftp/python/3.8.0/python-3.8.0b2-amd64.exe"
47 $PYTHON38_x64_URL = "https://www.python.org/ftp/python/3.8.0/python-3.8.0-amd64.exe"
48 $PYTHON38_x64_SHA256 = "4e151f7dfa3605e6f400a3b01acfc2517468d71afb1e20f9299149356b79d8e9"
48 $PYTHON38_x64_SHA256 = "a9bbc6088a3e4c7112826e21bfee6277f7b6d93259f7c57176139231bb7071e4"
49
49
50 # PIP 19.0.3.
50 # PIP 19.2.3.
51 $PIP_URL = "https://github.com/pypa/get-pip/raw/fee32c376da1ff6496a798986d7939cd51e1644f/get-pip.py"
51 $PIP_URL = "https://github.com/pypa/get-pip/raw/309a56c5fd94bd1134053a541cb4657a4e47e09d/get-pip.py"
52 $PIP_SHA256 = "efe99298f3fbb1f56201ce6b81d2658067d2f7d7dfc2d412e0d3cacc9a397c61"
52 $PIP_SHA256 = "57e3643ff19f018f8a00dfaa6b7e4620e3c1a7a2171fd218425366ec006b3bfe"
53
53
54 $VIRTUALENV_URL = "https://files.pythonhosted.org/packages/37/db/89d6b043b22052109da35416abc3c397655e4bd3cff031446ba02b9654fa/virtualenv-16.4.3.tar.gz"
54 $VIRTUALENV_URL = "https://files.pythonhosted.org/packages/66/f0/6867af06d2e2f511e4e1d7094ff663acdebc4f15d4a0cb0fed1007395124/virtualenv-16.7.5.tar.gz"
55 $VIRTUALENV_SHA256 = "984d7e607b0a5d1329425dd8845bd971b957424b5ba664729fab51ab8c11bc39"
55 $VIRTUALENV_SHA256 = "f78d81b62d3147396ac33fc9d77579ddc42cc2a98dd9ea38886f616b33bc7fb2"
56
56
57 $INNO_SETUP_URL = "http://files.jrsoftware.org/is/5/innosetup-5.6.1-unicode.exe"
57 $INNO_SETUP_URL = "http://files.jrsoftware.org/is/5/innosetup-5.6.1-unicode.exe"
58 $INNO_SETUP_SHA256 = "27D49E9BC769E9D1B214C153011978DB90DC01C2ACD1DDCD9ED7B3FE3B96B538"
58 $INNO_SETUP_SHA256 = "27D49E9BC769E9D1B214C153011978DB90DC01C2ACD1DDCD9ED7B3FE3B96B538"
@@ -60,9 +60,9 b''
60 $MINGW_BIN_URL = "https://osdn.net/frs/redir.php?m=constant&f=mingw%2F68260%2Fmingw-get-0.6.3-mingw32-pre-20170905-1-bin.zip"
60 $MINGW_BIN_URL = "https://osdn.net/frs/redir.php?m=constant&f=mingw%2F68260%2Fmingw-get-0.6.3-mingw32-pre-20170905-1-bin.zip"
61 $MINGW_BIN_SHA256 = "2AB8EFD7C7D1FC8EAF8B2FA4DA4EEF8F3E47768284C021599BC7435839A046DF"
61 $MINGW_BIN_SHA256 = "2AB8EFD7C7D1FC8EAF8B2FA4DA4EEF8F3E47768284C021599BC7435839A046DF"
62
62
63 $MERCURIAL_WHEEL_FILENAME = "mercurial-4.9-cp27-cp27m-win_amd64.whl"
63 $MERCURIAL_WHEEL_FILENAME = "mercurial-5.1.2-cp27-cp27m-win_amd64.whl"
64 $MERCURIAL_WHEEL_URL = "https://files.pythonhosted.org/packages/fe/e8/b872d53dfbbf986bdc46af0b30f580b227fb59bddd2587152a55e205b0cc/$MERCURIAL_WHEEL_FILENAME"
64 $MERCURIAL_WHEEL_URL = "https://files.pythonhosted.org/packages/6d/47/e031e47f7fe9b16e4e3383da47e2b0a7eae6e603996bc67a03ec4fa1b3f4/$MERCURIAL_WHEEL_FILENAME"
65 $MERCURIAL_WHEEL_SHA256 = "218cc2e7c3f1d535007febbb03351663897edf27df0e57d6842e3b686492b429"
65 $MERCURIAL_WHEEL_SHA256 = "1d18c7f6ca1456f0f62ee65c9a50c14cbba48ce6e924930cdb10537f5c9eaf5f"
66
66
67 # Writing progress slows down downloads substantially. So disable it.
67 # Writing progress slows down downloads substantially. So disable it.
68 $progressPreference = 'silentlyContinue'
68 $progressPreference = 'silentlyContinue'
@@ -13,6 +13,7 b' prints it to ``stderr`` on exit.'
13
13
14 from __future__ import absolute_import
14 from __future__ import absolute_import
15
15
16
16 def memusage(ui):
17 def memusage(ui):
17 """Report memory usage of the current process."""
18 """Report memory usage of the current process."""
18 result = {'peak': 0, 'rss': 0}
19 result = {'peak': 0, 'rss': 0}
@@ -24,8 +25,13 b' def memusage(ui):'
24 key = parts[0][2:-1].lower()
25 key = parts[0][2:-1].lower()
25 if key in result:
26 if key in result:
26 result[key] = int(parts[1])
27 result[key] = int(parts[1])
27 ui.write_err(", ".join(["%s: %.1f MiB" % (k, v / 1024.0)
28 ui.write_err(
28 for k, v in result.iteritems()]) + "\n")
29 ", ".join(
30 ["%s: %.1f MiB" % (k, v / 1024.0) for k, v in result.iteritems()]
31 )
32 + "\n"
33 )
34
29
35
30 def extsetup(ui):
36 def extsetup(ui):
31 ui.atexit(memusage, ui)
37 ui.atexit(memusage, ui)
@@ -98,7 +98,10 b' def secure_download_stream(url, size, sh'
98 length = 0
98 length = 0
99
99
100 with urllib.request.urlopen(url) as fh:
100 with urllib.request.urlopen(url) as fh:
101 if not url.endswith('.gz') and fh.info().get('Content-Encoding') == 'gzip':
101 if (
102 not url.endswith('.gz')
103 and fh.info().get('Content-Encoding') == 'gzip'
104 ):
102 fh = gzip.GzipFile(fileobj=fh)
105 fh = gzip.GzipFile(fileobj=fh)
103
106
104 while True:
107 while True:
@@ -114,12 +117,14 b' def secure_download_stream(url, size, sh'
114 digest = h.hexdigest()
117 digest = h.hexdigest()
115
118
116 if length != size:
119 if length != size:
117 raise IntegrityError('size mismatch on %s: wanted %d; got %d' % (
120 raise IntegrityError(
118 url, size, length))
121 'size mismatch on %s: wanted %d; got %d' % (url, size, length)
122 )
119
123
120 if digest != sha256:
124 if digest != sha256:
121 raise IntegrityError('sha256 mismatch on %s: wanted %s; got %s' % (
125 raise IntegrityError(
122 url, sha256, digest))
126 'sha256 mismatch on %s: wanted %s; got %s' % (url, sha256, digest)
127 )
123
128
124
129
125 def download_to_path(url: str, path: pathlib.Path, size: int, sha256: str):
130 def download_to_path(url: str, path: pathlib.Path, size: int, sha256: str):
@@ -162,12 +167,14 b' def download_to_path(url: str, path: pat'
162 print('successfully downloaded %s' % url)
167 print('successfully downloaded %s' % url)
163
168
164
169
165 def download_entry(name: dict, dest_path: pathlib.Path, local_name=None) -> pathlib.Path:
170 def download_entry(
171 name: dict, dest_path: pathlib.Path, local_name=None
172 ) -> pathlib.Path:
166 entry = DOWNLOADS[name]
173 entry = DOWNLOADS[name]
167
174
168 url = entry['url']
175 url = entry['url']
169
176
170 local_name = local_name or url[url.rindex('/') + 1:]
177 local_name = local_name or url[url.rindex('/') + 1 :]
171
178
172 local_path = dest_path / local_name
179 local_path = dest_path / local_name
173 download_to_path(url, local_path, entry['size'], entry['sha256'])
180 download_to_path(url, local_path, entry['size'], entry['sha256'])
@@ -12,12 +12,8 b' import pathlib'
12 import shutil
12 import shutil
13 import subprocess
13 import subprocess
14
14
15 from .py2exe import (
15 from .py2exe import build_py2exe
16 build_py2exe,
16 from .util import find_vc_runtime_files
17 )
18 from .util import (
19 find_vc_runtime_files,
20 )
21
17
22
18
23 EXTRA_PACKAGES = {
19 EXTRA_PACKAGES = {
@@ -28,9 +24,13 b' EXTRA_PACKAGES = {'
28 }
24 }
29
25
30
26
31 def build(source_dir: pathlib.Path, build_dir: pathlib.Path,
27 def build(
32 python_exe: pathlib.Path, iscc_exe: pathlib.Path,
28 source_dir: pathlib.Path,
33 version=None):
29 build_dir: pathlib.Path,
30 python_exe: pathlib.Path,
31 iscc_exe: pathlib.Path,
32 version=None,
33 ):
34 """Build the Inno installer.
34 """Build the Inno installer.
35
35
36 Build files will be placed in ``build_dir``.
36 Build files will be placed in ``build_dir``.
@@ -44,11 +44,18 b' def build(source_dir: pathlib.Path, buil'
44
44
45 vc_x64 = r'\x64' in os.environ.get('LIB', '')
45 vc_x64 = r'\x64' in os.environ.get('LIB', '')
46
46
47 requirements_txt = (source_dir / 'contrib' / 'packaging' /
47 requirements_txt = (
48 'inno' / 'requirements.txt')
48 source_dir / 'contrib' / 'packaging' / 'inno' / 'requirements.txt'
49 )
49
50
50 build_py2exe(source_dir, build_dir, python_exe, 'inno',
51 build_py2exe(
51 requirements_txt, extra_packages=EXTRA_PACKAGES)
52 source_dir,
53 build_dir,
54 python_exe,
55 'inno',
56 requirements_txt,
57 extra_packages=EXTRA_PACKAGES,
58 )
52
59
53 # hg.exe depends on VC9 runtime DLLs. Copy those into place.
60 # hg.exe depends on VC9 runtime DLLs. Copy those into place.
54 for f in find_vc_runtime_files(vc_x64):
61 for f in find_vc_runtime_files(vc_x64):
@@ -11,9 +11,7 b' import os'
11 import pathlib
11 import pathlib
12 import subprocess
12 import subprocess
13
13
14 from .downloads import (
14 from .downloads import download_entry
15 download_entry,
16 )
17 from .util import (
15 from .util import (
18 extract_tar_to_directory,
16 extract_tar_to_directory,
19 extract_zip_to_directory,
17 extract_zip_to_directory,
@@ -21,12 +19,17 b' from .util import ('
21 )
19 )
22
20
23
21
24 def build_py2exe(source_dir: pathlib.Path, build_dir: pathlib.Path,
22 def build_py2exe(
25 python_exe: pathlib.Path, build_name: str,
23 source_dir: pathlib.Path,
26 venv_requirements_txt: pathlib.Path,
24 build_dir: pathlib.Path,
27 extra_packages=None, extra_excludes=None,
25 python_exe: pathlib.Path,
28 extra_dll_excludes=None,
26 build_name: str,
29 extra_packages_script=None):
27 venv_requirements_txt: pathlib.Path,
28 extra_packages=None,
29 extra_excludes=None,
30 extra_dll_excludes=None,
31 extra_packages_script=None,
32 ):
30 """Build Mercurial with py2exe.
33 """Build Mercurial with py2exe.
31
34
32 Build files will be placed in ``build_dir``.
35 Build files will be placed in ``build_dir``.
@@ -36,9 +39,11 b' def build_py2exe(source_dir: pathlib.Pat'
36 to already be configured with an active toolchain.
39 to already be configured with an active toolchain.
37 """
40 """
38 if 'VCINSTALLDIR' not in os.environ:
41 if 'VCINSTALLDIR' not in os.environ:
39 raise Exception('not running from a Visual C++ build environment; '
42 raise Exception(
40 'execute the "Visual C++ <version> Command Prompt" '
43 'not running from a Visual C++ build environment; '
41 'application shortcut or a vcsvarsall.bat file')
44 'execute the "Visual C++ <version> Command Prompt" '
45 'application shortcut or a vcsvarsall.bat file'
46 )
42
47
43 # Identity x86/x64 and validate the environment matches the Python
48 # Identity x86/x64 and validate the environment matches the Python
44 # architecture.
49 # architecture.
@@ -48,12 +53,16 b' def build_py2exe(source_dir: pathlib.Pat'
48
53
49 if vc_x64:
54 if vc_x64:
50 if py_info['arch'] != '64bit':
55 if py_info['arch'] != '64bit':
51 raise Exception('architecture mismatch: Visual C++ environment '
56 raise Exception(
52 'is configured for 64-bit but Python is 32-bit')
57 'architecture mismatch: Visual C++ environment '
58 'is configured for 64-bit but Python is 32-bit'
59 )
53 else:
60 else:
54 if py_info['arch'] != '32bit':
61 if py_info['arch'] != '32bit':
55 raise Exception('architecture mismatch: Visual C++ environment '
62 raise Exception(
56 'is configured for 32-bit but Python is 64-bit')
63 'architecture mismatch: Visual C++ environment '
64 'is configured for 32-bit but Python is 64-bit'
65 )
57
66
58 if py_info['py3']:
67 if py_info['py3']:
59 raise Exception('Only Python 2 is currently supported')
68 raise Exception('Only Python 2 is currently supported')
@@ -65,11 +74,11 b' def build_py2exe(source_dir: pathlib.Pat'
65 virtualenv_pkg, virtualenv_entry = download_entry('virtualenv', build_dir)
74 virtualenv_pkg, virtualenv_entry = download_entry('virtualenv', build_dir)
66 py2exe_pkg, py2exe_entry = download_entry('py2exe', build_dir)
75 py2exe_pkg, py2exe_entry = download_entry('py2exe', build_dir)
67
76
68 venv_path = build_dir / ('venv-%s-%s' % (build_name,
77 venv_path = build_dir / (
69 'x64' if vc_x64 else 'x86'))
78 'venv-%s-%s' % (build_name, 'x64' if vc_x64 else 'x86')
79 )
70
80
71 gettext_root = build_dir / (
81 gettext_root = build_dir / ('gettext-win-%s' % gettext_entry['version'])
72 'gettext-win-%s' % gettext_entry['version'])
73
82
74 if not gettext_root.exists():
83 if not gettext_root.exists():
75 extract_zip_to_directory(gettext_pkg, gettext_root)
84 extract_zip_to_directory(gettext_pkg, gettext_root)
@@ -77,7 +86,8 b' def build_py2exe(source_dir: pathlib.Pat'
77
86
78 # This assumes Python 2. We don't need virtualenv on Python 3.
87 # This assumes Python 2. We don't need virtualenv on Python 3.
79 virtualenv_src_path = build_dir / (
88 virtualenv_src_path = build_dir / (
80 'virtualenv-%s' % virtualenv_entry['version'])
89 'virtualenv-%s' % virtualenv_entry['version']
90 )
81 virtualenv_py = virtualenv_src_path / 'virtualenv.py'
91 virtualenv_py = virtualenv_src_path / 'virtualenv.py'
82
92
83 if not virtualenv_src_path.exists():
93 if not virtualenv_src_path.exists():
@@ -91,14 +101,15 b' def build_py2exe(source_dir: pathlib.Pat'
91 if not venv_path.exists():
101 if not venv_path.exists():
92 print('creating virtualenv with dependencies')
102 print('creating virtualenv with dependencies')
93 subprocess.run(
103 subprocess.run(
94 [str(python_exe), str(virtualenv_py), str(venv_path)],
104 [str(python_exe), str(virtualenv_py), str(venv_path)], check=True
95 check=True)
105 )
96
106
97 venv_python = venv_path / 'Scripts' / 'python.exe'
107 venv_python = venv_path / 'Scripts' / 'python.exe'
98 venv_pip = venv_path / 'Scripts' / 'pip.exe'
108 venv_pip = venv_path / 'Scripts' / 'pip.exe'
99
109
100 subprocess.run([str(venv_pip), 'install', '-r', str(venv_requirements_txt)],
110 subprocess.run(
101 check=True)
111 [str(venv_pip), 'install', '-r', str(venv_requirements_txt)], check=True
112 )
102
113
103 # Force distutils to use VC++ settings from environment, which was
114 # Force distutils to use VC++ settings from environment, which was
104 # validated above.
115 # validated above.
@@ -107,9 +118,13 b' def build_py2exe(source_dir: pathlib.Pat'
107 env['MSSdk'] = '1'
118 env['MSSdk'] = '1'
108
119
109 if extra_packages_script:
120 if extra_packages_script:
110 more_packages = set(subprocess.check_output(
121 more_packages = set(
111 extra_packages_script,
122 subprocess.check_output(extra_packages_script, cwd=build_dir)
112 cwd=build_dir).split(b'\0')[-1].strip().decode('utf-8').splitlines())
123 .split(b'\0')[-1]
124 .strip()
125 .decode('utf-8')
126 .splitlines()
127 )
113 if more_packages:
128 if more_packages:
114 if not extra_packages:
129 if not extra_packages:
115 extra_packages = more_packages
130 extra_packages = more_packages
@@ -119,32 +134,38 b' def build_py2exe(source_dir: pathlib.Pat'
119 if extra_packages:
134 if extra_packages:
120 env['HG_PY2EXE_EXTRA_PACKAGES'] = ' '.join(sorted(extra_packages))
135 env['HG_PY2EXE_EXTRA_PACKAGES'] = ' '.join(sorted(extra_packages))
121 hgext3rd_extras = sorted(
136 hgext3rd_extras = sorted(
122 e for e in extra_packages if e.startswith('hgext3rd.'))
137 e for e in extra_packages if e.startswith('hgext3rd.')
138 )
123 if hgext3rd_extras:
139 if hgext3rd_extras:
124 env['HG_PY2EXE_EXTRA_INSTALL_PACKAGES'] = ' '.join(hgext3rd_extras)
140 env['HG_PY2EXE_EXTRA_INSTALL_PACKAGES'] = ' '.join(hgext3rd_extras)
125 if extra_excludes:
141 if extra_excludes:
126 env['HG_PY2EXE_EXTRA_EXCLUDES'] = ' '.join(sorted(extra_excludes))
142 env['HG_PY2EXE_EXTRA_EXCLUDES'] = ' '.join(sorted(extra_excludes))
127 if extra_dll_excludes:
143 if extra_dll_excludes:
128 env['HG_PY2EXE_EXTRA_DLL_EXCLUDES'] = ' '.join(
144 env['HG_PY2EXE_EXTRA_DLL_EXCLUDES'] = ' '.join(
129 sorted(extra_dll_excludes))
145 sorted(extra_dll_excludes)
146 )
130
147
131 py2exe_py_path = venv_path / 'Lib' / 'site-packages' / 'py2exe'
148 py2exe_py_path = venv_path / 'Lib' / 'site-packages' / 'py2exe'
132 if not py2exe_py_path.exists():
149 if not py2exe_py_path.exists():
133 print('building py2exe')
150 print('building py2exe')
134 subprocess.run([str(venv_python), 'setup.py', 'install'],
151 subprocess.run(
135 cwd=py2exe_source_path,
152 [str(venv_python), 'setup.py', 'install'],
136 env=env,
153 cwd=py2exe_source_path,
137 check=True)
154 env=env,
155 check=True,
156 )
138
157
139 # Register location of msgfmt and other binaries.
158 # Register location of msgfmt and other binaries.
140 env['PATH'] = '%s%s%s' % (
159 env['PATH'] = '%s%s%s' % (
141 env['PATH'], os.pathsep, str(gettext_root / 'bin'))
160 env['PATH'],
161 os.pathsep,
162 str(gettext_root / 'bin'),
163 )
142
164
143 print('building Mercurial')
165 print('building Mercurial')
144 subprocess.run(
166 subprocess.run(
145 [str(venv_python), 'setup.py',
167 [str(venv_python), 'setup.py', 'py2exe', 'build_doc', '--html'],
146 'py2exe',
147 'build_doc', '--html'],
148 cwd=str(source_dir),
168 cwd=str(source_dir),
149 env=env,
169 env=env,
150 check=True)
170 check=True,
171 )
@@ -32,8 +32,11 b' def find_vc_runtime_files(x64=False):'
32
32
33 prefix = 'amd64' if x64 else 'x86'
33 prefix = 'amd64' if x64 else 'x86'
34
34
35 candidates = sorted(p for p in os.listdir(winsxs)
35 candidates = sorted(
36 if p.lower().startswith('%s_microsoft.vc90.crt_' % prefix))
36 p
37 for p in os.listdir(winsxs)
38 if p.lower().startswith('%s_microsoft.vc90.crt_' % prefix)
39 )
37
40
38 for p in candidates:
41 for p in candidates:
39 print('found candidate VC runtime: %s' % p)
42 print('found candidate VC runtime: %s' % p)
@@ -72,7 +75,7 b' def windows_10_sdk_info():'
72 'version': version,
75 'version': version,
73 'bin_root': bin_version,
76 'bin_root': bin_version,
74 'bin_x86': bin_version / 'x86',
77 'bin_x86': bin_version / 'x86',
75 'bin_x64': bin_version / 'x64'
78 'bin_x64': bin_version / 'x64',
76 }
79 }
77
80
78
81
@@ -89,9 +92,14 b' def find_signtool():'
89 raise Exception('could not find signtool.exe in Windows 10 SDK')
92 raise Exception('could not find signtool.exe in Windows 10 SDK')
90
93
91
94
92 def sign_with_signtool(file_path, description, subject_name=None,
95 def sign_with_signtool(
93 cert_path=None, cert_password=None,
96 file_path,
94 timestamp_url=None):
97 description,
98 subject_name=None,
99 cert_path=None,
100 cert_password=None,
101 timestamp_url=None,
102 ):
95 """Digitally sign a file with signtool.exe.
103 """Digitally sign a file with signtool.exe.
96
104
97 ``file_path`` is file to sign.
105 ``file_path`` is file to sign.
@@ -114,10 +122,13 b' def sign_with_signtool(file_path, descri'
114 cert_password = getpass.getpass('password for %s: ' % cert_path)
122 cert_password = getpass.getpass('password for %s: ' % cert_path)
115
123
116 args = [
124 args = [
117 str(find_signtool()), 'sign',
125 str(find_signtool()),
126 'sign',
118 '/v',
127 '/v',
119 '/fd', 'sha256',
128 '/fd',
120 '/d', description,
129 'sha256',
130 '/d',
131 description,
121 ]
132 ]
122
133
123 if cert_path:
134 if cert_path:
@@ -15,12 +15,8 b' import tempfile'
15 import typing
15 import typing
16 import xml.dom.minidom
16 import xml.dom.minidom
17
17
18 from .downloads import (
18 from .downloads import download_entry
19 download_entry,
19 from .py2exe import build_py2exe
20 )
21 from .py2exe import (
22 build_py2exe,
23 )
24 from .util import (
20 from .util import (
25 extract_zip_to_directory,
21 extract_zip_to_directory,
26 sign_with_signtool,
22 sign_with_signtool,
@@ -84,17 +80,29 b' def normalize_version(version):'
84
80
85 def ensure_vc90_merge_modules(build_dir):
81 def ensure_vc90_merge_modules(build_dir):
86 x86 = (
82 x86 = (
87 download_entry('vc9-crt-x86-msm', build_dir,
83 download_entry(
88 local_name='microsoft.vcxx.crt.x86_msm.msm')[0],
84 'vc9-crt-x86-msm',
89 download_entry('vc9-crt-x86-msm-policy', build_dir,
85 build_dir,
90 local_name='policy.x.xx.microsoft.vcxx.crt.x86_msm.msm')[0]
86 local_name='microsoft.vcxx.crt.x86_msm.msm',
87 )[0],
88 download_entry(
89 'vc9-crt-x86-msm-policy',
90 build_dir,
91 local_name='policy.x.xx.microsoft.vcxx.crt.x86_msm.msm',
92 )[0],
91 )
93 )
92
94
93 x64 = (
95 x64 = (
94 download_entry('vc9-crt-x64-msm', build_dir,
96 download_entry(
95 local_name='microsoft.vcxx.crt.x64_msm.msm')[0],
97 'vc9-crt-x64-msm',
96 download_entry('vc9-crt-x64-msm-policy', build_dir,
98 build_dir,
97 local_name='policy.x.xx.microsoft.vcxx.crt.x64_msm.msm')[0]
99 local_name='microsoft.vcxx.crt.x64_msm.msm',
100 )[0],
101 download_entry(
102 'vc9-crt-x64-msm-policy',
103 build_dir,
104 local_name='policy.x.xx.microsoft.vcxx.crt.x64_msm.msm',
105 )[0],
98 )
106 )
99 return {
107 return {
100 'x86': x86,
108 'x86': x86,
@@ -116,17 +124,26 b' def run_candle(wix, cwd, wxs, source_dir'
116 subprocess.run(args, cwd=str(cwd), check=True)
124 subprocess.run(args, cwd=str(cwd), check=True)
117
125
118
126
119 def make_post_build_signing_fn(name, subject_name=None, cert_path=None,
127 def make_post_build_signing_fn(
120 cert_password=None, timestamp_url=None):
128 name,
129 subject_name=None,
130 cert_path=None,
131 cert_password=None,
132 timestamp_url=None,
133 ):
121 """Create a callable that will use signtool to sign hg.exe."""
134 """Create a callable that will use signtool to sign hg.exe."""
122
135
123 def post_build_sign(source_dir, build_dir, dist_dir, version):
136 def post_build_sign(source_dir, build_dir, dist_dir, version):
124 description = '%s %s' % (name, version)
137 description = '%s %s' % (name, version)
125
138
126 sign_with_signtool(dist_dir / 'hg.exe', description,
139 sign_with_signtool(
127 subject_name=subject_name, cert_path=cert_path,
140 dist_dir / 'hg.exe',
128 cert_password=cert_password,
141 description,
129 timestamp_url=timestamp_url)
142 subject_name=subject_name,
143 cert_path=cert_path,
144 cert_password=cert_password,
145 timestamp_url=timestamp_url,
146 )
130
147
131 return post_build_sign
148 return post_build_sign
132
149
@@ -155,7 +172,8 b' def make_libraries_xml(wix_dir: pathlib.'
155 # We can't use ElementTree because it doesn't handle the
172 # We can't use ElementTree because it doesn't handle the
156 # <?include ?> directives.
173 # <?include ?> directives.
157 doc = xml.dom.minidom.parseString(
174 doc = xml.dom.minidom.parseString(
158 LIBRARIES_XML.format(wix_dir=str(wix_dir)))
175 LIBRARIES_XML.format(wix_dir=str(wix_dir))
176 )
159
177
160 component = doc.getElementsByTagName('Component')[0]
178 component = doc.getElementsByTagName('Component')[0]
161
179
@@ -177,11 +195,16 b' def make_libraries_xml(wix_dir: pathlib.'
177 return doc.toprettyxml()
195 return doc.toprettyxml()
178
196
179
197
180 def build_installer(source_dir: pathlib.Path, python_exe: pathlib.Path,
198 def build_installer(
181 msi_name='mercurial', version=None, post_build_fn=None,
199 source_dir: pathlib.Path,
182 extra_packages_script=None,
200 python_exe: pathlib.Path,
183 extra_wxs:typing.Optional[typing.Dict[str,str]]=None,
201 msi_name='mercurial',
184 extra_features:typing.Optional[typing.List[str]]=None):
202 version=None,
203 post_build_fn=None,
204 extra_packages_script=None,
205 extra_wxs: typing.Optional[typing.Dict[str, str]] = None,
206 extra_features: typing.Optional[typing.List[str]] = None,
207 ):
185 """Build a WiX MSI installer.
208 """Build a WiX MSI installer.
186
209
187 ``source_dir`` is the path to the Mercurial source tree to use.
210 ``source_dir`` is the path to the Mercurial source tree to use.
@@ -209,10 +232,15 b' def build_installer(source_dir: pathlib.'
209
232
210 requirements_txt = wix_dir / 'requirements.txt'
233 requirements_txt = wix_dir / 'requirements.txt'
211
234
212 build_py2exe(source_dir, hg_build_dir,
235 build_py2exe(
213 python_exe, 'wix', requirements_txt,
236 source_dir,
214 extra_packages=EXTRA_PACKAGES,
237 hg_build_dir,
215 extra_packages_script=extra_packages_script)
238 python_exe,
239 'wix',
240 requirements_txt,
241 extra_packages=EXTRA_PACKAGES,
242 extra_packages_script=extra_packages_script,
243 )
216
244
217 version = version or normalize_version(find_version(source_dir))
245 version = version or normalize_version(find_version(source_dir))
218 print('using version string: %s' % version)
246 print('using version string: %s' % version)
@@ -265,16 +293,19 b' def build_installer(source_dir: pathlib.'
265
293
266 run_candle(wix_path, build_dir, source, source_build_rel, defines=defines)
294 run_candle(wix_path, build_dir, source, source_build_rel, defines=defines)
267
295
268 msi_path = source_dir / 'dist' / (
296 msi_path = (
269 '%s-%s-%s.msi' % (msi_name, version, arch))
297 source_dir / 'dist' / ('%s-%s-%s.msi' % (msi_name, version, arch))
298 )
270
299
271 args = [
300 args = [
272 str(wix_path / 'light.exe'),
301 str(wix_path / 'light.exe'),
273 '-nologo',
302 '-nologo',
274 '-ext', 'WixUIExtension',
303 '-ext',
304 'WixUIExtension',
275 '-sw1076',
305 '-sw1076',
276 '-spdb',
306 '-spdb',
277 '-o', str(msi_path),
307 '-o',
308 str(msi_path),
278 ]
309 ]
279
310
280 for source, rel_path in SUPPORT_WXS:
311 for source, rel_path in SUPPORT_WXS:
@@ -286,10 +317,12 b' def build_installer(source_dir: pathlib.'
286 source = os.path.basename(source)
317 source = os.path.basename(source)
287 args.append(str(build_dir / ('%s.wixobj' % source[:-4])))
318 args.append(str(build_dir / ('%s.wixobj' % source[:-4])))
288
319
289 args.extend([
320 args.extend(
290 str(build_dir / 'library.wixobj'),
321 [
291 str(build_dir / 'mercurial.wixobj'),
322 str(build_dir / 'library.wixobj'),
292 ])
323 str(build_dir / 'mercurial.wixobj'),
324 ]
325 )
293
326
294 subprocess.run(args, cwd=str(source_dir), check=True)
327 subprocess.run(args, cwd=str(source_dir), check=True)
295
328
@@ -300,11 +333,19 b' def build_installer(source_dir: pathlib.'
300 }
333 }
301
334
302
335
303 def build_signed_installer(source_dir: pathlib.Path, python_exe: pathlib.Path,
336 def build_signed_installer(
304 name: str, version=None, subject_name=None,
337 source_dir: pathlib.Path,
305 cert_path=None, cert_password=None,
338 python_exe: pathlib.Path,
306 timestamp_url=None, extra_packages_script=None,
339 name: str,
307 extra_wxs=None, extra_features=None):
340 version=None,
341 subject_name=None,
342 cert_path=None,
343 cert_password=None,
344 timestamp_url=None,
345 extra_packages_script=None,
346 extra_wxs=None,
347 extra_features=None,
348 ):
308 """Build an installer with signed executables."""
349 """Build an installer with signed executables."""
309
350
310 post_build_fn = make_post_build_signing_fn(
351 post_build_fn = make_post_build_signing_fn(
@@ -312,16 +353,27 b' def build_signed_installer(source_dir: p'
312 subject_name=subject_name,
353 subject_name=subject_name,
313 cert_path=cert_path,
354 cert_path=cert_path,
314 cert_password=cert_password,
355 cert_password=cert_password,
315 timestamp_url=timestamp_url)
356 timestamp_url=timestamp_url,
357 )
316
358
317 info = build_installer(source_dir, python_exe=python_exe,
359 info = build_installer(
318 msi_name=name.lower(), version=version,
360 source_dir,
319 post_build_fn=post_build_fn,
361 python_exe=python_exe,
320 extra_packages_script=extra_packages_script,
362 msi_name=name.lower(),
321 extra_wxs=extra_wxs, extra_features=extra_features)
363 version=version,
364 post_build_fn=post_build_fn,
365 extra_packages_script=extra_packages_script,
366 extra_wxs=extra_wxs,
367 extra_features=extra_features,
368 )
322
369
323 description = '%s %s' % (name, version)
370 description = '%s %s' % (name, version)
324
371
325 sign_with_signtool(info['msi_path'], description,
372 sign_with_signtool(
326 subject_name=subject_name, cert_path=cert_path,
373 info['msi_path'],
327 cert_password=cert_password, timestamp_url=timestamp_url)
374 description,
375 subject_name=subject_name,
376 cert_path=cert_path,
377 cert_password=cert_password,
378 timestamp_url=timestamp_url,
379 )
@@ -19,14 +19,15 b' import sys'
19 if __name__ == '__main__':
19 if __name__ == '__main__':
20 parser = argparse.ArgumentParser()
20 parser = argparse.ArgumentParser()
21
21
22 parser.add_argument('--python',
22 parser.add_argument(
23 required=True,
23 '--python', required=True, help='path to python.exe to use'
24 help='path to python.exe to use')
24 )
25 parser.add_argument('--iscc',
25 parser.add_argument('--iscc', help='path to iscc.exe to use')
26 help='path to iscc.exe to use')
26 parser.add_argument(
27 parser.add_argument('--version',
27 '--version',
28 help='Mercurial version string to use '
28 help='Mercurial version string to use '
29 '(detected from __version__.py if not defined')
29 '(detected from __version__.py if not defined',
30 )
30
31
31 args = parser.parse_args()
32 args = parser.parse_args()
32
33
@@ -36,8 +37,11 b" if __name__ == '__main__':"
36 if args.iscc:
37 if args.iscc:
37 iscc = pathlib.Path(args.iscc)
38 iscc = pathlib.Path(args.iscc)
38 else:
39 else:
39 iscc = (pathlib.Path(os.environ['ProgramFiles(x86)']) / 'Inno Setup 5' /
40 iscc = (
40 'ISCC.exe')
41 pathlib.Path(os.environ['ProgramFiles(x86)'])
42 / 'Inno Setup 5'
43 / 'ISCC.exe'
44 )
41
45
42 here = pathlib.Path(os.path.abspath(os.path.dirname(__file__)))
46 here = pathlib.Path(os.path.abspath(os.path.dirname(__file__)))
43 source_dir = here.parent.parent.parent
47 source_dir = here.parent.parent.parent
@@ -47,5 +51,10 b" if __name__ == '__main__':"
47
51
48 from hgpackaging.inno import build
52 from hgpackaging.inno import build
49
53
50 build(source_dir, build_dir, pathlib.Path(args.python), iscc,
54 build(
51 version=args.version)
55 source_dir,
56 build_dir,
57 pathlib.Path(args.python),
58 iscc,
59 version=args.version,
60 )
@@ -17,31 +17,42 b' import sys'
17 if __name__ == '__main__':
17 if __name__ == '__main__':
18 parser = argparse.ArgumentParser()
18 parser = argparse.ArgumentParser()
19
19
20 parser.add_argument('--name',
20 parser.add_argument('--name', help='Application name', default='Mercurial')
21 help='Application name',
21 parser.add_argument(
22 default='Mercurial')
22 '--python', help='Path to Python executable to use', required=True
23 parser.add_argument('--python',
23 )
24 help='Path to Python executable to use',
24 parser.add_argument(
25 required=True)
25 '--sign-sn',
26 parser.add_argument('--sign-sn',
26 help='Subject name (or fragment thereof) of certificate '
27 help='Subject name (or fragment thereof) of certificate '
27 'to use for signing',
28 'to use for signing')
28 )
29 parser.add_argument('--sign-cert',
29 parser.add_argument(
30 help='Path to certificate to use for signing')
30 '--sign-cert', help='Path to certificate to use for signing'
31 parser.add_argument('--sign-password',
31 )
32 help='Password for signing certificate')
32 parser.add_argument(
33 parser.add_argument('--sign-timestamp-url',
33 '--sign-password', help='Password for signing certificate'
34 help='URL of timestamp server to use for signing')
34 )
35 parser.add_argument('--version',
35 parser.add_argument(
36 help='Version string to use')
36 '--sign-timestamp-url',
37 parser.add_argument('--extra-packages-script',
37 help='URL of timestamp server to use for signing',
38 help=('Script to execute to include extra packages in '
38 )
39 'py2exe binary.'))
39 parser.add_argument('--version', help='Version string to use')
40 parser.add_argument('--extra-wxs',
40 parser.add_argument(
41 help='CSV of path_to_wxs_file=working_dir_for_wxs_file')
41 '--extra-packages-script',
42 parser.add_argument('--extra-features',
42 help=(
43 help=('CSV of extra feature names to include '
43 'Script to execute to include extra packages in ' 'py2exe binary.'
44 'in the installer from the extra wxs files'))
44 ),
45 )
46 parser.add_argument(
47 '--extra-wxs', help='CSV of path_to_wxs_file=working_dir_for_wxs_file'
48 )
49 parser.add_argument(
50 '--extra-features',
51 help=(
52 'CSV of extra feature names to include '
53 'in the installer from the extra wxs files'
54 ),
55 )
45
56
46 args = parser.parse_args()
57 args = parser.parse_args()
47
58
@@ -69,7 +80,8 b" if __name__ == '__main__':"
69 kwargs['extra_packages_script'] = args.extra_packages_script
80 kwargs['extra_packages_script'] = args.extra_packages_script
70 if args.extra_wxs:
81 if args.extra_wxs:
71 kwargs['extra_wxs'] = dict(
82 kwargs['extra_wxs'] = dict(
72 thing.split("=") for thing in args.extra_wxs.split(','))
83 thing.split("=") for thing in args.extra_wxs.split(',')
84 )
73 if args.extra_features:
85 if args.extra_features:
74 kwargs['extra_features'] = args.extra_features.split(',')
86 kwargs['extra_features'] = args.extra_features.split(',')
75
87
@@ -44,18 +44,12 b' def plot(data, title=None):'
44 comb_plt = fig.add_subplot(211)
44 comb_plt = fig.add_subplot(211)
45 other_plt = fig.add_subplot(212)
45 other_plt = fig.add_subplot(212)
46
46
47 comb_plt.plot(ary[0],
47 comb_plt.plot(
48 np.cumsum(ary[1]),
48 ary[0], np.cumsum(ary[1]), color='red', linewidth=1, label='comb'
49 color='red',
49 )
50 linewidth=1,
51 label='comb')
52
50
53 plots = []
51 plots = []
54 p = other_plt.plot(ary[0],
52 p = other_plt.plot(ary[0], ary[1], color='red', linewidth=1, label='wall')
55 ary[1],
56 color='red',
57 linewidth=1,
58 label='wall')
59 plots.append(p)
53 plots.append(p)
60
54
61 colors = {
55 colors = {
@@ -64,20 +58,24 b' def plot(data, title=None):'
64 1000: ('purple', 'xkcd:dark pink'),
58 1000: ('purple', 'xkcd:dark pink'),
65 }
59 }
66 for n, color in colors.items():
60 for n, color in colors.items():
67 avg_n = np.convolve(ary[1], np.full(n, 1. / n), 'valid')
61 avg_n = np.convolve(ary[1], np.full(n, 1.0 / n), 'valid')
68 p = other_plt.plot(ary[0][n - 1:],
62 p = other_plt.plot(
69 avg_n,
63 ary[0][n - 1 :],
70 color=color[0],
64 avg_n,
71 linewidth=1,
65 color=color[0],
72 label='avg time last %d' % n)
66 linewidth=1,
67 label='avg time last %d' % n,
68 )
73 plots.append(p)
69 plots.append(p)
74
70
75 med_n = scipy.signal.medfilt(ary[1], n + 1)
71 med_n = scipy.signal.medfilt(ary[1], n + 1)
76 p = other_plt.plot(ary[0],
72 p = other_plt.plot(
77 med_n,
73 ary[0],
78 color=color[1],
74 med_n,
79 linewidth=1,
75 color=color[1],
80 label='median time last %d' % n)
76 linewidth=1,
77 label='median time last %d' % n,
78 )
81 plots.append(p)
79 plots.append(p)
82
80
83 formatter = mticker.ScalarFormatter()
81 formatter = mticker.ScalarFormatter()
@@ -108,6 +106,7 b' def plot(data, title=None):'
108 else:
106 else:
109 legline.set_alpha(0.2)
107 legline.set_alpha(0.2)
110 fig.canvas.draw()
108 fig.canvas.draw()
109
111 if title is not None:
110 if title is not None:
112 fig.canvas.set_window_title(title)
111 fig.canvas.set_window_title(title)
113 fig.canvas.mpl_connect('pick_event', onpick)
112 fig.canvas.mpl_connect('pick_event', onpick)
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file renamed from mercurial/repository.py to mercurial/interfaces/repository.py
NO CONTENT: file renamed from mercurial/repository.py to mercurial/interfaces/repository.py
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file renamed from mercurial/utils/interfaceutil.py to mercurial/interfaces/util.py
NO CONTENT: file renamed from mercurial/utils/interfaceutil.py to mercurial/interfaces/util.py
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file chmod 100755 => 100644
NO CONTENT: modified file chmod 100755 => 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file copied from rust/hg-cpython/src/dirstate.rs to rust/hg-cpython/src/dirstate/dirs_multiset.rs
NO CONTENT: file copied from rust/hg-cpython/src/dirstate.rs to rust/hg-cpython/src/dirstate/dirs_multiset.rs
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file copied from rust/hg-cpython/src/dirstate.rs to rust/hg-cpython/src/parsers.rs
NO CONTENT: file copied from rust/hg-cpython/src/dirstate.rs to rust/hg-cpython/src/parsers.rs
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now