Show More
@@ -10,7 +10,7 b' import sys' | |||||
10 | # Import a minimal set of stdlib modules needed for list_stdlib_modules() |
|
10 | # Import a minimal set of stdlib modules needed for list_stdlib_modules() | |
11 | # to work when run from a virtualenv. The modules were chosen empirically |
|
11 | # to work when run from a virtualenv. The modules were chosen empirically | |
12 | # so that the return value matches the return value without virtualenv. |
|
12 | # so that the return value matches the return value without virtualenv. | |
13 | if True: # disable lexical sorting checks |
|
13 | if True: # disable lexical sorting checks | |
14 | try: |
|
14 | try: | |
15 | import BaseHTTPServer as basehttpserver |
|
15 | import BaseHTTPServer as basehttpserver | |
16 | except ImportError: |
|
16 | except ImportError: | |
@@ -47,9 +47,7 b' allowsymbolimports = (' | |||||
47 | ) |
|
47 | ) | |
48 |
|
48 | |||
49 | # Whitelist of symbols that can be directly imported. |
|
49 | # Whitelist of symbols that can be directly imported. | |
50 | directsymbols = ( |
|
50 | directsymbols = ('demandimport',) | |
51 | 'demandimport', |
|
|||
52 | ) |
|
|||
53 |
|
51 | |||
54 | # Modules that must be aliased because they are commonly confused with |
|
52 | # Modules that must be aliased because they are commonly confused with | |
55 | # common variables and can create aliasing and readability issues. |
|
53 | # common variables and can create aliasing and readability issues. | |
@@ -57,6 +55,7 b' requirealias = {' | |||||
57 | 'ui': 'uimod', |
|
55 | 'ui': 'uimod', | |
58 | } |
|
56 | } | |
59 |
|
57 | |||
|
58 | ||||
60 | def usingabsolute(root): |
|
59 | def usingabsolute(root): | |
61 | """Whether absolute imports are being used.""" |
|
60 | """Whether absolute imports are being used.""" | |
62 | if sys.version_info[0] >= 3: |
|
61 | if sys.version_info[0] >= 3: | |
@@ -71,6 +70,7 b' def usingabsolute(root):' | |||||
71 |
|
70 | |||
72 | return False |
|
71 | return False | |
73 |
|
72 | |||
|
73 | ||||
74 | def walklocal(root): |
|
74 | def walklocal(root): | |
75 | """Recursively yield all descendant nodes but not in a different scope""" |
|
75 | """Recursively yield all descendant nodes but not in a different scope""" | |
76 | todo = collections.deque(ast.iter_child_nodes(root)) |
|
76 | todo = collections.deque(ast.iter_child_nodes(root)) | |
@@ -82,6 +82,7 b' def walklocal(root):' | |||||
82 | todo.extend(ast.iter_child_nodes(node)) |
|
82 | todo.extend(ast.iter_child_nodes(node)) | |
83 | yield node, newscope |
|
83 | yield node, newscope | |
84 |
|
84 | |||
|
85 | ||||
85 | def dotted_name_of_path(path): |
|
86 | def dotted_name_of_path(path): | |
86 | """Given a relative path to a source file, return its dotted module name. |
|
87 | """Given a relative path to a source file, return its dotted module name. | |
87 |
|
88 | |||
@@ -91,11 +92,12 b' def dotted_name_of_path(path):' | |||||
91 | 'zlib' |
|
92 | 'zlib' | |
92 | """ |
|
93 | """ | |
93 | parts = path.replace(os.sep, '/').split('/') |
|
94 | parts = path.replace(os.sep, '/').split('/') | |
94 | parts[-1] = parts[-1].split('.', 1)[0] # remove .py and .so and .ARCH.so |
|
95 | parts[-1] = parts[-1].split('.', 1)[0] # remove .py and .so and .ARCH.so | |
95 | if parts[-1].endswith('module'): |
|
96 | if parts[-1].endswith('module'): | |
96 | parts[-1] = parts[-1][:-6] |
|
97 | parts[-1] = parts[-1][:-6] | |
97 | return '.'.join(parts) |
|
98 | return '.'.join(parts) | |
98 |
|
99 | |||
|
100 | ||||
99 | def fromlocalfunc(modulename, localmods): |
|
101 | def fromlocalfunc(modulename, localmods): | |
100 | """Get a function to examine which locally defined module the |
|
102 | """Get a function to examine which locally defined module the | |
101 | target source imports via a specified name. |
|
103 | target source imports via a specified name. | |
@@ -164,6 +166,7 b' def fromlocalfunc(modulename, localmods)' | |||||
164 | prefix = '.'.join(modulename.split('.')[:-1]) |
|
166 | prefix = '.'.join(modulename.split('.')[:-1]) | |
165 | if prefix: |
|
167 | if prefix: | |
166 | prefix += '.' |
|
168 | prefix += '.' | |
|
169 | ||||
167 | def fromlocal(name, level=0): |
|
170 | def fromlocal(name, level=0): | |
168 | # name is false value when relative imports are used. |
|
171 | # name is false value when relative imports are used. | |
169 | if not name: |
|
172 | if not name: | |
@@ -175,8 +178,9 b' def fromlocalfunc(modulename, localmods)' | |||||
175 | # Check relative name first. |
|
178 | # Check relative name first. | |
176 | candidates = [prefix + name, name] |
|
179 | candidates = [prefix + name, name] | |
177 | else: |
|
180 | else: | |
178 | candidates = ['.'.join(modulename.split('.')[:-level]) + |
|
181 | candidates = [ | |
179 | '.' + name] |
|
182 | '.'.join(modulename.split('.')[:-level]) + '.' + name | |
|
183 | ] | |||
180 |
|
184 | |||
181 | for n in candidates: |
|
185 | for n in candidates: | |
182 | if n in localmods: |
|
186 | if n in localmods: | |
@@ -185,18 +189,21 b' def fromlocalfunc(modulename, localmods)' | |||||
185 | if dottedpath in localmods: |
|
189 | if dottedpath in localmods: | |
186 | return (n, dottedpath, True) |
|
190 | return (n, dottedpath, True) | |
187 | return False |
|
191 | return False | |
|
192 | ||||
188 | return fromlocal |
|
193 | return fromlocal | |
189 |
|
194 | |||
|
195 | ||||
190 | def populateextmods(localmods): |
|
196 | def populateextmods(localmods): | |
191 | """Populate C extension modules based on pure modules""" |
|
197 | """Populate C extension modules based on pure modules""" | |
192 | newlocalmods = set(localmods) |
|
198 | newlocalmods = set(localmods) | |
193 | for n in localmods: |
|
199 | for n in localmods: | |
194 | if n.startswith('mercurial.pure.'): |
|
200 | if n.startswith('mercurial.pure.'): | |
195 | m = n[len('mercurial.pure.'):] |
|
201 | m = n[len('mercurial.pure.') :] | |
196 | newlocalmods.add('mercurial.cext.' + m) |
|
202 | newlocalmods.add('mercurial.cext.' + m) | |
197 | newlocalmods.add('mercurial.cffi._' + m) |
|
203 | newlocalmods.add('mercurial.cffi._' + m) | |
198 | return newlocalmods |
|
204 | return newlocalmods | |
199 |
|
205 | |||
|
206 | ||||
200 | def list_stdlib_modules(): |
|
207 | def list_stdlib_modules(): | |
201 | """List the modules present in the stdlib. |
|
208 | """List the modules present in the stdlib. | |
202 |
|
209 | |||
@@ -232,13 +239,13 b' def list_stdlib_modules():' | |||||
232 | for m in ['msvcrt', '_winreg']: |
|
239 | for m in ['msvcrt', '_winreg']: | |
233 | yield m |
|
240 | yield m | |
234 | yield '__builtin__' |
|
241 | yield '__builtin__' | |
235 | yield 'builtins' # python3 only |
|
242 | yield 'builtins' # python3 only | |
236 | yield 'importlib.abc' # python3 only |
|
243 | yield 'importlib.abc' # python3 only | |
237 | yield 'importlib.machinery' # python3 only |
|
244 | yield 'importlib.machinery' # python3 only | |
238 | yield 'importlib.util' # python3 only |
|
245 | yield 'importlib.util' # python3 only | |
239 | for m in 'fcntl', 'grp', 'pwd', 'termios': # Unix only |
|
246 | for m in 'fcntl', 'grp', 'pwd', 'termios': # Unix only | |
240 | yield m |
|
247 | yield m | |
241 | for m in 'cPickle', 'datetime': # in Python (not C) on PyPy |
|
248 | for m in 'cPickle', 'datetime': # in Python (not C) on PyPy | |
242 | yield m |
|
249 | yield m | |
243 | for m in ['cffi']: |
|
250 | for m in ['cffi']: | |
244 | yield m |
|
251 | yield m | |
@@ -264,14 +271,17 b' def list_stdlib_modules():' | |||||
264 | for libpath in sys.path: |
|
271 | for libpath in sys.path: | |
265 | # We want to walk everything in sys.path that starts with something in |
|
272 | # We want to walk everything in sys.path that starts with something in | |
266 | # stdlib_prefixes, but not directories from the hg sources. |
|
273 | # stdlib_prefixes, but not directories from the hg sources. | |
267 |
if |
|
274 | if os.path.abspath(libpath).startswith(sourceroot) or not any( | |
268 |
|
|
275 | libpath.startswith(p) for p in stdlib_prefixes | |
|
276 | ): | |||
269 | continue |
|
277 | continue | |
270 | for top, dirs, files in os.walk(libpath): |
|
278 | for top, dirs, files in os.walk(libpath): | |
271 | for i, d in reversed(list(enumerate(dirs))): |
|
279 | for i, d in reversed(list(enumerate(dirs))): | |
272 | if (not os.path.exists(os.path.join(top, d, '__init__.py')) |
|
280 | if ( | |
273 | or top == libpath and d in ('hgdemandimport', 'hgext', |
|
281 | not os.path.exists(os.path.join(top, d, '__init__.py')) | |
274 | 'mercurial')): |
|
282 | or top == libpath | |
|
283 | and d in ('hgdemandimport', 'hgext', 'mercurial') | |||
|
284 | ): | |||
275 | del dirs[i] |
|
285 | del dirs[i] | |
276 | for name in files: |
|
286 | for name in files: | |
277 | if not name.endswith(('.py', '.so', '.pyc', '.pyo', '.pyd')): |
|
287 | if not name.endswith(('.py', '.so', '.pyc', '.pyo', '.pyd')): | |
@@ -280,12 +290,14 b' def list_stdlib_modules():' | |||||
280 | full_path = top |
|
290 | full_path = top | |
281 | else: |
|
291 | else: | |
282 | full_path = os.path.join(top, name) |
|
292 | full_path = os.path.join(top, name) | |
283 | rel_path = full_path[len(libpath) + 1:] |
|
293 | rel_path = full_path[len(libpath) + 1 :] | |
284 | mod = dotted_name_of_path(rel_path) |
|
294 | mod = dotted_name_of_path(rel_path) | |
285 | yield mod |
|
295 | yield mod | |
286 |
|
296 | |||
|
297 | ||||
287 | stdlib_modules = set(list_stdlib_modules()) |
|
298 | stdlib_modules = set(list_stdlib_modules()) | |
288 |
|
299 | |||
|
300 | ||||
289 | def imported_modules(source, modulename, f, localmods, ignore_nested=False): |
|
301 | def imported_modules(source, modulename, f, localmods, ignore_nested=False): | |
290 | """Given the source of a file as a string, yield the names |
|
302 | """Given the source of a file as a string, yield the names | |
291 | imported by that file. |
|
303 | imported by that file. | |
@@ -383,6 +395,7 b' def imported_modules(source, modulename,' | |||||
383 | # lookup |
|
395 | # lookup | |
384 | yield dottedpath |
|
396 | yield dottedpath | |
385 |
|
397 | |||
|
398 | ||||
386 | def verify_import_convention(module, source, localmods): |
|
399 | def verify_import_convention(module, source, localmods): | |
387 | """Verify imports match our established coding convention. |
|
400 | """Verify imports match our established coding convention. | |
388 |
|
401 | |||
@@ -400,6 +413,7 b' def verify_import_convention(module, sou' | |||||
400 | else: |
|
413 | else: | |
401 | return verify_stdlib_on_own_line(root) |
|
414 | return verify_stdlib_on_own_line(root) | |
402 |
|
415 | |||
|
416 | ||||
403 | def verify_modern_convention(module, root, localmods, root_col_offset=0): |
|
417 | def verify_modern_convention(module, root, localmods, root_col_offset=0): | |
404 | """Verify a file conforms to the modern import convention rules. |
|
418 | """Verify a file conforms to the modern import convention rules. | |
405 |
|
419 | |||
@@ -443,19 +457,24 b' def verify_modern_convention(module, roo' | |||||
443 | seenlevels = set() |
|
457 | seenlevels = set() | |
444 |
|
458 | |||
445 | for node, newscope in walklocal(root): |
|
459 | for node, newscope in walklocal(root): | |
|
460 | ||||
446 | def msg(fmt, *args): |
|
461 | def msg(fmt, *args): | |
447 | return (fmt % args, node.lineno) |
|
462 | return (fmt % args, node.lineno) | |
|
463 | ||||
448 | if newscope: |
|
464 | if newscope: | |
449 | # Check for local imports in function |
|
465 | # Check for local imports in function | |
450 |
for r in verify_modern_convention( |
|
466 | for r in verify_modern_convention( | |
451 | node.col_offset + 4): |
|
467 | module, node, localmods, node.col_offset + 4 | |
|
468 | ): | |||
452 | yield r |
|
469 | yield r | |
453 | elif isinstance(node, ast.Import): |
|
470 | elif isinstance(node, ast.Import): | |
454 | # Disallow "import foo, bar" and require separate imports |
|
471 | # Disallow "import foo, bar" and require separate imports | |
455 | # for each module. |
|
472 | # for each module. | |
456 | if len(node.names) > 1: |
|
473 | if len(node.names) > 1: | |
457 |
yield msg( |
|
474 | yield msg( | |
458 | ', '.join(n.name for n in node.names)) |
|
475 | 'multiple imported names: %s', | |
|
476 | ', '.join(n.name for n in node.names), | |||
|
477 | ) | |||
459 |
|
478 | |||
460 | name = node.names[0].name |
|
479 | name = node.names[0].name | |
461 | asname = node.names[0].asname |
|
480 | asname = node.names[0].asname | |
@@ -465,16 +484,20 b' def verify_modern_convention(module, roo' | |||||
465 | # Ignore sorting rules on imports inside blocks. |
|
484 | # Ignore sorting rules on imports inside blocks. | |
466 | if node.col_offset == root_col_offset: |
|
485 | if node.col_offset == root_col_offset: | |
467 | if lastname and name < lastname and laststdlib == stdlib: |
|
486 | if lastname and name < lastname and laststdlib == stdlib: | |
468 |
yield msg( |
|
487 | yield msg( | |
469 |
name, lastname |
|
488 | 'imports not lexically sorted: %s < %s', name, lastname | |
|
489 | ) | |||
470 |
|
490 | |||
471 | lastname = name |
|
491 | lastname = name | |
472 | laststdlib = stdlib |
|
492 | laststdlib = stdlib | |
473 |
|
493 | |||
474 | # stdlib imports should be before local imports. |
|
494 | # stdlib imports should be before local imports. | |
475 | if stdlib and seenlocal and node.col_offset == root_col_offset: |
|
495 | if stdlib and seenlocal and node.col_offset == root_col_offset: | |
476 | yield msg('stdlib import "%s" follows local import: %s', |
|
496 | yield msg( | |
477 | name, seenlocal) |
|
497 | 'stdlib import "%s" follows local import: %s', | |
|
498 | name, | |||
|
499 | seenlocal, | |||
|
500 | ) | |||
478 |
|
501 | |||
479 | if not stdlib: |
|
502 | if not stdlib: | |
480 | seenlocal = name |
|
503 | seenlocal = name | |
@@ -485,13 +508,16 b' def verify_modern_convention(module, roo' | |||||
485 | yield msg('import should be relative: %s', name) |
|
508 | yield msg('import should be relative: %s', name) | |
486 |
|
509 | |||
487 | if name in requirealias and asname != requirealias[name]: |
|
510 | if name in requirealias and asname != requirealias[name]: | |
488 | yield msg('%s module must be "as" aliased to %s', |
|
511 | yield msg( | |
489 | name, requirealias[name]) |
|
512 | '%s module must be "as" aliased to %s', | |
|
513 | name, | |||
|
514 | requirealias[name], | |||
|
515 | ) | |||
490 |
|
516 | |||
491 | elif isinstance(node, ast.ImportFrom): |
|
517 | elif isinstance(node, ast.ImportFrom): | |
492 | # Resolve the full imported module name. |
|
518 | # Resolve the full imported module name. | |
493 | if node.level > 0: |
|
519 | if node.level > 0: | |
494 | fullname = '.'.join(module.split('.')[:-node.level]) |
|
520 | fullname = '.'.join(module.split('.')[: -node.level]) | |
495 | if node.module: |
|
521 | if node.module: | |
496 | fullname += '.%s' % node.module |
|
522 | fullname += '.%s' % node.module | |
497 | else: |
|
523 | else: | |
@@ -508,7 +534,8 b' def verify_modern_convention(module, roo' | |||||
508 | if not fullname or ( |
|
534 | if not fullname or ( | |
509 | fullname in stdlib_modules |
|
535 | fullname in stdlib_modules | |
510 | and fullname not in localmods |
|
536 | and fullname not in localmods | |
511 |
and fullname + '.__init__' not in localmods |
|
537 | and fullname + '.__init__' not in localmods | |
|
538 | ): | |||
512 | yield msg('relative import of stdlib module') |
|
539 | yield msg('relative import of stdlib module') | |
513 | else: |
|
540 | else: | |
514 | seenlocal = fullname |
|
541 | seenlocal = fullname | |
@@ -518,19 +545,24 b' def verify_modern_convention(module, roo' | |||||
518 | found = fromlocal(node.module, node.level) |
|
545 | found = fromlocal(node.module, node.level) | |
519 | if found and found[2]: # node.module is a package |
|
546 | if found and found[2]: # node.module is a package | |
520 | prefix = found[0] + '.' |
|
547 | prefix = found[0] + '.' | |
521 |
symbols = ( |
|
548 | symbols = ( | |
522 |
|
|
549 | n.name for n in node.names if not fromlocal(prefix + n.name) | |
|
550 | ) | |||
523 | else: |
|
551 | else: | |
524 | symbols = (n.name for n in node.names) |
|
552 | symbols = (n.name for n in node.names) | |
525 | symbols = [sym for sym in symbols if sym not in directsymbols] |
|
553 | symbols = [sym for sym in symbols if sym not in directsymbols] | |
526 | if node.module and node.col_offset == root_col_offset: |
|
554 | if node.module and node.col_offset == root_col_offset: | |
527 | if symbols and fullname not in allowsymbolimports: |
|
555 | if symbols and fullname not in allowsymbolimports: | |
528 |
yield msg( |
|
556 | yield msg( | |
529 |
|
|
557 | 'direct symbol import %s from %s', | |
|
558 | ', '.join(symbols), | |||
|
559 | fullname, | |||
|
560 | ) | |||
530 |
|
561 | |||
531 | if symbols and seennonsymbollocal: |
|
562 | if symbols and seennonsymbollocal: | |
532 | yield msg('symbol import follows non-symbol import: %s', |
|
563 | yield msg( | |
533 |
fullname |
|
564 | 'symbol import follows non-symbol import: %s', fullname | |
|
565 | ) | |||
534 | if not symbols and fullname not in stdlib_modules: |
|
566 | if not symbols and fullname not in stdlib_modules: | |
535 | seennonsymbollocal = True |
|
567 | seennonsymbollocal = True | |
536 |
|
568 | |||
@@ -538,15 +570,19 b' def verify_modern_convention(module, roo' | |||||
538 | assert node.level |
|
570 | assert node.level | |
539 |
|
571 | |||
540 | # Only allow 1 group per level. |
|
572 | # Only allow 1 group per level. | |
541 |
if ( |
|
573 | if ( | |
542 | and node.col_offset == root_col_offset): |
|
574 | node.level in seenlevels | |
543 | yield msg('multiple "from %s import" statements', |
|
575 | and node.col_offset == root_col_offset | |
544 | '.' * node.level) |
|
576 | ): | |
|
577 | yield msg( | |||
|
578 | 'multiple "from %s import" statements', '.' * node.level | |||
|
579 | ) | |||
545 |
|
580 | |||
546 | # Higher-level groups come before lower-level groups. |
|
581 | # Higher-level groups come before lower-level groups. | |
547 | if any(node.level > l for l in seenlevels): |
|
582 | if any(node.level > l for l in seenlevels): | |
548 | yield msg('higher-level import should come first: %s', |
|
583 | yield msg( | |
549 |
fullname |
|
584 | 'higher-level import should come first: %s', fullname | |
|
585 | ) | |||
550 |
|
586 | |||
551 | seenlevels.add(node.level) |
|
587 | seenlevels.add(node.level) | |
552 |
|
588 | |||
@@ -556,14 +592,23 b' def verify_modern_convention(module, roo' | |||||
556 |
|
592 | |||
557 | for n in node.names: |
|
593 | for n in node.names: | |
558 | if lastentryname and n.name < lastentryname: |
|
594 | if lastentryname and n.name < lastentryname: | |
559 | yield msg('imports from %s not lexically sorted: %s < %s', |
|
595 | yield msg( | |
560 | fullname, n.name, lastentryname) |
|
596 | 'imports from %s not lexically sorted: %s < %s', | |
|
597 | fullname, | |||
|
598 | n.name, | |||
|
599 | lastentryname, | |||
|
600 | ) | |||
561 |
|
601 | |||
562 | lastentryname = n.name |
|
602 | lastentryname = n.name | |
563 |
|
603 | |||
564 | if n.name in requirealias and n.asname != requirealias[n.name]: |
|
604 | if n.name in requirealias and n.asname != requirealias[n.name]: | |
565 |
yield msg( |
|
605 | yield msg( | |
566 | n.name, fullname, requirealias[n.name]) |
|
606 | '%s from %s must be "as" aliased to %s', | |
|
607 | n.name, | |||
|
608 | fullname, | |||
|
609 | requirealias[n.name], | |||
|
610 | ) | |||
|
611 | ||||
567 |
|
612 | |||
568 | def verify_stdlib_on_own_line(root): |
|
613 | def verify_stdlib_on_own_line(root): | |
569 | """Given some python source, verify that stdlib imports are done |
|
614 | """Given some python source, verify that stdlib imports are done | |
@@ -582,13 +627,20 b' def verify_stdlib_on_own_line(root):' | |||||
582 | for n in node.names: |
|
627 | for n in node.names: | |
583 | from_stdlib[n.name in stdlib_modules].append(n.name) |
|
628 | from_stdlib[n.name in stdlib_modules].append(n.name) | |
584 | if from_stdlib[True] and from_stdlib[False]: |
|
629 | if from_stdlib[True] and from_stdlib[False]: | |
585 | yield ('mixed imports\n stdlib: %s\n relative: %s' % |
|
630 | yield ( | |
586 | (', '.join(sorted(from_stdlib[True])), |
|
631 | 'mixed imports\n stdlib: %s\n relative: %s' | |
587 | ', '.join(sorted(from_stdlib[False]))), node.lineno) |
|
632 | % ( | |
|
633 | ', '.join(sorted(from_stdlib[True])), | |||
|
634 | ', '.join(sorted(from_stdlib[False])), | |||
|
635 | ), | |||
|
636 | node.lineno, | |||
|
637 | ) | |||
|
638 | ||||
588 |
|
639 | |||
589 | class CircularImport(Exception): |
|
640 | class CircularImport(Exception): | |
590 | pass |
|
641 | pass | |
591 |
|
642 | |||
|
643 | ||||
592 | def checkmod(mod, imports): |
|
644 | def checkmod(mod, imports): | |
593 | shortest = {} |
|
645 | shortest = {} | |
594 | visit = [[mod]] |
|
646 | visit = [[mod]] | |
@@ -603,6 +655,7 b' def checkmod(mod, imports):' | |||||
603 | continue |
|
655 | continue | |
604 | visit.append(path + [i]) |
|
656 | visit.append(path + [i]) | |
605 |
|
657 | |||
|
658 | ||||
606 | def rotatecycle(cycle): |
|
659 | def rotatecycle(cycle): | |
607 | """arrange a cycle so that the lexicographically first module listed first |
|
660 | """arrange a cycle so that the lexicographically first module listed first | |
608 |
|
661 | |||
@@ -613,6 +666,7 b' def rotatecycle(cycle):' | |||||
613 | idx = cycle.index(lowest) |
|
666 | idx = cycle.index(lowest) | |
614 | return cycle[idx:] + cycle[:idx] + [lowest] |
|
667 | return cycle[idx:] + cycle[:idx] + [lowest] | |
615 |
|
668 | |||
|
669 | ||||
616 | def find_cycles(imports): |
|
670 | def find_cycles(imports): | |
617 | """Find cycles in an already-loaded import graph. |
|
671 | """Find cycles in an already-loaded import graph. | |
618 |
|
672 | |||
@@ -636,9 +690,11 b' def find_cycles(imports):' | |||||
636 | cycles.add(" -> ".join(rotatecycle(cycle))) |
|
690 | cycles.add(" -> ".join(rotatecycle(cycle))) | |
637 | return cycles |
|
691 | return cycles | |
638 |
|
692 | |||
|
693 | ||||
639 | def _cycle_sortkey(c): |
|
694 | def _cycle_sortkey(c): | |
640 | return len(c), c |
|
695 | return len(c), c | |
641 |
|
696 | |||
|
697 | ||||
642 | def embedded(f, modname, src): |
|
698 | def embedded(f, modname, src): | |
643 | """Extract embedded python code |
|
699 | """Extract embedded python code | |
644 |
|
700 | |||
@@ -680,6 +736,7 b' def embedded(f, modname, src):' | |||||
680 | modname = modname.decode('utf8') |
|
736 | modname = modname.decode('utf8') | |
681 | yield code, "%s[%d]" % (modname, starts), name, starts - 1 |
|
737 | yield code, "%s[%d]" % (modname, starts), name, starts - 1 | |
682 |
|
738 | |||
|
739 | ||||
683 | def sources(f, modname): |
|
740 | def sources(f, modname): | |
684 | """Yields possibly multiple sources from a filepath |
|
741 | """Yields possibly multiple sources from a filepath | |
685 |
|
742 | |||
@@ -700,6 +757,7 b' def sources(f, modname):' | |||||
700 | for script, modname, t, line in embedded(f, modname, src): |
|
757 | for script, modname, t, line in embedded(f, modname, src): | |
701 | yield script, modname.encode('utf8'), t, line |
|
758 | yield script, modname.encode('utf8'), t, line | |
702 |
|
759 | |||
|
760 | ||||
703 | def main(argv): |
|
761 | def main(argv): | |
704 | if len(argv) < 2 or (argv[1] == '-' and len(argv) > 2): |
|
762 | if len(argv) < 2 or (argv[1] == '-' and len(argv) > 2): | |
705 | print('Usage: %s {-|file [file] [file] ...}') |
|
763 | print('Usage: %s {-|file [file] [file] ...}') | |
@@ -721,15 +779,19 b' def main(argv):' | |||||
721 | for src, modname, name, line in sources(source_path, localmodname): |
|
779 | for src, modname, name, line in sources(source_path, localmodname): | |
722 | try: |
|
780 | try: | |
723 | used_imports[modname] = sorted( |
|
781 | used_imports[modname] = sorted( | |
724 |
imported_modules( |
|
782 | imported_modules( | |
725 |
|
|
783 | src, modname, name, localmods, ignore_nested=True | |
726 | for error, lineno in verify_import_convention(modname, src, |
|
784 | ) | |
727 | localmods): |
|
785 | ) | |
|
786 | for error, lineno in verify_import_convention( | |||
|
787 | modname, src, localmods | |||
|
788 | ): | |||
728 | any_errors = True |
|
789 | any_errors = True | |
729 | print('%s:%d: %s' % (source_path, lineno + line, error)) |
|
790 | print('%s:%d: %s' % (source_path, lineno + line, error)) | |
730 | except SyntaxError as e: |
|
791 | except SyntaxError as e: | |
731 |
print( |
|
792 | print( | |
732 |
(source_path, e.lineno + line, e) |
|
793 | '%s:%d: SyntaxError: %s' % (source_path, e.lineno + line, e) | |
|
794 | ) | |||
733 | cycles = find_cycles(used_imports) |
|
795 | cycles = find_cycles(used_imports) | |
734 | if cycles: |
|
796 | if cycles: | |
735 | firstmods = set() |
|
797 | firstmods = set() | |
@@ -745,5 +807,6 b' def main(argv):' | |||||
745 | any_errors = True |
|
807 | any_errors = True | |
746 | return any_errors != 0 |
|
808 | return any_errors != 0 | |
747 |
|
809 | |||
|
810 | ||||
748 | if __name__ == '__main__': |
|
811 | if __name__ == '__main__': | |
749 | sys.exit(int(main(sys.argv))) |
|
812 | sys.exit(int(main(sys.argv))) |
@@ -8,6 +8,7 b' from __future__ import absolute_import' | |||||
8 |
|
8 | |||
9 | from . import repoview |
|
9 | from . import repoview | |
10 |
|
10 | |||
|
11 | ||||
11 | def cachetocopy(srcrepo): |
|
12 | def cachetocopy(srcrepo): | |
12 | """return the list of cache file valuable to copy during a clone""" |
|
13 | """return the list of cache file valuable to copy during a clone""" | |
13 | # In local clones we're copying all nodes, not just served |
|
14 | # In local clones we're copying all nodes, not just served |
@@ -14,6 +14,7 b' from . import (' | |||||
14 | pycompat, |
|
14 | pycompat, | |
15 | ) |
|
15 | ) | |
16 |
|
16 | |||
|
17 | ||||
17 | def addlines(fp, hunk, lena, lenb, a, b): |
|
18 | def addlines(fp, hunk, lena, lenb, a, b): | |
18 | """Read lines from fp into the hunk |
|
19 | """Read lines from fp into the hunk | |
19 |
|
20 | |||
@@ -47,6 +48,7 b' def addlines(fp, hunk, lena, lenb, a, b)' | |||||
47 | b.append(s[1:]) |
|
48 | b.append(s[1:]) | |
48 | a.append(s) |
|
49 | a.append(s) | |
49 |
|
50 | |||
|
51 | ||||
50 | def fixnewline(hunk, a, b): |
|
52 | def fixnewline(hunk, a, b): | |
51 | """Fix up the last lines of a and b when the patch has no newline at EOF""" |
|
53 | """Fix up the last lines of a and b when the patch has no newline at EOF""" | |
52 | l = hunk[-1] |
|
54 | l = hunk[-1] | |
@@ -62,6 +64,7 b' def fixnewline(hunk, a, b):' | |||||
62 | a[-1] = hline |
|
64 | a[-1] = hline | |
63 | hunk[-1] = hline |
|
65 | hunk[-1] = hline | |
64 |
|
66 | |||
|
67 | ||||
65 | def testhunk(a, b, bstart): |
|
68 | def testhunk(a, b, bstart): | |
66 | """Compare the lines in a with the lines in b |
|
69 | """Compare the lines in a with the lines in b | |
67 |
|
70 |
@@ -15,6 +15,7 b' from . import (' | |||||
15 | util, |
|
15 | util, | |
16 | ) |
|
16 | ) | |
17 |
|
17 | |||
|
18 | ||||
18 | class dirstateguard(util.transactional): |
|
19 | class dirstateguard(util.transactional): | |
19 | '''Restore dirstate at unexpected failure. |
|
20 | '''Restore dirstate at unexpected failure. | |
20 |
|
21 | |||
@@ -34,14 +35,16 b' class dirstateguard(util.transactional):' | |||||
34 | self._active = False |
|
35 | self._active = False | |
35 | self._closed = False |
|
36 | self._closed = False | |
36 | self._backupname = 'dirstate.backup.%s.%d' % (name, id(self)) |
|
37 | self._backupname = 'dirstate.backup.%s.%d' % (name, id(self)) | |
37 |
self._narrowspecbackupname = |
|
38 | self._narrowspecbackupname = 'narrowspec.backup.%s.%d' % ( | |
38 | (name, id(self))) |
|
39 | name, | |
|
40 | id(self), | |||
|
41 | ) | |||
39 | repo.dirstate.savebackup(repo.currenttransaction(), self._backupname) |
|
42 | repo.dirstate.savebackup(repo.currenttransaction(), self._backupname) | |
40 | narrowspec.savewcbackup(repo, self._narrowspecbackupname) |
|
43 | narrowspec.savewcbackup(repo, self._narrowspecbackupname) | |
41 | self._active = True |
|
44 | self._active = True | |
42 |
|
45 | |||
43 | def __del__(self): |
|
46 | def __del__(self): | |
44 | if self._active: # still active |
|
47 | if self._active: # still active | |
45 | # this may occur, even if this class is used correctly: |
|
48 | # this may occur, even if this class is used correctly: | |
46 | # for example, releasing other resources like transaction |
|
49 | # for example, releasing other resources like transaction | |
47 | # may raise exception before ``dirstateguard.release`` in |
|
50 | # may raise exception before ``dirstateguard.release`` in | |
@@ -49,27 +52,33 b' class dirstateguard(util.transactional):' | |||||
49 | self._abort() |
|
52 | self._abort() | |
50 |
|
53 | |||
51 | def close(self): |
|
54 | def close(self): | |
52 | if not self._active: # already inactivated |
|
55 | if not self._active: # already inactivated | |
53 | msg = (_("can't close already inactivated backup: %s") |
|
56 | msg = ( | |
54 | % self._backupname) |
|
57 | _("can't close already inactivated backup: %s") | |
|
58 | % self._backupname | |||
|
59 | ) | |||
55 | raise error.Abort(msg) |
|
60 | raise error.Abort(msg) | |
56 |
|
61 | |||
57 |
self._repo.dirstate.clearbackup( |
|
62 | self._repo.dirstate.clearbackup( | |
58 | self._backupname) |
|
63 | self._repo.currenttransaction(), self._backupname | |
|
64 | ) | |||
59 | narrowspec.clearwcbackup(self._repo, self._narrowspecbackupname) |
|
65 | narrowspec.clearwcbackup(self._repo, self._narrowspecbackupname) | |
60 | self._active = False |
|
66 | self._active = False | |
61 | self._closed = True |
|
67 | self._closed = True | |
62 |
|
68 | |||
63 | def _abort(self): |
|
69 | def _abort(self): | |
64 | narrowspec.restorewcbackup(self._repo, self._narrowspecbackupname) |
|
70 | narrowspec.restorewcbackup(self._repo, self._narrowspecbackupname) | |
65 |
self._repo.dirstate.restorebackup( |
|
71 | self._repo.dirstate.restorebackup( | |
66 | self._backupname) |
|
72 | self._repo.currenttransaction(), self._backupname | |
|
73 | ) | |||
67 | self._active = False |
|
74 | self._active = False | |
68 |
|
75 | |||
69 | def release(self): |
|
76 | def release(self): | |
70 | if not self._closed: |
|
77 | if not self._closed: | |
71 | if not self._active: # already inactivated |
|
78 | if not self._active: # already inactivated | |
72 | msg = (_("can't release already inactivated backup: %s") |
|
79 | msg = ( | |
73 | % self._backupname) |
|
80 | _("can't release already inactivated backup: %s") | |
|
81 | % self._backupname | |||
|
82 | ) | |||
74 | raise error.Abort(msg) |
|
83 | raise error.Abort(msg) | |
75 | self._abort() |
|
84 | self._abort() |
@@ -43,8 +43,9 b' class httpsendfile(object):' | |||||
43 | # requires authentication. Since we can't know until we try |
|
43 | # requires authentication. Since we can't know until we try | |
44 | # once whether authentication will be required, just lie to |
|
44 | # once whether authentication will be required, just lie to | |
45 | # the user and maybe the push succeeds suddenly at 50%. |
|
45 | # the user and maybe the push succeeds suddenly at 50%. | |
46 |
self._progress = ui.makeprogress( |
|
46 | self._progress = ui.makeprogress( | |
47 |
|
|
47 | _('sending'), unit=_('kb'), total=(self.length // 1024 * 2) | |
|
48 | ) | |||
48 |
|
49 | |||
49 | def read(self, *args, **kwargs): |
|
50 | def read(self, *args, **kwargs): | |
50 | ret = self._data.read(*args, **kwargs) |
|
51 | ret = self._data.read(*args, **kwargs) | |
@@ -61,6 +62,7 b' class httpsendfile(object):' | |||||
61 | def __exit__(self, exc_type, exc_val, exc_tb): |
|
62 | def __exit__(self, exc_type, exc_val, exc_tb): | |
62 | self.close() |
|
63 | self.close() | |
63 |
|
64 | |||
|
65 | ||||
64 | # moved here from url.py to avoid a cycle |
|
66 | # moved here from url.py to avoid a cycle | |
65 | def readauthforuri(ui, uri, user): |
|
67 | def readauthforuri(ui, uri, user): | |
66 | uri = pycompat.bytesurl(uri) |
|
68 | uri = pycompat.bytesurl(uri) | |
@@ -109,10 +111,18 b' def readauthforuri(ui, uri, user):' | |||||
109 | schemes, prefix = [p[0]], p[1] |
|
111 | schemes, prefix = [p[0]], p[1] | |
110 | else: |
|
112 | else: | |
111 | schemes = (auth.get('schemes') or 'https').split() |
|
113 | schemes = (auth.get('schemes') or 'https').split() | |
112 | if ((prefix == '*' or hostpath.startswith(prefix)) and |
|
114 | if ( | |
113 | (len(prefix) > bestlen or (len(prefix) == bestlen and |
|
115 | (prefix == '*' or hostpath.startswith(prefix)) | |
114 | not bestuser and 'username' in auth)) |
|
116 | and ( | |
115 | and scheme in schemes): |
|
117 | len(prefix) > bestlen | |
|
118 | or ( | |||
|
119 | len(prefix) == bestlen | |||
|
120 | and not bestuser | |||
|
121 | and 'username' in auth | |||
|
122 | ) | |||
|
123 | ) | |||
|
124 | and scheme in schemes | |||
|
125 | ): | |||
116 | bestlen = len(prefix) |
|
126 | bestlen = len(prefix) | |
117 | bestauth = group, auth |
|
127 | bestauth = group, auth | |
118 | bestuser = auth.get('username') |
|
128 | bestuser = auth.get('username') |
@@ -15,11 +15,13 b' from . import (' | |||||
15 | pycompat, |
|
15 | pycompat, | |
16 | ) |
|
16 | ) | |
17 |
|
17 | |||
|
18 | ||||
18 | def _sizep(x): |
|
19 | def _sizep(x): | |
19 | # i18n: "size" is a keyword |
|
20 | # i18n: "size" is a keyword | |
20 | expr = filesetlang.getstring(x, _("size requires an expression")) |
|
21 | expr = filesetlang.getstring(x, _("size requires an expression")) | |
21 | return fileset.sizematcher(expr) |
|
22 | return fileset.sizematcher(expr) | |
22 |
|
23 | |||
|
24 | ||||
23 | def _compile(tree): |
|
25 | def _compile(tree): | |
24 | if not tree: |
|
26 | if not tree: | |
25 | raise error.ParseError(_("missing argument")) |
|
27 | raise error.ParseError(_("missing argument")) | |
@@ -28,20 +30,23 b' def _compile(tree):' | |||||
28 | return _compile(tree[1]) |
|
30 | return _compile(tree[1]) | |
29 | elif op in {'symbol', 'string', 'kindpat'}: |
|
31 | elif op in {'symbol', 'string', 'kindpat'}: | |
30 | name = filesetlang.getpattern(tree, {'path'}, _('invalid file pattern')) |
|
32 | name = filesetlang.getpattern(tree, {'path'}, _('invalid file pattern')) | |
31 | if name.startswith('**'): # file extension test, ex. "**.tar.gz" |
|
33 | if name.startswith('**'): # file extension test, ex. "**.tar.gz" | |
32 | ext = name[2:] |
|
34 | ext = name[2:] | |
33 | for c in pycompat.bytestr(ext): |
|
35 | for c in pycompat.bytestr(ext): | |
34 | if c in '*{}[]?/\\': |
|
36 | if c in '*{}[]?/\\': | |
35 | raise error.ParseError(_('reserved character: %s') % c) |
|
37 | raise error.ParseError(_('reserved character: %s') % c) | |
36 | return lambda n, s: n.endswith(ext) |
|
38 | return lambda n, s: n.endswith(ext) | |
37 | elif name.startswith('path:'): # directory or full path test |
|
39 | elif name.startswith('path:'): # directory or full path test | |
38 | p = name[5:] # prefix |
|
40 | p = name[5:] # prefix | |
39 | pl = len(p) |
|
41 | pl = len(p) | |
40 |
f = lambda n, s: n.startswith(p) and ( |
|
42 | f = lambda n, s: n.startswith(p) and ( | |
41 |
|
|
43 | len(n) == pl or n[pl : pl + 1] == '/' | |
|
44 | ) | |||
42 | return f |
|
45 | return f | |
43 |
raise error.ParseError( |
|
46 | raise error.ParseError( | |
44 | hint=_('paths must be prefixed with "path:"')) |
|
47 | _("unsupported file pattern: %s") % name, | |
|
48 | hint=_('paths must be prefixed with "path:"'), | |||
|
49 | ) | |||
45 | elif op in {'or', 'patterns'}: |
|
50 | elif op in {'or', 'patterns'}: | |
46 | funcs = [_compile(x) for x in tree[1:]] |
|
51 | funcs = [_compile(x) for x in tree[1:]] | |
47 | return lambda n, s: any(f(n, s) for f in funcs) |
|
52 | return lambda n, s: any(f(n, s) for f in funcs) | |
@@ -63,15 +68,18 b' def _compile(tree):' | |||||
63 | return symbols[name] |
|
68 | return symbols[name] | |
64 |
|
69 | |||
65 | raise error.UnknownIdentifier(name, symbols.keys()) |
|
70 | raise error.UnknownIdentifier(name, symbols.keys()) | |
66 |
elif op == 'minus': |
|
71 | elif op == 'minus': # equivalent to 'x and not y' | |
67 | func1 = _compile(tree[1]) |
|
72 | func1 = _compile(tree[1]) | |
68 | func2 = _compile(tree[2]) |
|
73 | func2 = _compile(tree[2]) | |
69 | return lambda n, s: func1(n, s) and not func2(n, s) |
|
74 | return lambda n, s: func1(n, s) and not func2(n, s) | |
70 | elif op == 'list': |
|
75 | elif op == 'list': | |
71 |
raise error.ParseError( |
|
76 | raise error.ParseError( | |
72 | hint=_('see \'hg help "filesets.x or y"\'')) |
|
77 | _("can't use a list in this context"), | |
|
78 | hint=_('see \'hg help "filesets.x or y"\''), | |||
|
79 | ) | |||
73 | raise error.ProgrammingError('illegal tree: %r' % (tree,)) |
|
80 | raise error.ProgrammingError('illegal tree: %r' % (tree,)) | |
74 |
|
81 | |||
|
82 | ||||
75 | def compile(text): |
|
83 | def compile(text): | |
76 | """generate a function (path, size) -> bool from filter specification. |
|
84 | """generate a function (path, size) -> bool from filter specification. | |
77 |
|
85 |
@@ -20,6 +20,7 b' def bin(s):' | |||||
20 | except binascii.Error as e: |
|
20 | except binascii.Error as e: | |
21 | raise TypeError(e) |
|
21 | raise TypeError(e) | |
22 |
|
22 | |||
|
23 | ||||
23 | nullrev = -1 |
|
24 | nullrev = -1 | |
24 | # In hex, this is '0000000000000000000000000000000000000000' |
|
25 | # In hex, this is '0000000000000000000000000000000000000000' | |
25 | nullid = b"\0" * 20 |
|
26 | nullid = b"\0" * 20 | |
@@ -38,10 +39,11 b' wdirfilenodeids = {newnodeid, addednodei' | |||||
38 |
|
39 | |||
39 | # pseudo identifiers for working directory |
|
40 | # pseudo identifiers for working directory | |
40 | # (they are experimental, so don't add too many dependencies on them) |
|
41 | # (they are experimental, so don't add too many dependencies on them) | |
41 |
wdirrev = 0x7 |
|
42 | wdirrev = 0x7FFFFFFF | |
42 | # In hex, this is 'ffffffffffffffffffffffffffffffffffffffff' |
|
43 | # In hex, this is 'ffffffffffffffffffffffffffffffffffffffff' | |
43 | wdirid = b"\xff" * 20 |
|
44 | wdirid = b"\xff" * 20 | |
44 | wdirhex = hex(wdirid) |
|
45 | wdirhex = hex(wdirid) | |
45 |
|
46 | |||
|
47 | ||||
46 | def short(node): |
|
48 | def short(node): | |
47 | return hex(node[:6]) |
|
49 | return hex(node[:6]) |
@@ -39,6 +39,7 b" policy = b'allow'" | |||||
39 |
|
39 | |||
40 | try: |
|
40 | try: | |
41 | from . import __modulepolicy__ |
|
41 | from . import __modulepolicy__ | |
|
42 | ||||
42 | policy = __modulepolicy__.modulepolicy |
|
43 | policy = __modulepolicy__.modulepolicy | |
43 | except ImportError: |
|
44 | except ImportError: | |
44 | pass |
|
45 | pass | |
@@ -57,6 +58,7 b' if sys.version_info[0] >= 3:' | |||||
57 | else: |
|
58 | else: | |
58 | policy = os.environ.get(r'HGMODULEPOLICY', policy) |
|
59 | policy = os.environ.get(r'HGMODULEPOLICY', policy) | |
59 |
|
60 | |||
|
61 | ||||
60 | def _importfrom(pkgname, modname): |
|
62 | def _importfrom(pkgname, modname): | |
61 | # from .<pkgname> import <modname> (where . is looked through this module) |
|
63 | # from .<pkgname> import <modname> (where . is looked through this module) | |
62 | fakelocals = {} |
|
64 | fakelocals = {} | |
@@ -69,6 +71,7 b' def _importfrom(pkgname, modname):' | |||||
69 | getattr(mod, r'__doc__', None) |
|
71 | getattr(mod, r'__doc__', None) | |
70 | return fakelocals[modname] |
|
72 | return fakelocals[modname] | |
71 |
|
73 | |||
|
74 | ||||
72 | # keep in sync with "version" in C modules |
|
75 | # keep in sync with "version" in C modules | |
73 | _cextversions = { |
|
76 | _cextversions = { | |
74 | (r'cext', r'base85'): 1, |
|
77 | (r'cext', r'base85'): 1, | |
@@ -86,13 +89,17 b' def _importfrom(pkgname, modname):' | |||||
86 | (r'cffi', r'parsers'): (r'pure', r'parsers'), |
|
89 | (r'cffi', r'parsers'): (r'pure', r'parsers'), | |
87 | } |
|
90 | } | |
88 |
|
91 | |||
|
92 | ||||
89 | def _checkmod(pkgname, modname, mod): |
|
93 | def _checkmod(pkgname, modname, mod): | |
90 | expected = _cextversions.get((pkgname, modname)) |
|
94 | expected = _cextversions.get((pkgname, modname)) | |
91 | actual = getattr(mod, r'version', None) |
|
95 | actual = getattr(mod, r'version', None) | |
92 | if actual != expected: |
|
96 | if actual != expected: | |
93 |
raise ImportError( |
|
97 | raise ImportError( | |
94 | r'(expected version: %d, actual: %r)' |
|
98 | r'cannot import module %s.%s ' | |
95 | % (pkgname, modname, expected, actual)) |
|
99 | r'(expected version: %d, actual: %r)' | |
|
100 | % (pkgname, modname, expected, actual) | |||
|
101 | ) | |||
|
102 | ||||
96 |
|
103 | |||
97 | def importmod(modname): |
|
104 | def importmod(modname): | |
98 | """Import module according to policy and check API version""" |
|
105 | """Import module according to policy and check API version""" | |
@@ -114,10 +121,12 b' def importmod(modname):' | |||||
114 | pn, mn = _modredirects.get((purepkg, modname), (purepkg, modname)) |
|
121 | pn, mn = _modredirects.get((purepkg, modname), (purepkg, modname)) | |
115 | return _importfrom(pn, mn) |
|
122 | return _importfrom(pn, mn) | |
116 |
|
123 | |||
|
124 | ||||
117 | def _isrustpermissive(): |
|
125 | def _isrustpermissive(): | |
118 | """Assuming the policy is a Rust one, tell if it's permissive.""" |
|
126 | """Assuming the policy is a Rust one, tell if it's permissive.""" | |
119 | return policy.endswith(b'-allow') |
|
127 | return policy.endswith(b'-allow') | |
120 |
|
128 | |||
|
129 | ||||
121 | def importrust(modname, member=None, default=None): |
|
130 | def importrust(modname, member=None, default=None): | |
122 | """Import Rust module according to policy and availability. |
|
131 | """Import Rust module according to policy and availability. | |
123 |
|
132 |
@@ -14,6 +14,7 b' from . import (' | |||||
14 | phases, |
|
14 | phases, | |
15 | ) |
|
15 | ) | |
16 |
|
16 | |||
|
17 | ||||
17 | def _nslist(repo): |
|
18 | def _nslist(repo): | |
18 | n = {} |
|
19 | n = {} | |
19 | for k in _namespaces: |
|
20 | for k in _namespaces: | |
@@ -22,36 +23,45 b' def _nslist(repo):' | |||||
22 | n.pop('obsolete') |
|
23 | n.pop('obsolete') | |
23 | return n |
|
24 | return n | |
24 |
|
25 | |||
25 | _namespaces = {"namespaces": (lambda *x: False, _nslist), |
|
26 | ||
26 | "bookmarks": (bookmarks.pushbookmark, bookmarks.listbookmarks), |
|
27 | _namespaces = { | |
27 | "phases": (phases.pushphase, phases.listphases), |
|
28 | "namespaces": (lambda *x: False, _nslist), | |
28 | "obsolete": (obsolete.pushmarker, obsolete.listmarkers), |
|
29 | "bookmarks": (bookmarks.pushbookmark, bookmarks.listbookmarks), | |
29 | } |
|
30 | "phases": (phases.pushphase, phases.listphases), | |
|
31 | "obsolete": (obsolete.pushmarker, obsolete.listmarkers), | |||
|
32 | } | |||
|
33 | ||||
30 |
|
34 | |||
31 | def register(namespace, pushkey, listkeys): |
|
35 | def register(namespace, pushkey, listkeys): | |
32 | _namespaces[namespace] = (pushkey, listkeys) |
|
36 | _namespaces[namespace] = (pushkey, listkeys) | |
33 |
|
37 | |||
|
38 | ||||
34 | def _get(namespace): |
|
39 | def _get(namespace): | |
35 | return _namespaces.get(namespace, (lambda *x: False, lambda *x: {})) |
|
40 | return _namespaces.get(namespace, (lambda *x: False, lambda *x: {})) | |
36 |
|
41 | |||
|
42 | ||||
37 | def push(repo, namespace, key, old, new): |
|
43 | def push(repo, namespace, key, old, new): | |
38 | '''should succeed iff value was old''' |
|
44 | '''should succeed iff value was old''' | |
39 | pk = _get(namespace)[0] |
|
45 | pk = _get(namespace)[0] | |
40 | return pk(repo, key, old, new) |
|
46 | return pk(repo, key, old, new) | |
41 |
|
47 | |||
|
48 | ||||
42 | def list(repo, namespace): |
|
49 | def list(repo, namespace): | |
43 | '''return a dict''' |
|
50 | '''return a dict''' | |
44 | lk = _get(namespace)[1] |
|
51 | lk = _get(namespace)[1] | |
45 | return lk(repo) |
|
52 | return lk(repo) | |
46 |
|
53 | |||
|
54 | ||||
47 | encode = encoding.fromlocal |
|
55 | encode = encoding.fromlocal | |
48 |
|
56 | |||
49 | decode = encoding.tolocal |
|
57 | decode = encoding.tolocal | |
50 |
|
58 | |||
|
59 | ||||
51 | def encodekeys(keys): |
|
60 | def encodekeys(keys): | |
52 | """encode the content of a pushkey namespace for exchange over the wire""" |
|
61 | """encode the content of a pushkey namespace for exchange over the wire""" | |
53 | return '\n'.join(['%s\t%s' % (encode(k), encode(v)) for k, v in keys]) |
|
62 | return '\n'.join(['%s\t%s' % (encode(k), encode(v)) for k, v in keys]) | |
54 |
|
63 | |||
|
64 | ||||
55 | def decodekeys(data): |
|
65 | def decodekeys(data): | |
56 | """decode the content of a pushkey namespace from exchange over the wire""" |
|
66 | """decode the content of a pushkey namespace from exchange over the wire""" | |
57 | result = {} |
|
67 | result = {} |
@@ -24,15 +24,18 b' fallbackpager = scmplatform.fallbackpage' | |||||
24 | systemrcpath = scmplatform.systemrcpath |
|
24 | systemrcpath = scmplatform.systemrcpath | |
25 | userrcpath = scmplatform.userrcpath |
|
25 | userrcpath = scmplatform.userrcpath | |
26 |
|
26 | |||
|
27 | ||||
27 | def _expandrcpath(path): |
|
28 | def _expandrcpath(path): | |
28 | '''path could be a file or a directory. return a list of file paths''' |
|
29 | '''path could be a file or a directory. return a list of file paths''' | |
29 | p = util.expandpath(path) |
|
30 | p = util.expandpath(path) | |
30 | if os.path.isdir(p): |
|
31 | if os.path.isdir(p): | |
31 | join = os.path.join |
|
32 | join = os.path.join | |
32 | return sorted(join(p, f) for f, k in util.listdir(p) |
|
33 | return sorted( | |
33 |
|
|
34 | join(p, f) for f, k in util.listdir(p) if f.endswith('.rc') | |
|
35 | ) | |||
34 | return [p] |
|
36 | return [p] | |
35 |
|
37 | |||
|
38 | ||||
36 | def envrcitems(env=None): |
|
39 | def envrcitems(env=None): | |
37 | '''Return [(section, name, value, source)] config items. |
|
40 | '''Return [(section, name, value, source)] config items. | |
38 |
|
41 | |||
@@ -55,6 +58,7 b' def envrcitems(env=None):' | |||||
55 | result.append((section, configname, env[envname], '$%s' % envname)) |
|
58 | result.append((section, configname, env[envname], '$%s' % envname)) | |
56 | return result |
|
59 | return result | |
57 |
|
60 | |||
|
61 | ||||
58 | def defaultrcpath(): |
|
62 | def defaultrcpath(): | |
59 | '''return rc paths in default.d''' |
|
63 | '''return rc paths in default.d''' | |
60 | path = [] |
|
64 | path = [] | |
@@ -63,6 +67,7 b' def defaultrcpath():' | |||||
63 | path = _expandrcpath(defaultpath) |
|
67 | path = _expandrcpath(defaultpath) | |
64 | return path |
|
68 | return path | |
65 |
|
69 | |||
|
70 | ||||
66 | def rccomponents(): |
|
71 | def rccomponents(): | |
67 | '''return an ordered [(type, obj)] about where to load configs. |
|
72 | '''return an ordered [(type, obj)] about where to load configs. | |
68 |
|
73 | |||
@@ -92,6 +97,7 b' def rccomponents():' | |||||
92 | _rccomponents.extend(normpaths(userrcpath())) |
|
97 | _rccomponents.extend(normpaths(userrcpath())) | |
93 | return _rccomponents |
|
98 | return _rccomponents | |
94 |
|
99 | |||
|
100 | ||||
95 | def defaultpagerenv(): |
|
101 | def defaultpagerenv(): | |
96 | '''return a dict of default environment variables and their values, |
|
102 | '''return a dict of default environment variables and their values, | |
97 | intended to be set before starting a pager. |
|
103 | intended to be set before starting a pager. |
@@ -16,6 +16,7 b' from . import (' | |||||
16 | revset, |
|
16 | revset, | |
17 | ) |
|
17 | ) | |
18 |
|
18 | |||
|
19 | ||||
19 | def precheck(repo, revs, action='rewrite'): |
|
20 | def precheck(repo, revs, action='rewrite'): | |
20 | """check if revs can be rewritten |
|
21 | """check if revs can be rewritten | |
21 | action is used to control the error message. |
|
22 | action is used to control the error message. | |
@@ -23,7 +24,7 b" def precheck(repo, revs, action='rewrite" | |||||
23 | Make sure this function is called after taking the lock. |
|
24 | Make sure this function is called after taking the lock. | |
24 | """ |
|
25 | """ | |
25 | if node.nullrev in revs: |
|
26 | if node.nullrev in revs: | |
26 |
msg = _("cannot %s null changeset") % |
|
27 | msg = _("cannot %s null changeset") % action | |
27 | hint = _("no changeset checked out") |
|
28 | hint = _("no changeset checked out") | |
28 | raise error.Abort(msg, hint=hint) |
|
29 | raise error.Abort(msg, hint=hint) | |
29 |
|
30 | |||
@@ -32,7 +33,7 b" def precheck(repo, revs, action='rewrite" | |||||
32 |
|
33 | |||
33 | publicrevs = repo.revs('%ld and public()', revs) |
|
34 | publicrevs = repo.revs('%ld and public()', revs) | |
34 | if publicrevs: |
|
35 | if publicrevs: | |
35 |
msg = _("cannot %s public changesets") % |
|
36 | msg = _("cannot %s public changesets") % action | |
36 | hint = _("see 'hg help phases' for details") |
|
37 | hint = _("see 'hg help phases' for details") | |
37 | raise error.Abort(msg, hint=hint) |
|
38 | raise error.Abort(msg, hint=hint) | |
38 |
|
39 | |||
@@ -40,6 +41,7 b" def precheck(repo, revs, action='rewrite" | |||||
40 | if newunstable: |
|
41 | if newunstable: | |
41 | raise error.Abort(_("cannot %s changeset with children") % action) |
|
42 | raise error.Abort(_("cannot %s changeset with children") % action) | |
42 |
|
43 | |||
|
44 | ||||
43 | def disallowednewunstable(repo, revs): |
|
45 | def disallowednewunstable(repo, revs): | |
44 | """Checks whether editing the revs will create new unstable changesets and |
|
46 | """Checks whether editing the revs will create new unstable changesets and | |
45 | are we allowed to create them. |
|
47 | are we allowed to create them. |
@@ -18,17 +18,23 b' from . import (' | |||||
18 | # 'less' as the default seems reasonable. |
|
18 | # 'less' as the default seems reasonable. | |
19 | fallbackpager = 'less' |
|
19 | fallbackpager = 'less' | |
20 |
|
20 | |||
|
21 | ||||
21 | def _rcfiles(path): |
|
22 | def _rcfiles(path): | |
22 | rcs = [os.path.join(path, 'hgrc')] |
|
23 | rcs = [os.path.join(path, 'hgrc')] | |
23 | rcdir = os.path.join(path, 'hgrc.d') |
|
24 | rcdir = os.path.join(path, 'hgrc.d') | |
24 | try: |
|
25 | try: | |
25 |
rcs.extend( |
|
26 | rcs.extend( | |
26 | for f, kind in util.listdir(rcdir) |
|
27 | [ | |
27 | if f.endswith(".rc")]) |
|
28 | os.path.join(rcdir, f) | |
|
29 | for f, kind in util.listdir(rcdir) | |||
|
30 | if f.endswith(".rc") | |||
|
31 | ] | |||
|
32 | ) | |||
28 | except OSError: |
|
33 | except OSError: | |
29 | pass |
|
34 | pass | |
30 | return rcs |
|
35 | return rcs | |
31 |
|
36 | |||
|
37 | ||||
32 | def systemrcpath(): |
|
38 | def systemrcpath(): | |
33 | path = [] |
|
39 | path = [] | |
34 | if pycompat.sysplatform == 'plan9': |
|
40 | if pycompat.sysplatform == 'plan9': | |
@@ -43,6 +49,7 b' def systemrcpath():' | |||||
43 | path.extend(_rcfiles('/' + root)) |
|
49 | path.extend(_rcfiles('/' + root)) | |
44 | return path |
|
50 | return path | |
45 |
|
51 | |||
|
52 | ||||
46 | def userrcpath(): |
|
53 | def userrcpath(): | |
47 | if pycompat.sysplatform == 'plan9': |
|
54 | if pycompat.sysplatform == 'plan9': | |
48 | return [encoding.environ['home'] + '/lib/hgrc'] |
|
55 | return [encoding.environ['home'] + '/lib/hgrc'] | |
@@ -53,12 +60,16 b' def userrcpath():' | |||||
53 | if confighome is None or not os.path.isabs(confighome): |
|
60 | if confighome is None or not os.path.isabs(confighome): | |
54 | confighome = os.path.expanduser('~/.config') |
|
61 | confighome = os.path.expanduser('~/.config') | |
55 |
|
62 | |||
56 | return [os.path.expanduser('~/.hgrc'), |
|
63 | return [ | |
57 | os.path.join(confighome, 'hg', 'hgrc')] |
|
64 | os.path.expanduser('~/.hgrc'), | |
|
65 | os.path.join(confighome, 'hg', 'hgrc'), | |||
|
66 | ] | |||
|
67 | ||||
58 |
|
68 | |||
59 | def termsize(ui): |
|
69 | def termsize(ui): | |
60 | try: |
|
70 | try: | |
61 | import termios |
|
71 | import termios | |
|
72 | ||||
62 | TIOCGWINSZ = termios.TIOCGWINSZ # unavailable on IRIX (issue3449) |
|
73 | TIOCGWINSZ = termios.TIOCGWINSZ # unavailable on IRIX (issue3449) | |
63 | except (AttributeError, ImportError): |
|
74 | except (AttributeError, ImportError): | |
64 | return 80, 24 |
|
75 | return 80, 24 |
@@ -11,6 +11,7 b' from . import (' | |||||
11 |
|
11 | |||
12 | try: |
|
12 | try: | |
13 | import _winreg as winreg |
|
13 | import _winreg as winreg | |
|
14 | ||||
14 | winreg.CloseKey |
|
15 | winreg.CloseKey | |
15 | except ImportError: |
|
16 | except ImportError: | |
16 | import winreg |
|
17 | import winreg | |
@@ -18,6 +19,7 b' except ImportError:' | |||||
18 | # MS-DOS 'more' is the only pager available by default on Windows. |
|
19 | # MS-DOS 'more' is the only pager available by default on Windows. | |
19 | fallbackpager = 'more' |
|
20 | fallbackpager = 'more' | |
20 |
|
21 | |||
|
22 | ||||
21 | def systemrcpath(): |
|
23 | def systemrcpath(): | |
22 | '''return default os-specific hgrc search path''' |
|
24 | '''return default os-specific hgrc search path''' | |
23 | rcpath = [] |
|
25 | rcpath = [] | |
@@ -32,8 +34,9 b' def systemrcpath():' | |||||
32 | if f.endswith('.rc'): |
|
34 | if f.endswith('.rc'): | |
33 | rcpath.append(os.path.join(progrcd, f)) |
|
35 | rcpath.append(os.path.join(progrcd, f)) | |
34 | # else look for a system rcpath in the registry |
|
36 | # else look for a system rcpath in the registry | |
35 |
value = util.lookupreg( |
|
37 | value = util.lookupreg( | |
36 |
|
|
38 | 'SOFTWARE\\Mercurial', None, winreg.HKEY_LOCAL_MACHINE | |
|
39 | ) | |||
37 | if not isinstance(value, str) or not value: |
|
40 | if not isinstance(value, str) or not value: | |
38 | return rcpath |
|
41 | return rcpath | |
39 | value = util.localpath(value) |
|
42 | value = util.localpath(value) | |
@@ -46,16 +49,17 b' def systemrcpath():' | |||||
46 | rcpath.append(os.path.join(p, f)) |
|
49 | rcpath.append(os.path.join(p, f)) | |
47 | return rcpath |
|
50 | return rcpath | |
48 |
|
51 | |||
|
52 | ||||
49 | def userrcpath(): |
|
53 | def userrcpath(): | |
50 | '''return os-specific hgrc search path to the user dir''' |
|
54 | '''return os-specific hgrc search path to the user dir''' | |
51 | home = os.path.expanduser('~') |
|
55 | home = os.path.expanduser('~') | |
52 | path = [os.path.join(home, 'mercurial.ini'), |
|
56 | path = [os.path.join(home, 'mercurial.ini'), os.path.join(home, '.hgrc')] | |
53 | os.path.join(home, '.hgrc')] |
|
|||
54 | userprofile = encoding.environ.get('USERPROFILE') |
|
57 | userprofile = encoding.environ.get('USERPROFILE') | |
55 | if userprofile and userprofile != home: |
|
58 | if userprofile and userprofile != home: | |
56 | path.append(os.path.join(userprofile, 'mercurial.ini')) |
|
59 | path.append(os.path.join(userprofile, 'mercurial.ini')) | |
57 | path.append(os.path.join(userprofile, '.hgrc')) |
|
60 | path.append(os.path.join(userprofile, '.hgrc')) | |
58 | return path |
|
61 | return path | |
59 |
|
62 | |||
|
63 | ||||
60 | def termsize(ui): |
|
64 | def termsize(ui): | |
61 | return win32.termsize() |
|
65 | return win32.termsize() |
@@ -7,6 +7,7 b'' | |||||
7 |
|
7 | |||
8 | from __future__ import absolute_import |
|
8 | from __future__ import absolute_import | |
9 |
|
9 | |||
|
10 | ||||
10 | def getstack(repo, rev=None): |
|
11 | def getstack(repo, rev=None): | |
11 | """return a sorted smartrev of the stack containing either rev if it is |
|
12 | """return a sorted smartrev of the stack containing either rev if it is | |
12 | not None or the current working directory parent. |
|
13 | not None or the current working directory parent. |
General Comments 0
You need to be logged in to leave comments.
Login now