Show More
@@ -1,749 +1,812 b'' | |||||
1 | #!/usr/bin/env python |
|
1 | #!/usr/bin/env python | |
2 |
|
2 | |||
3 | from __future__ import absolute_import, print_function |
|
3 | from __future__ import absolute_import, print_function | |
4 |
|
4 | |||
5 | import ast |
|
5 | import ast | |
6 | import collections |
|
6 | import collections | |
7 | import os |
|
7 | import os | |
8 | import sys |
|
8 | import sys | |
9 |
|
9 | |||
10 | # Import a minimal set of stdlib modules needed for list_stdlib_modules() |
|
10 | # Import a minimal set of stdlib modules needed for list_stdlib_modules() | |
11 | # to work when run from a virtualenv. The modules were chosen empirically |
|
11 | # to work when run from a virtualenv. The modules were chosen empirically | |
12 | # so that the return value matches the return value without virtualenv. |
|
12 | # so that the return value matches the return value without virtualenv. | |
13 | if True: # disable lexical sorting checks |
|
13 | if True: # disable lexical sorting checks | |
14 | try: |
|
14 | try: | |
15 | import BaseHTTPServer as basehttpserver |
|
15 | import BaseHTTPServer as basehttpserver | |
16 | except ImportError: |
|
16 | except ImportError: | |
17 | basehttpserver = None |
|
17 | basehttpserver = None | |
18 | import zlib |
|
18 | import zlib | |
19 |
|
19 | |||
20 | import testparseutil |
|
20 | import testparseutil | |
21 |
|
21 | |||
22 | # Whitelist of modules that symbols can be directly imported from. |
|
22 | # Whitelist of modules that symbols can be directly imported from. | |
23 | allowsymbolimports = ( |
|
23 | allowsymbolimports = ( | |
24 | '__future__', |
|
24 | '__future__', | |
25 | 'bzrlib', |
|
25 | 'bzrlib', | |
26 | 'hgclient', |
|
26 | 'hgclient', | |
27 | 'mercurial', |
|
27 | 'mercurial', | |
28 | 'mercurial.hgweb.common', |
|
28 | 'mercurial.hgweb.common', | |
29 | 'mercurial.hgweb.request', |
|
29 | 'mercurial.hgweb.request', | |
30 | 'mercurial.i18n', |
|
30 | 'mercurial.i18n', | |
31 | 'mercurial.interfaces', |
|
31 | 'mercurial.interfaces', | |
32 | 'mercurial.node', |
|
32 | 'mercurial.node', | |
33 | # for revlog to re-export constant to extensions |
|
33 | # for revlog to re-export constant to extensions | |
34 | 'mercurial.revlogutils.constants', |
|
34 | 'mercurial.revlogutils.constants', | |
35 | 'mercurial.revlogutils.flagutil', |
|
35 | 'mercurial.revlogutils.flagutil', | |
36 | # for cffi modules to re-export pure functions |
|
36 | # for cffi modules to re-export pure functions | |
37 | 'mercurial.pure.base85', |
|
37 | 'mercurial.pure.base85', | |
38 | 'mercurial.pure.bdiff', |
|
38 | 'mercurial.pure.bdiff', | |
39 | 'mercurial.pure.mpatch', |
|
39 | 'mercurial.pure.mpatch', | |
40 | 'mercurial.pure.osutil', |
|
40 | 'mercurial.pure.osutil', | |
41 | 'mercurial.pure.parsers', |
|
41 | 'mercurial.pure.parsers', | |
42 | # third-party imports should be directly imported |
|
42 | # third-party imports should be directly imported | |
43 | 'mercurial.thirdparty', |
|
43 | 'mercurial.thirdparty', | |
44 | 'mercurial.thirdparty.attr', |
|
44 | 'mercurial.thirdparty.attr', | |
45 | 'mercurial.thirdparty.zope', |
|
45 | 'mercurial.thirdparty.zope', | |
46 | 'mercurial.thirdparty.zope.interface', |
|
46 | 'mercurial.thirdparty.zope.interface', | |
47 | ) |
|
47 | ) | |
48 |
|
48 | |||
49 | # Whitelist of symbols that can be directly imported. |
|
49 | # Whitelist of symbols that can be directly imported. | |
50 | directsymbols = ( |
|
50 | directsymbols = ('demandimport',) | |
51 | 'demandimport', |
|
|||
52 | ) |
|
|||
53 |
|
51 | |||
54 | # Modules that must be aliased because they are commonly confused with |
|
52 | # Modules that must be aliased because they are commonly confused with | |
55 | # common variables and can create aliasing and readability issues. |
|
53 | # common variables and can create aliasing and readability issues. | |
56 | requirealias = { |
|
54 | requirealias = { | |
57 | 'ui': 'uimod', |
|
55 | 'ui': 'uimod', | |
58 | } |
|
56 | } | |
59 |
|
57 | |||
|
58 | ||||
60 | def usingabsolute(root): |
|
59 | def usingabsolute(root): | |
61 | """Whether absolute imports are being used.""" |
|
60 | """Whether absolute imports are being used.""" | |
62 | if sys.version_info[0] >= 3: |
|
61 | if sys.version_info[0] >= 3: | |
63 | return True |
|
62 | return True | |
64 |
|
63 | |||
65 | for node in ast.walk(root): |
|
64 | for node in ast.walk(root): | |
66 | if isinstance(node, ast.ImportFrom): |
|
65 | if isinstance(node, ast.ImportFrom): | |
67 | if node.module == '__future__': |
|
66 | if node.module == '__future__': | |
68 | for n in node.names: |
|
67 | for n in node.names: | |
69 | if n.name == 'absolute_import': |
|
68 | if n.name == 'absolute_import': | |
70 | return True |
|
69 | return True | |
71 |
|
70 | |||
72 | return False |
|
71 | return False | |
73 |
|
72 | |||
|
73 | ||||
74 | def walklocal(root): |
|
74 | def walklocal(root): | |
75 | """Recursively yield all descendant nodes but not in a different scope""" |
|
75 | """Recursively yield all descendant nodes but not in a different scope""" | |
76 | todo = collections.deque(ast.iter_child_nodes(root)) |
|
76 | todo = collections.deque(ast.iter_child_nodes(root)) | |
77 | yield root, False |
|
77 | yield root, False | |
78 | while todo: |
|
78 | while todo: | |
79 | node = todo.popleft() |
|
79 | node = todo.popleft() | |
80 | newscope = isinstance(node, ast.FunctionDef) |
|
80 | newscope = isinstance(node, ast.FunctionDef) | |
81 | if not newscope: |
|
81 | if not newscope: | |
82 | todo.extend(ast.iter_child_nodes(node)) |
|
82 | todo.extend(ast.iter_child_nodes(node)) | |
83 | yield node, newscope |
|
83 | yield node, newscope | |
84 |
|
84 | |||
|
85 | ||||
85 | def dotted_name_of_path(path): |
|
86 | def dotted_name_of_path(path): | |
86 | """Given a relative path to a source file, return its dotted module name. |
|
87 | """Given a relative path to a source file, return its dotted module name. | |
87 |
|
88 | |||
88 | >>> dotted_name_of_path('mercurial/error.py') |
|
89 | >>> dotted_name_of_path('mercurial/error.py') | |
89 | 'mercurial.error' |
|
90 | 'mercurial.error' | |
90 | >>> dotted_name_of_path('zlibmodule.so') |
|
91 | >>> dotted_name_of_path('zlibmodule.so') | |
91 | 'zlib' |
|
92 | 'zlib' | |
92 | """ |
|
93 | """ | |
93 | parts = path.replace(os.sep, '/').split('/') |
|
94 | parts = path.replace(os.sep, '/').split('/') | |
94 | parts[-1] = parts[-1].split('.', 1)[0] # remove .py and .so and .ARCH.so |
|
95 | parts[-1] = parts[-1].split('.', 1)[0] # remove .py and .so and .ARCH.so | |
95 | if parts[-1].endswith('module'): |
|
96 | if parts[-1].endswith('module'): | |
96 | parts[-1] = parts[-1][:-6] |
|
97 | parts[-1] = parts[-1][:-6] | |
97 | return '.'.join(parts) |
|
98 | return '.'.join(parts) | |
98 |
|
99 | |||
|
100 | ||||
99 | def fromlocalfunc(modulename, localmods): |
|
101 | def fromlocalfunc(modulename, localmods): | |
100 | """Get a function to examine which locally defined module the |
|
102 | """Get a function to examine which locally defined module the | |
101 | target source imports via a specified name. |
|
103 | target source imports via a specified name. | |
102 |
|
104 | |||
103 | `modulename` is an `dotted_name_of_path()`-ed source file path, |
|
105 | `modulename` is an `dotted_name_of_path()`-ed source file path, | |
104 | which may have `.__init__` at the end of it, of the target source. |
|
106 | which may have `.__init__` at the end of it, of the target source. | |
105 |
|
107 | |||
106 | `localmods` is a set of absolute `dotted_name_of_path()`-ed source file |
|
108 | `localmods` is a set of absolute `dotted_name_of_path()`-ed source file | |
107 | paths of locally defined (= Mercurial specific) modules. |
|
109 | paths of locally defined (= Mercurial specific) modules. | |
108 |
|
110 | |||
109 | This function assumes that module names not existing in |
|
111 | This function assumes that module names not existing in | |
110 | `localmods` are from the Python standard library. |
|
112 | `localmods` are from the Python standard library. | |
111 |
|
113 | |||
112 | This function returns the function, which takes `name` argument, |
|
114 | This function returns the function, which takes `name` argument, | |
113 | and returns `(absname, dottedpath, hassubmod)` tuple if `name` |
|
115 | and returns `(absname, dottedpath, hassubmod)` tuple if `name` | |
114 | matches against locally defined module. Otherwise, it returns |
|
116 | matches against locally defined module. Otherwise, it returns | |
115 | False. |
|
117 | False. | |
116 |
|
118 | |||
117 | It is assumed that `name` doesn't have `.__init__`. |
|
119 | It is assumed that `name` doesn't have `.__init__`. | |
118 |
|
120 | |||
119 | `absname` is an absolute module name of specified `name` |
|
121 | `absname` is an absolute module name of specified `name` | |
120 | (e.g. "hgext.convert"). This can be used to compose prefix for sub |
|
122 | (e.g. "hgext.convert"). This can be used to compose prefix for sub | |
121 | modules or so. |
|
123 | modules or so. | |
122 |
|
124 | |||
123 | `dottedpath` is a `dotted_name_of_path()`-ed source file path |
|
125 | `dottedpath` is a `dotted_name_of_path()`-ed source file path | |
124 | (e.g. "hgext.convert.__init__") of `name`. This is used to look |
|
126 | (e.g. "hgext.convert.__init__") of `name`. This is used to look | |
125 | module up in `localmods` again. |
|
127 | module up in `localmods` again. | |
126 |
|
128 | |||
127 | `hassubmod` is whether it may have sub modules under it (for |
|
129 | `hassubmod` is whether it may have sub modules under it (for | |
128 | convenient, even though this is also equivalent to "absname != |
|
130 | convenient, even though this is also equivalent to "absname != | |
129 | dottednpath") |
|
131 | dottednpath") | |
130 |
|
132 | |||
131 | >>> localmods = {'foo.__init__', 'foo.foo1', |
|
133 | >>> localmods = {'foo.__init__', 'foo.foo1', | |
132 | ... 'foo.bar.__init__', 'foo.bar.bar1', |
|
134 | ... 'foo.bar.__init__', 'foo.bar.bar1', | |
133 | ... 'baz.__init__', 'baz.baz1'} |
|
135 | ... 'baz.__init__', 'baz.baz1'} | |
134 | >>> fromlocal = fromlocalfunc('foo.xxx', localmods) |
|
136 | >>> fromlocal = fromlocalfunc('foo.xxx', localmods) | |
135 | >>> # relative |
|
137 | >>> # relative | |
136 | >>> fromlocal('foo1') |
|
138 | >>> fromlocal('foo1') | |
137 | ('foo.foo1', 'foo.foo1', False) |
|
139 | ('foo.foo1', 'foo.foo1', False) | |
138 | >>> fromlocal('bar') |
|
140 | >>> fromlocal('bar') | |
139 | ('foo.bar', 'foo.bar.__init__', True) |
|
141 | ('foo.bar', 'foo.bar.__init__', True) | |
140 | >>> fromlocal('bar.bar1') |
|
142 | >>> fromlocal('bar.bar1') | |
141 | ('foo.bar.bar1', 'foo.bar.bar1', False) |
|
143 | ('foo.bar.bar1', 'foo.bar.bar1', False) | |
142 | >>> # absolute |
|
144 | >>> # absolute | |
143 | >>> fromlocal('baz') |
|
145 | >>> fromlocal('baz') | |
144 | ('baz', 'baz.__init__', True) |
|
146 | ('baz', 'baz.__init__', True) | |
145 | >>> fromlocal('baz.baz1') |
|
147 | >>> fromlocal('baz.baz1') | |
146 | ('baz.baz1', 'baz.baz1', False) |
|
148 | ('baz.baz1', 'baz.baz1', False) | |
147 | >>> # unknown = maybe standard library |
|
149 | >>> # unknown = maybe standard library | |
148 | >>> fromlocal('os') |
|
150 | >>> fromlocal('os') | |
149 | False |
|
151 | False | |
150 | >>> fromlocal(None, 1) |
|
152 | >>> fromlocal(None, 1) | |
151 | ('foo', 'foo.__init__', True) |
|
153 | ('foo', 'foo.__init__', True) | |
152 | >>> fromlocal('foo1', 1) |
|
154 | >>> fromlocal('foo1', 1) | |
153 | ('foo.foo1', 'foo.foo1', False) |
|
155 | ('foo.foo1', 'foo.foo1', False) | |
154 | >>> fromlocal2 = fromlocalfunc('foo.xxx.yyy', localmods) |
|
156 | >>> fromlocal2 = fromlocalfunc('foo.xxx.yyy', localmods) | |
155 | >>> fromlocal2(None, 2) |
|
157 | >>> fromlocal2(None, 2) | |
156 | ('foo', 'foo.__init__', True) |
|
158 | ('foo', 'foo.__init__', True) | |
157 | >>> fromlocal2('bar2', 1) |
|
159 | >>> fromlocal2('bar2', 1) | |
158 | False |
|
160 | False | |
159 | >>> fromlocal2('bar', 2) |
|
161 | >>> fromlocal2('bar', 2) | |
160 | ('foo.bar', 'foo.bar.__init__', True) |
|
162 | ('foo.bar', 'foo.bar.__init__', True) | |
161 | """ |
|
163 | """ | |
162 | if not isinstance(modulename, str): |
|
164 | if not isinstance(modulename, str): | |
163 | modulename = modulename.decode('ascii') |
|
165 | modulename = modulename.decode('ascii') | |
164 | prefix = '.'.join(modulename.split('.')[:-1]) |
|
166 | prefix = '.'.join(modulename.split('.')[:-1]) | |
165 | if prefix: |
|
167 | if prefix: | |
166 | prefix += '.' |
|
168 | prefix += '.' | |
|
169 | ||||
167 | def fromlocal(name, level=0): |
|
170 | def fromlocal(name, level=0): | |
168 | # name is false value when relative imports are used. |
|
171 | # name is false value when relative imports are used. | |
169 | if not name: |
|
172 | if not name: | |
170 | # If relative imports are used, level must not be absolute. |
|
173 | # If relative imports are used, level must not be absolute. | |
171 | assert level > 0 |
|
174 | assert level > 0 | |
172 | candidates = ['.'.join(modulename.split('.')[:-level])] |
|
175 | candidates = ['.'.join(modulename.split('.')[:-level])] | |
173 | else: |
|
176 | else: | |
174 | if not level: |
|
177 | if not level: | |
175 | # Check relative name first. |
|
178 | # Check relative name first. | |
176 | candidates = [prefix + name, name] |
|
179 | candidates = [prefix + name, name] | |
177 | else: |
|
180 | else: | |
178 | candidates = ['.'.join(modulename.split('.')[:-level]) + |
|
181 | candidates = [ | |
179 | '.' + name] |
|
182 | '.'.join(modulename.split('.')[:-level]) + '.' + name | |
|
183 | ] | |||
180 |
|
184 | |||
181 | for n in candidates: |
|
185 | for n in candidates: | |
182 | if n in localmods: |
|
186 | if n in localmods: | |
183 | return (n, n, False) |
|
187 | return (n, n, False) | |
184 | dottedpath = n + '.__init__' |
|
188 | dottedpath = n + '.__init__' | |
185 | if dottedpath in localmods: |
|
189 | if dottedpath in localmods: | |
186 | return (n, dottedpath, True) |
|
190 | return (n, dottedpath, True) | |
187 | return False |
|
191 | return False | |
|
192 | ||||
188 | return fromlocal |
|
193 | return fromlocal | |
189 |
|
194 | |||
|
195 | ||||
190 | def populateextmods(localmods): |
|
196 | def populateextmods(localmods): | |
191 | """Populate C extension modules based on pure modules""" |
|
197 | """Populate C extension modules based on pure modules""" | |
192 | newlocalmods = set(localmods) |
|
198 | newlocalmods = set(localmods) | |
193 | for n in localmods: |
|
199 | for n in localmods: | |
194 | if n.startswith('mercurial.pure.'): |
|
200 | if n.startswith('mercurial.pure.'): | |
195 | m = n[len('mercurial.pure.'):] |
|
201 | m = n[len('mercurial.pure.') :] | |
196 | newlocalmods.add('mercurial.cext.' + m) |
|
202 | newlocalmods.add('mercurial.cext.' + m) | |
197 | newlocalmods.add('mercurial.cffi._' + m) |
|
203 | newlocalmods.add('mercurial.cffi._' + m) | |
198 | return newlocalmods |
|
204 | return newlocalmods | |
199 |
|
205 | |||
|
206 | ||||
200 | def list_stdlib_modules(): |
|
207 | def list_stdlib_modules(): | |
201 | """List the modules present in the stdlib. |
|
208 | """List the modules present in the stdlib. | |
202 |
|
209 | |||
203 | >>> py3 = sys.version_info[0] >= 3 |
|
210 | >>> py3 = sys.version_info[0] >= 3 | |
204 | >>> mods = set(list_stdlib_modules()) |
|
211 | >>> mods = set(list_stdlib_modules()) | |
205 | >>> 'BaseHTTPServer' in mods or py3 |
|
212 | >>> 'BaseHTTPServer' in mods or py3 | |
206 | True |
|
213 | True | |
207 |
|
214 | |||
208 | os.path isn't really a module, so it's missing: |
|
215 | os.path isn't really a module, so it's missing: | |
209 |
|
216 | |||
210 | >>> 'os.path' in mods |
|
217 | >>> 'os.path' in mods | |
211 | False |
|
218 | False | |
212 |
|
219 | |||
213 | sys requires special treatment, because it's baked into the |
|
220 | sys requires special treatment, because it's baked into the | |
214 | interpreter, but it should still appear: |
|
221 | interpreter, but it should still appear: | |
215 |
|
222 | |||
216 | >>> 'sys' in mods |
|
223 | >>> 'sys' in mods | |
217 | True |
|
224 | True | |
218 |
|
225 | |||
219 | >>> 'collections' in mods |
|
226 | >>> 'collections' in mods | |
220 | True |
|
227 | True | |
221 |
|
228 | |||
222 | >>> 'cStringIO' in mods or py3 |
|
229 | >>> 'cStringIO' in mods or py3 | |
223 | True |
|
230 | True | |
224 |
|
231 | |||
225 | >>> 'cffi' in mods |
|
232 | >>> 'cffi' in mods | |
226 | True |
|
233 | True | |
227 | """ |
|
234 | """ | |
228 | for m in sys.builtin_module_names: |
|
235 | for m in sys.builtin_module_names: | |
229 | yield m |
|
236 | yield m | |
230 | # These modules only exist on windows, but we should always |
|
237 | # These modules only exist on windows, but we should always | |
231 | # consider them stdlib. |
|
238 | # consider them stdlib. | |
232 | for m in ['msvcrt', '_winreg']: |
|
239 | for m in ['msvcrt', '_winreg']: | |
233 | yield m |
|
240 | yield m | |
234 | yield '__builtin__' |
|
241 | yield '__builtin__' | |
235 | yield 'builtins' # python3 only |
|
242 | yield 'builtins' # python3 only | |
236 | yield 'importlib.abc' # python3 only |
|
243 | yield 'importlib.abc' # python3 only | |
237 | yield 'importlib.machinery' # python3 only |
|
244 | yield 'importlib.machinery' # python3 only | |
238 | yield 'importlib.util' # python3 only |
|
245 | yield 'importlib.util' # python3 only | |
239 | for m in 'fcntl', 'grp', 'pwd', 'termios': # Unix only |
|
246 | for m in 'fcntl', 'grp', 'pwd', 'termios': # Unix only | |
240 | yield m |
|
247 | yield m | |
241 | for m in 'cPickle', 'datetime': # in Python (not C) on PyPy |
|
248 | for m in 'cPickle', 'datetime': # in Python (not C) on PyPy | |
242 | yield m |
|
249 | yield m | |
243 | for m in ['cffi']: |
|
250 | for m in ['cffi']: | |
244 | yield m |
|
251 | yield m | |
245 | stdlib_prefixes = {sys.prefix, sys.exec_prefix} |
|
252 | stdlib_prefixes = {sys.prefix, sys.exec_prefix} | |
246 | # We need to supplement the list of prefixes for the search to work |
|
253 | # We need to supplement the list of prefixes for the search to work | |
247 | # when run from within a virtualenv. |
|
254 | # when run from within a virtualenv. | |
248 | for mod in (basehttpserver, zlib): |
|
255 | for mod in (basehttpserver, zlib): | |
249 | if mod is None: |
|
256 | if mod is None: | |
250 | continue |
|
257 | continue | |
251 | try: |
|
258 | try: | |
252 | # Not all module objects have a __file__ attribute. |
|
259 | # Not all module objects have a __file__ attribute. | |
253 | filename = mod.__file__ |
|
260 | filename = mod.__file__ | |
254 | except AttributeError: |
|
261 | except AttributeError: | |
255 | continue |
|
262 | continue | |
256 | dirname = os.path.dirname(filename) |
|
263 | dirname = os.path.dirname(filename) | |
257 | for prefix in stdlib_prefixes: |
|
264 | for prefix in stdlib_prefixes: | |
258 | if dirname.startswith(prefix): |
|
265 | if dirname.startswith(prefix): | |
259 | # Then this directory is redundant. |
|
266 | # Then this directory is redundant. | |
260 | break |
|
267 | break | |
261 | else: |
|
268 | else: | |
262 | stdlib_prefixes.add(dirname) |
|
269 | stdlib_prefixes.add(dirname) | |
263 | sourceroot = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) |
|
270 | sourceroot = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) | |
264 | for libpath in sys.path: |
|
271 | for libpath in sys.path: | |
265 | # We want to walk everything in sys.path that starts with something in |
|
272 | # We want to walk everything in sys.path that starts with something in | |
266 | # stdlib_prefixes, but not directories from the hg sources. |
|
273 | # stdlib_prefixes, but not directories from the hg sources. | |
267 |
if |
|
274 | if os.path.abspath(libpath).startswith(sourceroot) or not any( | |
268 |
|
|
275 | libpath.startswith(p) for p in stdlib_prefixes | |
|
276 | ): | |||
269 | continue |
|
277 | continue | |
270 | for top, dirs, files in os.walk(libpath): |
|
278 | for top, dirs, files in os.walk(libpath): | |
271 | for i, d in reversed(list(enumerate(dirs))): |
|
279 | for i, d in reversed(list(enumerate(dirs))): | |
272 | if (not os.path.exists(os.path.join(top, d, '__init__.py')) |
|
280 | if ( | |
273 | or top == libpath and d in ('hgdemandimport', 'hgext', |
|
281 | not os.path.exists(os.path.join(top, d, '__init__.py')) | |
274 | 'mercurial')): |
|
282 | or top == libpath | |
|
283 | and d in ('hgdemandimport', 'hgext', 'mercurial') | |||
|
284 | ): | |||
275 | del dirs[i] |
|
285 | del dirs[i] | |
276 | for name in files: |
|
286 | for name in files: | |
277 | if not name.endswith(('.py', '.so', '.pyc', '.pyo', '.pyd')): |
|
287 | if not name.endswith(('.py', '.so', '.pyc', '.pyo', '.pyd')): | |
278 | continue |
|
288 | continue | |
279 | if name.startswith('__init__.py'): |
|
289 | if name.startswith('__init__.py'): | |
280 | full_path = top |
|
290 | full_path = top | |
281 | else: |
|
291 | else: | |
282 | full_path = os.path.join(top, name) |
|
292 | full_path = os.path.join(top, name) | |
283 | rel_path = full_path[len(libpath) + 1:] |
|
293 | rel_path = full_path[len(libpath) + 1 :] | |
284 | mod = dotted_name_of_path(rel_path) |
|
294 | mod = dotted_name_of_path(rel_path) | |
285 | yield mod |
|
295 | yield mod | |
286 |
|
296 | |||
|
297 | ||||
287 | stdlib_modules = set(list_stdlib_modules()) |
|
298 | stdlib_modules = set(list_stdlib_modules()) | |
288 |
|
299 | |||
|
300 | ||||
289 | def imported_modules(source, modulename, f, localmods, ignore_nested=False): |
|
301 | def imported_modules(source, modulename, f, localmods, ignore_nested=False): | |
290 | """Given the source of a file as a string, yield the names |
|
302 | """Given the source of a file as a string, yield the names | |
291 | imported by that file. |
|
303 | imported by that file. | |
292 |
|
304 | |||
293 | Args: |
|
305 | Args: | |
294 | source: The python source to examine as a string. |
|
306 | source: The python source to examine as a string. | |
295 | modulename: of specified python source (may have `__init__`) |
|
307 | modulename: of specified python source (may have `__init__`) | |
296 | localmods: set of locally defined module names (may have `__init__`) |
|
308 | localmods: set of locally defined module names (may have `__init__`) | |
297 | ignore_nested: If true, import statements that do not start in |
|
309 | ignore_nested: If true, import statements that do not start in | |
298 | column zero will be ignored. |
|
310 | column zero will be ignored. | |
299 |
|
311 | |||
300 | Returns: |
|
312 | Returns: | |
301 | A list of absolute module names imported by the given source. |
|
313 | A list of absolute module names imported by the given source. | |
302 |
|
314 | |||
303 | >>> f = 'foo/xxx.py' |
|
315 | >>> f = 'foo/xxx.py' | |
304 | >>> modulename = 'foo.xxx' |
|
316 | >>> modulename = 'foo.xxx' | |
305 | >>> localmods = {'foo.__init__': True, |
|
317 | >>> localmods = {'foo.__init__': True, | |
306 | ... 'foo.foo1': True, 'foo.foo2': True, |
|
318 | ... 'foo.foo1': True, 'foo.foo2': True, | |
307 | ... 'foo.bar.__init__': True, 'foo.bar.bar1': True, |
|
319 | ... 'foo.bar.__init__': True, 'foo.bar.bar1': True, | |
308 | ... 'baz.__init__': True, 'baz.baz1': True } |
|
320 | ... 'baz.__init__': True, 'baz.baz1': True } | |
309 | >>> # standard library (= not locally defined ones) |
|
321 | >>> # standard library (= not locally defined ones) | |
310 | >>> sorted(imported_modules( |
|
322 | >>> sorted(imported_modules( | |
311 | ... 'from stdlib1 import foo, bar; import stdlib2', |
|
323 | ... 'from stdlib1 import foo, bar; import stdlib2', | |
312 | ... modulename, f, localmods)) |
|
324 | ... modulename, f, localmods)) | |
313 | [] |
|
325 | [] | |
314 | >>> # relative importing |
|
326 | >>> # relative importing | |
315 | >>> sorted(imported_modules( |
|
327 | >>> sorted(imported_modules( | |
316 | ... 'import foo1; from bar import bar1', |
|
328 | ... 'import foo1; from bar import bar1', | |
317 | ... modulename, f, localmods)) |
|
329 | ... modulename, f, localmods)) | |
318 | ['foo.bar.bar1', 'foo.foo1'] |
|
330 | ['foo.bar.bar1', 'foo.foo1'] | |
319 | >>> sorted(imported_modules( |
|
331 | >>> sorted(imported_modules( | |
320 | ... 'from bar.bar1 import name1, name2, name3', |
|
332 | ... 'from bar.bar1 import name1, name2, name3', | |
321 | ... modulename, f, localmods)) |
|
333 | ... modulename, f, localmods)) | |
322 | ['foo.bar.bar1'] |
|
334 | ['foo.bar.bar1'] | |
323 | >>> # absolute importing |
|
335 | >>> # absolute importing | |
324 | >>> sorted(imported_modules( |
|
336 | >>> sorted(imported_modules( | |
325 | ... 'from baz import baz1, name1', |
|
337 | ... 'from baz import baz1, name1', | |
326 | ... modulename, f, localmods)) |
|
338 | ... modulename, f, localmods)) | |
327 | ['baz.__init__', 'baz.baz1'] |
|
339 | ['baz.__init__', 'baz.baz1'] | |
328 | >>> # mixed importing, even though it shouldn't be recommended |
|
340 | >>> # mixed importing, even though it shouldn't be recommended | |
329 | >>> sorted(imported_modules( |
|
341 | >>> sorted(imported_modules( | |
330 | ... 'import stdlib, foo1, baz', |
|
342 | ... 'import stdlib, foo1, baz', | |
331 | ... modulename, f, localmods)) |
|
343 | ... modulename, f, localmods)) | |
332 | ['baz.__init__', 'foo.foo1'] |
|
344 | ['baz.__init__', 'foo.foo1'] | |
333 | >>> # ignore_nested |
|
345 | >>> # ignore_nested | |
334 | >>> sorted(imported_modules( |
|
346 | >>> sorted(imported_modules( | |
335 | ... '''import foo |
|
347 | ... '''import foo | |
336 | ... def wat(): |
|
348 | ... def wat(): | |
337 | ... import bar |
|
349 | ... import bar | |
338 | ... ''', modulename, f, localmods)) |
|
350 | ... ''', modulename, f, localmods)) | |
339 | ['foo.__init__', 'foo.bar.__init__'] |
|
351 | ['foo.__init__', 'foo.bar.__init__'] | |
340 | >>> sorted(imported_modules( |
|
352 | >>> sorted(imported_modules( | |
341 | ... '''import foo |
|
353 | ... '''import foo | |
342 | ... def wat(): |
|
354 | ... def wat(): | |
343 | ... import bar |
|
355 | ... import bar | |
344 | ... ''', modulename, f, localmods, ignore_nested=True)) |
|
356 | ... ''', modulename, f, localmods, ignore_nested=True)) | |
345 | ['foo.__init__'] |
|
357 | ['foo.__init__'] | |
346 | """ |
|
358 | """ | |
347 | fromlocal = fromlocalfunc(modulename, localmods) |
|
359 | fromlocal = fromlocalfunc(modulename, localmods) | |
348 | for node in ast.walk(ast.parse(source, f)): |
|
360 | for node in ast.walk(ast.parse(source, f)): | |
349 | if ignore_nested and getattr(node, 'col_offset', 0) > 0: |
|
361 | if ignore_nested and getattr(node, 'col_offset', 0) > 0: | |
350 | continue |
|
362 | continue | |
351 | if isinstance(node, ast.Import): |
|
363 | if isinstance(node, ast.Import): | |
352 | for n in node.names: |
|
364 | for n in node.names: | |
353 | found = fromlocal(n.name) |
|
365 | found = fromlocal(n.name) | |
354 | if not found: |
|
366 | if not found: | |
355 | # this should import standard library |
|
367 | # this should import standard library | |
356 | continue |
|
368 | continue | |
357 | yield found[1] |
|
369 | yield found[1] | |
358 | elif isinstance(node, ast.ImportFrom): |
|
370 | elif isinstance(node, ast.ImportFrom): | |
359 | found = fromlocal(node.module, node.level) |
|
371 | found = fromlocal(node.module, node.level) | |
360 | if not found: |
|
372 | if not found: | |
361 | # this should import standard library |
|
373 | # this should import standard library | |
362 | continue |
|
374 | continue | |
363 |
|
375 | |||
364 | absname, dottedpath, hassubmod = found |
|
376 | absname, dottedpath, hassubmod = found | |
365 | if not hassubmod: |
|
377 | if not hassubmod: | |
366 | # "dottedpath" is not a package; must be imported |
|
378 | # "dottedpath" is not a package; must be imported | |
367 | yield dottedpath |
|
379 | yield dottedpath | |
368 | # examination of "node.names" should be redundant |
|
380 | # examination of "node.names" should be redundant | |
369 | # e.g.: from mercurial.node import nullid, nullrev |
|
381 | # e.g.: from mercurial.node import nullid, nullrev | |
370 | continue |
|
382 | continue | |
371 |
|
383 | |||
372 | modnotfound = False |
|
384 | modnotfound = False | |
373 | prefix = absname + '.' |
|
385 | prefix = absname + '.' | |
374 | for n in node.names: |
|
386 | for n in node.names: | |
375 | found = fromlocal(prefix + n.name) |
|
387 | found = fromlocal(prefix + n.name) | |
376 | if not found: |
|
388 | if not found: | |
377 | # this should be a function or a property of "node.module" |
|
389 | # this should be a function or a property of "node.module" | |
378 | modnotfound = True |
|
390 | modnotfound = True | |
379 | continue |
|
391 | continue | |
380 | yield found[1] |
|
392 | yield found[1] | |
381 | if modnotfound: |
|
393 | if modnotfound: | |
382 | # "dottedpath" is a package, but imported because of non-module |
|
394 | # "dottedpath" is a package, but imported because of non-module | |
383 | # lookup |
|
395 | # lookup | |
384 | yield dottedpath |
|
396 | yield dottedpath | |
385 |
|
397 | |||
|
398 | ||||
386 | def verify_import_convention(module, source, localmods): |
|
399 | def verify_import_convention(module, source, localmods): | |
387 | """Verify imports match our established coding convention. |
|
400 | """Verify imports match our established coding convention. | |
388 |
|
401 | |||
389 | We have 2 conventions: legacy and modern. The modern convention is in |
|
402 | We have 2 conventions: legacy and modern. The modern convention is in | |
390 | effect when using absolute imports. |
|
403 | effect when using absolute imports. | |
391 |
|
404 | |||
392 | The legacy convention only looks for mixed imports. The modern convention |
|
405 | The legacy convention only looks for mixed imports. The modern convention | |
393 | is much more thorough. |
|
406 | is much more thorough. | |
394 | """ |
|
407 | """ | |
395 | root = ast.parse(source) |
|
408 | root = ast.parse(source) | |
396 | absolute = usingabsolute(root) |
|
409 | absolute = usingabsolute(root) | |
397 |
|
410 | |||
398 | if absolute: |
|
411 | if absolute: | |
399 | return verify_modern_convention(module, root, localmods) |
|
412 | return verify_modern_convention(module, root, localmods) | |
400 | else: |
|
413 | else: | |
401 | return verify_stdlib_on_own_line(root) |
|
414 | return verify_stdlib_on_own_line(root) | |
402 |
|
415 | |||
|
416 | ||||
403 | def verify_modern_convention(module, root, localmods, root_col_offset=0): |
|
417 | def verify_modern_convention(module, root, localmods, root_col_offset=0): | |
404 | """Verify a file conforms to the modern import convention rules. |
|
418 | """Verify a file conforms to the modern import convention rules. | |
405 |
|
419 | |||
406 | The rules of the modern convention are: |
|
420 | The rules of the modern convention are: | |
407 |
|
421 | |||
408 | * Ordering is stdlib followed by local imports. Each group is lexically |
|
422 | * Ordering is stdlib followed by local imports. Each group is lexically | |
409 | sorted. |
|
423 | sorted. | |
410 | * Importing multiple modules via "import X, Y" is not allowed: use |
|
424 | * Importing multiple modules via "import X, Y" is not allowed: use | |
411 | separate import statements. |
|
425 | separate import statements. | |
412 | * Importing multiple modules via "from X import ..." is allowed if using |
|
426 | * Importing multiple modules via "from X import ..." is allowed if using | |
413 | parenthesis and one entry per line. |
|
427 | parenthesis and one entry per line. | |
414 | * Only 1 relative import statement per import level ("from .", "from ..") |
|
428 | * Only 1 relative import statement per import level ("from .", "from ..") | |
415 | is allowed. |
|
429 | is allowed. | |
416 | * Relative imports from higher levels must occur before lower levels. e.g. |
|
430 | * Relative imports from higher levels must occur before lower levels. e.g. | |
417 | "from .." must be before "from .". |
|
431 | "from .." must be before "from .". | |
418 | * Imports from peer packages should use relative import (e.g. do not |
|
432 | * Imports from peer packages should use relative import (e.g. do not | |
419 | "import mercurial.foo" from a "mercurial.*" module). |
|
433 | "import mercurial.foo" from a "mercurial.*" module). | |
420 | * Symbols can only be imported from specific modules (see |
|
434 | * Symbols can only be imported from specific modules (see | |
421 | `allowsymbolimports`). For other modules, first import the module then |
|
435 | `allowsymbolimports`). For other modules, first import the module then | |
422 | assign the symbol to a module-level variable. In addition, these imports |
|
436 | assign the symbol to a module-level variable. In addition, these imports | |
423 | must be performed before other local imports. This rule only |
|
437 | must be performed before other local imports. This rule only | |
424 | applies to import statements outside of any blocks. |
|
438 | applies to import statements outside of any blocks. | |
425 | * Relative imports from the standard library are not allowed, unless that |
|
439 | * Relative imports from the standard library are not allowed, unless that | |
426 | library is also a local module. |
|
440 | library is also a local module. | |
427 | * Certain modules must be aliased to alternate names to avoid aliasing |
|
441 | * Certain modules must be aliased to alternate names to avoid aliasing | |
428 | and readability problems. See `requirealias`. |
|
442 | and readability problems. See `requirealias`. | |
429 | """ |
|
443 | """ | |
430 | if not isinstance(module, str): |
|
444 | if not isinstance(module, str): | |
431 | module = module.decode('ascii') |
|
445 | module = module.decode('ascii') | |
432 | topmodule = module.split('.')[0] |
|
446 | topmodule = module.split('.')[0] | |
433 | fromlocal = fromlocalfunc(module, localmods) |
|
447 | fromlocal = fromlocalfunc(module, localmods) | |
434 |
|
448 | |||
435 | # Whether a local/non-stdlib import has been performed. |
|
449 | # Whether a local/non-stdlib import has been performed. | |
436 | seenlocal = None |
|
450 | seenlocal = None | |
437 | # Whether a local/non-stdlib, non-symbol import has been seen. |
|
451 | # Whether a local/non-stdlib, non-symbol import has been seen. | |
438 | seennonsymbollocal = False |
|
452 | seennonsymbollocal = False | |
439 | # The last name to be imported (for sorting). |
|
453 | # The last name to be imported (for sorting). | |
440 | lastname = None |
|
454 | lastname = None | |
441 | laststdlib = None |
|
455 | laststdlib = None | |
442 | # Relative import levels encountered so far. |
|
456 | # Relative import levels encountered so far. | |
443 | seenlevels = set() |
|
457 | seenlevels = set() | |
444 |
|
458 | |||
445 | for node, newscope in walklocal(root): |
|
459 | for node, newscope in walklocal(root): | |
|
460 | ||||
446 | def msg(fmt, *args): |
|
461 | def msg(fmt, *args): | |
447 | return (fmt % args, node.lineno) |
|
462 | return (fmt % args, node.lineno) | |
|
463 | ||||
448 | if newscope: |
|
464 | if newscope: | |
449 | # Check for local imports in function |
|
465 | # Check for local imports in function | |
450 |
for r in verify_modern_convention( |
|
466 | for r in verify_modern_convention( | |
451 | node.col_offset + 4): |
|
467 | module, node, localmods, node.col_offset + 4 | |
|
468 | ): | |||
452 | yield r |
|
469 | yield r | |
453 | elif isinstance(node, ast.Import): |
|
470 | elif isinstance(node, ast.Import): | |
454 | # Disallow "import foo, bar" and require separate imports |
|
471 | # Disallow "import foo, bar" and require separate imports | |
455 | # for each module. |
|
472 | # for each module. | |
456 | if len(node.names) > 1: |
|
473 | if len(node.names) > 1: | |
457 |
yield msg( |
|
474 | yield msg( | |
458 | ', '.join(n.name for n in node.names)) |
|
475 | 'multiple imported names: %s', | |
|
476 | ', '.join(n.name for n in node.names), | |||
|
477 | ) | |||
459 |
|
478 | |||
460 | name = node.names[0].name |
|
479 | name = node.names[0].name | |
461 | asname = node.names[0].asname |
|
480 | asname = node.names[0].asname | |
462 |
|
481 | |||
463 | stdlib = name in stdlib_modules |
|
482 | stdlib = name in stdlib_modules | |
464 |
|
483 | |||
465 | # Ignore sorting rules on imports inside blocks. |
|
484 | # Ignore sorting rules on imports inside blocks. | |
466 | if node.col_offset == root_col_offset: |
|
485 | if node.col_offset == root_col_offset: | |
467 | if lastname and name < lastname and laststdlib == stdlib: |
|
486 | if lastname and name < lastname and laststdlib == stdlib: | |
468 |
yield msg( |
|
487 | yield msg( | |
469 |
name, lastname |
|
488 | 'imports not lexically sorted: %s < %s', name, lastname | |
|
489 | ) | |||
470 |
|
490 | |||
471 | lastname = name |
|
491 | lastname = name | |
472 | laststdlib = stdlib |
|
492 | laststdlib = stdlib | |
473 |
|
493 | |||
474 | # stdlib imports should be before local imports. |
|
494 | # stdlib imports should be before local imports. | |
475 | if stdlib and seenlocal and node.col_offset == root_col_offset: |
|
495 | if stdlib and seenlocal and node.col_offset == root_col_offset: | |
476 | yield msg('stdlib import "%s" follows local import: %s', |
|
496 | yield msg( | |
477 | name, seenlocal) |
|
497 | 'stdlib import "%s" follows local import: %s', | |
|
498 | name, | |||
|
499 | seenlocal, | |||
|
500 | ) | |||
478 |
|
501 | |||
479 | if not stdlib: |
|
502 | if not stdlib: | |
480 | seenlocal = name |
|
503 | seenlocal = name | |
481 |
|
504 | |||
482 | # Import of sibling modules should use relative imports. |
|
505 | # Import of sibling modules should use relative imports. | |
483 | topname = name.split('.')[0] |
|
506 | topname = name.split('.')[0] | |
484 | if topname == topmodule: |
|
507 | if topname == topmodule: | |
485 | yield msg('import should be relative: %s', name) |
|
508 | yield msg('import should be relative: %s', name) | |
486 |
|
509 | |||
487 | if name in requirealias and asname != requirealias[name]: |
|
510 | if name in requirealias and asname != requirealias[name]: | |
488 | yield msg('%s module must be "as" aliased to %s', |
|
511 | yield msg( | |
489 | name, requirealias[name]) |
|
512 | '%s module must be "as" aliased to %s', | |
|
513 | name, | |||
|
514 | requirealias[name], | |||
|
515 | ) | |||
490 |
|
516 | |||
491 | elif isinstance(node, ast.ImportFrom): |
|
517 | elif isinstance(node, ast.ImportFrom): | |
492 | # Resolve the full imported module name. |
|
518 | # Resolve the full imported module name. | |
493 | if node.level > 0: |
|
519 | if node.level > 0: | |
494 | fullname = '.'.join(module.split('.')[:-node.level]) |
|
520 | fullname = '.'.join(module.split('.')[: -node.level]) | |
495 | if node.module: |
|
521 | if node.module: | |
496 | fullname += '.%s' % node.module |
|
522 | fullname += '.%s' % node.module | |
497 | else: |
|
523 | else: | |
498 | assert node.module |
|
524 | assert node.module | |
499 | fullname = node.module |
|
525 | fullname = node.module | |
500 |
|
526 | |||
501 | topname = fullname.split('.')[0] |
|
527 | topname = fullname.split('.')[0] | |
502 | if topname == topmodule: |
|
528 | if topname == topmodule: | |
503 | yield msg('import should be relative: %s', fullname) |
|
529 | yield msg('import should be relative: %s', fullname) | |
504 |
|
530 | |||
505 | # __future__ is special since it needs to come first and use |
|
531 | # __future__ is special since it needs to come first and use | |
506 | # symbol import. |
|
532 | # symbol import. | |
507 | if fullname != '__future__': |
|
533 | if fullname != '__future__': | |
508 | if not fullname or ( |
|
534 | if not fullname or ( | |
509 | fullname in stdlib_modules |
|
535 | fullname in stdlib_modules | |
510 | and fullname not in localmods |
|
536 | and fullname not in localmods | |
511 |
and fullname + '.__init__' not in localmods |
|
537 | and fullname + '.__init__' not in localmods | |
|
538 | ): | |||
512 | yield msg('relative import of stdlib module') |
|
539 | yield msg('relative import of stdlib module') | |
513 | else: |
|
540 | else: | |
514 | seenlocal = fullname |
|
541 | seenlocal = fullname | |
515 |
|
542 | |||
516 | # Direct symbol import is only allowed from certain modules and |
|
543 | # Direct symbol import is only allowed from certain modules and | |
517 | # must occur before non-symbol imports. |
|
544 | # must occur before non-symbol imports. | |
518 | found = fromlocal(node.module, node.level) |
|
545 | found = fromlocal(node.module, node.level) | |
519 | if found and found[2]: # node.module is a package |
|
546 | if found and found[2]: # node.module is a package | |
520 | prefix = found[0] + '.' |
|
547 | prefix = found[0] + '.' | |
521 |
symbols = ( |
|
548 | symbols = ( | |
522 |
|
|
549 | n.name for n in node.names if not fromlocal(prefix + n.name) | |
|
550 | ) | |||
523 | else: |
|
551 | else: | |
524 | symbols = (n.name for n in node.names) |
|
552 | symbols = (n.name for n in node.names) | |
525 | symbols = [sym for sym in symbols if sym not in directsymbols] |
|
553 | symbols = [sym for sym in symbols if sym not in directsymbols] | |
526 | if node.module and node.col_offset == root_col_offset: |
|
554 | if node.module and node.col_offset == root_col_offset: | |
527 | if symbols and fullname not in allowsymbolimports: |
|
555 | if symbols and fullname not in allowsymbolimports: | |
528 |
yield msg( |
|
556 | yield msg( | |
529 |
|
|
557 | 'direct symbol import %s from %s', | |
|
558 | ', '.join(symbols), | |||
|
559 | fullname, | |||
|
560 | ) | |||
530 |
|
561 | |||
531 | if symbols and seennonsymbollocal: |
|
562 | if symbols and seennonsymbollocal: | |
532 | yield msg('symbol import follows non-symbol import: %s', |
|
563 | yield msg( | |
533 |
fullname |
|
564 | 'symbol import follows non-symbol import: %s', fullname | |
|
565 | ) | |||
534 | if not symbols and fullname not in stdlib_modules: |
|
566 | if not symbols and fullname not in stdlib_modules: | |
535 | seennonsymbollocal = True |
|
567 | seennonsymbollocal = True | |
536 |
|
568 | |||
537 | if not node.module: |
|
569 | if not node.module: | |
538 | assert node.level |
|
570 | assert node.level | |
539 |
|
571 | |||
540 | # Only allow 1 group per level. |
|
572 | # Only allow 1 group per level. | |
541 |
if ( |
|
573 | if ( | |
542 | and node.col_offset == root_col_offset): |
|
574 | node.level in seenlevels | |
543 | yield msg('multiple "from %s import" statements', |
|
575 | and node.col_offset == root_col_offset | |
544 | '.' * node.level) |
|
576 | ): | |
|
577 | yield msg( | |||
|
578 | 'multiple "from %s import" statements', '.' * node.level | |||
|
579 | ) | |||
545 |
|
580 | |||
546 | # Higher-level groups come before lower-level groups. |
|
581 | # Higher-level groups come before lower-level groups. | |
547 | if any(node.level > l for l in seenlevels): |
|
582 | if any(node.level > l for l in seenlevels): | |
548 | yield msg('higher-level import should come first: %s', |
|
583 | yield msg( | |
549 |
fullname |
|
584 | 'higher-level import should come first: %s', fullname | |
|
585 | ) | |||
550 |
|
586 | |||
551 | seenlevels.add(node.level) |
|
587 | seenlevels.add(node.level) | |
552 |
|
588 | |||
553 | # Entries in "from .X import ( ... )" lists must be lexically |
|
589 | # Entries in "from .X import ( ... )" lists must be lexically | |
554 | # sorted. |
|
590 | # sorted. | |
555 | lastentryname = None |
|
591 | lastentryname = None | |
556 |
|
592 | |||
557 | for n in node.names: |
|
593 | for n in node.names: | |
558 | if lastentryname and n.name < lastentryname: |
|
594 | if lastentryname and n.name < lastentryname: | |
559 | yield msg('imports from %s not lexically sorted: %s < %s', |
|
595 | yield msg( | |
560 | fullname, n.name, lastentryname) |
|
596 | 'imports from %s not lexically sorted: %s < %s', | |
|
597 | fullname, | |||
|
598 | n.name, | |||
|
599 | lastentryname, | |||
|
600 | ) | |||
561 |
|
601 | |||
562 | lastentryname = n.name |
|
602 | lastentryname = n.name | |
563 |
|
603 | |||
564 | if n.name in requirealias and n.asname != requirealias[n.name]: |
|
604 | if n.name in requirealias and n.asname != requirealias[n.name]: | |
565 |
yield msg( |
|
605 | yield msg( | |
566 | n.name, fullname, requirealias[n.name]) |
|
606 | '%s from %s must be "as" aliased to %s', | |
|
607 | n.name, | |||
|
608 | fullname, | |||
|
609 | requirealias[n.name], | |||
|
610 | ) | |||
|
611 | ||||
567 |
|
612 | |||
568 | def verify_stdlib_on_own_line(root): |
|
613 | def verify_stdlib_on_own_line(root): | |
569 | """Given some python source, verify that stdlib imports are done |
|
614 | """Given some python source, verify that stdlib imports are done | |
570 | in separate statements from relative local module imports. |
|
615 | in separate statements from relative local module imports. | |
571 |
|
616 | |||
572 | >>> list(verify_stdlib_on_own_line(ast.parse('import sys, foo'))) |
|
617 | >>> list(verify_stdlib_on_own_line(ast.parse('import sys, foo'))) | |
573 | [('mixed imports\\n stdlib: sys\\n relative: foo', 1)] |
|
618 | [('mixed imports\\n stdlib: sys\\n relative: foo', 1)] | |
574 | >>> list(verify_stdlib_on_own_line(ast.parse('import sys, os'))) |
|
619 | >>> list(verify_stdlib_on_own_line(ast.parse('import sys, os'))) | |
575 | [] |
|
620 | [] | |
576 | >>> list(verify_stdlib_on_own_line(ast.parse('import foo, bar'))) |
|
621 | >>> list(verify_stdlib_on_own_line(ast.parse('import foo, bar'))) | |
577 | [] |
|
622 | [] | |
578 | """ |
|
623 | """ | |
579 | for node in ast.walk(root): |
|
624 | for node in ast.walk(root): | |
580 | if isinstance(node, ast.Import): |
|
625 | if isinstance(node, ast.Import): | |
581 | from_stdlib = {False: [], True: []} |
|
626 | from_stdlib = {False: [], True: []} | |
582 | for n in node.names: |
|
627 | for n in node.names: | |
583 | from_stdlib[n.name in stdlib_modules].append(n.name) |
|
628 | from_stdlib[n.name in stdlib_modules].append(n.name) | |
584 | if from_stdlib[True] and from_stdlib[False]: |
|
629 | if from_stdlib[True] and from_stdlib[False]: | |
585 | yield ('mixed imports\n stdlib: %s\n relative: %s' % |
|
630 | yield ( | |
586 | (', '.join(sorted(from_stdlib[True])), |
|
631 | 'mixed imports\n stdlib: %s\n relative: %s' | |
587 | ', '.join(sorted(from_stdlib[False]))), node.lineno) |
|
632 | % ( | |
|
633 | ', '.join(sorted(from_stdlib[True])), | |||
|
634 | ', '.join(sorted(from_stdlib[False])), | |||
|
635 | ), | |||
|
636 | node.lineno, | |||
|
637 | ) | |||
|
638 | ||||
588 |
|
639 | |||
589 | class CircularImport(Exception): |
|
640 | class CircularImport(Exception): | |
590 | pass |
|
641 | pass | |
591 |
|
642 | |||
|
643 | ||||
592 | def checkmod(mod, imports): |
|
644 | def checkmod(mod, imports): | |
593 | shortest = {} |
|
645 | shortest = {} | |
594 | visit = [[mod]] |
|
646 | visit = [[mod]] | |
595 | while visit: |
|
647 | while visit: | |
596 | path = visit.pop(0) |
|
648 | path = visit.pop(0) | |
597 | for i in sorted(imports.get(path[-1], [])): |
|
649 | for i in sorted(imports.get(path[-1], [])): | |
598 | if len(path) < shortest.get(i, 1000): |
|
650 | if len(path) < shortest.get(i, 1000): | |
599 | shortest[i] = len(path) |
|
651 | shortest[i] = len(path) | |
600 | if i in path: |
|
652 | if i in path: | |
601 | if i == path[0]: |
|
653 | if i == path[0]: | |
602 | raise CircularImport(path) |
|
654 | raise CircularImport(path) | |
603 | continue |
|
655 | continue | |
604 | visit.append(path + [i]) |
|
656 | visit.append(path + [i]) | |
605 |
|
657 | |||
|
658 | ||||
606 | def rotatecycle(cycle): |
|
659 | def rotatecycle(cycle): | |
607 | """arrange a cycle so that the lexicographically first module listed first |
|
660 | """arrange a cycle so that the lexicographically first module listed first | |
608 |
|
661 | |||
609 | >>> rotatecycle(['foo', 'bar']) |
|
662 | >>> rotatecycle(['foo', 'bar']) | |
610 | ['bar', 'foo', 'bar'] |
|
663 | ['bar', 'foo', 'bar'] | |
611 | """ |
|
664 | """ | |
612 | lowest = min(cycle) |
|
665 | lowest = min(cycle) | |
613 | idx = cycle.index(lowest) |
|
666 | idx = cycle.index(lowest) | |
614 | return cycle[idx:] + cycle[:idx] + [lowest] |
|
667 | return cycle[idx:] + cycle[:idx] + [lowest] | |
615 |
|
668 | |||
|
669 | ||||
616 | def find_cycles(imports): |
|
670 | def find_cycles(imports): | |
617 | """Find cycles in an already-loaded import graph. |
|
671 | """Find cycles in an already-loaded import graph. | |
618 |
|
672 | |||
619 | All module names recorded in `imports` should be absolute one. |
|
673 | All module names recorded in `imports` should be absolute one. | |
620 |
|
674 | |||
621 | >>> from __future__ import print_function |
|
675 | >>> from __future__ import print_function | |
622 | >>> imports = {'top.foo': ['top.bar', 'os.path', 'top.qux'], |
|
676 | >>> imports = {'top.foo': ['top.bar', 'os.path', 'top.qux'], | |
623 | ... 'top.bar': ['top.baz', 'sys'], |
|
677 | ... 'top.bar': ['top.baz', 'sys'], | |
624 | ... 'top.baz': ['top.foo'], |
|
678 | ... 'top.baz': ['top.foo'], | |
625 | ... 'top.qux': ['top.foo']} |
|
679 | ... 'top.qux': ['top.foo']} | |
626 | >>> print('\\n'.join(sorted(find_cycles(imports)))) |
|
680 | >>> print('\\n'.join(sorted(find_cycles(imports)))) | |
627 | top.bar -> top.baz -> top.foo -> top.bar |
|
681 | top.bar -> top.baz -> top.foo -> top.bar | |
628 | top.foo -> top.qux -> top.foo |
|
682 | top.foo -> top.qux -> top.foo | |
629 | """ |
|
683 | """ | |
630 | cycles = set() |
|
684 | cycles = set() | |
631 | for mod in sorted(imports.keys()): |
|
685 | for mod in sorted(imports.keys()): | |
632 | try: |
|
686 | try: | |
633 | checkmod(mod, imports) |
|
687 | checkmod(mod, imports) | |
634 | except CircularImport as e: |
|
688 | except CircularImport as e: | |
635 | cycle = e.args[0] |
|
689 | cycle = e.args[0] | |
636 | cycles.add(" -> ".join(rotatecycle(cycle))) |
|
690 | cycles.add(" -> ".join(rotatecycle(cycle))) | |
637 | return cycles |
|
691 | return cycles | |
638 |
|
692 | |||
|
693 | ||||
639 | def _cycle_sortkey(c): |
|
694 | def _cycle_sortkey(c): | |
640 | return len(c), c |
|
695 | return len(c), c | |
641 |
|
696 | |||
|
697 | ||||
642 | def embedded(f, modname, src): |
|
698 | def embedded(f, modname, src): | |
643 | """Extract embedded python code |
|
699 | """Extract embedded python code | |
644 |
|
700 | |||
645 | >>> def _forcestr(thing): |
|
701 | >>> def _forcestr(thing): | |
646 | ... if not isinstance(thing, str): |
|
702 | ... if not isinstance(thing, str): | |
647 | ... return thing.decode('ascii') |
|
703 | ... return thing.decode('ascii') | |
648 | ... return thing |
|
704 | ... return thing | |
649 | >>> def test(fn, lines): |
|
705 | >>> def test(fn, lines): | |
650 | ... for s, m, f, l in embedded(fn, b"example", lines): |
|
706 | ... for s, m, f, l in embedded(fn, b"example", lines): | |
651 | ... print("%s %s %d" % (_forcestr(m), _forcestr(f), l)) |
|
707 | ... print("%s %s %d" % (_forcestr(m), _forcestr(f), l)) | |
652 | ... print(repr(_forcestr(s))) |
|
708 | ... print(repr(_forcestr(s))) | |
653 | >>> lines = [ |
|
709 | >>> lines = [ | |
654 | ... 'comment', |
|
710 | ... 'comment', | |
655 | ... ' >>> from __future__ import print_function', |
|
711 | ... ' >>> from __future__ import print_function', | |
656 | ... " >>> ' multiline", |
|
712 | ... " >>> ' multiline", | |
657 | ... " ... string'", |
|
713 | ... " ... string'", | |
658 | ... ' ', |
|
714 | ... ' ', | |
659 | ... 'comment', |
|
715 | ... 'comment', | |
660 | ... ' $ cat > foo.py <<EOF', |
|
716 | ... ' $ cat > foo.py <<EOF', | |
661 | ... ' > from __future__ import print_function', |
|
717 | ... ' > from __future__ import print_function', | |
662 | ... ' > EOF', |
|
718 | ... ' > EOF', | |
663 | ... ] |
|
719 | ... ] | |
664 | >>> test(b"example.t", lines) |
|
720 | >>> test(b"example.t", lines) | |
665 | example[2] doctest.py 1 |
|
721 | example[2] doctest.py 1 | |
666 | "from __future__ import print_function\\n' multiline\\nstring'\\n\\n" |
|
722 | "from __future__ import print_function\\n' multiline\\nstring'\\n\\n" | |
667 | example[8] foo.py 7 |
|
723 | example[8] foo.py 7 | |
668 | 'from __future__ import print_function\\n' |
|
724 | 'from __future__ import print_function\\n' | |
669 | """ |
|
725 | """ | |
670 | errors = [] |
|
726 | errors = [] | |
671 | for name, starts, ends, code in testparseutil.pyembedded(f, src, errors): |
|
727 | for name, starts, ends, code in testparseutil.pyembedded(f, src, errors): | |
672 | if not name: |
|
728 | if not name: | |
673 | # use 'doctest.py', in order to make already existing |
|
729 | # use 'doctest.py', in order to make already existing | |
674 | # doctest above pass instantly |
|
730 | # doctest above pass instantly | |
675 | name = 'doctest.py' |
|
731 | name = 'doctest.py' | |
676 | # "starts" is "line number" (1-origin), but embedded() is |
|
732 | # "starts" is "line number" (1-origin), but embedded() is | |
677 | # expected to return "line offset" (0-origin). Therefore, this |
|
733 | # expected to return "line offset" (0-origin). Therefore, this | |
678 | # yields "starts - 1". |
|
734 | # yields "starts - 1". | |
679 | if not isinstance(modname, str): |
|
735 | if not isinstance(modname, str): | |
680 | modname = modname.decode('utf8') |
|
736 | modname = modname.decode('utf8') | |
681 | yield code, "%s[%d]" % (modname, starts), name, starts - 1 |
|
737 | yield code, "%s[%d]" % (modname, starts), name, starts - 1 | |
682 |
|
738 | |||
|
739 | ||||
683 | def sources(f, modname): |
|
740 | def sources(f, modname): | |
684 | """Yields possibly multiple sources from a filepath |
|
741 | """Yields possibly multiple sources from a filepath | |
685 |
|
742 | |||
686 | input: filepath, modulename |
|
743 | input: filepath, modulename | |
687 | yields: script(string), modulename, filepath, linenumber |
|
744 | yields: script(string), modulename, filepath, linenumber | |
688 |
|
745 | |||
689 | For embedded scripts, the modulename and filepath will be different |
|
746 | For embedded scripts, the modulename and filepath will be different | |
690 | from the function arguments. linenumber is an offset relative to |
|
747 | from the function arguments. linenumber is an offset relative to | |
691 | the input file. |
|
748 | the input file. | |
692 | """ |
|
749 | """ | |
693 | py = False |
|
750 | py = False | |
694 | if not f.endswith('.t'): |
|
751 | if not f.endswith('.t'): | |
695 | with open(f, 'rb') as src: |
|
752 | with open(f, 'rb') as src: | |
696 | yield src.read(), modname, f, 0 |
|
753 | yield src.read(), modname, f, 0 | |
697 | py = True |
|
754 | py = True | |
698 | if py or f.endswith('.t'): |
|
755 | if py or f.endswith('.t'): | |
699 | with open(f, 'r') as src: |
|
756 | with open(f, 'r') as src: | |
700 | for script, modname, t, line in embedded(f, modname, src): |
|
757 | for script, modname, t, line in embedded(f, modname, src): | |
701 | yield script, modname.encode('utf8'), t, line |
|
758 | yield script, modname.encode('utf8'), t, line | |
702 |
|
759 | |||
|
760 | ||||
703 | def main(argv): |
|
761 | def main(argv): | |
704 | if len(argv) < 2 or (argv[1] == '-' and len(argv) > 2): |
|
762 | if len(argv) < 2 or (argv[1] == '-' and len(argv) > 2): | |
705 | print('Usage: %s {-|file [file] [file] ...}') |
|
763 | print('Usage: %s {-|file [file] [file] ...}') | |
706 | return 1 |
|
764 | return 1 | |
707 | if argv[1] == '-': |
|
765 | if argv[1] == '-': | |
708 | argv = argv[:1] |
|
766 | argv = argv[:1] | |
709 | argv.extend(l.rstrip() for l in sys.stdin.readlines()) |
|
767 | argv.extend(l.rstrip() for l in sys.stdin.readlines()) | |
710 | localmodpaths = {} |
|
768 | localmodpaths = {} | |
711 | used_imports = {} |
|
769 | used_imports = {} | |
712 | any_errors = False |
|
770 | any_errors = False | |
713 | for source_path in argv[1:]: |
|
771 | for source_path in argv[1:]: | |
714 | modname = dotted_name_of_path(source_path) |
|
772 | modname = dotted_name_of_path(source_path) | |
715 | localmodpaths[modname] = source_path |
|
773 | localmodpaths[modname] = source_path | |
716 | localmods = populateextmods(localmodpaths) |
|
774 | localmods = populateextmods(localmodpaths) | |
717 | for localmodname, source_path in sorted(localmodpaths.items()): |
|
775 | for localmodname, source_path in sorted(localmodpaths.items()): | |
718 | if not isinstance(localmodname, bytes): |
|
776 | if not isinstance(localmodname, bytes): | |
719 | # This is only safe because all hg's files are ascii |
|
777 | # This is only safe because all hg's files are ascii | |
720 | localmodname = localmodname.encode('ascii') |
|
778 | localmodname = localmodname.encode('ascii') | |
721 | for src, modname, name, line in sources(source_path, localmodname): |
|
779 | for src, modname, name, line in sources(source_path, localmodname): | |
722 | try: |
|
780 | try: | |
723 | used_imports[modname] = sorted( |
|
781 | used_imports[modname] = sorted( | |
724 |
imported_modules( |
|
782 | imported_modules( | |
725 |
|
|
783 | src, modname, name, localmods, ignore_nested=True | |
726 | for error, lineno in verify_import_convention(modname, src, |
|
784 | ) | |
727 | localmods): |
|
785 | ) | |
|
786 | for error, lineno in verify_import_convention( | |||
|
787 | modname, src, localmods | |||
|
788 | ): | |||
728 | any_errors = True |
|
789 | any_errors = True | |
729 | print('%s:%d: %s' % (source_path, lineno + line, error)) |
|
790 | print('%s:%d: %s' % (source_path, lineno + line, error)) | |
730 | except SyntaxError as e: |
|
791 | except SyntaxError as e: | |
731 |
print( |
|
792 | print( | |
732 |
(source_path, e.lineno + line, e) |
|
793 | '%s:%d: SyntaxError: %s' % (source_path, e.lineno + line, e) | |
|
794 | ) | |||
733 | cycles = find_cycles(used_imports) |
|
795 | cycles = find_cycles(used_imports) | |
734 | if cycles: |
|
796 | if cycles: | |
735 | firstmods = set() |
|
797 | firstmods = set() | |
736 | for c in sorted(cycles, key=_cycle_sortkey): |
|
798 | for c in sorted(cycles, key=_cycle_sortkey): | |
737 | first = c.split()[0] |
|
799 | first = c.split()[0] | |
738 | # As a rough cut, ignore any cycle that starts with the |
|
800 | # As a rough cut, ignore any cycle that starts with the | |
739 | # same module as some other cycle. Otherwise we see lots |
|
801 | # same module as some other cycle. Otherwise we see lots | |
740 | # of cycles that are effectively duplicates. |
|
802 | # of cycles that are effectively duplicates. | |
741 | if first in firstmods: |
|
803 | if first in firstmods: | |
742 | continue |
|
804 | continue | |
743 | print('Import cycle:', c) |
|
805 | print('Import cycle:', c) | |
744 | firstmods.add(first) |
|
806 | firstmods.add(first) | |
745 | any_errors = True |
|
807 | any_errors = True | |
746 | return any_errors != 0 |
|
808 | return any_errors != 0 | |
747 |
|
809 | |||
|
810 | ||||
748 | if __name__ == '__main__': |
|
811 | if __name__ == '__main__': | |
749 | sys.exit(int(main(sys.argv))) |
|
812 | sys.exit(int(main(sys.argv))) |
@@ -1,21 +1,22 b'' | |||||
1 | # scmutil.py - Mercurial core utility functions |
|
1 | # scmutil.py - Mercurial core utility functions | |
2 | # |
|
2 | # | |
3 | # Copyright Matt Mackall <mpm@selenic.com> and other |
|
3 | # Copyright Matt Mackall <mpm@selenic.com> and other | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 | from __future__ import absolute_import |
|
7 | from __future__ import absolute_import | |
8 |
|
8 | |||
9 | from . import repoview |
|
9 | from . import repoview | |
10 |
|
10 | |||
|
11 | ||||
11 | def cachetocopy(srcrepo): |
|
12 | def cachetocopy(srcrepo): | |
12 | """return the list of cache file valuable to copy during a clone""" |
|
13 | """return the list of cache file valuable to copy during a clone""" | |
13 | # In local clones we're copying all nodes, not just served |
|
14 | # In local clones we're copying all nodes, not just served | |
14 | # ones. Therefore copy all branch caches over. |
|
15 | # ones. Therefore copy all branch caches over. | |
15 | cachefiles = ['branch2'] |
|
16 | cachefiles = ['branch2'] | |
16 | cachefiles += ['branch2-%s' % f for f in repoview.filtertable] |
|
17 | cachefiles += ['branch2-%s' % f for f in repoview.filtertable] | |
17 | cachefiles += ['rbc-names-v1', 'rbc-revs-v1'] |
|
18 | cachefiles += ['rbc-names-v1', 'rbc-revs-v1'] | |
18 | cachefiles += ['tags2'] |
|
19 | cachefiles += ['tags2'] | |
19 | cachefiles += ['tags2-%s' % f for f in repoview.filtertable] |
|
20 | cachefiles += ['tags2-%s' % f for f in repoview.filtertable] | |
20 | cachefiles += ['hgtagsfnodes1'] |
|
21 | cachefiles += ['hgtagsfnodes1'] | |
21 | return cachefiles |
|
22 | return cachefiles |
@@ -1,78 +1,81 b'' | |||||
1 | # diffhelper.py - helper routines for patch |
|
1 | # diffhelper.py - helper routines for patch | |
2 | # |
|
2 | # | |
3 | # Copyright 2009 Matt Mackall <mpm@selenic.com> and others |
|
3 | # Copyright 2009 Matt Mackall <mpm@selenic.com> and others | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | from __future__ import absolute_import |
|
8 | from __future__ import absolute_import | |
9 |
|
9 | |||
10 | from .i18n import _ |
|
10 | from .i18n import _ | |
11 |
|
11 | |||
12 | from . import ( |
|
12 | from . import ( | |
13 | error, |
|
13 | error, | |
14 | pycompat, |
|
14 | pycompat, | |
15 | ) |
|
15 | ) | |
16 |
|
16 | |||
|
17 | ||||
17 | def addlines(fp, hunk, lena, lenb, a, b): |
|
18 | def addlines(fp, hunk, lena, lenb, a, b): | |
18 | """Read lines from fp into the hunk |
|
19 | """Read lines from fp into the hunk | |
19 |
|
20 | |||
20 | The hunk is parsed into two arrays, a and b. a gets the old state of |
|
21 | The hunk is parsed into two arrays, a and b. a gets the old state of | |
21 | the text, b gets the new state. The control char from the hunk is saved |
|
22 | the text, b gets the new state. The control char from the hunk is saved | |
22 | when inserting into a, but not b (for performance while deleting files.) |
|
23 | when inserting into a, but not b (for performance while deleting files.) | |
23 | """ |
|
24 | """ | |
24 | while True: |
|
25 | while True: | |
25 | todoa = lena - len(a) |
|
26 | todoa = lena - len(a) | |
26 | todob = lenb - len(b) |
|
27 | todob = lenb - len(b) | |
27 | num = max(todoa, todob) |
|
28 | num = max(todoa, todob) | |
28 | if num == 0: |
|
29 | if num == 0: | |
29 | break |
|
30 | break | |
30 | for i in pycompat.xrange(num): |
|
31 | for i in pycompat.xrange(num): | |
31 | s = fp.readline() |
|
32 | s = fp.readline() | |
32 | if not s: |
|
33 | if not s: | |
33 | raise error.ParseError(_('incomplete hunk')) |
|
34 | raise error.ParseError(_('incomplete hunk')) | |
34 | if s == "\\ No newline at end of file\n": |
|
35 | if s == "\\ No newline at end of file\n": | |
35 | fixnewline(hunk, a, b) |
|
36 | fixnewline(hunk, a, b) | |
36 | continue |
|
37 | continue | |
37 | if s == '\n' or s == '\r\n': |
|
38 | if s == '\n' or s == '\r\n': | |
38 | # Some patches may be missing the control char |
|
39 | # Some patches may be missing the control char | |
39 | # on empty lines. Supply a leading space. |
|
40 | # on empty lines. Supply a leading space. | |
40 | s = ' ' + s |
|
41 | s = ' ' + s | |
41 | hunk.append(s) |
|
42 | hunk.append(s) | |
42 | if s.startswith('+'): |
|
43 | if s.startswith('+'): | |
43 | b.append(s[1:]) |
|
44 | b.append(s[1:]) | |
44 | elif s.startswith('-'): |
|
45 | elif s.startswith('-'): | |
45 | a.append(s) |
|
46 | a.append(s) | |
46 | else: |
|
47 | else: | |
47 | b.append(s[1:]) |
|
48 | b.append(s[1:]) | |
48 | a.append(s) |
|
49 | a.append(s) | |
49 |
|
50 | |||
|
51 | ||||
50 | def fixnewline(hunk, a, b): |
|
52 | def fixnewline(hunk, a, b): | |
51 | """Fix up the last lines of a and b when the patch has no newline at EOF""" |
|
53 | """Fix up the last lines of a and b when the patch has no newline at EOF""" | |
52 | l = hunk[-1] |
|
54 | l = hunk[-1] | |
53 | # tolerate CRLF in last line |
|
55 | # tolerate CRLF in last line | |
54 | if l.endswith('\r\n'): |
|
56 | if l.endswith('\r\n'): | |
55 | hline = l[:-2] |
|
57 | hline = l[:-2] | |
56 | else: |
|
58 | else: | |
57 | hline = l[:-1] |
|
59 | hline = l[:-1] | |
58 |
|
60 | |||
59 | if hline.startswith((' ', '+')): |
|
61 | if hline.startswith((' ', '+')): | |
60 | b[-1] = hline[1:] |
|
62 | b[-1] = hline[1:] | |
61 | if hline.startswith((' ', '-')): |
|
63 | if hline.startswith((' ', '-')): | |
62 | a[-1] = hline |
|
64 | a[-1] = hline | |
63 | hunk[-1] = hline |
|
65 | hunk[-1] = hline | |
64 |
|
66 | |||
|
67 | ||||
65 | def testhunk(a, b, bstart): |
|
68 | def testhunk(a, b, bstart): | |
66 | """Compare the lines in a with the lines in b |
|
69 | """Compare the lines in a with the lines in b | |
67 |
|
70 | |||
68 | a is assumed to have a control char at the start of each line, this char |
|
71 | a is assumed to have a control char at the start of each line, this char | |
69 | is ignored in the compare. |
|
72 | is ignored in the compare. | |
70 | """ |
|
73 | """ | |
71 | alen = len(a) |
|
74 | alen = len(a) | |
72 | blen = len(b) |
|
75 | blen = len(b) | |
73 | if alen > blen - bstart or bstart < 0: |
|
76 | if alen > blen - bstart or bstart < 0: | |
74 | return False |
|
77 | return False | |
75 | for i in pycompat.xrange(alen): |
|
78 | for i in pycompat.xrange(alen): | |
76 | if a[i][1:] != b[i + bstart]: |
|
79 | if a[i][1:] != b[i + bstart]: | |
77 | return False |
|
80 | return False | |
78 | return True |
|
81 | return True |
@@ -1,75 +1,84 b'' | |||||
1 | # dirstateguard.py - class to allow restoring dirstate after failure |
|
1 | # dirstateguard.py - class to allow restoring dirstate after failure | |
2 | # |
|
2 | # | |
3 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> |
|
3 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | from __future__ import absolute_import |
|
8 | from __future__ import absolute_import | |
9 |
|
9 | |||
10 | from .i18n import _ |
|
10 | from .i18n import _ | |
11 |
|
11 | |||
12 | from . import ( |
|
12 | from . import ( | |
13 | error, |
|
13 | error, | |
14 | narrowspec, |
|
14 | narrowspec, | |
15 | util, |
|
15 | util, | |
16 | ) |
|
16 | ) | |
17 |
|
17 | |||
|
18 | ||||
18 | class dirstateguard(util.transactional): |
|
19 | class dirstateguard(util.transactional): | |
19 | '''Restore dirstate at unexpected failure. |
|
20 | '''Restore dirstate at unexpected failure. | |
20 |
|
21 | |||
21 | At the construction, this class does: |
|
22 | At the construction, this class does: | |
22 |
|
23 | |||
23 | - write current ``repo.dirstate`` out, and |
|
24 | - write current ``repo.dirstate`` out, and | |
24 | - save ``.hg/dirstate`` into the backup file |
|
25 | - save ``.hg/dirstate`` into the backup file | |
25 |
|
26 | |||
26 | This restores ``.hg/dirstate`` from backup file, if ``release()`` |
|
27 | This restores ``.hg/dirstate`` from backup file, if ``release()`` | |
27 | is invoked before ``close()``. |
|
28 | is invoked before ``close()``. | |
28 |
|
29 | |||
29 | This just removes the backup file at ``close()`` before ``release()``. |
|
30 | This just removes the backup file at ``close()`` before ``release()``. | |
30 | ''' |
|
31 | ''' | |
31 |
|
32 | |||
32 | def __init__(self, repo, name): |
|
33 | def __init__(self, repo, name): | |
33 | self._repo = repo |
|
34 | self._repo = repo | |
34 | self._active = False |
|
35 | self._active = False | |
35 | self._closed = False |
|
36 | self._closed = False | |
36 | self._backupname = 'dirstate.backup.%s.%d' % (name, id(self)) |
|
37 | self._backupname = 'dirstate.backup.%s.%d' % (name, id(self)) | |
37 |
self._narrowspecbackupname = |
|
38 | self._narrowspecbackupname = 'narrowspec.backup.%s.%d' % ( | |
38 | (name, id(self))) |
|
39 | name, | |
|
40 | id(self), | |||
|
41 | ) | |||
39 | repo.dirstate.savebackup(repo.currenttransaction(), self._backupname) |
|
42 | repo.dirstate.savebackup(repo.currenttransaction(), self._backupname) | |
40 | narrowspec.savewcbackup(repo, self._narrowspecbackupname) |
|
43 | narrowspec.savewcbackup(repo, self._narrowspecbackupname) | |
41 | self._active = True |
|
44 | self._active = True | |
42 |
|
45 | |||
43 | def __del__(self): |
|
46 | def __del__(self): | |
44 | if self._active: # still active |
|
47 | if self._active: # still active | |
45 | # this may occur, even if this class is used correctly: |
|
48 | # this may occur, even if this class is used correctly: | |
46 | # for example, releasing other resources like transaction |
|
49 | # for example, releasing other resources like transaction | |
47 | # may raise exception before ``dirstateguard.release`` in |
|
50 | # may raise exception before ``dirstateguard.release`` in | |
48 | # ``release(tr, ....)``. |
|
51 | # ``release(tr, ....)``. | |
49 | self._abort() |
|
52 | self._abort() | |
50 |
|
53 | |||
51 | def close(self): |
|
54 | def close(self): | |
52 | if not self._active: # already inactivated |
|
55 | if not self._active: # already inactivated | |
53 | msg = (_("can't close already inactivated backup: %s") |
|
56 | msg = ( | |
54 | % self._backupname) |
|
57 | _("can't close already inactivated backup: %s") | |
|
58 | % self._backupname | |||
|
59 | ) | |||
55 | raise error.Abort(msg) |
|
60 | raise error.Abort(msg) | |
56 |
|
61 | |||
57 |
self._repo.dirstate.clearbackup( |
|
62 | self._repo.dirstate.clearbackup( | |
58 | self._backupname) |
|
63 | self._repo.currenttransaction(), self._backupname | |
|
64 | ) | |||
59 | narrowspec.clearwcbackup(self._repo, self._narrowspecbackupname) |
|
65 | narrowspec.clearwcbackup(self._repo, self._narrowspecbackupname) | |
60 | self._active = False |
|
66 | self._active = False | |
61 | self._closed = True |
|
67 | self._closed = True | |
62 |
|
68 | |||
63 | def _abort(self): |
|
69 | def _abort(self): | |
64 | narrowspec.restorewcbackup(self._repo, self._narrowspecbackupname) |
|
70 | narrowspec.restorewcbackup(self._repo, self._narrowspecbackupname) | |
65 |
self._repo.dirstate.restorebackup( |
|
71 | self._repo.dirstate.restorebackup( | |
66 | self._backupname) |
|
72 | self._repo.currenttransaction(), self._backupname | |
|
73 | ) | |||
67 | self._active = False |
|
74 | self._active = False | |
68 |
|
75 | |||
69 | def release(self): |
|
76 | def release(self): | |
70 | if not self._closed: |
|
77 | if not self._closed: | |
71 | if not self._active: # already inactivated |
|
78 | if not self._active: # already inactivated | |
72 | msg = (_("can't release already inactivated backup: %s") |
|
79 | msg = ( | |
73 | % self._backupname) |
|
80 | _("can't release already inactivated backup: %s") | |
|
81 | % self._backupname | |||
|
82 | ) | |||
74 | raise error.Abort(msg) |
|
83 | raise error.Abort(msg) | |
75 | self._abort() |
|
84 | self._abort() |
@@ -1,121 +1,131 b'' | |||||
1 | # httpconnection.py - urllib2 handler for new http support |
|
1 | # httpconnection.py - urllib2 handler for new http support | |
2 | # |
|
2 | # | |
3 | # Copyright 2005, 2006, 2007, 2008 Matt Mackall <mpm@selenic.com> |
|
3 | # Copyright 2005, 2006, 2007, 2008 Matt Mackall <mpm@selenic.com> | |
4 | # Copyright 2006, 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br> |
|
4 | # Copyright 2006, 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br> | |
5 | # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> |
|
5 | # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> | |
6 | # Copyright 2011 Google, Inc. |
|
6 | # Copyright 2011 Google, Inc. | |
7 | # |
|
7 | # | |
8 | # This software may be used and distributed according to the terms of the |
|
8 | # This software may be used and distributed according to the terms of the | |
9 | # GNU General Public License version 2 or any later version. |
|
9 | # GNU General Public License version 2 or any later version. | |
10 |
|
10 | |||
11 | from __future__ import absolute_import |
|
11 | from __future__ import absolute_import | |
12 |
|
12 | |||
13 | import os |
|
13 | import os | |
14 |
|
14 | |||
15 | from .i18n import _ |
|
15 | from .i18n import _ | |
16 | from . import ( |
|
16 | from . import ( | |
17 | pycompat, |
|
17 | pycompat, | |
18 | util, |
|
18 | util, | |
19 | ) |
|
19 | ) | |
20 |
|
20 | |||
21 | urlerr = util.urlerr |
|
21 | urlerr = util.urlerr | |
22 | urlreq = util.urlreq |
|
22 | urlreq = util.urlreq | |
23 |
|
23 | |||
24 | # moved here from url.py to avoid a cycle |
|
24 | # moved here from url.py to avoid a cycle | |
25 | class httpsendfile(object): |
|
25 | class httpsendfile(object): | |
26 | """This is a wrapper around the objects returned by python's "open". |
|
26 | """This is a wrapper around the objects returned by python's "open". | |
27 |
|
27 | |||
28 | Its purpose is to send file-like objects via HTTP. |
|
28 | Its purpose is to send file-like objects via HTTP. | |
29 | It do however not define a __len__ attribute because the length |
|
29 | It do however not define a __len__ attribute because the length | |
30 | might be more than Py_ssize_t can handle. |
|
30 | might be more than Py_ssize_t can handle. | |
31 | """ |
|
31 | """ | |
32 |
|
32 | |||
33 | def __init__(self, ui, *args, **kwargs): |
|
33 | def __init__(self, ui, *args, **kwargs): | |
34 | self.ui = ui |
|
34 | self.ui = ui | |
35 | self._data = open(*args, **kwargs) |
|
35 | self._data = open(*args, **kwargs) | |
36 | self.seek = self._data.seek |
|
36 | self.seek = self._data.seek | |
37 | self.close = self._data.close |
|
37 | self.close = self._data.close | |
38 | self.write = self._data.write |
|
38 | self.write = self._data.write | |
39 | self.length = os.fstat(self._data.fileno()).st_size |
|
39 | self.length = os.fstat(self._data.fileno()).st_size | |
40 | self._pos = 0 |
|
40 | self._pos = 0 | |
41 | # We pass double the max for total because we currently have |
|
41 | # We pass double the max for total because we currently have | |
42 | # to send the bundle twice in the case of a server that |
|
42 | # to send the bundle twice in the case of a server that | |
43 | # requires authentication. Since we can't know until we try |
|
43 | # requires authentication. Since we can't know until we try | |
44 | # once whether authentication will be required, just lie to |
|
44 | # once whether authentication will be required, just lie to | |
45 | # the user and maybe the push succeeds suddenly at 50%. |
|
45 | # the user and maybe the push succeeds suddenly at 50%. | |
46 |
self._progress = ui.makeprogress( |
|
46 | self._progress = ui.makeprogress( | |
47 |
|
|
47 | _('sending'), unit=_('kb'), total=(self.length // 1024 * 2) | |
|
48 | ) | |||
48 |
|
49 | |||
49 | def read(self, *args, **kwargs): |
|
50 | def read(self, *args, **kwargs): | |
50 | ret = self._data.read(*args, **kwargs) |
|
51 | ret = self._data.read(*args, **kwargs) | |
51 | if not ret: |
|
52 | if not ret: | |
52 | self._progress.complete() |
|
53 | self._progress.complete() | |
53 | return ret |
|
54 | return ret | |
54 | self._pos += len(ret) |
|
55 | self._pos += len(ret) | |
55 | self._progress.update(self._pos // 1024) |
|
56 | self._progress.update(self._pos // 1024) | |
56 | return ret |
|
57 | return ret | |
57 |
|
58 | |||
58 | def __enter__(self): |
|
59 | def __enter__(self): | |
59 | return self |
|
60 | return self | |
60 |
|
61 | |||
61 | def __exit__(self, exc_type, exc_val, exc_tb): |
|
62 | def __exit__(self, exc_type, exc_val, exc_tb): | |
62 | self.close() |
|
63 | self.close() | |
63 |
|
64 | |||
|
65 | ||||
64 | # moved here from url.py to avoid a cycle |
|
66 | # moved here from url.py to avoid a cycle | |
65 | def readauthforuri(ui, uri, user): |
|
67 | def readauthforuri(ui, uri, user): | |
66 | uri = pycompat.bytesurl(uri) |
|
68 | uri = pycompat.bytesurl(uri) | |
67 | # Read configuration |
|
69 | # Read configuration | |
68 | groups = {} |
|
70 | groups = {} | |
69 | for key, val in ui.configitems('auth'): |
|
71 | for key, val in ui.configitems('auth'): | |
70 | if key in ('cookiefile',): |
|
72 | if key in ('cookiefile',): | |
71 | continue |
|
73 | continue | |
72 |
|
74 | |||
73 | if '.' not in key: |
|
75 | if '.' not in key: | |
74 | ui.warn(_("ignoring invalid [auth] key '%s'\n") % key) |
|
76 | ui.warn(_("ignoring invalid [auth] key '%s'\n") % key) | |
75 | continue |
|
77 | continue | |
76 | group, setting = key.rsplit('.', 1) |
|
78 | group, setting = key.rsplit('.', 1) | |
77 | gdict = groups.setdefault(group, {}) |
|
79 | gdict = groups.setdefault(group, {}) | |
78 | if setting in ('username', 'cert', 'key'): |
|
80 | if setting in ('username', 'cert', 'key'): | |
79 | val = util.expandpath(val) |
|
81 | val = util.expandpath(val) | |
80 | gdict[setting] = val |
|
82 | gdict[setting] = val | |
81 |
|
83 | |||
82 | # Find the best match |
|
84 | # Find the best match | |
83 | scheme, hostpath = uri.split('://', 1) |
|
85 | scheme, hostpath = uri.split('://', 1) | |
84 | bestuser = None |
|
86 | bestuser = None | |
85 | bestlen = 0 |
|
87 | bestlen = 0 | |
86 | bestauth = None |
|
88 | bestauth = None | |
87 | for group, auth in groups.iteritems(): |
|
89 | for group, auth in groups.iteritems(): | |
88 | if user and user != auth.get('username', user): |
|
90 | if user and user != auth.get('username', user): | |
89 | # If a username was set in the URI, the entry username |
|
91 | # If a username was set in the URI, the entry username | |
90 | # must either match it or be unset |
|
92 | # must either match it or be unset | |
91 | continue |
|
93 | continue | |
92 | prefix = auth.get('prefix') |
|
94 | prefix = auth.get('prefix') | |
93 | if not prefix: |
|
95 | if not prefix: | |
94 | continue |
|
96 | continue | |
95 |
|
97 | |||
96 | prefixurl = util.url(prefix) |
|
98 | prefixurl = util.url(prefix) | |
97 | if prefixurl.user and prefixurl.user != user: |
|
99 | if prefixurl.user and prefixurl.user != user: | |
98 | # If a username was set in the prefix, it must match the username in |
|
100 | # If a username was set in the prefix, it must match the username in | |
99 | # the URI. |
|
101 | # the URI. | |
100 | continue |
|
102 | continue | |
101 |
|
103 | |||
102 | # The URI passed in has been stripped of credentials, so erase the user |
|
104 | # The URI passed in has been stripped of credentials, so erase the user | |
103 | # here to allow simpler matching. |
|
105 | # here to allow simpler matching. | |
104 | prefixurl.user = None |
|
106 | prefixurl.user = None | |
105 | prefix = bytes(prefixurl) |
|
107 | prefix = bytes(prefixurl) | |
106 |
|
108 | |||
107 | p = prefix.split('://', 1) |
|
109 | p = prefix.split('://', 1) | |
108 | if len(p) > 1: |
|
110 | if len(p) > 1: | |
109 | schemes, prefix = [p[0]], p[1] |
|
111 | schemes, prefix = [p[0]], p[1] | |
110 | else: |
|
112 | else: | |
111 | schemes = (auth.get('schemes') or 'https').split() |
|
113 | schemes = (auth.get('schemes') or 'https').split() | |
112 | if ((prefix == '*' or hostpath.startswith(prefix)) and |
|
114 | if ( | |
113 | (len(prefix) > bestlen or (len(prefix) == bestlen and |
|
115 | (prefix == '*' or hostpath.startswith(prefix)) | |
114 | not bestuser and 'username' in auth)) |
|
116 | and ( | |
115 | and scheme in schemes): |
|
117 | len(prefix) > bestlen | |
|
118 | or ( | |||
|
119 | len(prefix) == bestlen | |||
|
120 | and not bestuser | |||
|
121 | and 'username' in auth | |||
|
122 | ) | |||
|
123 | ) | |||
|
124 | and scheme in schemes | |||
|
125 | ): | |||
116 | bestlen = len(prefix) |
|
126 | bestlen = len(prefix) | |
117 | bestauth = group, auth |
|
127 | bestauth = group, auth | |
118 | bestuser = auth.get('username') |
|
128 | bestuser = auth.get('username') | |
119 | if user and not bestuser: |
|
129 | if user and not bestuser: | |
120 | auth['username'] = user |
|
130 | auth['username'] = user | |
121 | return bestauth |
|
131 | return bestauth |
@@ -1,92 +1,100 b'' | |||||
1 | # minifileset.py - a simple language to select files |
|
1 | # minifileset.py - a simple language to select files | |
2 | # |
|
2 | # | |
3 | # Copyright 2017 Facebook, Inc. |
|
3 | # Copyright 2017 Facebook, Inc. | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | from __future__ import absolute_import |
|
8 | from __future__ import absolute_import | |
9 |
|
9 | |||
10 | from .i18n import _ |
|
10 | from .i18n import _ | |
11 | from . import ( |
|
11 | from . import ( | |
12 | error, |
|
12 | error, | |
13 | fileset, |
|
13 | fileset, | |
14 | filesetlang, |
|
14 | filesetlang, | |
15 | pycompat, |
|
15 | pycompat, | |
16 | ) |
|
16 | ) | |
17 |
|
17 | |||
|
18 | ||||
18 | def _sizep(x): |
|
19 | def _sizep(x): | |
19 | # i18n: "size" is a keyword |
|
20 | # i18n: "size" is a keyword | |
20 | expr = filesetlang.getstring(x, _("size requires an expression")) |
|
21 | expr = filesetlang.getstring(x, _("size requires an expression")) | |
21 | return fileset.sizematcher(expr) |
|
22 | return fileset.sizematcher(expr) | |
22 |
|
23 | |||
|
24 | ||||
23 | def _compile(tree): |
|
25 | def _compile(tree): | |
24 | if not tree: |
|
26 | if not tree: | |
25 | raise error.ParseError(_("missing argument")) |
|
27 | raise error.ParseError(_("missing argument")) | |
26 | op = tree[0] |
|
28 | op = tree[0] | |
27 | if op == 'withstatus': |
|
29 | if op == 'withstatus': | |
28 | return _compile(tree[1]) |
|
30 | return _compile(tree[1]) | |
29 | elif op in {'symbol', 'string', 'kindpat'}: |
|
31 | elif op in {'symbol', 'string', 'kindpat'}: | |
30 | name = filesetlang.getpattern(tree, {'path'}, _('invalid file pattern')) |
|
32 | name = filesetlang.getpattern(tree, {'path'}, _('invalid file pattern')) | |
31 | if name.startswith('**'): # file extension test, ex. "**.tar.gz" |
|
33 | if name.startswith('**'): # file extension test, ex. "**.tar.gz" | |
32 | ext = name[2:] |
|
34 | ext = name[2:] | |
33 | for c in pycompat.bytestr(ext): |
|
35 | for c in pycompat.bytestr(ext): | |
34 | if c in '*{}[]?/\\': |
|
36 | if c in '*{}[]?/\\': | |
35 | raise error.ParseError(_('reserved character: %s') % c) |
|
37 | raise error.ParseError(_('reserved character: %s') % c) | |
36 | return lambda n, s: n.endswith(ext) |
|
38 | return lambda n, s: n.endswith(ext) | |
37 | elif name.startswith('path:'): # directory or full path test |
|
39 | elif name.startswith('path:'): # directory or full path test | |
38 | p = name[5:] # prefix |
|
40 | p = name[5:] # prefix | |
39 | pl = len(p) |
|
41 | pl = len(p) | |
40 |
f = lambda n, s: n.startswith(p) and ( |
|
42 | f = lambda n, s: n.startswith(p) and ( | |
41 |
|
|
43 | len(n) == pl or n[pl : pl + 1] == '/' | |
|
44 | ) | |||
42 | return f |
|
45 | return f | |
43 |
raise error.ParseError( |
|
46 | raise error.ParseError( | |
44 | hint=_('paths must be prefixed with "path:"')) |
|
47 | _("unsupported file pattern: %s") % name, | |
|
48 | hint=_('paths must be prefixed with "path:"'), | |||
|
49 | ) | |||
45 | elif op in {'or', 'patterns'}: |
|
50 | elif op in {'or', 'patterns'}: | |
46 | funcs = [_compile(x) for x in tree[1:]] |
|
51 | funcs = [_compile(x) for x in tree[1:]] | |
47 | return lambda n, s: any(f(n, s) for f in funcs) |
|
52 | return lambda n, s: any(f(n, s) for f in funcs) | |
48 | elif op == 'and': |
|
53 | elif op == 'and': | |
49 | func1 = _compile(tree[1]) |
|
54 | func1 = _compile(tree[1]) | |
50 | func2 = _compile(tree[2]) |
|
55 | func2 = _compile(tree[2]) | |
51 | return lambda n, s: func1(n, s) and func2(n, s) |
|
56 | return lambda n, s: func1(n, s) and func2(n, s) | |
52 | elif op == 'not': |
|
57 | elif op == 'not': | |
53 | return lambda n, s: not _compile(tree[1])(n, s) |
|
58 | return lambda n, s: not _compile(tree[1])(n, s) | |
54 | elif op == 'func': |
|
59 | elif op == 'func': | |
55 | symbols = { |
|
60 | symbols = { | |
56 | 'all': lambda n, s: True, |
|
61 | 'all': lambda n, s: True, | |
57 | 'none': lambda n, s: False, |
|
62 | 'none': lambda n, s: False, | |
58 | 'size': lambda n, s: _sizep(tree[2])(s), |
|
63 | 'size': lambda n, s: _sizep(tree[2])(s), | |
59 | } |
|
64 | } | |
60 |
|
65 | |||
61 | name = filesetlang.getsymbol(tree[1]) |
|
66 | name = filesetlang.getsymbol(tree[1]) | |
62 | if name in symbols: |
|
67 | if name in symbols: | |
63 | return symbols[name] |
|
68 | return symbols[name] | |
64 |
|
69 | |||
65 | raise error.UnknownIdentifier(name, symbols.keys()) |
|
70 | raise error.UnknownIdentifier(name, symbols.keys()) | |
66 |
elif op == 'minus': |
|
71 | elif op == 'minus': # equivalent to 'x and not y' | |
67 | func1 = _compile(tree[1]) |
|
72 | func1 = _compile(tree[1]) | |
68 | func2 = _compile(tree[2]) |
|
73 | func2 = _compile(tree[2]) | |
69 | return lambda n, s: func1(n, s) and not func2(n, s) |
|
74 | return lambda n, s: func1(n, s) and not func2(n, s) | |
70 | elif op == 'list': |
|
75 | elif op == 'list': | |
71 |
raise error.ParseError( |
|
76 | raise error.ParseError( | |
72 | hint=_('see \'hg help "filesets.x or y"\'')) |
|
77 | _("can't use a list in this context"), | |
|
78 | hint=_('see \'hg help "filesets.x or y"\''), | |||
|
79 | ) | |||
73 | raise error.ProgrammingError('illegal tree: %r' % (tree,)) |
|
80 | raise error.ProgrammingError('illegal tree: %r' % (tree,)) | |
74 |
|
81 | |||
|
82 | ||||
75 | def compile(text): |
|
83 | def compile(text): | |
76 | """generate a function (path, size) -> bool from filter specification. |
|
84 | """generate a function (path, size) -> bool from filter specification. | |
77 |
|
85 | |||
78 | "text" could contain the operators defined by the fileset language for |
|
86 | "text" could contain the operators defined by the fileset language for | |
79 | common logic operations, and parenthesis for grouping. The supported path |
|
87 | common logic operations, and parenthesis for grouping. The supported path | |
80 | tests are '**.extname' for file extension test, and '"path:dir/subdir"' |
|
88 | tests are '**.extname' for file extension test, and '"path:dir/subdir"' | |
81 | for prefix test. The ``size()`` predicate is borrowed from filesets to test |
|
89 | for prefix test. The ``size()`` predicate is borrowed from filesets to test | |
82 | file size. The predicates ``all()`` and ``none()`` are also supported. |
|
90 | file size. The predicates ``all()`` and ``none()`` are also supported. | |
83 |
|
91 | |||
84 | '(**.php & size(">10MB")) | **.zip | (path:bin & !path:bin/README)' for |
|
92 | '(**.php & size(">10MB")) | **.zip | (path:bin & !path:bin/README)' for | |
85 | example, will catch all php files whose size is greater than 10 MB, all |
|
93 | example, will catch all php files whose size is greater than 10 MB, all | |
86 | files whose name ends with ".zip", and all files under "bin" in the repo |
|
94 | files whose name ends with ".zip", and all files under "bin" in the repo | |
87 | root except for "bin/README". |
|
95 | root except for "bin/README". | |
88 | """ |
|
96 | """ | |
89 | tree = filesetlang.parse(text) |
|
97 | tree = filesetlang.parse(text) | |
90 | tree = filesetlang.analyze(tree) |
|
98 | tree = filesetlang.analyze(tree) | |
91 | tree = filesetlang.optimize(tree) |
|
99 | tree = filesetlang.optimize(tree) | |
92 | return _compile(tree) |
|
100 | return _compile(tree) |
@@ -1,47 +1,49 b'' | |||||
1 | # node.py - basic nodeid manipulation for mercurial |
|
1 | # node.py - basic nodeid manipulation for mercurial | |
2 | # |
|
2 | # | |
3 | # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> |
|
3 | # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | from __future__ import absolute_import |
|
8 | from __future__ import absolute_import | |
9 |
|
9 | |||
10 | import binascii |
|
10 | import binascii | |
11 |
|
11 | |||
12 | # This ugly style has a noticeable effect in manifest parsing |
|
12 | # This ugly style has a noticeable effect in manifest parsing | |
13 | hex = binascii.hexlify |
|
13 | hex = binascii.hexlify | |
14 | # Adapt to Python 3 API changes. If this ends up showing up in |
|
14 | # Adapt to Python 3 API changes. If this ends up showing up in | |
15 | # profiles, we can use this version only on Python 3, and forward |
|
15 | # profiles, we can use this version only on Python 3, and forward | |
16 | # binascii.unhexlify like we used to on Python 2. |
|
16 | # binascii.unhexlify like we used to on Python 2. | |
17 | def bin(s): |
|
17 | def bin(s): | |
18 | try: |
|
18 | try: | |
19 | return binascii.unhexlify(s) |
|
19 | return binascii.unhexlify(s) | |
20 | except binascii.Error as e: |
|
20 | except binascii.Error as e: | |
21 | raise TypeError(e) |
|
21 | raise TypeError(e) | |
22 |
|
22 | |||
|
23 | ||||
23 | nullrev = -1 |
|
24 | nullrev = -1 | |
24 | # In hex, this is '0000000000000000000000000000000000000000' |
|
25 | # In hex, this is '0000000000000000000000000000000000000000' | |
25 | nullid = b"\0" * 20 |
|
26 | nullid = b"\0" * 20 | |
26 | nullhex = hex(nullid) |
|
27 | nullhex = hex(nullid) | |
27 |
|
28 | |||
28 | # Phony node value to stand-in for new files in some uses of |
|
29 | # Phony node value to stand-in for new files in some uses of | |
29 | # manifests. |
|
30 | # manifests. | |
30 | # In hex, this is '2121212121212121212121212121212121212121' |
|
31 | # In hex, this is '2121212121212121212121212121212121212121' | |
31 | newnodeid = '!!!!!!!!!!!!!!!!!!!!' |
|
32 | newnodeid = '!!!!!!!!!!!!!!!!!!!!' | |
32 | # In hex, this is '3030303030303030303030303030306164646564' |
|
33 | # In hex, this is '3030303030303030303030303030306164646564' | |
33 | addednodeid = '000000000000000added' |
|
34 | addednodeid = '000000000000000added' | |
34 | # In hex, this is '3030303030303030303030306d6f646966696564' |
|
35 | # In hex, this is '3030303030303030303030306d6f646966696564' | |
35 | modifiednodeid = '000000000000modified' |
|
36 | modifiednodeid = '000000000000modified' | |
36 |
|
37 | |||
37 | wdirfilenodeids = {newnodeid, addednodeid, modifiednodeid} |
|
38 | wdirfilenodeids = {newnodeid, addednodeid, modifiednodeid} | |
38 |
|
39 | |||
39 | # pseudo identifiers for working directory |
|
40 | # pseudo identifiers for working directory | |
40 | # (they are experimental, so don't add too many dependencies on them) |
|
41 | # (they are experimental, so don't add too many dependencies on them) | |
41 |
wdirrev = 0x7 |
|
42 | wdirrev = 0x7FFFFFFF | |
42 | # In hex, this is 'ffffffffffffffffffffffffffffffffffffffff' |
|
43 | # In hex, this is 'ffffffffffffffffffffffffffffffffffffffff' | |
43 | wdirid = b"\xff" * 20 |
|
44 | wdirid = b"\xff" * 20 | |
44 | wdirhex = hex(wdirid) |
|
45 | wdirhex = hex(wdirid) | |
45 |
|
46 | |||
|
47 | ||||
46 | def short(node): |
|
48 | def short(node): | |
47 | return hex(node[:6]) |
|
49 | return hex(node[:6]) |
@@ -1,146 +1,155 b'' | |||||
1 | # policy.py - module policy logic for Mercurial. |
|
1 | # policy.py - module policy logic for Mercurial. | |
2 | # |
|
2 | # | |
3 | # Copyright 2015 Gregory Szorc <gregory.szorc@gmail.com> |
|
3 | # Copyright 2015 Gregory Szorc <gregory.szorc@gmail.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | from __future__ import absolute_import |
|
8 | from __future__ import absolute_import | |
9 |
|
9 | |||
10 | import os |
|
10 | import os | |
11 | import sys |
|
11 | import sys | |
12 |
|
12 | |||
13 | # Rules for how modules can be loaded. Values are: |
|
13 | # Rules for how modules can be loaded. Values are: | |
14 | # |
|
14 | # | |
15 | # c - require C extensions |
|
15 | # c - require C extensions | |
16 | # rust+c - require Rust and C extensions |
|
16 | # rust+c - require Rust and C extensions | |
17 | # rust+c-allow - allow Rust and C extensions with fallback to pure Python |
|
17 | # rust+c-allow - allow Rust and C extensions with fallback to pure Python | |
18 | # for each |
|
18 | # for each | |
19 | # allow - allow pure Python implementation when C loading fails |
|
19 | # allow - allow pure Python implementation when C loading fails | |
20 | # cffi - required cffi versions (implemented within pure module) |
|
20 | # cffi - required cffi versions (implemented within pure module) | |
21 | # cffi-allow - allow pure Python implementation if cffi version is missing |
|
21 | # cffi-allow - allow pure Python implementation if cffi version is missing | |
22 | # py - only load pure Python modules |
|
22 | # py - only load pure Python modules | |
23 | # |
|
23 | # | |
24 | # By default, fall back to the pure modules so the in-place build can |
|
24 | # By default, fall back to the pure modules so the in-place build can | |
25 | # run without recompiling the C extensions. This will be overridden by |
|
25 | # run without recompiling the C extensions. This will be overridden by | |
26 | # __modulepolicy__ generated by setup.py. |
|
26 | # __modulepolicy__ generated by setup.py. | |
27 | policy = b'allow' |
|
27 | policy = b'allow' | |
28 | _packageprefs = { |
|
28 | _packageprefs = { | |
29 | # policy: (versioned package, pure package) |
|
29 | # policy: (versioned package, pure package) | |
30 | b'c': (r'cext', None), |
|
30 | b'c': (r'cext', None), | |
31 | b'allow': (r'cext', r'pure'), |
|
31 | b'allow': (r'cext', r'pure'), | |
32 | b'cffi': (r'cffi', None), |
|
32 | b'cffi': (r'cffi', None), | |
33 | b'cffi-allow': (r'cffi', r'pure'), |
|
33 | b'cffi-allow': (r'cffi', r'pure'), | |
34 | b'py': (None, r'pure'), |
|
34 | b'py': (None, r'pure'), | |
35 | # For now, rust policies impact importrust only |
|
35 | # For now, rust policies impact importrust only | |
36 | b'rust+c': (r'cext', None), |
|
36 | b'rust+c': (r'cext', None), | |
37 | b'rust+c-allow': (r'cext', r'pure'), |
|
37 | b'rust+c-allow': (r'cext', r'pure'), | |
38 | } |
|
38 | } | |
39 |
|
39 | |||
40 | try: |
|
40 | try: | |
41 | from . import __modulepolicy__ |
|
41 | from . import __modulepolicy__ | |
|
42 | ||||
42 | policy = __modulepolicy__.modulepolicy |
|
43 | policy = __modulepolicy__.modulepolicy | |
43 | except ImportError: |
|
44 | except ImportError: | |
44 | pass |
|
45 | pass | |
45 |
|
46 | |||
46 | # PyPy doesn't load C extensions. |
|
47 | # PyPy doesn't load C extensions. | |
47 | # |
|
48 | # | |
48 | # The canonical way to do this is to test platform.python_implementation(). |
|
49 | # The canonical way to do this is to test platform.python_implementation(). | |
49 | # But we don't import platform and don't bloat for it here. |
|
50 | # But we don't import platform and don't bloat for it here. | |
50 | if r'__pypy__' in sys.builtin_module_names: |
|
51 | if r'__pypy__' in sys.builtin_module_names: | |
51 | policy = b'cffi' |
|
52 | policy = b'cffi' | |
52 |
|
53 | |||
53 | # Environment variable can always force settings. |
|
54 | # Environment variable can always force settings. | |
54 | if sys.version_info[0] >= 3: |
|
55 | if sys.version_info[0] >= 3: | |
55 | if r'HGMODULEPOLICY' in os.environ: |
|
56 | if r'HGMODULEPOLICY' in os.environ: | |
56 | policy = os.environ[r'HGMODULEPOLICY'].encode(r'utf-8') |
|
57 | policy = os.environ[r'HGMODULEPOLICY'].encode(r'utf-8') | |
57 | else: |
|
58 | else: | |
58 | policy = os.environ.get(r'HGMODULEPOLICY', policy) |
|
59 | policy = os.environ.get(r'HGMODULEPOLICY', policy) | |
59 |
|
60 | |||
|
61 | ||||
60 | def _importfrom(pkgname, modname): |
|
62 | def _importfrom(pkgname, modname): | |
61 | # from .<pkgname> import <modname> (where . is looked through this module) |
|
63 | # from .<pkgname> import <modname> (where . is looked through this module) | |
62 | fakelocals = {} |
|
64 | fakelocals = {} | |
63 | pkg = __import__(pkgname, globals(), fakelocals, [modname], level=1) |
|
65 | pkg = __import__(pkgname, globals(), fakelocals, [modname], level=1) | |
64 | try: |
|
66 | try: | |
65 | fakelocals[modname] = mod = getattr(pkg, modname) |
|
67 | fakelocals[modname] = mod = getattr(pkg, modname) | |
66 | except AttributeError: |
|
68 | except AttributeError: | |
67 | raise ImportError(r'cannot import name %s' % modname) |
|
69 | raise ImportError(r'cannot import name %s' % modname) | |
68 | # force import; fakelocals[modname] may be replaced with the real module |
|
70 | # force import; fakelocals[modname] may be replaced with the real module | |
69 | getattr(mod, r'__doc__', None) |
|
71 | getattr(mod, r'__doc__', None) | |
70 | return fakelocals[modname] |
|
72 | return fakelocals[modname] | |
71 |
|
73 | |||
|
74 | ||||
72 | # keep in sync with "version" in C modules |
|
75 | # keep in sync with "version" in C modules | |
73 | _cextversions = { |
|
76 | _cextversions = { | |
74 | (r'cext', r'base85'): 1, |
|
77 | (r'cext', r'base85'): 1, | |
75 | (r'cext', r'bdiff'): 3, |
|
78 | (r'cext', r'bdiff'): 3, | |
76 | (r'cext', r'mpatch'): 1, |
|
79 | (r'cext', r'mpatch'): 1, | |
77 | (r'cext', r'osutil'): 4, |
|
80 | (r'cext', r'osutil'): 4, | |
78 | (r'cext', r'parsers'): 13, |
|
81 | (r'cext', r'parsers'): 13, | |
79 | } |
|
82 | } | |
80 |
|
83 | |||
81 | # map import request to other package or module |
|
84 | # map import request to other package or module | |
82 | _modredirects = { |
|
85 | _modredirects = { | |
83 | (r'cext', r'charencode'): (r'cext', r'parsers'), |
|
86 | (r'cext', r'charencode'): (r'cext', r'parsers'), | |
84 | (r'cffi', r'base85'): (r'pure', r'base85'), |
|
87 | (r'cffi', r'base85'): (r'pure', r'base85'), | |
85 | (r'cffi', r'charencode'): (r'pure', r'charencode'), |
|
88 | (r'cffi', r'charencode'): (r'pure', r'charencode'), | |
86 | (r'cffi', r'parsers'): (r'pure', r'parsers'), |
|
89 | (r'cffi', r'parsers'): (r'pure', r'parsers'), | |
87 | } |
|
90 | } | |
88 |
|
91 | |||
|
92 | ||||
89 | def _checkmod(pkgname, modname, mod): |
|
93 | def _checkmod(pkgname, modname, mod): | |
90 | expected = _cextversions.get((pkgname, modname)) |
|
94 | expected = _cextversions.get((pkgname, modname)) | |
91 | actual = getattr(mod, r'version', None) |
|
95 | actual = getattr(mod, r'version', None) | |
92 | if actual != expected: |
|
96 | if actual != expected: | |
93 |
raise ImportError( |
|
97 | raise ImportError( | |
94 | r'(expected version: %d, actual: %r)' |
|
98 | r'cannot import module %s.%s ' | |
95 | % (pkgname, modname, expected, actual)) |
|
99 | r'(expected version: %d, actual: %r)' | |
|
100 | % (pkgname, modname, expected, actual) | |||
|
101 | ) | |||
|
102 | ||||
96 |
|
103 | |||
97 | def importmod(modname): |
|
104 | def importmod(modname): | |
98 | """Import module according to policy and check API version""" |
|
105 | """Import module according to policy and check API version""" | |
99 | try: |
|
106 | try: | |
100 | verpkg, purepkg = _packageprefs[policy] |
|
107 | verpkg, purepkg = _packageprefs[policy] | |
101 | except KeyError: |
|
108 | except KeyError: | |
102 | raise ImportError(r'invalid HGMODULEPOLICY %r' % policy) |
|
109 | raise ImportError(r'invalid HGMODULEPOLICY %r' % policy) | |
103 | assert verpkg or purepkg |
|
110 | assert verpkg or purepkg | |
104 | if verpkg: |
|
111 | if verpkg: | |
105 | pn, mn = _modredirects.get((verpkg, modname), (verpkg, modname)) |
|
112 | pn, mn = _modredirects.get((verpkg, modname), (verpkg, modname)) | |
106 | try: |
|
113 | try: | |
107 | mod = _importfrom(pn, mn) |
|
114 | mod = _importfrom(pn, mn) | |
108 | if pn == verpkg: |
|
115 | if pn == verpkg: | |
109 | _checkmod(pn, mn, mod) |
|
116 | _checkmod(pn, mn, mod) | |
110 | return mod |
|
117 | return mod | |
111 | except ImportError: |
|
118 | except ImportError: | |
112 | if not purepkg: |
|
119 | if not purepkg: | |
113 | raise |
|
120 | raise | |
114 | pn, mn = _modredirects.get((purepkg, modname), (purepkg, modname)) |
|
121 | pn, mn = _modredirects.get((purepkg, modname), (purepkg, modname)) | |
115 | return _importfrom(pn, mn) |
|
122 | return _importfrom(pn, mn) | |
116 |
|
123 | |||
|
124 | ||||
117 | def _isrustpermissive(): |
|
125 | def _isrustpermissive(): | |
118 | """Assuming the policy is a Rust one, tell if it's permissive.""" |
|
126 | """Assuming the policy is a Rust one, tell if it's permissive.""" | |
119 | return policy.endswith(b'-allow') |
|
127 | return policy.endswith(b'-allow') | |
120 |
|
128 | |||
|
129 | ||||
121 | def importrust(modname, member=None, default=None): |
|
130 | def importrust(modname, member=None, default=None): | |
122 | """Import Rust module according to policy and availability. |
|
131 | """Import Rust module according to policy and availability. | |
123 |
|
132 | |||
124 | If policy isn't a Rust one, this returns `default`. |
|
133 | If policy isn't a Rust one, this returns `default`. | |
125 |
|
134 | |||
126 | If either the module or its member is not available, this returns `default` |
|
135 | If either the module or its member is not available, this returns `default` | |
127 | if policy is permissive and raises `ImportError` if not. |
|
136 | if policy is permissive and raises `ImportError` if not. | |
128 | """ |
|
137 | """ | |
129 | if not policy.startswith(b'rust'): |
|
138 | if not policy.startswith(b'rust'): | |
130 | return default |
|
139 | return default | |
131 |
|
140 | |||
132 | try: |
|
141 | try: | |
133 | mod = _importfrom(r'rustext', modname) |
|
142 | mod = _importfrom(r'rustext', modname) | |
134 | except ImportError: |
|
143 | except ImportError: | |
135 | if _isrustpermissive(): |
|
144 | if _isrustpermissive(): | |
136 | return default |
|
145 | return default | |
137 | raise |
|
146 | raise | |
138 | if member is None: |
|
147 | if member is None: | |
139 | return mod |
|
148 | return mod | |
140 |
|
149 | |||
141 | try: |
|
150 | try: | |
142 | return getattr(mod, member) |
|
151 | return getattr(mod, member) | |
143 | except AttributeError: |
|
152 | except AttributeError: | |
144 | if _isrustpermissive(): |
|
153 | if _isrustpermissive(): | |
145 | return default |
|
154 | return default | |
146 | raise ImportError(r"Cannot import name %s" % member) |
|
155 | raise ImportError(r"Cannot import name %s" % member) |
@@ -1,61 +1,71 b'' | |||||
1 | # pushkey.py - dispatching for pushing and pulling keys |
|
1 | # pushkey.py - dispatching for pushing and pulling keys | |
2 | # |
|
2 | # | |
3 | # Copyright 2010 Matt Mackall <mpm@selenic.com> |
|
3 | # Copyright 2010 Matt Mackall <mpm@selenic.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | from __future__ import absolute_import |
|
8 | from __future__ import absolute_import | |
9 |
|
9 | |||
10 | from . import ( |
|
10 | from . import ( | |
11 | bookmarks, |
|
11 | bookmarks, | |
12 | encoding, |
|
12 | encoding, | |
13 | obsolete, |
|
13 | obsolete, | |
14 | phases, |
|
14 | phases, | |
15 | ) |
|
15 | ) | |
16 |
|
16 | |||
|
17 | ||||
17 | def _nslist(repo): |
|
18 | def _nslist(repo): | |
18 | n = {} |
|
19 | n = {} | |
19 | for k in _namespaces: |
|
20 | for k in _namespaces: | |
20 | n[k] = "" |
|
21 | n[k] = "" | |
21 | if not obsolete.isenabled(repo, obsolete.exchangeopt): |
|
22 | if not obsolete.isenabled(repo, obsolete.exchangeopt): | |
22 | n.pop('obsolete') |
|
23 | n.pop('obsolete') | |
23 | return n |
|
24 | return n | |
24 |
|
25 | |||
25 | _namespaces = {"namespaces": (lambda *x: False, _nslist), |
|
26 | ||
26 | "bookmarks": (bookmarks.pushbookmark, bookmarks.listbookmarks), |
|
27 | _namespaces = { | |
27 | "phases": (phases.pushphase, phases.listphases), |
|
28 | "namespaces": (lambda *x: False, _nslist), | |
28 | "obsolete": (obsolete.pushmarker, obsolete.listmarkers), |
|
29 | "bookmarks": (bookmarks.pushbookmark, bookmarks.listbookmarks), | |
29 | } |
|
30 | "phases": (phases.pushphase, phases.listphases), | |
|
31 | "obsolete": (obsolete.pushmarker, obsolete.listmarkers), | |||
|
32 | } | |||
|
33 | ||||
30 |
|
34 | |||
31 | def register(namespace, pushkey, listkeys): |
|
35 | def register(namespace, pushkey, listkeys): | |
32 | _namespaces[namespace] = (pushkey, listkeys) |
|
36 | _namespaces[namespace] = (pushkey, listkeys) | |
33 |
|
37 | |||
|
38 | ||||
34 | def _get(namespace): |
|
39 | def _get(namespace): | |
35 | return _namespaces.get(namespace, (lambda *x: False, lambda *x: {})) |
|
40 | return _namespaces.get(namespace, (lambda *x: False, lambda *x: {})) | |
36 |
|
41 | |||
|
42 | ||||
37 | def push(repo, namespace, key, old, new): |
|
43 | def push(repo, namespace, key, old, new): | |
38 | '''should succeed iff value was old''' |
|
44 | '''should succeed iff value was old''' | |
39 | pk = _get(namespace)[0] |
|
45 | pk = _get(namespace)[0] | |
40 | return pk(repo, key, old, new) |
|
46 | return pk(repo, key, old, new) | |
41 |
|
47 | |||
|
48 | ||||
42 | def list(repo, namespace): |
|
49 | def list(repo, namespace): | |
43 | '''return a dict''' |
|
50 | '''return a dict''' | |
44 | lk = _get(namespace)[1] |
|
51 | lk = _get(namespace)[1] | |
45 | return lk(repo) |
|
52 | return lk(repo) | |
46 |
|
53 | |||
|
54 | ||||
47 | encode = encoding.fromlocal |
|
55 | encode = encoding.fromlocal | |
48 |
|
56 | |||
49 | decode = encoding.tolocal |
|
57 | decode = encoding.tolocal | |
50 |
|
58 | |||
|
59 | ||||
51 | def encodekeys(keys): |
|
60 | def encodekeys(keys): | |
52 | """encode the content of a pushkey namespace for exchange over the wire""" |
|
61 | """encode the content of a pushkey namespace for exchange over the wire""" | |
53 | return '\n'.join(['%s\t%s' % (encode(k), encode(v)) for k, v in keys]) |
|
62 | return '\n'.join(['%s\t%s' % (encode(k), encode(v)) for k, v in keys]) | |
54 |
|
63 | |||
|
64 | ||||
55 | def decodekeys(data): |
|
65 | def decodekeys(data): | |
56 | """decode the content of a pushkey namespace from exchange over the wire""" |
|
66 | """decode the content of a pushkey namespace from exchange over the wire""" | |
57 | result = {} |
|
67 | result = {} | |
58 | for l in data.splitlines(): |
|
68 | for l in data.splitlines(): | |
59 | k, v = l.split('\t') |
|
69 | k, v = l.split('\t') | |
60 | result[decode(k)] = decode(v) |
|
70 | result[decode(k)] = decode(v) | |
61 | return result |
|
71 | return result |
@@ -1,99 +1,105 b'' | |||||
1 | # rcutil.py - utilities about config paths, special config sections etc. |
|
1 | # rcutil.py - utilities about config paths, special config sections etc. | |
2 | # |
|
2 | # | |
3 | # Copyright Mercurial Contributors |
|
3 | # Copyright Mercurial Contributors | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | from __future__ import absolute_import |
|
8 | from __future__ import absolute_import | |
9 |
|
9 | |||
10 | import os |
|
10 | import os | |
11 |
|
11 | |||
12 | from . import ( |
|
12 | from . import ( | |
13 | encoding, |
|
13 | encoding, | |
14 | pycompat, |
|
14 | pycompat, | |
15 | util, |
|
15 | util, | |
16 | ) |
|
16 | ) | |
17 |
|
17 | |||
18 | if pycompat.iswindows: |
|
18 | if pycompat.iswindows: | |
19 | from . import scmwindows as scmplatform |
|
19 | from . import scmwindows as scmplatform | |
20 | else: |
|
20 | else: | |
21 | from . import scmposix as scmplatform |
|
21 | from . import scmposix as scmplatform | |
22 |
|
22 | |||
23 | fallbackpager = scmplatform.fallbackpager |
|
23 | fallbackpager = scmplatform.fallbackpager | |
24 | systemrcpath = scmplatform.systemrcpath |
|
24 | systemrcpath = scmplatform.systemrcpath | |
25 | userrcpath = scmplatform.userrcpath |
|
25 | userrcpath = scmplatform.userrcpath | |
26 |
|
26 | |||
|
27 | ||||
27 | def _expandrcpath(path): |
|
28 | def _expandrcpath(path): | |
28 | '''path could be a file or a directory. return a list of file paths''' |
|
29 | '''path could be a file or a directory. return a list of file paths''' | |
29 | p = util.expandpath(path) |
|
30 | p = util.expandpath(path) | |
30 | if os.path.isdir(p): |
|
31 | if os.path.isdir(p): | |
31 | join = os.path.join |
|
32 | join = os.path.join | |
32 | return sorted(join(p, f) for f, k in util.listdir(p) |
|
33 | return sorted( | |
33 |
|
|
34 | join(p, f) for f, k in util.listdir(p) if f.endswith('.rc') | |
|
35 | ) | |||
34 | return [p] |
|
36 | return [p] | |
35 |
|
37 | |||
|
38 | ||||
36 | def envrcitems(env=None): |
|
39 | def envrcitems(env=None): | |
37 | '''Return [(section, name, value, source)] config items. |
|
40 | '''Return [(section, name, value, source)] config items. | |
38 |
|
41 | |||
39 | The config items are extracted from environment variables specified by env, |
|
42 | The config items are extracted from environment variables specified by env, | |
40 | used to override systemrc, but not userrc. |
|
43 | used to override systemrc, but not userrc. | |
41 |
|
44 | |||
42 | If env is not provided, encoding.environ will be used. |
|
45 | If env is not provided, encoding.environ will be used. | |
43 | ''' |
|
46 | ''' | |
44 | if env is None: |
|
47 | if env is None: | |
45 | env = encoding.environ |
|
48 | env = encoding.environ | |
46 | checklist = [ |
|
49 | checklist = [ | |
47 | ('EDITOR', 'ui', 'editor'), |
|
50 | ('EDITOR', 'ui', 'editor'), | |
48 | ('VISUAL', 'ui', 'editor'), |
|
51 | ('VISUAL', 'ui', 'editor'), | |
49 | ('PAGER', 'pager', 'pager'), |
|
52 | ('PAGER', 'pager', 'pager'), | |
50 | ] |
|
53 | ] | |
51 | result = [] |
|
54 | result = [] | |
52 | for envname, section, configname in checklist: |
|
55 | for envname, section, configname in checklist: | |
53 | if envname not in env: |
|
56 | if envname not in env: | |
54 | continue |
|
57 | continue | |
55 | result.append((section, configname, env[envname], '$%s' % envname)) |
|
58 | result.append((section, configname, env[envname], '$%s' % envname)) | |
56 | return result |
|
59 | return result | |
57 |
|
60 | |||
|
61 | ||||
58 | def defaultrcpath(): |
|
62 | def defaultrcpath(): | |
59 | '''return rc paths in default.d''' |
|
63 | '''return rc paths in default.d''' | |
60 | path = [] |
|
64 | path = [] | |
61 | defaultpath = os.path.join(util.datapath, 'default.d') |
|
65 | defaultpath = os.path.join(util.datapath, 'default.d') | |
62 | if os.path.isdir(defaultpath): |
|
66 | if os.path.isdir(defaultpath): | |
63 | path = _expandrcpath(defaultpath) |
|
67 | path = _expandrcpath(defaultpath) | |
64 | return path |
|
68 | return path | |
65 |
|
69 | |||
|
70 | ||||
66 | def rccomponents(): |
|
71 | def rccomponents(): | |
67 | '''return an ordered [(type, obj)] about where to load configs. |
|
72 | '''return an ordered [(type, obj)] about where to load configs. | |
68 |
|
73 | |||
69 | respect $HGRCPATH. if $HGRCPATH is empty, only .hg/hgrc of current repo is |
|
74 | respect $HGRCPATH. if $HGRCPATH is empty, only .hg/hgrc of current repo is | |
70 | used. if $HGRCPATH is not set, the platform default will be used. |
|
75 | used. if $HGRCPATH is not set, the platform default will be used. | |
71 |
|
76 | |||
72 | if a directory is provided, *.rc files under it will be used. |
|
77 | if a directory is provided, *.rc files under it will be used. | |
73 |
|
78 | |||
74 | type could be either 'path' or 'items', if type is 'path', obj is a string, |
|
79 | type could be either 'path' or 'items', if type is 'path', obj is a string, | |
75 | and is the config file path. if type is 'items', obj is a list of (section, |
|
80 | and is the config file path. if type is 'items', obj is a list of (section, | |
76 | name, value, source) that should fill the config directly. |
|
81 | name, value, source) that should fill the config directly. | |
77 | ''' |
|
82 | ''' | |
78 | envrc = ('items', envrcitems()) |
|
83 | envrc = ('items', envrcitems()) | |
79 |
|
84 | |||
80 | if 'HGRCPATH' in encoding.environ: |
|
85 | if 'HGRCPATH' in encoding.environ: | |
81 | # assume HGRCPATH is all about user configs so environments can be |
|
86 | # assume HGRCPATH is all about user configs so environments can be | |
82 | # overridden. |
|
87 | # overridden. | |
83 | _rccomponents = [envrc] |
|
88 | _rccomponents = [envrc] | |
84 | for p in encoding.environ['HGRCPATH'].split(pycompat.ospathsep): |
|
89 | for p in encoding.environ['HGRCPATH'].split(pycompat.ospathsep): | |
85 | if not p: |
|
90 | if not p: | |
86 | continue |
|
91 | continue | |
87 | _rccomponents.extend(('path', p) for p in _expandrcpath(p)) |
|
92 | _rccomponents.extend(('path', p) for p in _expandrcpath(p)) | |
88 | else: |
|
93 | else: | |
89 | normpaths = lambda paths: [('path', os.path.normpath(p)) for p in paths] |
|
94 | normpaths = lambda paths: [('path', os.path.normpath(p)) for p in paths] | |
90 | _rccomponents = normpaths(defaultrcpath() + systemrcpath()) |
|
95 | _rccomponents = normpaths(defaultrcpath() + systemrcpath()) | |
91 | _rccomponents.append(envrc) |
|
96 | _rccomponents.append(envrc) | |
92 | _rccomponents.extend(normpaths(userrcpath())) |
|
97 | _rccomponents.extend(normpaths(userrcpath())) | |
93 | return _rccomponents |
|
98 | return _rccomponents | |
94 |
|
99 | |||
|
100 | ||||
95 | def defaultpagerenv(): |
|
101 | def defaultpagerenv(): | |
96 | '''return a dict of default environment variables and their values, |
|
102 | '''return a dict of default environment variables and their values, | |
97 | intended to be set before starting a pager. |
|
103 | intended to be set before starting a pager. | |
98 | ''' |
|
104 | ''' | |
99 | return {'LESS': 'FRX', 'LV': '-c'} |
|
105 | return {'LESS': 'FRX', 'LV': '-c'} |
@@ -1,53 +1,55 b'' | |||||
1 | # rewriteutil.py - utility functions for rewriting changesets |
|
1 | # rewriteutil.py - utility functions for rewriting changesets | |
2 | # |
|
2 | # | |
3 | # Copyright 2017 Octobus <contact@octobus.net> |
|
3 | # Copyright 2017 Octobus <contact@octobus.net> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | from __future__ import absolute_import |
|
8 | from __future__ import absolute_import | |
9 |
|
9 | |||
10 | from .i18n import _ |
|
10 | from .i18n import _ | |
11 |
|
11 | |||
12 | from . import ( |
|
12 | from . import ( | |
13 | error, |
|
13 | error, | |
14 | node, |
|
14 | node, | |
15 | obsolete, |
|
15 | obsolete, | |
16 | revset, |
|
16 | revset, | |
17 | ) |
|
17 | ) | |
18 |
|
18 | |||
|
19 | ||||
19 | def precheck(repo, revs, action='rewrite'): |
|
20 | def precheck(repo, revs, action='rewrite'): | |
20 | """check if revs can be rewritten |
|
21 | """check if revs can be rewritten | |
21 | action is used to control the error message. |
|
22 | action is used to control the error message. | |
22 |
|
23 | |||
23 | Make sure this function is called after taking the lock. |
|
24 | Make sure this function is called after taking the lock. | |
24 | """ |
|
25 | """ | |
25 | if node.nullrev in revs: |
|
26 | if node.nullrev in revs: | |
26 |
msg = _("cannot %s null changeset") % |
|
27 | msg = _("cannot %s null changeset") % action | |
27 | hint = _("no changeset checked out") |
|
28 | hint = _("no changeset checked out") | |
28 | raise error.Abort(msg, hint=hint) |
|
29 | raise error.Abort(msg, hint=hint) | |
29 |
|
30 | |||
30 | if len(repo[None].parents()) > 1: |
|
31 | if len(repo[None].parents()) > 1: | |
31 | raise error.Abort(_("cannot %s while merging") % action) |
|
32 | raise error.Abort(_("cannot %s while merging") % action) | |
32 |
|
33 | |||
33 | publicrevs = repo.revs('%ld and public()', revs) |
|
34 | publicrevs = repo.revs('%ld and public()', revs) | |
34 | if publicrevs: |
|
35 | if publicrevs: | |
35 |
msg = _("cannot %s public changesets") % |
|
36 | msg = _("cannot %s public changesets") % action | |
36 | hint = _("see 'hg help phases' for details") |
|
37 | hint = _("see 'hg help phases' for details") | |
37 | raise error.Abort(msg, hint=hint) |
|
38 | raise error.Abort(msg, hint=hint) | |
38 |
|
39 | |||
39 | newunstable = disallowednewunstable(repo, revs) |
|
40 | newunstable = disallowednewunstable(repo, revs) | |
40 | if newunstable: |
|
41 | if newunstable: | |
41 | raise error.Abort(_("cannot %s changeset with children") % action) |
|
42 | raise error.Abort(_("cannot %s changeset with children") % action) | |
42 |
|
43 | |||
|
44 | ||||
43 | def disallowednewunstable(repo, revs): |
|
45 | def disallowednewunstable(repo, revs): | |
44 | """Checks whether editing the revs will create new unstable changesets and |
|
46 | """Checks whether editing the revs will create new unstable changesets and | |
45 | are we allowed to create them. |
|
47 | are we allowed to create them. | |
46 |
|
48 | |||
47 | To allow new unstable changesets, set the config: |
|
49 | To allow new unstable changesets, set the config: | |
48 | `experimental.evolution.allowunstable=True` |
|
50 | `experimental.evolution.allowunstable=True` | |
49 | """ |
|
51 | """ | |
50 | allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt) |
|
52 | allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt) | |
51 | if allowunstable: |
|
53 | if allowunstable: | |
52 | return revset.baseset() |
|
54 | return revset.baseset() | |
53 | return repo.revs("(%ld::) - %ld", revs, revs) |
|
55 | return repo.revs("(%ld::) - %ld", revs, revs) |
@@ -1,85 +1,96 b'' | |||||
1 | from __future__ import absolute_import |
|
1 | from __future__ import absolute_import | |
2 |
|
2 | |||
3 | import array |
|
3 | import array | |
4 | import errno |
|
4 | import errno | |
5 | import fcntl |
|
5 | import fcntl | |
6 | import os |
|
6 | import os | |
7 | import sys |
|
7 | import sys | |
8 |
|
8 | |||
9 | from . import ( |
|
9 | from . import ( | |
10 | encoding, |
|
10 | encoding, | |
11 | pycompat, |
|
11 | pycompat, | |
12 | util, |
|
12 | util, | |
13 | ) |
|
13 | ) | |
14 |
|
14 | |||
15 | # BSD 'more' escapes ANSI color sequences by default. This can be disabled by |
|
15 | # BSD 'more' escapes ANSI color sequences by default. This can be disabled by | |
16 | # $MORE variable, but there's no compatible option with Linux 'more'. Given |
|
16 | # $MORE variable, but there's no compatible option with Linux 'more'. Given | |
17 | # OS X is widely used and most modern Unix systems would have 'less', setting |
|
17 | # OS X is widely used and most modern Unix systems would have 'less', setting | |
18 | # 'less' as the default seems reasonable. |
|
18 | # 'less' as the default seems reasonable. | |
19 | fallbackpager = 'less' |
|
19 | fallbackpager = 'less' | |
20 |
|
20 | |||
|
21 | ||||
21 | def _rcfiles(path): |
|
22 | def _rcfiles(path): | |
22 | rcs = [os.path.join(path, 'hgrc')] |
|
23 | rcs = [os.path.join(path, 'hgrc')] | |
23 | rcdir = os.path.join(path, 'hgrc.d') |
|
24 | rcdir = os.path.join(path, 'hgrc.d') | |
24 | try: |
|
25 | try: | |
25 |
rcs.extend( |
|
26 | rcs.extend( | |
26 | for f, kind in util.listdir(rcdir) |
|
27 | [ | |
27 | if f.endswith(".rc")]) |
|
28 | os.path.join(rcdir, f) | |
|
29 | for f, kind in util.listdir(rcdir) | |||
|
30 | if f.endswith(".rc") | |||
|
31 | ] | |||
|
32 | ) | |||
28 | except OSError: |
|
33 | except OSError: | |
29 | pass |
|
34 | pass | |
30 | return rcs |
|
35 | return rcs | |
31 |
|
36 | |||
|
37 | ||||
32 | def systemrcpath(): |
|
38 | def systemrcpath(): | |
33 | path = [] |
|
39 | path = [] | |
34 | if pycompat.sysplatform == 'plan9': |
|
40 | if pycompat.sysplatform == 'plan9': | |
35 | root = 'lib/mercurial' |
|
41 | root = 'lib/mercurial' | |
36 | else: |
|
42 | else: | |
37 | root = 'etc/mercurial' |
|
43 | root = 'etc/mercurial' | |
38 | # old mod_python does not set sys.argv |
|
44 | # old mod_python does not set sys.argv | |
39 | if len(getattr(sys, 'argv', [])) > 0: |
|
45 | if len(getattr(sys, 'argv', [])) > 0: | |
40 | p = os.path.dirname(os.path.dirname(pycompat.sysargv[0])) |
|
46 | p = os.path.dirname(os.path.dirname(pycompat.sysargv[0])) | |
41 | if p != '/': |
|
47 | if p != '/': | |
42 | path.extend(_rcfiles(os.path.join(p, root))) |
|
48 | path.extend(_rcfiles(os.path.join(p, root))) | |
43 | path.extend(_rcfiles('/' + root)) |
|
49 | path.extend(_rcfiles('/' + root)) | |
44 | return path |
|
50 | return path | |
45 |
|
51 | |||
|
52 | ||||
46 | def userrcpath(): |
|
53 | def userrcpath(): | |
47 | if pycompat.sysplatform == 'plan9': |
|
54 | if pycompat.sysplatform == 'plan9': | |
48 | return [encoding.environ['home'] + '/lib/hgrc'] |
|
55 | return [encoding.environ['home'] + '/lib/hgrc'] | |
49 | elif pycompat.isdarwin: |
|
56 | elif pycompat.isdarwin: | |
50 | return [os.path.expanduser('~/.hgrc')] |
|
57 | return [os.path.expanduser('~/.hgrc')] | |
51 | else: |
|
58 | else: | |
52 | confighome = encoding.environ.get('XDG_CONFIG_HOME') |
|
59 | confighome = encoding.environ.get('XDG_CONFIG_HOME') | |
53 | if confighome is None or not os.path.isabs(confighome): |
|
60 | if confighome is None or not os.path.isabs(confighome): | |
54 | confighome = os.path.expanduser('~/.config') |
|
61 | confighome = os.path.expanduser('~/.config') | |
55 |
|
62 | |||
56 | return [os.path.expanduser('~/.hgrc'), |
|
63 | return [ | |
57 | os.path.join(confighome, 'hg', 'hgrc')] |
|
64 | os.path.expanduser('~/.hgrc'), | |
|
65 | os.path.join(confighome, 'hg', 'hgrc'), | |||
|
66 | ] | |||
|
67 | ||||
58 |
|
68 | |||
59 | def termsize(ui): |
|
69 | def termsize(ui): | |
60 | try: |
|
70 | try: | |
61 | import termios |
|
71 | import termios | |
|
72 | ||||
62 | TIOCGWINSZ = termios.TIOCGWINSZ # unavailable on IRIX (issue3449) |
|
73 | TIOCGWINSZ = termios.TIOCGWINSZ # unavailable on IRIX (issue3449) | |
63 | except (AttributeError, ImportError): |
|
74 | except (AttributeError, ImportError): | |
64 | return 80, 24 |
|
75 | return 80, 24 | |
65 |
|
76 | |||
66 | for dev in (ui.ferr, ui.fout, ui.fin): |
|
77 | for dev in (ui.ferr, ui.fout, ui.fin): | |
67 | try: |
|
78 | try: | |
68 | try: |
|
79 | try: | |
69 | fd = dev.fileno() |
|
80 | fd = dev.fileno() | |
70 | except AttributeError: |
|
81 | except AttributeError: | |
71 | continue |
|
82 | continue | |
72 | if not os.isatty(fd): |
|
83 | if not os.isatty(fd): | |
73 | continue |
|
84 | continue | |
74 | arri = fcntl.ioctl(fd, TIOCGWINSZ, '\0' * 8) |
|
85 | arri = fcntl.ioctl(fd, TIOCGWINSZ, '\0' * 8) | |
75 | height, width = array.array(r'h', arri)[:2] |
|
86 | height, width = array.array(r'h', arri)[:2] | |
76 | if width > 0 and height > 0: |
|
87 | if width > 0 and height > 0: | |
77 | return width, height |
|
88 | return width, height | |
78 | except ValueError: |
|
89 | except ValueError: | |
79 | pass |
|
90 | pass | |
80 | except IOError as e: |
|
91 | except IOError as e: | |
81 | if e[0] == errno.EINVAL: |
|
92 | if e[0] == errno.EINVAL: | |
82 | pass |
|
93 | pass | |
83 | else: |
|
94 | else: | |
84 | raise |
|
95 | raise | |
85 | return 80, 24 |
|
96 | return 80, 24 |
@@ -1,61 +1,65 b'' | |||||
1 | from __future__ import absolute_import |
|
1 | from __future__ import absolute_import | |
2 |
|
2 | |||
3 | import os |
|
3 | import os | |
4 |
|
4 | |||
5 | from . import ( |
|
5 | from . import ( | |
6 | encoding, |
|
6 | encoding, | |
7 | pycompat, |
|
7 | pycompat, | |
8 | util, |
|
8 | util, | |
9 | win32, |
|
9 | win32, | |
10 | ) |
|
10 | ) | |
11 |
|
11 | |||
12 | try: |
|
12 | try: | |
13 | import _winreg as winreg |
|
13 | import _winreg as winreg | |
|
14 | ||||
14 | winreg.CloseKey |
|
15 | winreg.CloseKey | |
15 | except ImportError: |
|
16 | except ImportError: | |
16 | import winreg |
|
17 | import winreg | |
17 |
|
18 | |||
18 | # MS-DOS 'more' is the only pager available by default on Windows. |
|
19 | # MS-DOS 'more' is the only pager available by default on Windows. | |
19 | fallbackpager = 'more' |
|
20 | fallbackpager = 'more' | |
20 |
|
21 | |||
|
22 | ||||
21 | def systemrcpath(): |
|
23 | def systemrcpath(): | |
22 | '''return default os-specific hgrc search path''' |
|
24 | '''return default os-specific hgrc search path''' | |
23 | rcpath = [] |
|
25 | rcpath = [] | |
24 | filename = win32.executablepath() |
|
26 | filename = win32.executablepath() | |
25 | # Use mercurial.ini found in directory with hg.exe |
|
27 | # Use mercurial.ini found in directory with hg.exe | |
26 | progrc = os.path.join(os.path.dirname(filename), 'mercurial.ini') |
|
28 | progrc = os.path.join(os.path.dirname(filename), 'mercurial.ini') | |
27 | rcpath.append(progrc) |
|
29 | rcpath.append(progrc) | |
28 | # Use hgrc.d found in directory with hg.exe |
|
30 | # Use hgrc.d found in directory with hg.exe | |
29 | progrcd = os.path.join(os.path.dirname(filename), 'hgrc.d') |
|
31 | progrcd = os.path.join(os.path.dirname(filename), 'hgrc.d') | |
30 | if os.path.isdir(progrcd): |
|
32 | if os.path.isdir(progrcd): | |
31 | for f, kind in util.listdir(progrcd): |
|
33 | for f, kind in util.listdir(progrcd): | |
32 | if f.endswith('.rc'): |
|
34 | if f.endswith('.rc'): | |
33 | rcpath.append(os.path.join(progrcd, f)) |
|
35 | rcpath.append(os.path.join(progrcd, f)) | |
34 | # else look for a system rcpath in the registry |
|
36 | # else look for a system rcpath in the registry | |
35 |
value = util.lookupreg( |
|
37 | value = util.lookupreg( | |
36 |
|
|
38 | 'SOFTWARE\\Mercurial', None, winreg.HKEY_LOCAL_MACHINE | |
|
39 | ) | |||
37 | if not isinstance(value, str) or not value: |
|
40 | if not isinstance(value, str) or not value: | |
38 | return rcpath |
|
41 | return rcpath | |
39 | value = util.localpath(value) |
|
42 | value = util.localpath(value) | |
40 | for p in value.split(pycompat.ospathsep): |
|
43 | for p in value.split(pycompat.ospathsep): | |
41 | if p.lower().endswith('mercurial.ini'): |
|
44 | if p.lower().endswith('mercurial.ini'): | |
42 | rcpath.append(p) |
|
45 | rcpath.append(p) | |
43 | elif os.path.isdir(p): |
|
46 | elif os.path.isdir(p): | |
44 | for f, kind in util.listdir(p): |
|
47 | for f, kind in util.listdir(p): | |
45 | if f.endswith('.rc'): |
|
48 | if f.endswith('.rc'): | |
46 | rcpath.append(os.path.join(p, f)) |
|
49 | rcpath.append(os.path.join(p, f)) | |
47 | return rcpath |
|
50 | return rcpath | |
48 |
|
51 | |||
|
52 | ||||
49 | def userrcpath(): |
|
53 | def userrcpath(): | |
50 | '''return os-specific hgrc search path to the user dir''' |
|
54 | '''return os-specific hgrc search path to the user dir''' | |
51 | home = os.path.expanduser('~') |
|
55 | home = os.path.expanduser('~') | |
52 | path = [os.path.join(home, 'mercurial.ini'), |
|
56 | path = [os.path.join(home, 'mercurial.ini'), os.path.join(home, '.hgrc')] | |
53 | os.path.join(home, '.hgrc')] |
|
|||
54 | userprofile = encoding.environ.get('USERPROFILE') |
|
57 | userprofile = encoding.environ.get('USERPROFILE') | |
55 | if userprofile and userprofile != home: |
|
58 | if userprofile and userprofile != home: | |
56 | path.append(os.path.join(userprofile, 'mercurial.ini')) |
|
59 | path.append(os.path.join(userprofile, 'mercurial.ini')) | |
57 | path.append(os.path.join(userprofile, '.hgrc')) |
|
60 | path.append(os.path.join(userprofile, '.hgrc')) | |
58 | return path |
|
61 | return path | |
59 |
|
62 | |||
|
63 | ||||
60 | def termsize(ui): |
|
64 | def termsize(ui): | |
61 | return win32.termsize() |
|
65 | return win32.termsize() |
@@ -1,23 +1,24 b'' | |||||
1 | # stack.py - Mercurial functions for stack definition |
|
1 | # stack.py - Mercurial functions for stack definition | |
2 | # |
|
2 | # | |
3 | # Copyright Matt Mackall <mpm@selenic.com> and other |
|
3 | # Copyright Matt Mackall <mpm@selenic.com> and other | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | from __future__ import absolute_import |
|
8 | from __future__ import absolute_import | |
9 |
|
9 | |||
|
10 | ||||
10 | def getstack(repo, rev=None): |
|
11 | def getstack(repo, rev=None): | |
11 | """return a sorted smartrev of the stack containing either rev if it is |
|
12 | """return a sorted smartrev of the stack containing either rev if it is | |
12 | not None or the current working directory parent. |
|
13 | not None or the current working directory parent. | |
13 |
|
14 | |||
14 | The stack will always contain all drafts changesets which are ancestors to |
|
15 | The stack will always contain all drafts changesets which are ancestors to | |
15 | the revision and are not merges. |
|
16 | the revision and are not merges. | |
16 | """ |
|
17 | """ | |
17 | if rev is None: |
|
18 | if rev is None: | |
18 | rev = '.' |
|
19 | rev = '.' | |
19 |
|
20 | |||
20 | revspec = 'only(%s) and not public() and not ::merge()' |
|
21 | revspec = 'only(%s) and not public() and not ::merge()' | |
21 | revisions = repo.revs(revspec, rev) |
|
22 | revisions = repo.revs(revspec, rev) | |
22 | revisions.sort() |
|
23 | revisions.sort() | |
23 | return revisions |
|
24 | return revisions |
General Comments 0
You need to be logged in to leave comments.
Login now