Show More
@@ -1,749 +1,812 b'' | |||
|
1 | 1 | #!/usr/bin/env python |
|
2 | 2 | |
|
3 | 3 | from __future__ import absolute_import, print_function |
|
4 | 4 | |
|
5 | 5 | import ast |
|
6 | 6 | import collections |
|
7 | 7 | import os |
|
8 | 8 | import sys |
|
9 | 9 | |
|
10 | 10 | # Import a minimal set of stdlib modules needed for list_stdlib_modules() |
|
11 | 11 | # to work when run from a virtualenv. The modules were chosen empirically |
|
12 | 12 | # so that the return value matches the return value without virtualenv. |
|
13 | if True: # disable lexical sorting checks | |
|
13 | if True: # disable lexical sorting checks | |
|
14 | 14 | try: |
|
15 | 15 | import BaseHTTPServer as basehttpserver |
|
16 | 16 | except ImportError: |
|
17 | 17 | basehttpserver = None |
|
18 | 18 | import zlib |
|
19 | 19 | |
|
20 | 20 | import testparseutil |
|
21 | 21 | |
|
22 | 22 | # Whitelist of modules that symbols can be directly imported from. |
|
23 | 23 | allowsymbolimports = ( |
|
24 | 24 | '__future__', |
|
25 | 25 | 'bzrlib', |
|
26 | 26 | 'hgclient', |
|
27 | 27 | 'mercurial', |
|
28 | 28 | 'mercurial.hgweb.common', |
|
29 | 29 | 'mercurial.hgweb.request', |
|
30 | 30 | 'mercurial.i18n', |
|
31 | 31 | 'mercurial.interfaces', |
|
32 | 32 | 'mercurial.node', |
|
33 | 33 | # for revlog to re-export constant to extensions |
|
34 | 34 | 'mercurial.revlogutils.constants', |
|
35 | 35 | 'mercurial.revlogutils.flagutil', |
|
36 | 36 | # for cffi modules to re-export pure functions |
|
37 | 37 | 'mercurial.pure.base85', |
|
38 | 38 | 'mercurial.pure.bdiff', |
|
39 | 39 | 'mercurial.pure.mpatch', |
|
40 | 40 | 'mercurial.pure.osutil', |
|
41 | 41 | 'mercurial.pure.parsers', |
|
42 | 42 | # third-party imports should be directly imported |
|
43 | 43 | 'mercurial.thirdparty', |
|
44 | 44 | 'mercurial.thirdparty.attr', |
|
45 | 45 | 'mercurial.thirdparty.zope', |
|
46 | 46 | 'mercurial.thirdparty.zope.interface', |
|
47 | 47 | ) |
|
48 | 48 | |
|
49 | 49 | # Whitelist of symbols that can be directly imported. |
|
50 | directsymbols = ( | |
|
51 | 'demandimport', | |
|
52 | ) | |
|
50 | directsymbols = ('demandimport',) | |
|
53 | 51 | |
|
54 | 52 | # Modules that must be aliased because they are commonly confused with |
|
55 | 53 | # common variables and can create aliasing and readability issues. |
|
56 | 54 | requirealias = { |
|
57 | 55 | 'ui': 'uimod', |
|
58 | 56 | } |
|
59 | 57 | |
|
58 | ||
|
60 | 59 | def usingabsolute(root): |
|
61 | 60 | """Whether absolute imports are being used.""" |
|
62 | 61 | if sys.version_info[0] >= 3: |
|
63 | 62 | return True |
|
64 | 63 | |
|
65 | 64 | for node in ast.walk(root): |
|
66 | 65 | if isinstance(node, ast.ImportFrom): |
|
67 | 66 | if node.module == '__future__': |
|
68 | 67 | for n in node.names: |
|
69 | 68 | if n.name == 'absolute_import': |
|
70 | 69 | return True |
|
71 | 70 | |
|
72 | 71 | return False |
|
73 | 72 | |
|
73 | ||
|
74 | 74 | def walklocal(root): |
|
75 | 75 | """Recursively yield all descendant nodes but not in a different scope""" |
|
76 | 76 | todo = collections.deque(ast.iter_child_nodes(root)) |
|
77 | 77 | yield root, False |
|
78 | 78 | while todo: |
|
79 | 79 | node = todo.popleft() |
|
80 | 80 | newscope = isinstance(node, ast.FunctionDef) |
|
81 | 81 | if not newscope: |
|
82 | 82 | todo.extend(ast.iter_child_nodes(node)) |
|
83 | 83 | yield node, newscope |
|
84 | 84 | |
|
85 | ||
|
85 | 86 | def dotted_name_of_path(path): |
|
86 | 87 | """Given a relative path to a source file, return its dotted module name. |
|
87 | 88 | |
|
88 | 89 | >>> dotted_name_of_path('mercurial/error.py') |
|
89 | 90 | 'mercurial.error' |
|
90 | 91 | >>> dotted_name_of_path('zlibmodule.so') |
|
91 | 92 | 'zlib' |
|
92 | 93 | """ |
|
93 | 94 | parts = path.replace(os.sep, '/').split('/') |
|
94 | parts[-1] = parts[-1].split('.', 1)[0] # remove .py and .so and .ARCH.so | |
|
95 | parts[-1] = parts[-1].split('.', 1)[0] # remove .py and .so and .ARCH.so | |
|
95 | 96 | if parts[-1].endswith('module'): |
|
96 | 97 | parts[-1] = parts[-1][:-6] |
|
97 | 98 | return '.'.join(parts) |
|
98 | 99 | |
|
100 | ||
|
99 | 101 | def fromlocalfunc(modulename, localmods): |
|
100 | 102 | """Get a function to examine which locally defined module the |
|
101 | 103 | target source imports via a specified name. |
|
102 | 104 | |
|
103 | 105 | `modulename` is an `dotted_name_of_path()`-ed source file path, |
|
104 | 106 | which may have `.__init__` at the end of it, of the target source. |
|
105 | 107 | |
|
106 | 108 | `localmods` is a set of absolute `dotted_name_of_path()`-ed source file |
|
107 | 109 | paths of locally defined (= Mercurial specific) modules. |
|
108 | 110 | |
|
109 | 111 | This function assumes that module names not existing in |
|
110 | 112 | `localmods` are from the Python standard library. |
|
111 | 113 | |
|
112 | 114 | This function returns the function, which takes `name` argument, |
|
113 | 115 | and returns `(absname, dottedpath, hassubmod)` tuple if `name` |
|
114 | 116 | matches against locally defined module. Otherwise, it returns |
|
115 | 117 | False. |
|
116 | 118 | |
|
117 | 119 | It is assumed that `name` doesn't have `.__init__`. |
|
118 | 120 | |
|
119 | 121 | `absname` is an absolute module name of specified `name` |
|
120 | 122 | (e.g. "hgext.convert"). This can be used to compose prefix for sub |
|
121 | 123 | modules or so. |
|
122 | 124 | |
|
123 | 125 | `dottedpath` is a `dotted_name_of_path()`-ed source file path |
|
124 | 126 | (e.g. "hgext.convert.__init__") of `name`. This is used to look |
|
125 | 127 | module up in `localmods` again. |
|
126 | 128 | |
|
127 | 129 | `hassubmod` is whether it may have sub modules under it (for |
|
128 | 130 | convenient, even though this is also equivalent to "absname != |
|
129 | 131 | dottednpath") |
|
130 | 132 | |
|
131 | 133 | >>> localmods = {'foo.__init__', 'foo.foo1', |
|
132 | 134 | ... 'foo.bar.__init__', 'foo.bar.bar1', |
|
133 | 135 | ... 'baz.__init__', 'baz.baz1'} |
|
134 | 136 | >>> fromlocal = fromlocalfunc('foo.xxx', localmods) |
|
135 | 137 | >>> # relative |
|
136 | 138 | >>> fromlocal('foo1') |
|
137 | 139 | ('foo.foo1', 'foo.foo1', False) |
|
138 | 140 | >>> fromlocal('bar') |
|
139 | 141 | ('foo.bar', 'foo.bar.__init__', True) |
|
140 | 142 | >>> fromlocal('bar.bar1') |
|
141 | 143 | ('foo.bar.bar1', 'foo.bar.bar1', False) |
|
142 | 144 | >>> # absolute |
|
143 | 145 | >>> fromlocal('baz') |
|
144 | 146 | ('baz', 'baz.__init__', True) |
|
145 | 147 | >>> fromlocal('baz.baz1') |
|
146 | 148 | ('baz.baz1', 'baz.baz1', False) |
|
147 | 149 | >>> # unknown = maybe standard library |
|
148 | 150 | >>> fromlocal('os') |
|
149 | 151 | False |
|
150 | 152 | >>> fromlocal(None, 1) |
|
151 | 153 | ('foo', 'foo.__init__', True) |
|
152 | 154 | >>> fromlocal('foo1', 1) |
|
153 | 155 | ('foo.foo1', 'foo.foo1', False) |
|
154 | 156 | >>> fromlocal2 = fromlocalfunc('foo.xxx.yyy', localmods) |
|
155 | 157 | >>> fromlocal2(None, 2) |
|
156 | 158 | ('foo', 'foo.__init__', True) |
|
157 | 159 | >>> fromlocal2('bar2', 1) |
|
158 | 160 | False |
|
159 | 161 | >>> fromlocal2('bar', 2) |
|
160 | 162 | ('foo.bar', 'foo.bar.__init__', True) |
|
161 | 163 | """ |
|
162 | 164 | if not isinstance(modulename, str): |
|
163 | 165 | modulename = modulename.decode('ascii') |
|
164 | 166 | prefix = '.'.join(modulename.split('.')[:-1]) |
|
165 | 167 | if prefix: |
|
166 | 168 | prefix += '.' |
|
169 | ||
|
167 | 170 | def fromlocal(name, level=0): |
|
168 | 171 | # name is false value when relative imports are used. |
|
169 | 172 | if not name: |
|
170 | 173 | # If relative imports are used, level must not be absolute. |
|
171 | 174 | assert level > 0 |
|
172 | 175 | candidates = ['.'.join(modulename.split('.')[:-level])] |
|
173 | 176 | else: |
|
174 | 177 | if not level: |
|
175 | 178 | # Check relative name first. |
|
176 | 179 | candidates = [prefix + name, name] |
|
177 | 180 | else: |
|
178 | candidates = ['.'.join(modulename.split('.')[:-level]) + | |
|
179 | '.' + name] | |
|
181 | candidates = [ | |
|
182 | '.'.join(modulename.split('.')[:-level]) + '.' + name | |
|
183 | ] | |
|
180 | 184 | |
|
181 | 185 | for n in candidates: |
|
182 | 186 | if n in localmods: |
|
183 | 187 | return (n, n, False) |
|
184 | 188 | dottedpath = n + '.__init__' |
|
185 | 189 | if dottedpath in localmods: |
|
186 | 190 | return (n, dottedpath, True) |
|
187 | 191 | return False |
|
192 | ||
|
188 | 193 | return fromlocal |
|
189 | 194 | |
|
195 | ||
|
190 | 196 | def populateextmods(localmods): |
|
191 | 197 | """Populate C extension modules based on pure modules""" |
|
192 | 198 | newlocalmods = set(localmods) |
|
193 | 199 | for n in localmods: |
|
194 | 200 | if n.startswith('mercurial.pure.'): |
|
195 | m = n[len('mercurial.pure.'):] | |
|
201 | m = n[len('mercurial.pure.') :] | |
|
196 | 202 | newlocalmods.add('mercurial.cext.' + m) |
|
197 | 203 | newlocalmods.add('mercurial.cffi._' + m) |
|
198 | 204 | return newlocalmods |
|
199 | 205 | |
|
206 | ||
|
200 | 207 | def list_stdlib_modules(): |
|
201 | 208 | """List the modules present in the stdlib. |
|
202 | 209 | |
|
203 | 210 | >>> py3 = sys.version_info[0] >= 3 |
|
204 | 211 | >>> mods = set(list_stdlib_modules()) |
|
205 | 212 | >>> 'BaseHTTPServer' in mods or py3 |
|
206 | 213 | True |
|
207 | 214 | |
|
208 | 215 | os.path isn't really a module, so it's missing: |
|
209 | 216 | |
|
210 | 217 | >>> 'os.path' in mods |
|
211 | 218 | False |
|
212 | 219 | |
|
213 | 220 | sys requires special treatment, because it's baked into the |
|
214 | 221 | interpreter, but it should still appear: |
|
215 | 222 | |
|
216 | 223 | >>> 'sys' in mods |
|
217 | 224 | True |
|
218 | 225 | |
|
219 | 226 | >>> 'collections' in mods |
|
220 | 227 | True |
|
221 | 228 | |
|
222 | 229 | >>> 'cStringIO' in mods or py3 |
|
223 | 230 | True |
|
224 | 231 | |
|
225 | 232 | >>> 'cffi' in mods |
|
226 | 233 | True |
|
227 | 234 | """ |
|
228 | 235 | for m in sys.builtin_module_names: |
|
229 | 236 | yield m |
|
230 | 237 | # These modules only exist on windows, but we should always |
|
231 | 238 | # consider them stdlib. |
|
232 | 239 | for m in ['msvcrt', '_winreg']: |
|
233 | 240 | yield m |
|
234 | 241 | yield '__builtin__' |
|
235 | yield 'builtins' # python3 only | |
|
236 | yield 'importlib.abc' # python3 only | |
|
237 | yield 'importlib.machinery' # python3 only | |
|
238 | yield 'importlib.util' # python3 only | |
|
242 | yield 'builtins' # python3 only | |
|
243 | yield 'importlib.abc' # python3 only | |
|
244 | yield 'importlib.machinery' # python3 only | |
|
245 | yield 'importlib.util' # python3 only | |
|
239 | 246 | for m in 'fcntl', 'grp', 'pwd', 'termios': # Unix only |
|
240 | 247 | yield m |
|
241 | for m in 'cPickle', 'datetime': # in Python (not C) on PyPy | |
|
248 | for m in 'cPickle', 'datetime': # in Python (not C) on PyPy | |
|
242 | 249 | yield m |
|
243 | 250 | for m in ['cffi']: |
|
244 | 251 | yield m |
|
245 | 252 | stdlib_prefixes = {sys.prefix, sys.exec_prefix} |
|
246 | 253 | # We need to supplement the list of prefixes for the search to work |
|
247 | 254 | # when run from within a virtualenv. |
|
248 | 255 | for mod in (basehttpserver, zlib): |
|
249 | 256 | if mod is None: |
|
250 | 257 | continue |
|
251 | 258 | try: |
|
252 | 259 | # Not all module objects have a __file__ attribute. |
|
253 | 260 | filename = mod.__file__ |
|
254 | 261 | except AttributeError: |
|
255 | 262 | continue |
|
256 | 263 | dirname = os.path.dirname(filename) |
|
257 | 264 | for prefix in stdlib_prefixes: |
|
258 | 265 | if dirname.startswith(prefix): |
|
259 | 266 | # Then this directory is redundant. |
|
260 | 267 | break |
|
261 | 268 | else: |
|
262 | 269 | stdlib_prefixes.add(dirname) |
|
263 | 270 | sourceroot = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) |
|
264 | 271 | for libpath in sys.path: |
|
265 | 272 | # We want to walk everything in sys.path that starts with something in |
|
266 | 273 | # stdlib_prefixes, but not directories from the hg sources. |
|
267 |
if |
|
|
268 |
|
|
|
274 | if os.path.abspath(libpath).startswith(sourceroot) or not any( | |
|
275 | libpath.startswith(p) for p in stdlib_prefixes | |
|
276 | ): | |
|
269 | 277 | continue |
|
270 | 278 | for top, dirs, files in os.walk(libpath): |
|
271 | 279 | for i, d in reversed(list(enumerate(dirs))): |
|
272 | if (not os.path.exists(os.path.join(top, d, '__init__.py')) | |
|
273 | or top == libpath and d in ('hgdemandimport', 'hgext', | |
|
274 | 'mercurial')): | |
|
280 | if ( | |
|
281 | not os.path.exists(os.path.join(top, d, '__init__.py')) | |
|
282 | or top == libpath | |
|
283 | and d in ('hgdemandimport', 'hgext', 'mercurial') | |
|
284 | ): | |
|
275 | 285 | del dirs[i] |
|
276 | 286 | for name in files: |
|
277 | 287 | if not name.endswith(('.py', '.so', '.pyc', '.pyo', '.pyd')): |
|
278 | 288 | continue |
|
279 | 289 | if name.startswith('__init__.py'): |
|
280 | 290 | full_path = top |
|
281 | 291 | else: |
|
282 | 292 | full_path = os.path.join(top, name) |
|
283 | rel_path = full_path[len(libpath) + 1:] | |
|
293 | rel_path = full_path[len(libpath) + 1 :] | |
|
284 | 294 | mod = dotted_name_of_path(rel_path) |
|
285 | 295 | yield mod |
|
286 | 296 | |
|
297 | ||
|
287 | 298 | stdlib_modules = set(list_stdlib_modules()) |
|
288 | 299 | |
|
300 | ||
|
289 | 301 | def imported_modules(source, modulename, f, localmods, ignore_nested=False): |
|
290 | 302 | """Given the source of a file as a string, yield the names |
|
291 | 303 | imported by that file. |
|
292 | 304 | |
|
293 | 305 | Args: |
|
294 | 306 | source: The python source to examine as a string. |
|
295 | 307 | modulename: of specified python source (may have `__init__`) |
|
296 | 308 | localmods: set of locally defined module names (may have `__init__`) |
|
297 | 309 | ignore_nested: If true, import statements that do not start in |
|
298 | 310 | column zero will be ignored. |
|
299 | 311 | |
|
300 | 312 | Returns: |
|
301 | 313 | A list of absolute module names imported by the given source. |
|
302 | 314 | |
|
303 | 315 | >>> f = 'foo/xxx.py' |
|
304 | 316 | >>> modulename = 'foo.xxx' |
|
305 | 317 | >>> localmods = {'foo.__init__': True, |
|
306 | 318 | ... 'foo.foo1': True, 'foo.foo2': True, |
|
307 | 319 | ... 'foo.bar.__init__': True, 'foo.bar.bar1': True, |
|
308 | 320 | ... 'baz.__init__': True, 'baz.baz1': True } |
|
309 | 321 | >>> # standard library (= not locally defined ones) |
|
310 | 322 | >>> sorted(imported_modules( |
|
311 | 323 | ... 'from stdlib1 import foo, bar; import stdlib2', |
|
312 | 324 | ... modulename, f, localmods)) |
|
313 | 325 | [] |
|
314 | 326 | >>> # relative importing |
|
315 | 327 | >>> sorted(imported_modules( |
|
316 | 328 | ... 'import foo1; from bar import bar1', |
|
317 | 329 | ... modulename, f, localmods)) |
|
318 | 330 | ['foo.bar.bar1', 'foo.foo1'] |
|
319 | 331 | >>> sorted(imported_modules( |
|
320 | 332 | ... 'from bar.bar1 import name1, name2, name3', |
|
321 | 333 | ... modulename, f, localmods)) |
|
322 | 334 | ['foo.bar.bar1'] |
|
323 | 335 | >>> # absolute importing |
|
324 | 336 | >>> sorted(imported_modules( |
|
325 | 337 | ... 'from baz import baz1, name1', |
|
326 | 338 | ... modulename, f, localmods)) |
|
327 | 339 | ['baz.__init__', 'baz.baz1'] |
|
328 | 340 | >>> # mixed importing, even though it shouldn't be recommended |
|
329 | 341 | >>> sorted(imported_modules( |
|
330 | 342 | ... 'import stdlib, foo1, baz', |
|
331 | 343 | ... modulename, f, localmods)) |
|
332 | 344 | ['baz.__init__', 'foo.foo1'] |
|
333 | 345 | >>> # ignore_nested |
|
334 | 346 | >>> sorted(imported_modules( |
|
335 | 347 | ... '''import foo |
|
336 | 348 | ... def wat(): |
|
337 | 349 | ... import bar |
|
338 | 350 | ... ''', modulename, f, localmods)) |
|
339 | 351 | ['foo.__init__', 'foo.bar.__init__'] |
|
340 | 352 | >>> sorted(imported_modules( |
|
341 | 353 | ... '''import foo |
|
342 | 354 | ... def wat(): |
|
343 | 355 | ... import bar |
|
344 | 356 | ... ''', modulename, f, localmods, ignore_nested=True)) |
|
345 | 357 | ['foo.__init__'] |
|
346 | 358 | """ |
|
347 | 359 | fromlocal = fromlocalfunc(modulename, localmods) |
|
348 | 360 | for node in ast.walk(ast.parse(source, f)): |
|
349 | 361 | if ignore_nested and getattr(node, 'col_offset', 0) > 0: |
|
350 | 362 | continue |
|
351 | 363 | if isinstance(node, ast.Import): |
|
352 | 364 | for n in node.names: |
|
353 | 365 | found = fromlocal(n.name) |
|
354 | 366 | if not found: |
|
355 | 367 | # this should import standard library |
|
356 | 368 | continue |
|
357 | 369 | yield found[1] |
|
358 | 370 | elif isinstance(node, ast.ImportFrom): |
|
359 | 371 | found = fromlocal(node.module, node.level) |
|
360 | 372 | if not found: |
|
361 | 373 | # this should import standard library |
|
362 | 374 | continue |
|
363 | 375 | |
|
364 | 376 | absname, dottedpath, hassubmod = found |
|
365 | 377 | if not hassubmod: |
|
366 | 378 | # "dottedpath" is not a package; must be imported |
|
367 | 379 | yield dottedpath |
|
368 | 380 | # examination of "node.names" should be redundant |
|
369 | 381 | # e.g.: from mercurial.node import nullid, nullrev |
|
370 | 382 | continue |
|
371 | 383 | |
|
372 | 384 | modnotfound = False |
|
373 | 385 | prefix = absname + '.' |
|
374 | 386 | for n in node.names: |
|
375 | 387 | found = fromlocal(prefix + n.name) |
|
376 | 388 | if not found: |
|
377 | 389 | # this should be a function or a property of "node.module" |
|
378 | 390 | modnotfound = True |
|
379 | 391 | continue |
|
380 | 392 | yield found[1] |
|
381 | 393 | if modnotfound: |
|
382 | 394 | # "dottedpath" is a package, but imported because of non-module |
|
383 | 395 | # lookup |
|
384 | 396 | yield dottedpath |
|
385 | 397 | |
|
398 | ||
|
386 | 399 | def verify_import_convention(module, source, localmods): |
|
387 | 400 | """Verify imports match our established coding convention. |
|
388 | 401 | |
|
389 | 402 | We have 2 conventions: legacy and modern. The modern convention is in |
|
390 | 403 | effect when using absolute imports. |
|
391 | 404 | |
|
392 | 405 | The legacy convention only looks for mixed imports. The modern convention |
|
393 | 406 | is much more thorough. |
|
394 | 407 | """ |
|
395 | 408 | root = ast.parse(source) |
|
396 | 409 | absolute = usingabsolute(root) |
|
397 | 410 | |
|
398 | 411 | if absolute: |
|
399 | 412 | return verify_modern_convention(module, root, localmods) |
|
400 | 413 | else: |
|
401 | 414 | return verify_stdlib_on_own_line(root) |
|
402 | 415 | |
|
416 | ||
|
403 | 417 | def verify_modern_convention(module, root, localmods, root_col_offset=0): |
|
404 | 418 | """Verify a file conforms to the modern import convention rules. |
|
405 | 419 | |
|
406 | 420 | The rules of the modern convention are: |
|
407 | 421 | |
|
408 | 422 | * Ordering is stdlib followed by local imports. Each group is lexically |
|
409 | 423 | sorted. |
|
410 | 424 | * Importing multiple modules via "import X, Y" is not allowed: use |
|
411 | 425 | separate import statements. |
|
412 | 426 | * Importing multiple modules via "from X import ..." is allowed if using |
|
413 | 427 | parenthesis and one entry per line. |
|
414 | 428 | * Only 1 relative import statement per import level ("from .", "from ..") |
|
415 | 429 | is allowed. |
|
416 | 430 | * Relative imports from higher levels must occur before lower levels. e.g. |
|
417 | 431 | "from .." must be before "from .". |
|
418 | 432 | * Imports from peer packages should use relative import (e.g. do not |
|
419 | 433 | "import mercurial.foo" from a "mercurial.*" module). |
|
420 | 434 | * Symbols can only be imported from specific modules (see |
|
421 | 435 | `allowsymbolimports`). For other modules, first import the module then |
|
422 | 436 | assign the symbol to a module-level variable. In addition, these imports |
|
423 | 437 | must be performed before other local imports. This rule only |
|
424 | 438 | applies to import statements outside of any blocks. |
|
425 | 439 | * Relative imports from the standard library are not allowed, unless that |
|
426 | 440 | library is also a local module. |
|
427 | 441 | * Certain modules must be aliased to alternate names to avoid aliasing |
|
428 | 442 | and readability problems. See `requirealias`. |
|
429 | 443 | """ |
|
430 | 444 | if not isinstance(module, str): |
|
431 | 445 | module = module.decode('ascii') |
|
432 | 446 | topmodule = module.split('.')[0] |
|
433 | 447 | fromlocal = fromlocalfunc(module, localmods) |
|
434 | 448 | |
|
435 | 449 | # Whether a local/non-stdlib import has been performed. |
|
436 | 450 | seenlocal = None |
|
437 | 451 | # Whether a local/non-stdlib, non-symbol import has been seen. |
|
438 | 452 | seennonsymbollocal = False |
|
439 | 453 | # The last name to be imported (for sorting). |
|
440 | 454 | lastname = None |
|
441 | 455 | laststdlib = None |
|
442 | 456 | # Relative import levels encountered so far. |
|
443 | 457 | seenlevels = set() |
|
444 | 458 | |
|
445 | 459 | for node, newscope in walklocal(root): |
|
460 | ||
|
446 | 461 | def msg(fmt, *args): |
|
447 | 462 | return (fmt % args, node.lineno) |
|
463 | ||
|
448 | 464 | if newscope: |
|
449 | 465 | # Check for local imports in function |
|
450 |
for r in verify_modern_convention( |
|
|
451 | node.col_offset + 4): | |
|
466 | for r in verify_modern_convention( | |
|
467 | module, node, localmods, node.col_offset + 4 | |
|
468 | ): | |
|
452 | 469 | yield r |
|
453 | 470 | elif isinstance(node, ast.Import): |
|
454 | 471 | # Disallow "import foo, bar" and require separate imports |
|
455 | 472 | # for each module. |
|
456 | 473 | if len(node.names) > 1: |
|
457 |
yield msg( |
|
|
458 | ', '.join(n.name for n in node.names)) | |
|
474 | yield msg( | |
|
475 | 'multiple imported names: %s', | |
|
476 | ', '.join(n.name for n in node.names), | |
|
477 | ) | |
|
459 | 478 | |
|
460 | 479 | name = node.names[0].name |
|
461 | 480 | asname = node.names[0].asname |
|
462 | 481 | |
|
463 | 482 | stdlib = name in stdlib_modules |
|
464 | 483 | |
|
465 | 484 | # Ignore sorting rules on imports inside blocks. |
|
466 | 485 | if node.col_offset == root_col_offset: |
|
467 | 486 | if lastname and name < lastname and laststdlib == stdlib: |
|
468 |
yield msg( |
|
|
469 |
name, lastname |
|
|
487 | yield msg( | |
|
488 | 'imports not lexically sorted: %s < %s', name, lastname | |
|
489 | ) | |
|
470 | 490 | |
|
471 | 491 | lastname = name |
|
472 | 492 | laststdlib = stdlib |
|
473 | 493 | |
|
474 | 494 | # stdlib imports should be before local imports. |
|
475 | 495 | if stdlib and seenlocal and node.col_offset == root_col_offset: |
|
476 | yield msg('stdlib import "%s" follows local import: %s', | |
|
477 | name, seenlocal) | |
|
496 | yield msg( | |
|
497 | 'stdlib import "%s" follows local import: %s', | |
|
498 | name, | |
|
499 | seenlocal, | |
|
500 | ) | |
|
478 | 501 | |
|
479 | 502 | if not stdlib: |
|
480 | 503 | seenlocal = name |
|
481 | 504 | |
|
482 | 505 | # Import of sibling modules should use relative imports. |
|
483 | 506 | topname = name.split('.')[0] |
|
484 | 507 | if topname == topmodule: |
|
485 | 508 | yield msg('import should be relative: %s', name) |
|
486 | 509 | |
|
487 | 510 | if name in requirealias and asname != requirealias[name]: |
|
488 | yield msg('%s module must be "as" aliased to %s', | |
|
489 | name, requirealias[name]) | |
|
511 | yield msg( | |
|
512 | '%s module must be "as" aliased to %s', | |
|
513 | name, | |
|
514 | requirealias[name], | |
|
515 | ) | |
|
490 | 516 | |
|
491 | 517 | elif isinstance(node, ast.ImportFrom): |
|
492 | 518 | # Resolve the full imported module name. |
|
493 | 519 | if node.level > 0: |
|
494 | fullname = '.'.join(module.split('.')[:-node.level]) | |
|
520 | fullname = '.'.join(module.split('.')[: -node.level]) | |
|
495 | 521 | if node.module: |
|
496 | 522 | fullname += '.%s' % node.module |
|
497 | 523 | else: |
|
498 | 524 | assert node.module |
|
499 | 525 | fullname = node.module |
|
500 | 526 | |
|
501 | 527 | topname = fullname.split('.')[0] |
|
502 | 528 | if topname == topmodule: |
|
503 | 529 | yield msg('import should be relative: %s', fullname) |
|
504 | 530 | |
|
505 | 531 | # __future__ is special since it needs to come first and use |
|
506 | 532 | # symbol import. |
|
507 | 533 | if fullname != '__future__': |
|
508 | 534 | if not fullname or ( |
|
509 | 535 | fullname in stdlib_modules |
|
510 | 536 | and fullname not in localmods |
|
511 |
and fullname + '.__init__' not in localmods |
|
|
537 | and fullname + '.__init__' not in localmods | |
|
538 | ): | |
|
512 | 539 | yield msg('relative import of stdlib module') |
|
513 | 540 | else: |
|
514 | 541 | seenlocal = fullname |
|
515 | 542 | |
|
516 | 543 | # Direct symbol import is only allowed from certain modules and |
|
517 | 544 | # must occur before non-symbol imports. |
|
518 | 545 | found = fromlocal(node.module, node.level) |
|
519 | 546 | if found and found[2]: # node.module is a package |
|
520 | 547 | prefix = found[0] + '.' |
|
521 |
symbols = ( |
|
|
522 |
|
|
|
548 | symbols = ( | |
|
549 | n.name for n in node.names if not fromlocal(prefix + n.name) | |
|
550 | ) | |
|
523 | 551 | else: |
|
524 | 552 | symbols = (n.name for n in node.names) |
|
525 | 553 | symbols = [sym for sym in symbols if sym not in directsymbols] |
|
526 | 554 | if node.module and node.col_offset == root_col_offset: |
|
527 | 555 | if symbols and fullname not in allowsymbolimports: |
|
528 |
yield msg( |
|
|
529 |
|
|
|
556 | yield msg( | |
|
557 | 'direct symbol import %s from %s', | |
|
558 | ', '.join(symbols), | |
|
559 | fullname, | |
|
560 | ) | |
|
530 | 561 | |
|
531 | 562 | if symbols and seennonsymbollocal: |
|
532 | yield msg('symbol import follows non-symbol import: %s', | |
|
533 |
fullname |
|
|
563 | yield msg( | |
|
564 | 'symbol import follows non-symbol import: %s', fullname | |
|
565 | ) | |
|
534 | 566 | if not symbols and fullname not in stdlib_modules: |
|
535 | 567 | seennonsymbollocal = True |
|
536 | 568 | |
|
537 | 569 | if not node.module: |
|
538 | 570 | assert node.level |
|
539 | 571 | |
|
540 | 572 | # Only allow 1 group per level. |
|
541 |
if ( |
|
|
542 | and node.col_offset == root_col_offset): | |
|
543 | yield msg('multiple "from %s import" statements', | |
|
544 | '.' * node.level) | |
|
573 | if ( | |
|
574 | node.level in seenlevels | |
|
575 | and node.col_offset == root_col_offset | |
|
576 | ): | |
|
577 | yield msg( | |
|
578 | 'multiple "from %s import" statements', '.' * node.level | |
|
579 | ) | |
|
545 | 580 | |
|
546 | 581 | # Higher-level groups come before lower-level groups. |
|
547 | 582 | if any(node.level > l for l in seenlevels): |
|
548 | yield msg('higher-level import should come first: %s', | |
|
549 |
fullname |
|
|
583 | yield msg( | |
|
584 | 'higher-level import should come first: %s', fullname | |
|
585 | ) | |
|
550 | 586 | |
|
551 | 587 | seenlevels.add(node.level) |
|
552 | 588 | |
|
553 | 589 | # Entries in "from .X import ( ... )" lists must be lexically |
|
554 | 590 | # sorted. |
|
555 | 591 | lastentryname = None |
|
556 | 592 | |
|
557 | 593 | for n in node.names: |
|
558 | 594 | if lastentryname and n.name < lastentryname: |
|
559 | yield msg('imports from %s not lexically sorted: %s < %s', | |
|
560 | fullname, n.name, lastentryname) | |
|
595 | yield msg( | |
|
596 | 'imports from %s not lexically sorted: %s < %s', | |
|
597 | fullname, | |
|
598 | n.name, | |
|
599 | lastentryname, | |
|
600 | ) | |
|
561 | 601 | |
|
562 | 602 | lastentryname = n.name |
|
563 | 603 | |
|
564 | 604 | if n.name in requirealias and n.asname != requirealias[n.name]: |
|
565 |
yield msg( |
|
|
566 | n.name, fullname, requirealias[n.name]) | |
|
605 | yield msg( | |
|
606 | '%s from %s must be "as" aliased to %s', | |
|
607 | n.name, | |
|
608 | fullname, | |
|
609 | requirealias[n.name], | |
|
610 | ) | |
|
611 | ||
|
567 | 612 | |
|
568 | 613 | def verify_stdlib_on_own_line(root): |
|
569 | 614 | """Given some python source, verify that stdlib imports are done |
|
570 | 615 | in separate statements from relative local module imports. |
|
571 | 616 | |
|
572 | 617 | >>> list(verify_stdlib_on_own_line(ast.parse('import sys, foo'))) |
|
573 | 618 | [('mixed imports\\n stdlib: sys\\n relative: foo', 1)] |
|
574 | 619 | >>> list(verify_stdlib_on_own_line(ast.parse('import sys, os'))) |
|
575 | 620 | [] |
|
576 | 621 | >>> list(verify_stdlib_on_own_line(ast.parse('import foo, bar'))) |
|
577 | 622 | [] |
|
578 | 623 | """ |
|
579 | 624 | for node in ast.walk(root): |
|
580 | 625 | if isinstance(node, ast.Import): |
|
581 | 626 | from_stdlib = {False: [], True: []} |
|
582 | 627 | for n in node.names: |
|
583 | 628 | from_stdlib[n.name in stdlib_modules].append(n.name) |
|
584 | 629 | if from_stdlib[True] and from_stdlib[False]: |
|
585 | yield ('mixed imports\n stdlib: %s\n relative: %s' % | |
|
586 | (', '.join(sorted(from_stdlib[True])), | |
|
587 | ', '.join(sorted(from_stdlib[False]))), node.lineno) | |
|
630 | yield ( | |
|
631 | 'mixed imports\n stdlib: %s\n relative: %s' | |
|
632 | % ( | |
|
633 | ', '.join(sorted(from_stdlib[True])), | |
|
634 | ', '.join(sorted(from_stdlib[False])), | |
|
635 | ), | |
|
636 | node.lineno, | |
|
637 | ) | |
|
638 | ||
|
588 | 639 | |
|
589 | 640 | class CircularImport(Exception): |
|
590 | 641 | pass |
|
591 | 642 | |
|
643 | ||
|
592 | 644 | def checkmod(mod, imports): |
|
593 | 645 | shortest = {} |
|
594 | 646 | visit = [[mod]] |
|
595 | 647 | while visit: |
|
596 | 648 | path = visit.pop(0) |
|
597 | 649 | for i in sorted(imports.get(path[-1], [])): |
|
598 | 650 | if len(path) < shortest.get(i, 1000): |
|
599 | 651 | shortest[i] = len(path) |
|
600 | 652 | if i in path: |
|
601 | 653 | if i == path[0]: |
|
602 | 654 | raise CircularImport(path) |
|
603 | 655 | continue |
|
604 | 656 | visit.append(path + [i]) |
|
605 | 657 | |
|
658 | ||
|
606 | 659 | def rotatecycle(cycle): |
|
607 | 660 | """arrange a cycle so that the lexicographically first module listed first |
|
608 | 661 | |
|
609 | 662 | >>> rotatecycle(['foo', 'bar']) |
|
610 | 663 | ['bar', 'foo', 'bar'] |
|
611 | 664 | """ |
|
612 | 665 | lowest = min(cycle) |
|
613 | 666 | idx = cycle.index(lowest) |
|
614 | 667 | return cycle[idx:] + cycle[:idx] + [lowest] |
|
615 | 668 | |
|
669 | ||
|
616 | 670 | def find_cycles(imports): |
|
617 | 671 | """Find cycles in an already-loaded import graph. |
|
618 | 672 | |
|
619 | 673 | All module names recorded in `imports` should be absolute one. |
|
620 | 674 | |
|
621 | 675 | >>> from __future__ import print_function |
|
622 | 676 | >>> imports = {'top.foo': ['top.bar', 'os.path', 'top.qux'], |
|
623 | 677 | ... 'top.bar': ['top.baz', 'sys'], |
|
624 | 678 | ... 'top.baz': ['top.foo'], |
|
625 | 679 | ... 'top.qux': ['top.foo']} |
|
626 | 680 | >>> print('\\n'.join(sorted(find_cycles(imports)))) |
|
627 | 681 | top.bar -> top.baz -> top.foo -> top.bar |
|
628 | 682 | top.foo -> top.qux -> top.foo |
|
629 | 683 | """ |
|
630 | 684 | cycles = set() |
|
631 | 685 | for mod in sorted(imports.keys()): |
|
632 | 686 | try: |
|
633 | 687 | checkmod(mod, imports) |
|
634 | 688 | except CircularImport as e: |
|
635 | 689 | cycle = e.args[0] |
|
636 | 690 | cycles.add(" -> ".join(rotatecycle(cycle))) |
|
637 | 691 | return cycles |
|
638 | 692 | |
|
693 | ||
|
639 | 694 | def _cycle_sortkey(c): |
|
640 | 695 | return len(c), c |
|
641 | 696 | |
|
697 | ||
|
642 | 698 | def embedded(f, modname, src): |
|
643 | 699 | """Extract embedded python code |
|
644 | 700 | |
|
645 | 701 | >>> def _forcestr(thing): |
|
646 | 702 | ... if not isinstance(thing, str): |
|
647 | 703 | ... return thing.decode('ascii') |
|
648 | 704 | ... return thing |
|
649 | 705 | >>> def test(fn, lines): |
|
650 | 706 | ... for s, m, f, l in embedded(fn, b"example", lines): |
|
651 | 707 | ... print("%s %s %d" % (_forcestr(m), _forcestr(f), l)) |
|
652 | 708 | ... print(repr(_forcestr(s))) |
|
653 | 709 | >>> lines = [ |
|
654 | 710 | ... 'comment', |
|
655 | 711 | ... ' >>> from __future__ import print_function', |
|
656 | 712 | ... " >>> ' multiline", |
|
657 | 713 | ... " ... string'", |
|
658 | 714 | ... ' ', |
|
659 | 715 | ... 'comment', |
|
660 | 716 | ... ' $ cat > foo.py <<EOF', |
|
661 | 717 | ... ' > from __future__ import print_function', |
|
662 | 718 | ... ' > EOF', |
|
663 | 719 | ... ] |
|
664 | 720 | >>> test(b"example.t", lines) |
|
665 | 721 | example[2] doctest.py 1 |
|
666 | 722 | "from __future__ import print_function\\n' multiline\\nstring'\\n\\n" |
|
667 | 723 | example[8] foo.py 7 |
|
668 | 724 | 'from __future__ import print_function\\n' |
|
669 | 725 | """ |
|
670 | 726 | errors = [] |
|
671 | 727 | for name, starts, ends, code in testparseutil.pyembedded(f, src, errors): |
|
672 | 728 | if not name: |
|
673 | 729 | # use 'doctest.py', in order to make already existing |
|
674 | 730 | # doctest above pass instantly |
|
675 | 731 | name = 'doctest.py' |
|
676 | 732 | # "starts" is "line number" (1-origin), but embedded() is |
|
677 | 733 | # expected to return "line offset" (0-origin). Therefore, this |
|
678 | 734 | # yields "starts - 1". |
|
679 | 735 | if not isinstance(modname, str): |
|
680 | 736 | modname = modname.decode('utf8') |
|
681 | 737 | yield code, "%s[%d]" % (modname, starts), name, starts - 1 |
|
682 | 738 | |
|
739 | ||
|
683 | 740 | def sources(f, modname): |
|
684 | 741 | """Yields possibly multiple sources from a filepath |
|
685 | 742 | |
|
686 | 743 | input: filepath, modulename |
|
687 | 744 | yields: script(string), modulename, filepath, linenumber |
|
688 | 745 | |
|
689 | 746 | For embedded scripts, the modulename and filepath will be different |
|
690 | 747 | from the function arguments. linenumber is an offset relative to |
|
691 | 748 | the input file. |
|
692 | 749 | """ |
|
693 | 750 | py = False |
|
694 | 751 | if not f.endswith('.t'): |
|
695 | 752 | with open(f, 'rb') as src: |
|
696 | 753 | yield src.read(), modname, f, 0 |
|
697 | 754 | py = True |
|
698 | 755 | if py or f.endswith('.t'): |
|
699 | 756 | with open(f, 'r') as src: |
|
700 | 757 | for script, modname, t, line in embedded(f, modname, src): |
|
701 | 758 | yield script, modname.encode('utf8'), t, line |
|
702 | 759 | |
|
760 | ||
|
703 | 761 | def main(argv): |
|
704 | 762 | if len(argv) < 2 or (argv[1] == '-' and len(argv) > 2): |
|
705 | 763 | print('Usage: %s {-|file [file] [file] ...}') |
|
706 | 764 | return 1 |
|
707 | 765 | if argv[1] == '-': |
|
708 | 766 | argv = argv[:1] |
|
709 | 767 | argv.extend(l.rstrip() for l in sys.stdin.readlines()) |
|
710 | 768 | localmodpaths = {} |
|
711 | 769 | used_imports = {} |
|
712 | 770 | any_errors = False |
|
713 | 771 | for source_path in argv[1:]: |
|
714 | 772 | modname = dotted_name_of_path(source_path) |
|
715 | 773 | localmodpaths[modname] = source_path |
|
716 | 774 | localmods = populateextmods(localmodpaths) |
|
717 | 775 | for localmodname, source_path in sorted(localmodpaths.items()): |
|
718 | 776 | if not isinstance(localmodname, bytes): |
|
719 | 777 | # This is only safe because all hg's files are ascii |
|
720 | 778 | localmodname = localmodname.encode('ascii') |
|
721 | 779 | for src, modname, name, line in sources(source_path, localmodname): |
|
722 | 780 | try: |
|
723 | 781 | used_imports[modname] = sorted( |
|
724 |
imported_modules( |
|
|
725 |
|
|
|
726 | for error, lineno in verify_import_convention(modname, src, | |
|
727 | localmods): | |
|
782 | imported_modules( | |
|
783 | src, modname, name, localmods, ignore_nested=True | |
|
784 | ) | |
|
785 | ) | |
|
786 | for error, lineno in verify_import_convention( | |
|
787 | modname, src, localmods | |
|
788 | ): | |
|
728 | 789 | any_errors = True |
|
729 | 790 | print('%s:%d: %s' % (source_path, lineno + line, error)) |
|
730 | 791 | except SyntaxError as e: |
|
731 |
print( |
|
|
732 |
(source_path, e.lineno + line, e) |
|
|
792 | print( | |
|
793 | '%s:%d: SyntaxError: %s' % (source_path, e.lineno + line, e) | |
|
794 | ) | |
|
733 | 795 | cycles = find_cycles(used_imports) |
|
734 | 796 | if cycles: |
|
735 | 797 | firstmods = set() |
|
736 | 798 | for c in sorted(cycles, key=_cycle_sortkey): |
|
737 | 799 | first = c.split()[0] |
|
738 | 800 | # As a rough cut, ignore any cycle that starts with the |
|
739 | 801 | # same module as some other cycle. Otherwise we see lots |
|
740 | 802 | # of cycles that are effectively duplicates. |
|
741 | 803 | if first in firstmods: |
|
742 | 804 | continue |
|
743 | 805 | print('Import cycle:', c) |
|
744 | 806 | firstmods.add(first) |
|
745 | 807 | any_errors = True |
|
746 | 808 | return any_errors != 0 |
|
747 | 809 | |
|
810 | ||
|
748 | 811 | if __name__ == '__main__': |
|
749 | 812 | sys.exit(int(main(sys.argv))) |
@@ -1,21 +1,22 b'' | |||
|
1 | 1 | # scmutil.py - Mercurial core utility functions |
|
2 | 2 | # |
|
3 | 3 | # Copyright Matt Mackall <mpm@selenic.com> and other |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | from __future__ import absolute_import |
|
8 | 8 | |
|
9 | 9 | from . import repoview |
|
10 | 10 | |
|
11 | ||
|
11 | 12 | def cachetocopy(srcrepo): |
|
12 | 13 | """return the list of cache file valuable to copy during a clone""" |
|
13 | 14 | # In local clones we're copying all nodes, not just served |
|
14 | 15 | # ones. Therefore copy all branch caches over. |
|
15 | 16 | cachefiles = ['branch2'] |
|
16 | 17 | cachefiles += ['branch2-%s' % f for f in repoview.filtertable] |
|
17 | 18 | cachefiles += ['rbc-names-v1', 'rbc-revs-v1'] |
|
18 | 19 | cachefiles += ['tags2'] |
|
19 | 20 | cachefiles += ['tags2-%s' % f for f in repoview.filtertable] |
|
20 | 21 | cachefiles += ['hgtagsfnodes1'] |
|
21 | 22 | return cachefiles |
@@ -1,78 +1,81 b'' | |||
|
1 | 1 | # diffhelper.py - helper routines for patch |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2009 Matt Mackall <mpm@selenic.com> and others |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | from __future__ import absolute_import |
|
9 | 9 | |
|
10 | 10 | from .i18n import _ |
|
11 | 11 | |
|
12 | 12 | from . import ( |
|
13 | 13 | error, |
|
14 | 14 | pycompat, |
|
15 | 15 | ) |
|
16 | 16 | |
|
17 | ||
|
17 | 18 | def addlines(fp, hunk, lena, lenb, a, b): |
|
18 | 19 | """Read lines from fp into the hunk |
|
19 | 20 | |
|
20 | 21 | The hunk is parsed into two arrays, a and b. a gets the old state of |
|
21 | 22 | the text, b gets the new state. The control char from the hunk is saved |
|
22 | 23 | when inserting into a, but not b (for performance while deleting files.) |
|
23 | 24 | """ |
|
24 | 25 | while True: |
|
25 | 26 | todoa = lena - len(a) |
|
26 | 27 | todob = lenb - len(b) |
|
27 | 28 | num = max(todoa, todob) |
|
28 | 29 | if num == 0: |
|
29 | 30 | break |
|
30 | 31 | for i in pycompat.xrange(num): |
|
31 | 32 | s = fp.readline() |
|
32 | 33 | if not s: |
|
33 | 34 | raise error.ParseError(_('incomplete hunk')) |
|
34 | 35 | if s == "\\ No newline at end of file\n": |
|
35 | 36 | fixnewline(hunk, a, b) |
|
36 | 37 | continue |
|
37 | 38 | if s == '\n' or s == '\r\n': |
|
38 | 39 | # Some patches may be missing the control char |
|
39 | 40 | # on empty lines. Supply a leading space. |
|
40 | 41 | s = ' ' + s |
|
41 | 42 | hunk.append(s) |
|
42 | 43 | if s.startswith('+'): |
|
43 | 44 | b.append(s[1:]) |
|
44 | 45 | elif s.startswith('-'): |
|
45 | 46 | a.append(s) |
|
46 | 47 | else: |
|
47 | 48 | b.append(s[1:]) |
|
48 | 49 | a.append(s) |
|
49 | 50 | |
|
51 | ||
|
50 | 52 | def fixnewline(hunk, a, b): |
|
51 | 53 | """Fix up the last lines of a and b when the patch has no newline at EOF""" |
|
52 | 54 | l = hunk[-1] |
|
53 | 55 | # tolerate CRLF in last line |
|
54 | 56 | if l.endswith('\r\n'): |
|
55 | 57 | hline = l[:-2] |
|
56 | 58 | else: |
|
57 | 59 | hline = l[:-1] |
|
58 | 60 | |
|
59 | 61 | if hline.startswith((' ', '+')): |
|
60 | 62 | b[-1] = hline[1:] |
|
61 | 63 | if hline.startswith((' ', '-')): |
|
62 | 64 | a[-1] = hline |
|
63 | 65 | hunk[-1] = hline |
|
64 | 66 | |
|
67 | ||
|
65 | 68 | def testhunk(a, b, bstart): |
|
66 | 69 | """Compare the lines in a with the lines in b |
|
67 | 70 | |
|
68 | 71 | a is assumed to have a control char at the start of each line, this char |
|
69 | 72 | is ignored in the compare. |
|
70 | 73 | """ |
|
71 | 74 | alen = len(a) |
|
72 | 75 | blen = len(b) |
|
73 | 76 | if alen > blen - bstart or bstart < 0: |
|
74 | 77 | return False |
|
75 | 78 | for i in pycompat.xrange(alen): |
|
76 | 79 | if a[i][1:] != b[i + bstart]: |
|
77 | 80 | return False |
|
78 | 81 | return True |
@@ -1,75 +1,84 b'' | |||
|
1 | 1 | # dirstateguard.py - class to allow restoring dirstate after failure |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | from __future__ import absolute_import |
|
9 | 9 | |
|
10 | 10 | from .i18n import _ |
|
11 | 11 | |
|
12 | 12 | from . import ( |
|
13 | 13 | error, |
|
14 | 14 | narrowspec, |
|
15 | 15 | util, |
|
16 | 16 | ) |
|
17 | 17 | |
|
18 | ||
|
18 | 19 | class dirstateguard(util.transactional): |
|
19 | 20 | '''Restore dirstate at unexpected failure. |
|
20 | 21 | |
|
21 | 22 | At the construction, this class does: |
|
22 | 23 | |
|
23 | 24 | - write current ``repo.dirstate`` out, and |
|
24 | 25 | - save ``.hg/dirstate`` into the backup file |
|
25 | 26 | |
|
26 | 27 | This restores ``.hg/dirstate`` from backup file, if ``release()`` |
|
27 | 28 | is invoked before ``close()``. |
|
28 | 29 | |
|
29 | 30 | This just removes the backup file at ``close()`` before ``release()``. |
|
30 | 31 | ''' |
|
31 | 32 | |
|
32 | 33 | def __init__(self, repo, name): |
|
33 | 34 | self._repo = repo |
|
34 | 35 | self._active = False |
|
35 | 36 | self._closed = False |
|
36 | 37 | self._backupname = 'dirstate.backup.%s.%d' % (name, id(self)) |
|
37 |
self._narrowspecbackupname = |
|
|
38 | (name, id(self))) | |
|
38 | self._narrowspecbackupname = 'narrowspec.backup.%s.%d' % ( | |
|
39 | name, | |
|
40 | id(self), | |
|
41 | ) | |
|
39 | 42 | repo.dirstate.savebackup(repo.currenttransaction(), self._backupname) |
|
40 | 43 | narrowspec.savewcbackup(repo, self._narrowspecbackupname) |
|
41 | 44 | self._active = True |
|
42 | 45 | |
|
43 | 46 | def __del__(self): |
|
44 | if self._active: # still active | |
|
47 | if self._active: # still active | |
|
45 | 48 | # this may occur, even if this class is used correctly: |
|
46 | 49 | # for example, releasing other resources like transaction |
|
47 | 50 | # may raise exception before ``dirstateguard.release`` in |
|
48 | 51 | # ``release(tr, ....)``. |
|
49 | 52 | self._abort() |
|
50 | 53 | |
|
51 | 54 | def close(self): |
|
52 | if not self._active: # already inactivated | |
|
53 | msg = (_("can't close already inactivated backup: %s") | |
|
54 | % self._backupname) | |
|
55 | if not self._active: # already inactivated | |
|
56 | msg = ( | |
|
57 | _("can't close already inactivated backup: %s") | |
|
58 | % self._backupname | |
|
59 | ) | |
|
55 | 60 | raise error.Abort(msg) |
|
56 | 61 | |
|
57 |
self._repo.dirstate.clearbackup( |
|
|
58 | self._backupname) | |
|
62 | self._repo.dirstate.clearbackup( | |
|
63 | self._repo.currenttransaction(), self._backupname | |
|
64 | ) | |
|
59 | 65 | narrowspec.clearwcbackup(self._repo, self._narrowspecbackupname) |
|
60 | 66 | self._active = False |
|
61 | 67 | self._closed = True |
|
62 | 68 | |
|
63 | 69 | def _abort(self): |
|
64 | 70 | narrowspec.restorewcbackup(self._repo, self._narrowspecbackupname) |
|
65 |
self._repo.dirstate.restorebackup( |
|
|
66 | self._backupname) | |
|
71 | self._repo.dirstate.restorebackup( | |
|
72 | self._repo.currenttransaction(), self._backupname | |
|
73 | ) | |
|
67 | 74 | self._active = False |
|
68 | 75 | |
|
69 | 76 | def release(self): |
|
70 | 77 | if not self._closed: |
|
71 | if not self._active: # already inactivated | |
|
72 | msg = (_("can't release already inactivated backup: %s") | |
|
73 | % self._backupname) | |
|
78 | if not self._active: # already inactivated | |
|
79 | msg = ( | |
|
80 | _("can't release already inactivated backup: %s") | |
|
81 | % self._backupname | |
|
82 | ) | |
|
74 | 83 | raise error.Abort(msg) |
|
75 | 84 | self._abort() |
@@ -1,121 +1,131 b'' | |||
|
1 | 1 | # httpconnection.py - urllib2 handler for new http support |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2005, 2006, 2007, 2008 Matt Mackall <mpm@selenic.com> |
|
4 | 4 | # Copyright 2006, 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br> |
|
5 | 5 | # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> |
|
6 | 6 | # Copyright 2011 Google, Inc. |
|
7 | 7 | # |
|
8 | 8 | # This software may be used and distributed according to the terms of the |
|
9 | 9 | # GNU General Public License version 2 or any later version. |
|
10 | 10 | |
|
11 | 11 | from __future__ import absolute_import |
|
12 | 12 | |
|
13 | 13 | import os |
|
14 | 14 | |
|
15 | 15 | from .i18n import _ |
|
16 | 16 | from . import ( |
|
17 | 17 | pycompat, |
|
18 | 18 | util, |
|
19 | 19 | ) |
|
20 | 20 | |
|
21 | 21 | urlerr = util.urlerr |
|
22 | 22 | urlreq = util.urlreq |
|
23 | 23 | |
|
24 | 24 | # moved here from url.py to avoid a cycle |
|
25 | 25 | class httpsendfile(object): |
|
26 | 26 | """This is a wrapper around the objects returned by python's "open". |
|
27 | 27 | |
|
28 | 28 | Its purpose is to send file-like objects via HTTP. |
|
29 | 29 | It do however not define a __len__ attribute because the length |
|
30 | 30 | might be more than Py_ssize_t can handle. |
|
31 | 31 | """ |
|
32 | 32 | |
|
33 | 33 | def __init__(self, ui, *args, **kwargs): |
|
34 | 34 | self.ui = ui |
|
35 | 35 | self._data = open(*args, **kwargs) |
|
36 | 36 | self.seek = self._data.seek |
|
37 | 37 | self.close = self._data.close |
|
38 | 38 | self.write = self._data.write |
|
39 | 39 | self.length = os.fstat(self._data.fileno()).st_size |
|
40 | 40 | self._pos = 0 |
|
41 | 41 | # We pass double the max for total because we currently have |
|
42 | 42 | # to send the bundle twice in the case of a server that |
|
43 | 43 | # requires authentication. Since we can't know until we try |
|
44 | 44 | # once whether authentication will be required, just lie to |
|
45 | 45 | # the user and maybe the push succeeds suddenly at 50%. |
|
46 |
self._progress = ui.makeprogress( |
|
|
47 |
|
|
|
46 | self._progress = ui.makeprogress( | |
|
47 | _('sending'), unit=_('kb'), total=(self.length // 1024 * 2) | |
|
48 | ) | |
|
48 | 49 | |
|
49 | 50 | def read(self, *args, **kwargs): |
|
50 | 51 | ret = self._data.read(*args, **kwargs) |
|
51 | 52 | if not ret: |
|
52 | 53 | self._progress.complete() |
|
53 | 54 | return ret |
|
54 | 55 | self._pos += len(ret) |
|
55 | 56 | self._progress.update(self._pos // 1024) |
|
56 | 57 | return ret |
|
57 | 58 | |
|
58 | 59 | def __enter__(self): |
|
59 | 60 | return self |
|
60 | 61 | |
|
61 | 62 | def __exit__(self, exc_type, exc_val, exc_tb): |
|
62 | 63 | self.close() |
|
63 | 64 | |
|
65 | ||
|
64 | 66 | # moved here from url.py to avoid a cycle |
|
65 | 67 | def readauthforuri(ui, uri, user): |
|
66 | 68 | uri = pycompat.bytesurl(uri) |
|
67 | 69 | # Read configuration |
|
68 | 70 | groups = {} |
|
69 | 71 | for key, val in ui.configitems('auth'): |
|
70 | 72 | if key in ('cookiefile',): |
|
71 | 73 | continue |
|
72 | 74 | |
|
73 | 75 | if '.' not in key: |
|
74 | 76 | ui.warn(_("ignoring invalid [auth] key '%s'\n") % key) |
|
75 | 77 | continue |
|
76 | 78 | group, setting = key.rsplit('.', 1) |
|
77 | 79 | gdict = groups.setdefault(group, {}) |
|
78 | 80 | if setting in ('username', 'cert', 'key'): |
|
79 | 81 | val = util.expandpath(val) |
|
80 | 82 | gdict[setting] = val |
|
81 | 83 | |
|
82 | 84 | # Find the best match |
|
83 | 85 | scheme, hostpath = uri.split('://', 1) |
|
84 | 86 | bestuser = None |
|
85 | 87 | bestlen = 0 |
|
86 | 88 | bestauth = None |
|
87 | 89 | for group, auth in groups.iteritems(): |
|
88 | 90 | if user and user != auth.get('username', user): |
|
89 | 91 | # If a username was set in the URI, the entry username |
|
90 | 92 | # must either match it or be unset |
|
91 | 93 | continue |
|
92 | 94 | prefix = auth.get('prefix') |
|
93 | 95 | if not prefix: |
|
94 | 96 | continue |
|
95 | 97 | |
|
96 | 98 | prefixurl = util.url(prefix) |
|
97 | 99 | if prefixurl.user and prefixurl.user != user: |
|
98 | 100 | # If a username was set in the prefix, it must match the username in |
|
99 | 101 | # the URI. |
|
100 | 102 | continue |
|
101 | 103 | |
|
102 | 104 | # The URI passed in has been stripped of credentials, so erase the user |
|
103 | 105 | # here to allow simpler matching. |
|
104 | 106 | prefixurl.user = None |
|
105 | 107 | prefix = bytes(prefixurl) |
|
106 | 108 | |
|
107 | 109 | p = prefix.split('://', 1) |
|
108 | 110 | if len(p) > 1: |
|
109 | 111 | schemes, prefix = [p[0]], p[1] |
|
110 | 112 | else: |
|
111 | 113 | schemes = (auth.get('schemes') or 'https').split() |
|
112 | if ((prefix == '*' or hostpath.startswith(prefix)) and | |
|
113 | (len(prefix) > bestlen or (len(prefix) == bestlen and | |
|
114 | not bestuser and 'username' in auth)) | |
|
115 | and scheme in schemes): | |
|
114 | if ( | |
|
115 | (prefix == '*' or hostpath.startswith(prefix)) | |
|
116 | and ( | |
|
117 | len(prefix) > bestlen | |
|
118 | or ( | |
|
119 | len(prefix) == bestlen | |
|
120 | and not bestuser | |
|
121 | and 'username' in auth | |
|
122 | ) | |
|
123 | ) | |
|
124 | and scheme in schemes | |
|
125 | ): | |
|
116 | 126 | bestlen = len(prefix) |
|
117 | 127 | bestauth = group, auth |
|
118 | 128 | bestuser = auth.get('username') |
|
119 | 129 | if user and not bestuser: |
|
120 | 130 | auth['username'] = user |
|
121 | 131 | return bestauth |
@@ -1,92 +1,100 b'' | |||
|
1 | 1 | # minifileset.py - a simple language to select files |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2017 Facebook, Inc. |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | from __future__ import absolute_import |
|
9 | 9 | |
|
10 | 10 | from .i18n import _ |
|
11 | 11 | from . import ( |
|
12 | 12 | error, |
|
13 | 13 | fileset, |
|
14 | 14 | filesetlang, |
|
15 | 15 | pycompat, |
|
16 | 16 | ) |
|
17 | 17 | |
|
18 | ||
|
18 | 19 | def _sizep(x): |
|
19 | 20 | # i18n: "size" is a keyword |
|
20 | 21 | expr = filesetlang.getstring(x, _("size requires an expression")) |
|
21 | 22 | return fileset.sizematcher(expr) |
|
22 | 23 | |
|
24 | ||
|
23 | 25 | def _compile(tree): |
|
24 | 26 | if not tree: |
|
25 | 27 | raise error.ParseError(_("missing argument")) |
|
26 | 28 | op = tree[0] |
|
27 | 29 | if op == 'withstatus': |
|
28 | 30 | return _compile(tree[1]) |
|
29 | 31 | elif op in {'symbol', 'string', 'kindpat'}: |
|
30 | 32 | name = filesetlang.getpattern(tree, {'path'}, _('invalid file pattern')) |
|
31 | if name.startswith('**'): # file extension test, ex. "**.tar.gz" | |
|
33 | if name.startswith('**'): # file extension test, ex. "**.tar.gz" | |
|
32 | 34 | ext = name[2:] |
|
33 | 35 | for c in pycompat.bytestr(ext): |
|
34 | 36 | if c in '*{}[]?/\\': |
|
35 | 37 | raise error.ParseError(_('reserved character: %s') % c) |
|
36 | 38 | return lambda n, s: n.endswith(ext) |
|
37 | elif name.startswith('path:'): # directory or full path test | |
|
38 | p = name[5:] # prefix | |
|
39 | elif name.startswith('path:'): # directory or full path test | |
|
40 | p = name[5:] # prefix | |
|
39 | 41 | pl = len(p) |
|
40 |
f = lambda n, s: n.startswith(p) and ( |
|
|
41 |
|
|
|
42 | f = lambda n, s: n.startswith(p) and ( | |
|
43 | len(n) == pl or n[pl : pl + 1] == '/' | |
|
44 | ) | |
|
42 | 45 | return f |
|
43 |
raise error.ParseError( |
|
|
44 | hint=_('paths must be prefixed with "path:"')) | |
|
46 | raise error.ParseError( | |
|
47 | _("unsupported file pattern: %s") % name, | |
|
48 | hint=_('paths must be prefixed with "path:"'), | |
|
49 | ) | |
|
45 | 50 | elif op in {'or', 'patterns'}: |
|
46 | 51 | funcs = [_compile(x) for x in tree[1:]] |
|
47 | 52 | return lambda n, s: any(f(n, s) for f in funcs) |
|
48 | 53 | elif op == 'and': |
|
49 | 54 | func1 = _compile(tree[1]) |
|
50 | 55 | func2 = _compile(tree[2]) |
|
51 | 56 | return lambda n, s: func1(n, s) and func2(n, s) |
|
52 | 57 | elif op == 'not': |
|
53 | 58 | return lambda n, s: not _compile(tree[1])(n, s) |
|
54 | 59 | elif op == 'func': |
|
55 | 60 | symbols = { |
|
56 | 61 | 'all': lambda n, s: True, |
|
57 | 62 | 'none': lambda n, s: False, |
|
58 | 63 | 'size': lambda n, s: _sizep(tree[2])(s), |
|
59 | 64 | } |
|
60 | 65 | |
|
61 | 66 | name = filesetlang.getsymbol(tree[1]) |
|
62 | 67 | if name in symbols: |
|
63 | 68 | return symbols[name] |
|
64 | 69 | |
|
65 | 70 | raise error.UnknownIdentifier(name, symbols.keys()) |
|
66 |
elif op == 'minus': |
|
|
71 | elif op == 'minus': # equivalent to 'x and not y' | |
|
67 | 72 | func1 = _compile(tree[1]) |
|
68 | 73 | func2 = _compile(tree[2]) |
|
69 | 74 | return lambda n, s: func1(n, s) and not func2(n, s) |
|
70 | 75 | elif op == 'list': |
|
71 |
raise error.ParseError( |
|
|
72 | hint=_('see \'hg help "filesets.x or y"\'')) | |
|
76 | raise error.ParseError( | |
|
77 | _("can't use a list in this context"), | |
|
78 | hint=_('see \'hg help "filesets.x or y"\''), | |
|
79 | ) | |
|
73 | 80 | raise error.ProgrammingError('illegal tree: %r' % (tree,)) |
|
74 | 81 | |
|
82 | ||
|
75 | 83 | def compile(text): |
|
76 | 84 | """generate a function (path, size) -> bool from filter specification. |
|
77 | 85 | |
|
78 | 86 | "text" could contain the operators defined by the fileset language for |
|
79 | 87 | common logic operations, and parenthesis for grouping. The supported path |
|
80 | 88 | tests are '**.extname' for file extension test, and '"path:dir/subdir"' |
|
81 | 89 | for prefix test. The ``size()`` predicate is borrowed from filesets to test |
|
82 | 90 | file size. The predicates ``all()`` and ``none()`` are also supported. |
|
83 | 91 | |
|
84 | 92 | '(**.php & size(">10MB")) | **.zip | (path:bin & !path:bin/README)' for |
|
85 | 93 | example, will catch all php files whose size is greater than 10 MB, all |
|
86 | 94 | files whose name ends with ".zip", and all files under "bin" in the repo |
|
87 | 95 | root except for "bin/README". |
|
88 | 96 | """ |
|
89 | 97 | tree = filesetlang.parse(text) |
|
90 | 98 | tree = filesetlang.analyze(tree) |
|
91 | 99 | tree = filesetlang.optimize(tree) |
|
92 | 100 | return _compile(tree) |
@@ -1,47 +1,49 b'' | |||
|
1 | 1 | # node.py - basic nodeid manipulation for mercurial |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | from __future__ import absolute_import |
|
9 | 9 | |
|
10 | 10 | import binascii |
|
11 | 11 | |
|
12 | 12 | # This ugly style has a noticeable effect in manifest parsing |
|
13 | 13 | hex = binascii.hexlify |
|
14 | 14 | # Adapt to Python 3 API changes. If this ends up showing up in |
|
15 | 15 | # profiles, we can use this version only on Python 3, and forward |
|
16 | 16 | # binascii.unhexlify like we used to on Python 2. |
|
17 | 17 | def bin(s): |
|
18 | 18 | try: |
|
19 | 19 | return binascii.unhexlify(s) |
|
20 | 20 | except binascii.Error as e: |
|
21 | 21 | raise TypeError(e) |
|
22 | 22 | |
|
23 | ||
|
23 | 24 | nullrev = -1 |
|
24 | 25 | # In hex, this is '0000000000000000000000000000000000000000' |
|
25 | 26 | nullid = b"\0" * 20 |
|
26 | 27 | nullhex = hex(nullid) |
|
27 | 28 | |
|
28 | 29 | # Phony node value to stand-in for new files in some uses of |
|
29 | 30 | # manifests. |
|
30 | 31 | # In hex, this is '2121212121212121212121212121212121212121' |
|
31 | 32 | newnodeid = '!!!!!!!!!!!!!!!!!!!!' |
|
32 | 33 | # In hex, this is '3030303030303030303030303030306164646564' |
|
33 | 34 | addednodeid = '000000000000000added' |
|
34 | 35 | # In hex, this is '3030303030303030303030306d6f646966696564' |
|
35 | 36 | modifiednodeid = '000000000000modified' |
|
36 | 37 | |
|
37 | 38 | wdirfilenodeids = {newnodeid, addednodeid, modifiednodeid} |
|
38 | 39 | |
|
39 | 40 | # pseudo identifiers for working directory |
|
40 | 41 | # (they are experimental, so don't add too many dependencies on them) |
|
41 |
wdirrev = 0x7 |
|
|
42 | wdirrev = 0x7FFFFFFF | |
|
42 | 43 | # In hex, this is 'ffffffffffffffffffffffffffffffffffffffff' |
|
43 | 44 | wdirid = b"\xff" * 20 |
|
44 | 45 | wdirhex = hex(wdirid) |
|
45 | 46 | |
|
47 | ||
|
46 | 48 | def short(node): |
|
47 | 49 | return hex(node[:6]) |
@@ -1,146 +1,155 b'' | |||
|
1 | 1 | # policy.py - module policy logic for Mercurial. |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2015 Gregory Szorc <gregory.szorc@gmail.com> |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | from __future__ import absolute_import |
|
9 | 9 | |
|
10 | 10 | import os |
|
11 | 11 | import sys |
|
12 | 12 | |
|
13 | 13 | # Rules for how modules can be loaded. Values are: |
|
14 | 14 | # |
|
15 | 15 | # c - require C extensions |
|
16 | 16 | # rust+c - require Rust and C extensions |
|
17 | 17 | # rust+c-allow - allow Rust and C extensions with fallback to pure Python |
|
18 | 18 | # for each |
|
19 | 19 | # allow - allow pure Python implementation when C loading fails |
|
20 | 20 | # cffi - required cffi versions (implemented within pure module) |
|
21 | 21 | # cffi-allow - allow pure Python implementation if cffi version is missing |
|
22 | 22 | # py - only load pure Python modules |
|
23 | 23 | # |
|
24 | 24 | # By default, fall back to the pure modules so the in-place build can |
|
25 | 25 | # run without recompiling the C extensions. This will be overridden by |
|
26 | 26 | # __modulepolicy__ generated by setup.py. |
|
27 | 27 | policy = b'allow' |
|
28 | 28 | _packageprefs = { |
|
29 | 29 | # policy: (versioned package, pure package) |
|
30 | 30 | b'c': (r'cext', None), |
|
31 | 31 | b'allow': (r'cext', r'pure'), |
|
32 | 32 | b'cffi': (r'cffi', None), |
|
33 | 33 | b'cffi-allow': (r'cffi', r'pure'), |
|
34 | 34 | b'py': (None, r'pure'), |
|
35 | 35 | # For now, rust policies impact importrust only |
|
36 | 36 | b'rust+c': (r'cext', None), |
|
37 | 37 | b'rust+c-allow': (r'cext', r'pure'), |
|
38 | 38 | } |
|
39 | 39 | |
|
40 | 40 | try: |
|
41 | 41 | from . import __modulepolicy__ |
|
42 | ||
|
42 | 43 | policy = __modulepolicy__.modulepolicy |
|
43 | 44 | except ImportError: |
|
44 | 45 | pass |
|
45 | 46 | |
|
46 | 47 | # PyPy doesn't load C extensions. |
|
47 | 48 | # |
|
48 | 49 | # The canonical way to do this is to test platform.python_implementation(). |
|
49 | 50 | # But we don't import platform and don't bloat for it here. |
|
50 | 51 | if r'__pypy__' in sys.builtin_module_names: |
|
51 | 52 | policy = b'cffi' |
|
52 | 53 | |
|
53 | 54 | # Environment variable can always force settings. |
|
54 | 55 | if sys.version_info[0] >= 3: |
|
55 | 56 | if r'HGMODULEPOLICY' in os.environ: |
|
56 | 57 | policy = os.environ[r'HGMODULEPOLICY'].encode(r'utf-8') |
|
57 | 58 | else: |
|
58 | 59 | policy = os.environ.get(r'HGMODULEPOLICY', policy) |
|
59 | 60 | |
|
61 | ||
|
60 | 62 | def _importfrom(pkgname, modname): |
|
61 | 63 | # from .<pkgname> import <modname> (where . is looked through this module) |
|
62 | 64 | fakelocals = {} |
|
63 | 65 | pkg = __import__(pkgname, globals(), fakelocals, [modname], level=1) |
|
64 | 66 | try: |
|
65 | 67 | fakelocals[modname] = mod = getattr(pkg, modname) |
|
66 | 68 | except AttributeError: |
|
67 | 69 | raise ImportError(r'cannot import name %s' % modname) |
|
68 | 70 | # force import; fakelocals[modname] may be replaced with the real module |
|
69 | 71 | getattr(mod, r'__doc__', None) |
|
70 | 72 | return fakelocals[modname] |
|
71 | 73 | |
|
74 | ||
|
72 | 75 | # keep in sync with "version" in C modules |
|
73 | 76 | _cextversions = { |
|
74 | 77 | (r'cext', r'base85'): 1, |
|
75 | 78 | (r'cext', r'bdiff'): 3, |
|
76 | 79 | (r'cext', r'mpatch'): 1, |
|
77 | 80 | (r'cext', r'osutil'): 4, |
|
78 | 81 | (r'cext', r'parsers'): 13, |
|
79 | 82 | } |
|
80 | 83 | |
|
81 | 84 | # map import request to other package or module |
|
82 | 85 | _modredirects = { |
|
83 | 86 | (r'cext', r'charencode'): (r'cext', r'parsers'), |
|
84 | 87 | (r'cffi', r'base85'): (r'pure', r'base85'), |
|
85 | 88 | (r'cffi', r'charencode'): (r'pure', r'charencode'), |
|
86 | 89 | (r'cffi', r'parsers'): (r'pure', r'parsers'), |
|
87 | 90 | } |
|
88 | 91 | |
|
92 | ||
|
89 | 93 | def _checkmod(pkgname, modname, mod): |
|
90 | 94 | expected = _cextversions.get((pkgname, modname)) |
|
91 | 95 | actual = getattr(mod, r'version', None) |
|
92 | 96 | if actual != expected: |
|
93 |
raise ImportError( |
|
|
94 | r'(expected version: %d, actual: %r)' | |
|
95 | % (pkgname, modname, expected, actual)) | |
|
97 | raise ImportError( | |
|
98 | r'cannot import module %s.%s ' | |
|
99 | r'(expected version: %d, actual: %r)' | |
|
100 | % (pkgname, modname, expected, actual) | |
|
101 | ) | |
|
102 | ||
|
96 | 103 | |
|
97 | 104 | def importmod(modname): |
|
98 | 105 | """Import module according to policy and check API version""" |
|
99 | 106 | try: |
|
100 | 107 | verpkg, purepkg = _packageprefs[policy] |
|
101 | 108 | except KeyError: |
|
102 | 109 | raise ImportError(r'invalid HGMODULEPOLICY %r' % policy) |
|
103 | 110 | assert verpkg or purepkg |
|
104 | 111 | if verpkg: |
|
105 | 112 | pn, mn = _modredirects.get((verpkg, modname), (verpkg, modname)) |
|
106 | 113 | try: |
|
107 | 114 | mod = _importfrom(pn, mn) |
|
108 | 115 | if pn == verpkg: |
|
109 | 116 | _checkmod(pn, mn, mod) |
|
110 | 117 | return mod |
|
111 | 118 | except ImportError: |
|
112 | 119 | if not purepkg: |
|
113 | 120 | raise |
|
114 | 121 | pn, mn = _modredirects.get((purepkg, modname), (purepkg, modname)) |
|
115 | 122 | return _importfrom(pn, mn) |
|
116 | 123 | |
|
124 | ||
|
117 | 125 | def _isrustpermissive(): |
|
118 | 126 | """Assuming the policy is a Rust one, tell if it's permissive.""" |
|
119 | 127 | return policy.endswith(b'-allow') |
|
120 | 128 | |
|
129 | ||
|
121 | 130 | def importrust(modname, member=None, default=None): |
|
122 | 131 | """Import Rust module according to policy and availability. |
|
123 | 132 | |
|
124 | 133 | If policy isn't a Rust one, this returns `default`. |
|
125 | 134 | |
|
126 | 135 | If either the module or its member is not available, this returns `default` |
|
127 | 136 | if policy is permissive and raises `ImportError` if not. |
|
128 | 137 | """ |
|
129 | 138 | if not policy.startswith(b'rust'): |
|
130 | 139 | return default |
|
131 | 140 | |
|
132 | 141 | try: |
|
133 | 142 | mod = _importfrom(r'rustext', modname) |
|
134 | 143 | except ImportError: |
|
135 | 144 | if _isrustpermissive(): |
|
136 | 145 | return default |
|
137 | 146 | raise |
|
138 | 147 | if member is None: |
|
139 | 148 | return mod |
|
140 | 149 | |
|
141 | 150 | try: |
|
142 | 151 | return getattr(mod, member) |
|
143 | 152 | except AttributeError: |
|
144 | 153 | if _isrustpermissive(): |
|
145 | 154 | return default |
|
146 | 155 | raise ImportError(r"Cannot import name %s" % member) |
@@ -1,61 +1,71 b'' | |||
|
1 | 1 | # pushkey.py - dispatching for pushing and pulling keys |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2010 Matt Mackall <mpm@selenic.com> |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | from __future__ import absolute_import |
|
9 | 9 | |
|
10 | 10 | from . import ( |
|
11 | 11 | bookmarks, |
|
12 | 12 | encoding, |
|
13 | 13 | obsolete, |
|
14 | 14 | phases, |
|
15 | 15 | ) |
|
16 | 16 | |
|
17 | ||
|
17 | 18 | def _nslist(repo): |
|
18 | 19 | n = {} |
|
19 | 20 | for k in _namespaces: |
|
20 | 21 | n[k] = "" |
|
21 | 22 | if not obsolete.isenabled(repo, obsolete.exchangeopt): |
|
22 | 23 | n.pop('obsolete') |
|
23 | 24 | return n |
|
24 | 25 | |
|
25 | _namespaces = {"namespaces": (lambda *x: False, _nslist), | |
|
26 | "bookmarks": (bookmarks.pushbookmark, bookmarks.listbookmarks), | |
|
27 | "phases": (phases.pushphase, phases.listphases), | |
|
28 | "obsolete": (obsolete.pushmarker, obsolete.listmarkers), | |
|
29 | } | |
|
26 | ||
|
27 | _namespaces = { | |
|
28 | "namespaces": (lambda *x: False, _nslist), | |
|
29 | "bookmarks": (bookmarks.pushbookmark, bookmarks.listbookmarks), | |
|
30 | "phases": (phases.pushphase, phases.listphases), | |
|
31 | "obsolete": (obsolete.pushmarker, obsolete.listmarkers), | |
|
32 | } | |
|
33 | ||
|
30 | 34 | |
|
31 | 35 | def register(namespace, pushkey, listkeys): |
|
32 | 36 | _namespaces[namespace] = (pushkey, listkeys) |
|
33 | 37 | |
|
38 | ||
|
34 | 39 | def _get(namespace): |
|
35 | 40 | return _namespaces.get(namespace, (lambda *x: False, lambda *x: {})) |
|
36 | 41 | |
|
42 | ||
|
37 | 43 | def push(repo, namespace, key, old, new): |
|
38 | 44 | '''should succeed iff value was old''' |
|
39 | 45 | pk = _get(namespace)[0] |
|
40 | 46 | return pk(repo, key, old, new) |
|
41 | 47 | |
|
48 | ||
|
42 | 49 | def list(repo, namespace): |
|
43 | 50 | '''return a dict''' |
|
44 | 51 | lk = _get(namespace)[1] |
|
45 | 52 | return lk(repo) |
|
46 | 53 | |
|
54 | ||
|
47 | 55 | encode = encoding.fromlocal |
|
48 | 56 | |
|
49 | 57 | decode = encoding.tolocal |
|
50 | 58 | |
|
59 | ||
|
51 | 60 | def encodekeys(keys): |
|
52 | 61 | """encode the content of a pushkey namespace for exchange over the wire""" |
|
53 | 62 | return '\n'.join(['%s\t%s' % (encode(k), encode(v)) for k, v in keys]) |
|
54 | 63 | |
|
64 | ||
|
55 | 65 | def decodekeys(data): |
|
56 | 66 | """decode the content of a pushkey namespace from exchange over the wire""" |
|
57 | 67 | result = {} |
|
58 | 68 | for l in data.splitlines(): |
|
59 | 69 | k, v = l.split('\t') |
|
60 | 70 | result[decode(k)] = decode(v) |
|
61 | 71 | return result |
@@ -1,99 +1,105 b'' | |||
|
1 | 1 | # rcutil.py - utilities about config paths, special config sections etc. |
|
2 | 2 | # |
|
3 | 3 | # Copyright Mercurial Contributors |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | from __future__ import absolute_import |
|
9 | 9 | |
|
10 | 10 | import os |
|
11 | 11 | |
|
12 | 12 | from . import ( |
|
13 | 13 | encoding, |
|
14 | 14 | pycompat, |
|
15 | 15 | util, |
|
16 | 16 | ) |
|
17 | 17 | |
|
18 | 18 | if pycompat.iswindows: |
|
19 | 19 | from . import scmwindows as scmplatform |
|
20 | 20 | else: |
|
21 | 21 | from . import scmposix as scmplatform |
|
22 | 22 | |
|
23 | 23 | fallbackpager = scmplatform.fallbackpager |
|
24 | 24 | systemrcpath = scmplatform.systemrcpath |
|
25 | 25 | userrcpath = scmplatform.userrcpath |
|
26 | 26 | |
|
27 | ||
|
27 | 28 | def _expandrcpath(path): |
|
28 | 29 | '''path could be a file or a directory. return a list of file paths''' |
|
29 | 30 | p = util.expandpath(path) |
|
30 | 31 | if os.path.isdir(p): |
|
31 | 32 | join = os.path.join |
|
32 | return sorted(join(p, f) for f, k in util.listdir(p) | |
|
33 |
|
|
|
33 | return sorted( | |
|
34 | join(p, f) for f, k in util.listdir(p) if f.endswith('.rc') | |
|
35 | ) | |
|
34 | 36 | return [p] |
|
35 | 37 | |
|
38 | ||
|
36 | 39 | def envrcitems(env=None): |
|
37 | 40 | '''Return [(section, name, value, source)] config items. |
|
38 | 41 | |
|
39 | 42 | The config items are extracted from environment variables specified by env, |
|
40 | 43 | used to override systemrc, but not userrc. |
|
41 | 44 | |
|
42 | 45 | If env is not provided, encoding.environ will be used. |
|
43 | 46 | ''' |
|
44 | 47 | if env is None: |
|
45 | 48 | env = encoding.environ |
|
46 | 49 | checklist = [ |
|
47 | 50 | ('EDITOR', 'ui', 'editor'), |
|
48 | 51 | ('VISUAL', 'ui', 'editor'), |
|
49 | 52 | ('PAGER', 'pager', 'pager'), |
|
50 | 53 | ] |
|
51 | 54 | result = [] |
|
52 | 55 | for envname, section, configname in checklist: |
|
53 | 56 | if envname not in env: |
|
54 | 57 | continue |
|
55 | 58 | result.append((section, configname, env[envname], '$%s' % envname)) |
|
56 | 59 | return result |
|
57 | 60 | |
|
61 | ||
|
58 | 62 | def defaultrcpath(): |
|
59 | 63 | '''return rc paths in default.d''' |
|
60 | 64 | path = [] |
|
61 | 65 | defaultpath = os.path.join(util.datapath, 'default.d') |
|
62 | 66 | if os.path.isdir(defaultpath): |
|
63 | 67 | path = _expandrcpath(defaultpath) |
|
64 | 68 | return path |
|
65 | 69 | |
|
70 | ||
|
66 | 71 | def rccomponents(): |
|
67 | 72 | '''return an ordered [(type, obj)] about where to load configs. |
|
68 | 73 | |
|
69 | 74 | respect $HGRCPATH. if $HGRCPATH is empty, only .hg/hgrc of current repo is |
|
70 | 75 | used. if $HGRCPATH is not set, the platform default will be used. |
|
71 | 76 | |
|
72 | 77 | if a directory is provided, *.rc files under it will be used. |
|
73 | 78 | |
|
74 | 79 | type could be either 'path' or 'items', if type is 'path', obj is a string, |
|
75 | 80 | and is the config file path. if type is 'items', obj is a list of (section, |
|
76 | 81 | name, value, source) that should fill the config directly. |
|
77 | 82 | ''' |
|
78 | 83 | envrc = ('items', envrcitems()) |
|
79 | 84 | |
|
80 | 85 | if 'HGRCPATH' in encoding.environ: |
|
81 | 86 | # assume HGRCPATH is all about user configs so environments can be |
|
82 | 87 | # overridden. |
|
83 | 88 | _rccomponents = [envrc] |
|
84 | 89 | for p in encoding.environ['HGRCPATH'].split(pycompat.ospathsep): |
|
85 | 90 | if not p: |
|
86 | 91 | continue |
|
87 | 92 | _rccomponents.extend(('path', p) for p in _expandrcpath(p)) |
|
88 | 93 | else: |
|
89 | 94 | normpaths = lambda paths: [('path', os.path.normpath(p)) for p in paths] |
|
90 | 95 | _rccomponents = normpaths(defaultrcpath() + systemrcpath()) |
|
91 | 96 | _rccomponents.append(envrc) |
|
92 | 97 | _rccomponents.extend(normpaths(userrcpath())) |
|
93 | 98 | return _rccomponents |
|
94 | 99 | |
|
100 | ||
|
95 | 101 | def defaultpagerenv(): |
|
96 | 102 | '''return a dict of default environment variables and their values, |
|
97 | 103 | intended to be set before starting a pager. |
|
98 | 104 | ''' |
|
99 | 105 | return {'LESS': 'FRX', 'LV': '-c'} |
@@ -1,53 +1,55 b'' | |||
|
1 | 1 | # rewriteutil.py - utility functions for rewriting changesets |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2017 Octobus <contact@octobus.net> |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | from __future__ import absolute_import |
|
9 | 9 | |
|
10 | 10 | from .i18n import _ |
|
11 | 11 | |
|
12 | 12 | from . import ( |
|
13 | 13 | error, |
|
14 | 14 | node, |
|
15 | 15 | obsolete, |
|
16 | 16 | revset, |
|
17 | 17 | ) |
|
18 | 18 | |
|
19 | ||
|
19 | 20 | def precheck(repo, revs, action='rewrite'): |
|
20 | 21 | """check if revs can be rewritten |
|
21 | 22 | action is used to control the error message. |
|
22 | 23 | |
|
23 | 24 | Make sure this function is called after taking the lock. |
|
24 | 25 | """ |
|
25 | 26 | if node.nullrev in revs: |
|
26 |
msg = _("cannot %s null changeset") % |
|
|
27 | msg = _("cannot %s null changeset") % action | |
|
27 | 28 | hint = _("no changeset checked out") |
|
28 | 29 | raise error.Abort(msg, hint=hint) |
|
29 | 30 | |
|
30 | 31 | if len(repo[None].parents()) > 1: |
|
31 | 32 | raise error.Abort(_("cannot %s while merging") % action) |
|
32 | 33 | |
|
33 | 34 | publicrevs = repo.revs('%ld and public()', revs) |
|
34 | 35 | if publicrevs: |
|
35 |
msg = _("cannot %s public changesets") % |
|
|
36 | msg = _("cannot %s public changesets") % action | |
|
36 | 37 | hint = _("see 'hg help phases' for details") |
|
37 | 38 | raise error.Abort(msg, hint=hint) |
|
38 | 39 | |
|
39 | 40 | newunstable = disallowednewunstable(repo, revs) |
|
40 | 41 | if newunstable: |
|
41 | 42 | raise error.Abort(_("cannot %s changeset with children") % action) |
|
42 | 43 | |
|
44 | ||
|
43 | 45 | def disallowednewunstable(repo, revs): |
|
44 | 46 | """Checks whether editing the revs will create new unstable changesets and |
|
45 | 47 | are we allowed to create them. |
|
46 | 48 | |
|
47 | 49 | To allow new unstable changesets, set the config: |
|
48 | 50 | `experimental.evolution.allowunstable=True` |
|
49 | 51 | """ |
|
50 | 52 | allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt) |
|
51 | 53 | if allowunstable: |
|
52 | 54 | return revset.baseset() |
|
53 | 55 | return repo.revs("(%ld::) - %ld", revs, revs) |
@@ -1,85 +1,96 b'' | |||
|
1 | 1 | from __future__ import absolute_import |
|
2 | 2 | |
|
3 | 3 | import array |
|
4 | 4 | import errno |
|
5 | 5 | import fcntl |
|
6 | 6 | import os |
|
7 | 7 | import sys |
|
8 | 8 | |
|
9 | 9 | from . import ( |
|
10 | 10 | encoding, |
|
11 | 11 | pycompat, |
|
12 | 12 | util, |
|
13 | 13 | ) |
|
14 | 14 | |
|
15 | 15 | # BSD 'more' escapes ANSI color sequences by default. This can be disabled by |
|
16 | 16 | # $MORE variable, but there's no compatible option with Linux 'more'. Given |
|
17 | 17 | # OS X is widely used and most modern Unix systems would have 'less', setting |
|
18 | 18 | # 'less' as the default seems reasonable. |
|
19 | 19 | fallbackpager = 'less' |
|
20 | 20 | |
|
21 | ||
|
21 | 22 | def _rcfiles(path): |
|
22 | 23 | rcs = [os.path.join(path, 'hgrc')] |
|
23 | 24 | rcdir = os.path.join(path, 'hgrc.d') |
|
24 | 25 | try: |
|
25 |
rcs.extend( |
|
|
26 | for f, kind in util.listdir(rcdir) | |
|
27 | if f.endswith(".rc")]) | |
|
26 | rcs.extend( | |
|
27 | [ | |
|
28 | os.path.join(rcdir, f) | |
|
29 | for f, kind in util.listdir(rcdir) | |
|
30 | if f.endswith(".rc") | |
|
31 | ] | |
|
32 | ) | |
|
28 | 33 | except OSError: |
|
29 | 34 | pass |
|
30 | 35 | return rcs |
|
31 | 36 | |
|
37 | ||
|
32 | 38 | def systemrcpath(): |
|
33 | 39 | path = [] |
|
34 | 40 | if pycompat.sysplatform == 'plan9': |
|
35 | 41 | root = 'lib/mercurial' |
|
36 | 42 | else: |
|
37 | 43 | root = 'etc/mercurial' |
|
38 | 44 | # old mod_python does not set sys.argv |
|
39 | 45 | if len(getattr(sys, 'argv', [])) > 0: |
|
40 | 46 | p = os.path.dirname(os.path.dirname(pycompat.sysargv[0])) |
|
41 | 47 | if p != '/': |
|
42 | 48 | path.extend(_rcfiles(os.path.join(p, root))) |
|
43 | 49 | path.extend(_rcfiles('/' + root)) |
|
44 | 50 | return path |
|
45 | 51 | |
|
52 | ||
|
46 | 53 | def userrcpath(): |
|
47 | 54 | if pycompat.sysplatform == 'plan9': |
|
48 | 55 | return [encoding.environ['home'] + '/lib/hgrc'] |
|
49 | 56 | elif pycompat.isdarwin: |
|
50 | 57 | return [os.path.expanduser('~/.hgrc')] |
|
51 | 58 | else: |
|
52 | 59 | confighome = encoding.environ.get('XDG_CONFIG_HOME') |
|
53 | 60 | if confighome is None or not os.path.isabs(confighome): |
|
54 | 61 | confighome = os.path.expanduser('~/.config') |
|
55 | 62 | |
|
56 | return [os.path.expanduser('~/.hgrc'), | |
|
57 | os.path.join(confighome, 'hg', 'hgrc')] | |
|
63 | return [ | |
|
64 | os.path.expanduser('~/.hgrc'), | |
|
65 | os.path.join(confighome, 'hg', 'hgrc'), | |
|
66 | ] | |
|
67 | ||
|
58 | 68 | |
|
59 | 69 | def termsize(ui): |
|
60 | 70 | try: |
|
61 | 71 | import termios |
|
72 | ||
|
62 | 73 | TIOCGWINSZ = termios.TIOCGWINSZ # unavailable on IRIX (issue3449) |
|
63 | 74 | except (AttributeError, ImportError): |
|
64 | 75 | return 80, 24 |
|
65 | 76 | |
|
66 | 77 | for dev in (ui.ferr, ui.fout, ui.fin): |
|
67 | 78 | try: |
|
68 | 79 | try: |
|
69 | 80 | fd = dev.fileno() |
|
70 | 81 | except AttributeError: |
|
71 | 82 | continue |
|
72 | 83 | if not os.isatty(fd): |
|
73 | 84 | continue |
|
74 | 85 | arri = fcntl.ioctl(fd, TIOCGWINSZ, '\0' * 8) |
|
75 | 86 | height, width = array.array(r'h', arri)[:2] |
|
76 | 87 | if width > 0 and height > 0: |
|
77 | 88 | return width, height |
|
78 | 89 | except ValueError: |
|
79 | 90 | pass |
|
80 | 91 | except IOError as e: |
|
81 | 92 | if e[0] == errno.EINVAL: |
|
82 | 93 | pass |
|
83 | 94 | else: |
|
84 | 95 | raise |
|
85 | 96 | return 80, 24 |
@@ -1,61 +1,65 b'' | |||
|
1 | 1 | from __future__ import absolute_import |
|
2 | 2 | |
|
3 | 3 | import os |
|
4 | 4 | |
|
5 | 5 | from . import ( |
|
6 | 6 | encoding, |
|
7 | 7 | pycompat, |
|
8 | 8 | util, |
|
9 | 9 | win32, |
|
10 | 10 | ) |
|
11 | 11 | |
|
12 | 12 | try: |
|
13 | 13 | import _winreg as winreg |
|
14 | ||
|
14 | 15 | winreg.CloseKey |
|
15 | 16 | except ImportError: |
|
16 | 17 | import winreg |
|
17 | 18 | |
|
18 | 19 | # MS-DOS 'more' is the only pager available by default on Windows. |
|
19 | 20 | fallbackpager = 'more' |
|
20 | 21 | |
|
22 | ||
|
21 | 23 | def systemrcpath(): |
|
22 | 24 | '''return default os-specific hgrc search path''' |
|
23 | 25 | rcpath = [] |
|
24 | 26 | filename = win32.executablepath() |
|
25 | 27 | # Use mercurial.ini found in directory with hg.exe |
|
26 | 28 | progrc = os.path.join(os.path.dirname(filename), 'mercurial.ini') |
|
27 | 29 | rcpath.append(progrc) |
|
28 | 30 | # Use hgrc.d found in directory with hg.exe |
|
29 | 31 | progrcd = os.path.join(os.path.dirname(filename), 'hgrc.d') |
|
30 | 32 | if os.path.isdir(progrcd): |
|
31 | 33 | for f, kind in util.listdir(progrcd): |
|
32 | 34 | if f.endswith('.rc'): |
|
33 | 35 | rcpath.append(os.path.join(progrcd, f)) |
|
34 | 36 | # else look for a system rcpath in the registry |
|
35 |
value = util.lookupreg( |
|
|
36 |
|
|
|
37 | value = util.lookupreg( | |
|
38 | 'SOFTWARE\\Mercurial', None, winreg.HKEY_LOCAL_MACHINE | |
|
39 | ) | |
|
37 | 40 | if not isinstance(value, str) or not value: |
|
38 | 41 | return rcpath |
|
39 | 42 | value = util.localpath(value) |
|
40 | 43 | for p in value.split(pycompat.ospathsep): |
|
41 | 44 | if p.lower().endswith('mercurial.ini'): |
|
42 | 45 | rcpath.append(p) |
|
43 | 46 | elif os.path.isdir(p): |
|
44 | 47 | for f, kind in util.listdir(p): |
|
45 | 48 | if f.endswith('.rc'): |
|
46 | 49 | rcpath.append(os.path.join(p, f)) |
|
47 | 50 | return rcpath |
|
48 | 51 | |
|
52 | ||
|
49 | 53 | def userrcpath(): |
|
50 | 54 | '''return os-specific hgrc search path to the user dir''' |
|
51 | 55 | home = os.path.expanduser('~') |
|
52 | path = [os.path.join(home, 'mercurial.ini'), | |
|
53 | os.path.join(home, '.hgrc')] | |
|
56 | path = [os.path.join(home, 'mercurial.ini'), os.path.join(home, '.hgrc')] | |
|
54 | 57 | userprofile = encoding.environ.get('USERPROFILE') |
|
55 | 58 | if userprofile and userprofile != home: |
|
56 | 59 | path.append(os.path.join(userprofile, 'mercurial.ini')) |
|
57 | 60 | path.append(os.path.join(userprofile, '.hgrc')) |
|
58 | 61 | return path |
|
59 | 62 | |
|
63 | ||
|
60 | 64 | def termsize(ui): |
|
61 | 65 | return win32.termsize() |
@@ -1,23 +1,24 b'' | |||
|
1 | 1 | # stack.py - Mercurial functions for stack definition |
|
2 | 2 | # |
|
3 | 3 | # Copyright Matt Mackall <mpm@selenic.com> and other |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | from __future__ import absolute_import |
|
9 | 9 | |
|
10 | ||
|
10 | 11 | def getstack(repo, rev=None): |
|
11 | 12 | """return a sorted smartrev of the stack containing either rev if it is |
|
12 | 13 | not None or the current working directory parent. |
|
13 | 14 | |
|
14 | 15 | The stack will always contain all drafts changesets which are ancestors to |
|
15 | 16 | the revision and are not merges. |
|
16 | 17 | """ |
|
17 | 18 | if rev is None: |
|
18 | 19 | rev = '.' |
|
19 | 20 | |
|
20 | 21 | revspec = 'only(%s) and not public() and not ::merge()' |
|
21 | 22 | revisions = repo.revs(revspec, rev) |
|
22 | 23 | revisions.sort() |
|
23 | 24 | return revisions |
General Comments 0
You need to be logged in to leave comments.
Login now