##// END OF EJS Templates
osutil: switch to policy importer...
Yuya Nishihara -
r32367:a9c71d57 default
parent child Browse files
Show More
@@ -1,107 +1,106 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 #
2 #
3 # check-py3-compat - check Python 3 compatibility of Mercurial files
3 # check-py3-compat - check Python 3 compatibility of Mercurial files
4 #
4 #
5 # Copyright 2015 Gregory Szorc <gregory.szorc@gmail.com>
5 # Copyright 2015 Gregory Szorc <gregory.szorc@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 from __future__ import absolute_import, print_function
10 from __future__ import absolute_import, print_function
11
11
12 import ast
12 import ast
13 import importlib
13 import importlib
14 import os
14 import os
15 import sys
15 import sys
16 import traceback
16 import traceback
17
17
18 # Modules that have both Python and C implementations.
18 # Modules that have both Python and C implementations.
19 _dualmodules = (
19 _dualmodules = (
20 'base85.py',
20 'base85.py',
21 'bdiff.py',
21 'bdiff.py',
22 'diffhelpers.py',
22 'diffhelpers.py',
23 'mpatch.py',
23 'mpatch.py',
24 'osutil.py',
25 'parsers.py',
24 'parsers.py',
26 )
25 )
27
26
28 def check_compat_py2(f):
27 def check_compat_py2(f):
29 """Check Python 3 compatibility for a file with Python 2"""
28 """Check Python 3 compatibility for a file with Python 2"""
30 with open(f, 'rb') as fh:
29 with open(f, 'rb') as fh:
31 content = fh.read()
30 content = fh.read()
32 root = ast.parse(content)
31 root = ast.parse(content)
33
32
34 # Ignore empty files.
33 # Ignore empty files.
35 if not root.body:
34 if not root.body:
36 return
35 return
37
36
38 futures = set()
37 futures = set()
39 haveprint = False
38 haveprint = False
40 for node in ast.walk(root):
39 for node in ast.walk(root):
41 if isinstance(node, ast.ImportFrom):
40 if isinstance(node, ast.ImportFrom):
42 if node.module == '__future__':
41 if node.module == '__future__':
43 futures |= set(n.name for n in node.names)
42 futures |= set(n.name for n in node.names)
44 elif isinstance(node, ast.Print):
43 elif isinstance(node, ast.Print):
45 haveprint = True
44 haveprint = True
46
45
47 if 'absolute_import' not in futures:
46 if 'absolute_import' not in futures:
48 print('%s not using absolute_import' % f)
47 print('%s not using absolute_import' % f)
49 if haveprint and 'print_function' not in futures:
48 if haveprint and 'print_function' not in futures:
50 print('%s requires print_function' % f)
49 print('%s requires print_function' % f)
51
50
52 def check_compat_py3(f):
51 def check_compat_py3(f):
53 """Check Python 3 compatibility of a file with Python 3."""
52 """Check Python 3 compatibility of a file with Python 3."""
54 with open(f, 'rb') as fh:
53 with open(f, 'rb') as fh:
55 content = fh.read()
54 content = fh.read()
56
55
57 try:
56 try:
58 ast.parse(content)
57 ast.parse(content)
59 except SyntaxError as e:
58 except SyntaxError as e:
60 print('%s: invalid syntax: %s' % (f, e))
59 print('%s: invalid syntax: %s' % (f, e))
61 return
60 return
62
61
63 # Try to import the module.
62 # Try to import the module.
64 # For now we only support mercurial.* and hgext.* modules because figuring
63 # For now we only support mercurial.* and hgext.* modules because figuring
65 # out module paths for things not in a package can be confusing.
64 # out module paths for things not in a package can be confusing.
66 if f.startswith(('hgext/', 'mercurial/')) and not f.endswith('__init__.py'):
65 if f.startswith(('hgext/', 'mercurial/')) and not f.endswith('__init__.py'):
67 assert f.endswith('.py')
66 assert f.endswith('.py')
68 name = f.replace('/', '.')[:-3]
67 name = f.replace('/', '.')[:-3]
69 if f.endswith(_dualmodules):
68 if f.endswith(_dualmodules):
70 name = name.replace('.pure.', '.')
69 name = name.replace('.pure.', '.')
71 try:
70 try:
72 importlib.import_module(name)
71 importlib.import_module(name)
73 except Exception as e:
72 except Exception as e:
74 exc_type, exc_value, tb = sys.exc_info()
73 exc_type, exc_value, tb = sys.exc_info()
75 # We walk the stack and ignore frames from our custom importer,
74 # We walk the stack and ignore frames from our custom importer,
76 # import mechanisms, and stdlib modules. This kinda/sorta
75 # import mechanisms, and stdlib modules. This kinda/sorta
77 # emulates CPython behavior in import.c while also attempting
76 # emulates CPython behavior in import.c while also attempting
78 # to pin blame on a Mercurial file.
77 # to pin blame on a Mercurial file.
79 for frame in reversed(traceback.extract_tb(tb)):
78 for frame in reversed(traceback.extract_tb(tb)):
80 if frame.name == '_call_with_frames_removed':
79 if frame.name == '_call_with_frames_removed':
81 continue
80 continue
82 if 'importlib' in frame.filename:
81 if 'importlib' in frame.filename:
83 continue
82 continue
84 if 'mercurial/__init__.py' in frame.filename:
83 if 'mercurial/__init__.py' in frame.filename:
85 continue
84 continue
86 if frame.filename.startswith(sys.prefix):
85 if frame.filename.startswith(sys.prefix):
87 continue
86 continue
88 break
87 break
89
88
90 if frame.filename:
89 if frame.filename:
91 filename = os.path.basename(frame.filename)
90 filename = os.path.basename(frame.filename)
92 print('%s: error importing: <%s> %s (error at %s:%d)' % (
91 print('%s: error importing: <%s> %s (error at %s:%d)' % (
93 f, type(e).__name__, e, filename, frame.lineno))
92 f, type(e).__name__, e, filename, frame.lineno))
94 else:
93 else:
95 print('%s: error importing module: <%s> %s (line %d)' % (
94 print('%s: error importing module: <%s> %s (line %d)' % (
96 f, type(e).__name__, e, frame.lineno))
95 f, type(e).__name__, e, frame.lineno))
97
96
98 if __name__ == '__main__':
97 if __name__ == '__main__':
99 if sys.version_info[0] == 2:
98 if sys.version_info[0] == 2:
100 fn = check_compat_py2
99 fn = check_compat_py2
101 else:
100 else:
102 fn = check_compat_py3
101 fn = check_compat_py3
103
102
104 for f in sys.argv[1:]:
103 for f in sys.argv[1:]:
105 fn(f)
104 fn(f)
106
105
107 sys.exit(0)
106 sys.exit(0)
@@ -1,736 +1,735 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2
2
3 from __future__ import absolute_import, print_function
3 from __future__ import absolute_import, print_function
4
4
5 import ast
5 import ast
6 import collections
6 import collections
7 import os
7 import os
8 import re
8 import re
9 import sys
9 import sys
10
10
11 # Import a minimal set of stdlib modules needed for list_stdlib_modules()
11 # Import a minimal set of stdlib modules needed for list_stdlib_modules()
12 # to work when run from a virtualenv. The modules were chosen empirically
12 # to work when run from a virtualenv. The modules were chosen empirically
13 # so that the return value matches the return value without virtualenv.
13 # so that the return value matches the return value without virtualenv.
14 if True: # disable lexical sorting checks
14 if True: # disable lexical sorting checks
15 import BaseHTTPServer
15 import BaseHTTPServer
16 import zlib
16 import zlib
17
17
18 # Whitelist of modules that symbols can be directly imported from.
18 # Whitelist of modules that symbols can be directly imported from.
19 allowsymbolimports = (
19 allowsymbolimports = (
20 '__future__',
20 '__future__',
21 'mercurial.hgweb.common',
21 'mercurial.hgweb.common',
22 'mercurial.hgweb.request',
22 'mercurial.hgweb.request',
23 'mercurial.i18n',
23 'mercurial.i18n',
24 'mercurial.node',
24 'mercurial.node',
25 )
25 )
26
26
27 # Modules that have both Python and C implementations.
27 # Modules that have both Python and C implementations.
28 _dualmodules = (
28 _dualmodules = (
29 'base85.py',
29 'base85.py',
30 'bdiff.py',
30 'bdiff.py',
31 'diffhelpers.py',
31 'diffhelpers.py',
32 'mpatch.py',
32 'mpatch.py',
33 'osutil.py',
34 'parsers.py',
33 'parsers.py',
35 )
34 )
36
35
37 # Modules that must be aliased because they are commonly confused with
36 # Modules that must be aliased because they are commonly confused with
38 # common variables and can create aliasing and readability issues.
37 # common variables and can create aliasing and readability issues.
39 requirealias = {
38 requirealias = {
40 'ui': 'uimod',
39 'ui': 'uimod',
41 }
40 }
42
41
43 def usingabsolute(root):
42 def usingabsolute(root):
44 """Whether absolute imports are being used."""
43 """Whether absolute imports are being used."""
45 if sys.version_info[0] >= 3:
44 if sys.version_info[0] >= 3:
46 return True
45 return True
47
46
48 for node in ast.walk(root):
47 for node in ast.walk(root):
49 if isinstance(node, ast.ImportFrom):
48 if isinstance(node, ast.ImportFrom):
50 if node.module == '__future__':
49 if node.module == '__future__':
51 for n in node.names:
50 for n in node.names:
52 if n.name == 'absolute_import':
51 if n.name == 'absolute_import':
53 return True
52 return True
54
53
55 return False
54 return False
56
55
57 def walklocal(root):
56 def walklocal(root):
58 """Recursively yield all descendant nodes but not in a different scope"""
57 """Recursively yield all descendant nodes but not in a different scope"""
59 todo = collections.deque(ast.iter_child_nodes(root))
58 todo = collections.deque(ast.iter_child_nodes(root))
60 yield root, False
59 yield root, False
61 while todo:
60 while todo:
62 node = todo.popleft()
61 node = todo.popleft()
63 newscope = isinstance(node, ast.FunctionDef)
62 newscope = isinstance(node, ast.FunctionDef)
64 if not newscope:
63 if not newscope:
65 todo.extend(ast.iter_child_nodes(node))
64 todo.extend(ast.iter_child_nodes(node))
66 yield node, newscope
65 yield node, newscope
67
66
68 def dotted_name_of_path(path, trimpure=False):
67 def dotted_name_of_path(path, trimpure=False):
69 """Given a relative path to a source file, return its dotted module name.
68 """Given a relative path to a source file, return its dotted module name.
70
69
71 >>> dotted_name_of_path('mercurial/error.py')
70 >>> dotted_name_of_path('mercurial/error.py')
72 'mercurial.error'
71 'mercurial.error'
73 >>> dotted_name_of_path('mercurial/pure/parsers.py', trimpure=True)
72 >>> dotted_name_of_path('mercurial/pure/parsers.py', trimpure=True)
74 'mercurial.parsers'
73 'mercurial.parsers'
75 >>> dotted_name_of_path('zlibmodule.so')
74 >>> dotted_name_of_path('zlibmodule.so')
76 'zlib'
75 'zlib'
77 """
76 """
78 parts = path.replace(os.sep, '/').split('/')
77 parts = path.replace(os.sep, '/').split('/')
79 parts[-1] = parts[-1].split('.', 1)[0] # remove .py and .so and .ARCH.so
78 parts[-1] = parts[-1].split('.', 1)[0] # remove .py and .so and .ARCH.so
80 if parts[-1].endswith('module'):
79 if parts[-1].endswith('module'):
81 parts[-1] = parts[-1][:-6]
80 parts[-1] = parts[-1][:-6]
82 if trimpure:
81 if trimpure:
83 return '.'.join(p for p in parts if p != 'pure')
82 return '.'.join(p for p in parts if p != 'pure')
84 return '.'.join(parts)
83 return '.'.join(parts)
85
84
86 def fromlocalfunc(modulename, localmods):
85 def fromlocalfunc(modulename, localmods):
87 """Get a function to examine which locally defined module the
86 """Get a function to examine which locally defined module the
88 target source imports via a specified name.
87 target source imports via a specified name.
89
88
90 `modulename` is an `dotted_name_of_path()`-ed source file path,
89 `modulename` is an `dotted_name_of_path()`-ed source file path,
91 which may have `.__init__` at the end of it, of the target source.
90 which may have `.__init__` at the end of it, of the target source.
92
91
93 `localmods` is a dict (or set), of which key is an absolute
92 `localmods` is a dict (or set), of which key is an absolute
94 `dotted_name_of_path()`-ed source file path of locally defined (=
93 `dotted_name_of_path()`-ed source file path of locally defined (=
95 Mercurial specific) modules.
94 Mercurial specific) modules.
96
95
97 This function assumes that module names not existing in
96 This function assumes that module names not existing in
98 `localmods` are from the Python standard library.
97 `localmods` are from the Python standard library.
99
98
100 This function returns the function, which takes `name` argument,
99 This function returns the function, which takes `name` argument,
101 and returns `(absname, dottedpath, hassubmod)` tuple if `name`
100 and returns `(absname, dottedpath, hassubmod)` tuple if `name`
102 matches against locally defined module. Otherwise, it returns
101 matches against locally defined module. Otherwise, it returns
103 False.
102 False.
104
103
105 It is assumed that `name` doesn't have `.__init__`.
104 It is assumed that `name` doesn't have `.__init__`.
106
105
107 `absname` is an absolute module name of specified `name`
106 `absname` is an absolute module name of specified `name`
108 (e.g. "hgext.convert"). This can be used to compose prefix for sub
107 (e.g. "hgext.convert"). This can be used to compose prefix for sub
109 modules or so.
108 modules or so.
110
109
111 `dottedpath` is a `dotted_name_of_path()`-ed source file path
110 `dottedpath` is a `dotted_name_of_path()`-ed source file path
112 (e.g. "hgext.convert.__init__") of `name`. This is used to look
111 (e.g. "hgext.convert.__init__") of `name`. This is used to look
113 module up in `localmods` again.
112 module up in `localmods` again.
114
113
115 `hassubmod` is whether it may have sub modules under it (for
114 `hassubmod` is whether it may have sub modules under it (for
116 convenient, even though this is also equivalent to "absname !=
115 convenient, even though this is also equivalent to "absname !=
117 dottednpath")
116 dottednpath")
118
117
119 >>> localmods = {'foo.__init__': True, 'foo.foo1': True,
118 >>> localmods = {'foo.__init__': True, 'foo.foo1': True,
120 ... 'foo.bar.__init__': True, 'foo.bar.bar1': True,
119 ... 'foo.bar.__init__': True, 'foo.bar.bar1': True,
121 ... 'baz.__init__': True, 'baz.baz1': True }
120 ... 'baz.__init__': True, 'baz.baz1': True }
122 >>> fromlocal = fromlocalfunc('foo.xxx', localmods)
121 >>> fromlocal = fromlocalfunc('foo.xxx', localmods)
123 >>> # relative
122 >>> # relative
124 >>> fromlocal('foo1')
123 >>> fromlocal('foo1')
125 ('foo.foo1', 'foo.foo1', False)
124 ('foo.foo1', 'foo.foo1', False)
126 >>> fromlocal('bar')
125 >>> fromlocal('bar')
127 ('foo.bar', 'foo.bar.__init__', True)
126 ('foo.bar', 'foo.bar.__init__', True)
128 >>> fromlocal('bar.bar1')
127 >>> fromlocal('bar.bar1')
129 ('foo.bar.bar1', 'foo.bar.bar1', False)
128 ('foo.bar.bar1', 'foo.bar.bar1', False)
130 >>> # absolute
129 >>> # absolute
131 >>> fromlocal('baz')
130 >>> fromlocal('baz')
132 ('baz', 'baz.__init__', True)
131 ('baz', 'baz.__init__', True)
133 >>> fromlocal('baz.baz1')
132 >>> fromlocal('baz.baz1')
134 ('baz.baz1', 'baz.baz1', False)
133 ('baz.baz1', 'baz.baz1', False)
135 >>> # unknown = maybe standard library
134 >>> # unknown = maybe standard library
136 >>> fromlocal('os')
135 >>> fromlocal('os')
137 False
136 False
138 >>> fromlocal(None, 1)
137 >>> fromlocal(None, 1)
139 ('foo', 'foo.__init__', True)
138 ('foo', 'foo.__init__', True)
140 >>> fromlocal('foo1', 1)
139 >>> fromlocal('foo1', 1)
141 ('foo.foo1', 'foo.foo1', False)
140 ('foo.foo1', 'foo.foo1', False)
142 >>> fromlocal2 = fromlocalfunc('foo.xxx.yyy', localmods)
141 >>> fromlocal2 = fromlocalfunc('foo.xxx.yyy', localmods)
143 >>> fromlocal2(None, 2)
142 >>> fromlocal2(None, 2)
144 ('foo', 'foo.__init__', True)
143 ('foo', 'foo.__init__', True)
145 >>> fromlocal2('bar2', 1)
144 >>> fromlocal2('bar2', 1)
146 False
145 False
147 >>> fromlocal2('bar', 2)
146 >>> fromlocal2('bar', 2)
148 ('foo.bar', 'foo.bar.__init__', True)
147 ('foo.bar', 'foo.bar.__init__', True)
149 """
148 """
150 prefix = '.'.join(modulename.split('.')[:-1])
149 prefix = '.'.join(modulename.split('.')[:-1])
151 if prefix:
150 if prefix:
152 prefix += '.'
151 prefix += '.'
153 def fromlocal(name, level=0):
152 def fromlocal(name, level=0):
154 # name is false value when relative imports are used.
153 # name is false value when relative imports are used.
155 if not name:
154 if not name:
156 # If relative imports are used, level must not be absolute.
155 # If relative imports are used, level must not be absolute.
157 assert level > 0
156 assert level > 0
158 candidates = ['.'.join(modulename.split('.')[:-level])]
157 candidates = ['.'.join(modulename.split('.')[:-level])]
159 else:
158 else:
160 if not level:
159 if not level:
161 # Check relative name first.
160 # Check relative name first.
162 candidates = [prefix + name, name]
161 candidates = [prefix + name, name]
163 else:
162 else:
164 candidates = ['.'.join(modulename.split('.')[:-level]) +
163 candidates = ['.'.join(modulename.split('.')[:-level]) +
165 '.' + name]
164 '.' + name]
166
165
167 for n in candidates:
166 for n in candidates:
168 if n in localmods:
167 if n in localmods:
169 return (n, n, False)
168 return (n, n, False)
170 dottedpath = n + '.__init__'
169 dottedpath = n + '.__init__'
171 if dottedpath in localmods:
170 if dottedpath in localmods:
172 return (n, dottedpath, True)
171 return (n, dottedpath, True)
173 return False
172 return False
174 return fromlocal
173 return fromlocal
175
174
176 def list_stdlib_modules():
175 def list_stdlib_modules():
177 """List the modules present in the stdlib.
176 """List the modules present in the stdlib.
178
177
179 >>> mods = set(list_stdlib_modules())
178 >>> mods = set(list_stdlib_modules())
180 >>> 'BaseHTTPServer' in mods
179 >>> 'BaseHTTPServer' in mods
181 True
180 True
182
181
183 os.path isn't really a module, so it's missing:
182 os.path isn't really a module, so it's missing:
184
183
185 >>> 'os.path' in mods
184 >>> 'os.path' in mods
186 False
185 False
187
186
188 sys requires special treatment, because it's baked into the
187 sys requires special treatment, because it's baked into the
189 interpreter, but it should still appear:
188 interpreter, but it should still appear:
190
189
191 >>> 'sys' in mods
190 >>> 'sys' in mods
192 True
191 True
193
192
194 >>> 'collections' in mods
193 >>> 'collections' in mods
195 True
194 True
196
195
197 >>> 'cStringIO' in mods
196 >>> 'cStringIO' in mods
198 True
197 True
199
198
200 >>> 'cffi' in mods
199 >>> 'cffi' in mods
201 True
200 True
202 """
201 """
203 for m in sys.builtin_module_names:
202 for m in sys.builtin_module_names:
204 yield m
203 yield m
205 # These modules only exist on windows, but we should always
204 # These modules only exist on windows, but we should always
206 # consider them stdlib.
205 # consider them stdlib.
207 for m in ['msvcrt', '_winreg']:
206 for m in ['msvcrt', '_winreg']:
208 yield m
207 yield m
209 yield 'builtins' # python3 only
208 yield 'builtins' # python3 only
210 for m in 'fcntl', 'grp', 'pwd', 'termios': # Unix only
209 for m in 'fcntl', 'grp', 'pwd', 'termios': # Unix only
211 yield m
210 yield m
212 for m in 'cPickle', 'datetime': # in Python (not C) on PyPy
211 for m in 'cPickle', 'datetime': # in Python (not C) on PyPy
213 yield m
212 yield m
214 for m in ['cffi']:
213 for m in ['cffi']:
215 yield m
214 yield m
216 stdlib_prefixes = {sys.prefix, sys.exec_prefix}
215 stdlib_prefixes = {sys.prefix, sys.exec_prefix}
217 # We need to supplement the list of prefixes for the search to work
216 # We need to supplement the list of prefixes for the search to work
218 # when run from within a virtualenv.
217 # when run from within a virtualenv.
219 for mod in (BaseHTTPServer, zlib):
218 for mod in (BaseHTTPServer, zlib):
220 try:
219 try:
221 # Not all module objects have a __file__ attribute.
220 # Not all module objects have a __file__ attribute.
222 filename = mod.__file__
221 filename = mod.__file__
223 except AttributeError:
222 except AttributeError:
224 continue
223 continue
225 dirname = os.path.dirname(filename)
224 dirname = os.path.dirname(filename)
226 for prefix in stdlib_prefixes:
225 for prefix in stdlib_prefixes:
227 if dirname.startswith(prefix):
226 if dirname.startswith(prefix):
228 # Then this directory is redundant.
227 # Then this directory is redundant.
229 break
228 break
230 else:
229 else:
231 stdlib_prefixes.add(dirname)
230 stdlib_prefixes.add(dirname)
232 for libpath in sys.path:
231 for libpath in sys.path:
233 # We want to walk everything in sys.path that starts with
232 # We want to walk everything in sys.path that starts with
234 # something in stdlib_prefixes.
233 # something in stdlib_prefixes.
235 if not any(libpath.startswith(p) for p in stdlib_prefixes):
234 if not any(libpath.startswith(p) for p in stdlib_prefixes):
236 continue
235 continue
237 for top, dirs, files in os.walk(libpath):
236 for top, dirs, files in os.walk(libpath):
238 for i, d in reversed(list(enumerate(dirs))):
237 for i, d in reversed(list(enumerate(dirs))):
239 if (not os.path.exists(os.path.join(top, d, '__init__.py'))
238 if (not os.path.exists(os.path.join(top, d, '__init__.py'))
240 or top == libpath and d in ('hgext', 'mercurial')):
239 or top == libpath and d in ('hgext', 'mercurial')):
241 del dirs[i]
240 del dirs[i]
242 for name in files:
241 for name in files:
243 if not name.endswith(('.py', '.so', '.pyc', '.pyo', '.pyd')):
242 if not name.endswith(('.py', '.so', '.pyc', '.pyo', '.pyd')):
244 continue
243 continue
245 if name.startswith('__init__.py'):
244 if name.startswith('__init__.py'):
246 full_path = top
245 full_path = top
247 else:
246 else:
248 full_path = os.path.join(top, name)
247 full_path = os.path.join(top, name)
249 rel_path = full_path[len(libpath) + 1:]
248 rel_path = full_path[len(libpath) + 1:]
250 mod = dotted_name_of_path(rel_path)
249 mod = dotted_name_of_path(rel_path)
251 yield mod
250 yield mod
252
251
253 stdlib_modules = set(list_stdlib_modules())
252 stdlib_modules = set(list_stdlib_modules())
254
253
255 def imported_modules(source, modulename, f, localmods, ignore_nested=False):
254 def imported_modules(source, modulename, f, localmods, ignore_nested=False):
256 """Given the source of a file as a string, yield the names
255 """Given the source of a file as a string, yield the names
257 imported by that file.
256 imported by that file.
258
257
259 Args:
258 Args:
260 source: The python source to examine as a string.
259 source: The python source to examine as a string.
261 modulename: of specified python source (may have `__init__`)
260 modulename: of specified python source (may have `__init__`)
262 localmods: dict of locally defined module names (may have `__init__`)
261 localmods: dict of locally defined module names (may have `__init__`)
263 ignore_nested: If true, import statements that do not start in
262 ignore_nested: If true, import statements that do not start in
264 column zero will be ignored.
263 column zero will be ignored.
265
264
266 Returns:
265 Returns:
267 A list of absolute module names imported by the given source.
266 A list of absolute module names imported by the given source.
268
267
269 >>> f = 'foo/xxx.py'
268 >>> f = 'foo/xxx.py'
270 >>> modulename = 'foo.xxx'
269 >>> modulename = 'foo.xxx'
271 >>> localmods = {'foo.__init__': True,
270 >>> localmods = {'foo.__init__': True,
272 ... 'foo.foo1': True, 'foo.foo2': True,
271 ... 'foo.foo1': True, 'foo.foo2': True,
273 ... 'foo.bar.__init__': True, 'foo.bar.bar1': True,
272 ... 'foo.bar.__init__': True, 'foo.bar.bar1': True,
274 ... 'baz.__init__': True, 'baz.baz1': True }
273 ... 'baz.__init__': True, 'baz.baz1': True }
275 >>> # standard library (= not locally defined ones)
274 >>> # standard library (= not locally defined ones)
276 >>> sorted(imported_modules(
275 >>> sorted(imported_modules(
277 ... 'from stdlib1 import foo, bar; import stdlib2',
276 ... 'from stdlib1 import foo, bar; import stdlib2',
278 ... modulename, f, localmods))
277 ... modulename, f, localmods))
279 []
278 []
280 >>> # relative importing
279 >>> # relative importing
281 >>> sorted(imported_modules(
280 >>> sorted(imported_modules(
282 ... 'import foo1; from bar import bar1',
281 ... 'import foo1; from bar import bar1',
283 ... modulename, f, localmods))
282 ... modulename, f, localmods))
284 ['foo.bar.bar1', 'foo.foo1']
283 ['foo.bar.bar1', 'foo.foo1']
285 >>> sorted(imported_modules(
284 >>> sorted(imported_modules(
286 ... 'from bar.bar1 import name1, name2, name3',
285 ... 'from bar.bar1 import name1, name2, name3',
287 ... modulename, f, localmods))
286 ... modulename, f, localmods))
288 ['foo.bar.bar1']
287 ['foo.bar.bar1']
289 >>> # absolute importing
288 >>> # absolute importing
290 >>> sorted(imported_modules(
289 >>> sorted(imported_modules(
291 ... 'from baz import baz1, name1',
290 ... 'from baz import baz1, name1',
292 ... modulename, f, localmods))
291 ... modulename, f, localmods))
293 ['baz.__init__', 'baz.baz1']
292 ['baz.__init__', 'baz.baz1']
294 >>> # mixed importing, even though it shouldn't be recommended
293 >>> # mixed importing, even though it shouldn't be recommended
295 >>> sorted(imported_modules(
294 >>> sorted(imported_modules(
296 ... 'import stdlib, foo1, baz',
295 ... 'import stdlib, foo1, baz',
297 ... modulename, f, localmods))
296 ... modulename, f, localmods))
298 ['baz.__init__', 'foo.foo1']
297 ['baz.__init__', 'foo.foo1']
299 >>> # ignore_nested
298 >>> # ignore_nested
300 >>> sorted(imported_modules(
299 >>> sorted(imported_modules(
301 ... '''import foo
300 ... '''import foo
302 ... def wat():
301 ... def wat():
303 ... import bar
302 ... import bar
304 ... ''', modulename, f, localmods))
303 ... ''', modulename, f, localmods))
305 ['foo.__init__', 'foo.bar.__init__']
304 ['foo.__init__', 'foo.bar.__init__']
306 >>> sorted(imported_modules(
305 >>> sorted(imported_modules(
307 ... '''import foo
306 ... '''import foo
308 ... def wat():
307 ... def wat():
309 ... import bar
308 ... import bar
310 ... ''', modulename, f, localmods, ignore_nested=True))
309 ... ''', modulename, f, localmods, ignore_nested=True))
311 ['foo.__init__']
310 ['foo.__init__']
312 """
311 """
313 fromlocal = fromlocalfunc(modulename, localmods)
312 fromlocal = fromlocalfunc(modulename, localmods)
314 for node in ast.walk(ast.parse(source, f)):
313 for node in ast.walk(ast.parse(source, f)):
315 if ignore_nested and getattr(node, 'col_offset', 0) > 0:
314 if ignore_nested and getattr(node, 'col_offset', 0) > 0:
316 continue
315 continue
317 if isinstance(node, ast.Import):
316 if isinstance(node, ast.Import):
318 for n in node.names:
317 for n in node.names:
319 found = fromlocal(n.name)
318 found = fromlocal(n.name)
320 if not found:
319 if not found:
321 # this should import standard library
320 # this should import standard library
322 continue
321 continue
323 yield found[1]
322 yield found[1]
324 elif isinstance(node, ast.ImportFrom):
323 elif isinstance(node, ast.ImportFrom):
325 found = fromlocal(node.module, node.level)
324 found = fromlocal(node.module, node.level)
326 if not found:
325 if not found:
327 # this should import standard library
326 # this should import standard library
328 continue
327 continue
329
328
330 absname, dottedpath, hassubmod = found
329 absname, dottedpath, hassubmod = found
331 if not hassubmod:
330 if not hassubmod:
332 # "dottedpath" is not a package; must be imported
331 # "dottedpath" is not a package; must be imported
333 yield dottedpath
332 yield dottedpath
334 # examination of "node.names" should be redundant
333 # examination of "node.names" should be redundant
335 # e.g.: from mercurial.node import nullid, nullrev
334 # e.g.: from mercurial.node import nullid, nullrev
336 continue
335 continue
337
336
338 modnotfound = False
337 modnotfound = False
339 prefix = absname + '.'
338 prefix = absname + '.'
340 for n in node.names:
339 for n in node.names:
341 found = fromlocal(prefix + n.name)
340 found = fromlocal(prefix + n.name)
342 if not found:
341 if not found:
343 # this should be a function or a property of "node.module"
342 # this should be a function or a property of "node.module"
344 modnotfound = True
343 modnotfound = True
345 continue
344 continue
346 yield found[1]
345 yield found[1]
347 if modnotfound:
346 if modnotfound:
348 # "dottedpath" is a package, but imported because of non-module
347 # "dottedpath" is a package, but imported because of non-module
349 # lookup
348 # lookup
350 yield dottedpath
349 yield dottedpath
351
350
352 def verify_import_convention(module, source, localmods):
351 def verify_import_convention(module, source, localmods):
353 """Verify imports match our established coding convention.
352 """Verify imports match our established coding convention.
354
353
355 We have 2 conventions: legacy and modern. The modern convention is in
354 We have 2 conventions: legacy and modern. The modern convention is in
356 effect when using absolute imports.
355 effect when using absolute imports.
357
356
358 The legacy convention only looks for mixed imports. The modern convention
357 The legacy convention only looks for mixed imports. The modern convention
359 is much more thorough.
358 is much more thorough.
360 """
359 """
361 root = ast.parse(source)
360 root = ast.parse(source)
362 absolute = usingabsolute(root)
361 absolute = usingabsolute(root)
363
362
364 if absolute:
363 if absolute:
365 return verify_modern_convention(module, root, localmods)
364 return verify_modern_convention(module, root, localmods)
366 else:
365 else:
367 return verify_stdlib_on_own_line(root)
366 return verify_stdlib_on_own_line(root)
368
367
369 def verify_modern_convention(module, root, localmods, root_col_offset=0):
368 def verify_modern_convention(module, root, localmods, root_col_offset=0):
370 """Verify a file conforms to the modern import convention rules.
369 """Verify a file conforms to the modern import convention rules.
371
370
372 The rules of the modern convention are:
371 The rules of the modern convention are:
373
372
374 * Ordering is stdlib followed by local imports. Each group is lexically
373 * Ordering is stdlib followed by local imports. Each group is lexically
375 sorted.
374 sorted.
376 * Importing multiple modules via "import X, Y" is not allowed: use
375 * Importing multiple modules via "import X, Y" is not allowed: use
377 separate import statements.
376 separate import statements.
378 * Importing multiple modules via "from X import ..." is allowed if using
377 * Importing multiple modules via "from X import ..." is allowed if using
379 parenthesis and one entry per line.
378 parenthesis and one entry per line.
380 * Only 1 relative import statement per import level ("from .", "from ..")
379 * Only 1 relative import statement per import level ("from .", "from ..")
381 is allowed.
380 is allowed.
382 * Relative imports from higher levels must occur before lower levels. e.g.
381 * Relative imports from higher levels must occur before lower levels. e.g.
383 "from .." must be before "from .".
382 "from .." must be before "from .".
384 * Imports from peer packages should use relative import (e.g. do not
383 * Imports from peer packages should use relative import (e.g. do not
385 "import mercurial.foo" from a "mercurial.*" module).
384 "import mercurial.foo" from a "mercurial.*" module).
386 * Symbols can only be imported from specific modules (see
385 * Symbols can only be imported from specific modules (see
387 `allowsymbolimports`). For other modules, first import the module then
386 `allowsymbolimports`). For other modules, first import the module then
388 assign the symbol to a module-level variable. In addition, these imports
387 assign the symbol to a module-level variable. In addition, these imports
389 must be performed before other local imports. This rule only
388 must be performed before other local imports. This rule only
390 applies to import statements outside of any blocks.
389 applies to import statements outside of any blocks.
391 * Relative imports from the standard library are not allowed.
390 * Relative imports from the standard library are not allowed.
392 * Certain modules must be aliased to alternate names to avoid aliasing
391 * Certain modules must be aliased to alternate names to avoid aliasing
393 and readability problems. See `requirealias`.
392 and readability problems. See `requirealias`.
394 """
393 """
395 topmodule = module.split('.')[0]
394 topmodule = module.split('.')[0]
396 fromlocal = fromlocalfunc(module, localmods)
395 fromlocal = fromlocalfunc(module, localmods)
397
396
398 # Whether a local/non-stdlib import has been performed.
397 # Whether a local/non-stdlib import has been performed.
399 seenlocal = None
398 seenlocal = None
400 # Whether a local/non-stdlib, non-symbol import has been seen.
399 # Whether a local/non-stdlib, non-symbol import has been seen.
401 seennonsymbollocal = False
400 seennonsymbollocal = False
402 # The last name to be imported (for sorting).
401 # The last name to be imported (for sorting).
403 lastname = None
402 lastname = None
404 laststdlib = None
403 laststdlib = None
405 # Relative import levels encountered so far.
404 # Relative import levels encountered so far.
406 seenlevels = set()
405 seenlevels = set()
407
406
408 for node, newscope in walklocal(root):
407 for node, newscope in walklocal(root):
409 def msg(fmt, *args):
408 def msg(fmt, *args):
410 return (fmt % args, node.lineno)
409 return (fmt % args, node.lineno)
411 if newscope:
410 if newscope:
412 # Check for local imports in function
411 # Check for local imports in function
413 for r in verify_modern_convention(module, node, localmods,
412 for r in verify_modern_convention(module, node, localmods,
414 node.col_offset + 4):
413 node.col_offset + 4):
415 yield r
414 yield r
416 elif isinstance(node, ast.Import):
415 elif isinstance(node, ast.Import):
417 # Disallow "import foo, bar" and require separate imports
416 # Disallow "import foo, bar" and require separate imports
418 # for each module.
417 # for each module.
419 if len(node.names) > 1:
418 if len(node.names) > 1:
420 yield msg('multiple imported names: %s',
419 yield msg('multiple imported names: %s',
421 ', '.join(n.name for n in node.names))
420 ', '.join(n.name for n in node.names))
422
421
423 name = node.names[0].name
422 name = node.names[0].name
424 asname = node.names[0].asname
423 asname = node.names[0].asname
425
424
426 stdlib = name in stdlib_modules
425 stdlib = name in stdlib_modules
427
426
428 # Ignore sorting rules on imports inside blocks.
427 # Ignore sorting rules on imports inside blocks.
429 if node.col_offset == root_col_offset:
428 if node.col_offset == root_col_offset:
430 if lastname and name < lastname and laststdlib == stdlib:
429 if lastname and name < lastname and laststdlib == stdlib:
431 yield msg('imports not lexically sorted: %s < %s',
430 yield msg('imports not lexically sorted: %s < %s',
432 name, lastname)
431 name, lastname)
433
432
434 lastname = name
433 lastname = name
435 laststdlib = stdlib
434 laststdlib = stdlib
436
435
437 # stdlib imports should be before local imports.
436 # stdlib imports should be before local imports.
438 if stdlib and seenlocal and node.col_offset == root_col_offset:
437 if stdlib and seenlocal and node.col_offset == root_col_offset:
439 yield msg('stdlib import "%s" follows local import: %s',
438 yield msg('stdlib import "%s" follows local import: %s',
440 name, seenlocal)
439 name, seenlocal)
441
440
442 if not stdlib:
441 if not stdlib:
443 seenlocal = name
442 seenlocal = name
444
443
445 # Import of sibling modules should use relative imports.
444 # Import of sibling modules should use relative imports.
446 topname = name.split('.')[0]
445 topname = name.split('.')[0]
447 if topname == topmodule:
446 if topname == topmodule:
448 yield msg('import should be relative: %s', name)
447 yield msg('import should be relative: %s', name)
449
448
450 if name in requirealias and asname != requirealias[name]:
449 if name in requirealias and asname != requirealias[name]:
451 yield msg('%s module must be "as" aliased to %s',
450 yield msg('%s module must be "as" aliased to %s',
452 name, requirealias[name])
451 name, requirealias[name])
453
452
454 elif isinstance(node, ast.ImportFrom):
453 elif isinstance(node, ast.ImportFrom):
455 # Resolve the full imported module name.
454 # Resolve the full imported module name.
456 if node.level > 0:
455 if node.level > 0:
457 fullname = '.'.join(module.split('.')[:-node.level])
456 fullname = '.'.join(module.split('.')[:-node.level])
458 if node.module:
457 if node.module:
459 fullname += '.%s' % node.module
458 fullname += '.%s' % node.module
460 else:
459 else:
461 assert node.module
460 assert node.module
462 fullname = node.module
461 fullname = node.module
463
462
464 topname = fullname.split('.')[0]
463 topname = fullname.split('.')[0]
465 if topname == topmodule:
464 if topname == topmodule:
466 yield msg('import should be relative: %s', fullname)
465 yield msg('import should be relative: %s', fullname)
467
466
468 # __future__ is special since it needs to come first and use
467 # __future__ is special since it needs to come first and use
469 # symbol import.
468 # symbol import.
470 if fullname != '__future__':
469 if fullname != '__future__':
471 if not fullname or fullname in stdlib_modules:
470 if not fullname or fullname in stdlib_modules:
472 yield msg('relative import of stdlib module')
471 yield msg('relative import of stdlib module')
473 else:
472 else:
474 seenlocal = fullname
473 seenlocal = fullname
475
474
476 # Direct symbol import is only allowed from certain modules and
475 # Direct symbol import is only allowed from certain modules and
477 # must occur before non-symbol imports.
476 # must occur before non-symbol imports.
478 found = fromlocal(node.module, node.level)
477 found = fromlocal(node.module, node.level)
479 if found and found[2]: # node.module is a package
478 if found and found[2]: # node.module is a package
480 prefix = found[0] + '.'
479 prefix = found[0] + '.'
481 symbols = [n.name for n in node.names
480 symbols = [n.name for n in node.names
482 if not fromlocal(prefix + n.name)]
481 if not fromlocal(prefix + n.name)]
483 else:
482 else:
484 symbols = [n.name for n in node.names]
483 symbols = [n.name for n in node.names]
485 if node.module and node.col_offset == root_col_offset:
484 if node.module and node.col_offset == root_col_offset:
486 if symbols and fullname not in allowsymbolimports:
485 if symbols and fullname not in allowsymbolimports:
487 yield msg('direct symbol import %s from %s',
486 yield msg('direct symbol import %s from %s',
488 ', '.join(symbols), fullname)
487 ', '.join(symbols), fullname)
489
488
490 if symbols and seennonsymbollocal:
489 if symbols and seennonsymbollocal:
491 yield msg('symbol import follows non-symbol import: %s',
490 yield msg('symbol import follows non-symbol import: %s',
492 fullname)
491 fullname)
493 if not symbols and fullname not in stdlib_modules:
492 if not symbols and fullname not in stdlib_modules:
494 seennonsymbollocal = True
493 seennonsymbollocal = True
495
494
496 if not node.module:
495 if not node.module:
497 assert node.level
496 assert node.level
498
497
499 # Only allow 1 group per level.
498 # Only allow 1 group per level.
500 if (node.level in seenlevels
499 if (node.level in seenlevels
501 and node.col_offset == root_col_offset):
500 and node.col_offset == root_col_offset):
502 yield msg('multiple "from %s import" statements',
501 yield msg('multiple "from %s import" statements',
503 '.' * node.level)
502 '.' * node.level)
504
503
505 # Higher-level groups come before lower-level groups.
504 # Higher-level groups come before lower-level groups.
506 if any(node.level > l for l in seenlevels):
505 if any(node.level > l for l in seenlevels):
507 yield msg('higher-level import should come first: %s',
506 yield msg('higher-level import should come first: %s',
508 fullname)
507 fullname)
509
508
510 seenlevels.add(node.level)
509 seenlevels.add(node.level)
511
510
512 # Entries in "from .X import ( ... )" lists must be lexically
511 # Entries in "from .X import ( ... )" lists must be lexically
513 # sorted.
512 # sorted.
514 lastentryname = None
513 lastentryname = None
515
514
516 for n in node.names:
515 for n in node.names:
517 if lastentryname and n.name < lastentryname:
516 if lastentryname and n.name < lastentryname:
518 yield msg('imports from %s not lexically sorted: %s < %s',
517 yield msg('imports from %s not lexically sorted: %s < %s',
519 fullname, n.name, lastentryname)
518 fullname, n.name, lastentryname)
520
519
521 lastentryname = n.name
520 lastentryname = n.name
522
521
523 if n.name in requirealias and n.asname != requirealias[n.name]:
522 if n.name in requirealias and n.asname != requirealias[n.name]:
524 yield msg('%s from %s must be "as" aliased to %s',
523 yield msg('%s from %s must be "as" aliased to %s',
525 n.name, fullname, requirealias[n.name])
524 n.name, fullname, requirealias[n.name])
526
525
527 def verify_stdlib_on_own_line(root):
526 def verify_stdlib_on_own_line(root):
528 """Given some python source, verify that stdlib imports are done
527 """Given some python source, verify that stdlib imports are done
529 in separate statements from relative local module imports.
528 in separate statements from relative local module imports.
530
529
531 >>> list(verify_stdlib_on_own_line(ast.parse('import sys, foo')))
530 >>> list(verify_stdlib_on_own_line(ast.parse('import sys, foo')))
532 [('mixed imports\\n stdlib: sys\\n relative: foo', 1)]
531 [('mixed imports\\n stdlib: sys\\n relative: foo', 1)]
533 >>> list(verify_stdlib_on_own_line(ast.parse('import sys, os')))
532 >>> list(verify_stdlib_on_own_line(ast.parse('import sys, os')))
534 []
533 []
535 >>> list(verify_stdlib_on_own_line(ast.parse('import foo, bar')))
534 >>> list(verify_stdlib_on_own_line(ast.parse('import foo, bar')))
536 []
535 []
537 """
536 """
538 for node in ast.walk(root):
537 for node in ast.walk(root):
539 if isinstance(node, ast.Import):
538 if isinstance(node, ast.Import):
540 from_stdlib = {False: [], True: []}
539 from_stdlib = {False: [], True: []}
541 for n in node.names:
540 for n in node.names:
542 from_stdlib[n.name in stdlib_modules].append(n.name)
541 from_stdlib[n.name in stdlib_modules].append(n.name)
543 if from_stdlib[True] and from_stdlib[False]:
542 if from_stdlib[True] and from_stdlib[False]:
544 yield ('mixed imports\n stdlib: %s\n relative: %s' %
543 yield ('mixed imports\n stdlib: %s\n relative: %s' %
545 (', '.join(sorted(from_stdlib[True])),
544 (', '.join(sorted(from_stdlib[True])),
546 ', '.join(sorted(from_stdlib[False]))), node.lineno)
545 ', '.join(sorted(from_stdlib[False]))), node.lineno)
547
546
548 class CircularImport(Exception):
547 class CircularImport(Exception):
549 pass
548 pass
550
549
551 def checkmod(mod, imports):
550 def checkmod(mod, imports):
552 shortest = {}
551 shortest = {}
553 visit = [[mod]]
552 visit = [[mod]]
554 while visit:
553 while visit:
555 path = visit.pop(0)
554 path = visit.pop(0)
556 for i in sorted(imports.get(path[-1], [])):
555 for i in sorted(imports.get(path[-1], [])):
557 if len(path) < shortest.get(i, 1000):
556 if len(path) < shortest.get(i, 1000):
558 shortest[i] = len(path)
557 shortest[i] = len(path)
559 if i in path:
558 if i in path:
560 if i == path[0]:
559 if i == path[0]:
561 raise CircularImport(path)
560 raise CircularImport(path)
562 continue
561 continue
563 visit.append(path + [i])
562 visit.append(path + [i])
564
563
565 def rotatecycle(cycle):
564 def rotatecycle(cycle):
566 """arrange a cycle so that the lexicographically first module listed first
565 """arrange a cycle so that the lexicographically first module listed first
567
566
568 >>> rotatecycle(['foo', 'bar'])
567 >>> rotatecycle(['foo', 'bar'])
569 ['bar', 'foo', 'bar']
568 ['bar', 'foo', 'bar']
570 """
569 """
571 lowest = min(cycle)
570 lowest = min(cycle)
572 idx = cycle.index(lowest)
571 idx = cycle.index(lowest)
573 return cycle[idx:] + cycle[:idx] + [lowest]
572 return cycle[idx:] + cycle[:idx] + [lowest]
574
573
575 def find_cycles(imports):
574 def find_cycles(imports):
576 """Find cycles in an already-loaded import graph.
575 """Find cycles in an already-loaded import graph.
577
576
578 All module names recorded in `imports` should be absolute one.
577 All module names recorded in `imports` should be absolute one.
579
578
580 >>> from __future__ import print_function
579 >>> from __future__ import print_function
581 >>> imports = {'top.foo': ['top.bar', 'os.path', 'top.qux'],
580 >>> imports = {'top.foo': ['top.bar', 'os.path', 'top.qux'],
582 ... 'top.bar': ['top.baz', 'sys'],
581 ... 'top.bar': ['top.baz', 'sys'],
583 ... 'top.baz': ['top.foo'],
582 ... 'top.baz': ['top.foo'],
584 ... 'top.qux': ['top.foo']}
583 ... 'top.qux': ['top.foo']}
585 >>> print('\\n'.join(sorted(find_cycles(imports))))
584 >>> print('\\n'.join(sorted(find_cycles(imports))))
586 top.bar -> top.baz -> top.foo -> top.bar
585 top.bar -> top.baz -> top.foo -> top.bar
587 top.foo -> top.qux -> top.foo
586 top.foo -> top.qux -> top.foo
588 """
587 """
589 cycles = set()
588 cycles = set()
590 for mod in sorted(imports.keys()):
589 for mod in sorted(imports.keys()):
591 try:
590 try:
592 checkmod(mod, imports)
591 checkmod(mod, imports)
593 except CircularImport as e:
592 except CircularImport as e:
594 cycle = e.args[0]
593 cycle = e.args[0]
595 cycles.add(" -> ".join(rotatecycle(cycle)))
594 cycles.add(" -> ".join(rotatecycle(cycle)))
596 return cycles
595 return cycles
597
596
598 def _cycle_sortkey(c):
597 def _cycle_sortkey(c):
599 return len(c), c
598 return len(c), c
600
599
601 def embedded(f, modname, src):
600 def embedded(f, modname, src):
602 """Extract embedded python code
601 """Extract embedded python code
603
602
604 >>> def test(fn, lines):
603 >>> def test(fn, lines):
605 ... for s, m, f, l in embedded(fn, "example", lines):
604 ... for s, m, f, l in embedded(fn, "example", lines):
606 ... print("%s %s %s" % (m, f, l))
605 ... print("%s %s %s" % (m, f, l))
607 ... print(repr(s))
606 ... print(repr(s))
608 >>> lines = [
607 >>> lines = [
609 ... 'comment',
608 ... 'comment',
610 ... ' >>> from __future__ import print_function',
609 ... ' >>> from __future__ import print_function',
611 ... " >>> ' multiline",
610 ... " >>> ' multiline",
612 ... " ... string'",
611 ... " ... string'",
613 ... ' ',
612 ... ' ',
614 ... 'comment',
613 ... 'comment',
615 ... ' $ cat > foo.py <<EOF',
614 ... ' $ cat > foo.py <<EOF',
616 ... ' > from __future__ import print_function',
615 ... ' > from __future__ import print_function',
617 ... ' > EOF',
616 ... ' > EOF',
618 ... ]
617 ... ]
619 >>> test("example.t", lines)
618 >>> test("example.t", lines)
620 example[2] doctest.py 2
619 example[2] doctest.py 2
621 "from __future__ import print_function\\n' multiline\\nstring'\\n"
620 "from __future__ import print_function\\n' multiline\\nstring'\\n"
622 example[7] foo.py 7
621 example[7] foo.py 7
623 'from __future__ import print_function\\n'
622 'from __future__ import print_function\\n'
624 """
623 """
625 inlinepython = 0
624 inlinepython = 0
626 shpython = 0
625 shpython = 0
627 script = []
626 script = []
628 prefix = 6
627 prefix = 6
629 t = ''
628 t = ''
630 n = 0
629 n = 0
631 for l in src:
630 for l in src:
632 n += 1
631 n += 1
633 if not l.endswith(b'\n'):
632 if not l.endswith(b'\n'):
634 l += b'\n'
633 l += b'\n'
635 if l.startswith(b' >>> '): # python inlines
634 if l.startswith(b' >>> '): # python inlines
636 if shpython:
635 if shpython:
637 print("%s:%d: Parse Error" % (f, n))
636 print("%s:%d: Parse Error" % (f, n))
638 if not inlinepython:
637 if not inlinepython:
639 # We've just entered a Python block.
638 # We've just entered a Python block.
640 inlinepython = n
639 inlinepython = n
641 t = 'doctest.py'
640 t = 'doctest.py'
642 script.append(l[prefix:])
641 script.append(l[prefix:])
643 continue
642 continue
644 if l.startswith(b' ... '): # python inlines
643 if l.startswith(b' ... '): # python inlines
645 script.append(l[prefix:])
644 script.append(l[prefix:])
646 continue
645 continue
647 cat = re.search(r"\$ \s*cat\s*>\s*(\S+\.py)\s*<<\s*EOF", l)
646 cat = re.search(r"\$ \s*cat\s*>\s*(\S+\.py)\s*<<\s*EOF", l)
648 if cat:
647 if cat:
649 if inlinepython:
648 if inlinepython:
650 yield ''.join(script), ("%s[%d]" %
649 yield ''.join(script), ("%s[%d]" %
651 (modname, inlinepython)), t, inlinepython
650 (modname, inlinepython)), t, inlinepython
652 script = []
651 script = []
653 inlinepython = 0
652 inlinepython = 0
654 shpython = n
653 shpython = n
655 t = cat.group(1)
654 t = cat.group(1)
656 continue
655 continue
657 if shpython and l.startswith(b' > '): # sh continuation
656 if shpython and l.startswith(b' > '): # sh continuation
658 if l == b' > EOF\n':
657 if l == b' > EOF\n':
659 yield ''.join(script), ("%s[%d]" %
658 yield ''.join(script), ("%s[%d]" %
660 (modname, shpython)), t, shpython
659 (modname, shpython)), t, shpython
661 script = []
660 script = []
662 shpython = 0
661 shpython = 0
663 else:
662 else:
664 script.append(l[4:])
663 script.append(l[4:])
665 continue
664 continue
666 if inlinepython and l == b' \n':
665 if inlinepython and l == b' \n':
667 yield ''.join(script), ("%s[%d]" %
666 yield ''.join(script), ("%s[%d]" %
668 (modname, inlinepython)), t, inlinepython
667 (modname, inlinepython)), t, inlinepython
669 script = []
668 script = []
670 inlinepython = 0
669 inlinepython = 0
671 continue
670 continue
672
671
673 def sources(f, modname):
672 def sources(f, modname):
674 """Yields possibly multiple sources from a filepath
673 """Yields possibly multiple sources from a filepath
675
674
676 input: filepath, modulename
675 input: filepath, modulename
677 yields: script(string), modulename, filepath, linenumber
676 yields: script(string), modulename, filepath, linenumber
678
677
679 For embedded scripts, the modulename and filepath will be different
678 For embedded scripts, the modulename and filepath will be different
680 from the function arguments. linenumber is an offset relative to
679 from the function arguments. linenumber is an offset relative to
681 the input file.
680 the input file.
682 """
681 """
683 py = False
682 py = False
684 if not f.endswith('.t'):
683 if not f.endswith('.t'):
685 with open(f) as src:
684 with open(f) as src:
686 yield src.read(), modname, f, 0
685 yield src.read(), modname, f, 0
687 py = True
686 py = True
688 if py or f.endswith('.t'):
687 if py or f.endswith('.t'):
689 with open(f) as src:
688 with open(f) as src:
690 for script, modname, t, line in embedded(f, modname, src):
689 for script, modname, t, line in embedded(f, modname, src):
691 yield script, modname, t, line
690 yield script, modname, t, line
692
691
693 def main(argv):
692 def main(argv):
694 if len(argv) < 2 or (argv[1] == '-' and len(argv) > 2):
693 if len(argv) < 2 or (argv[1] == '-' and len(argv) > 2):
695 print('Usage: %s {-|file [file] [file] ...}')
694 print('Usage: %s {-|file [file] [file] ...}')
696 return 1
695 return 1
697 if argv[1] == '-':
696 if argv[1] == '-':
698 argv = argv[:1]
697 argv = argv[:1]
699 argv.extend(l.rstrip() for l in sys.stdin.readlines())
698 argv.extend(l.rstrip() for l in sys.stdin.readlines())
700 localmods = {}
699 localmods = {}
701 used_imports = {}
700 used_imports = {}
702 any_errors = False
701 any_errors = False
703 for source_path in argv[1:]:
702 for source_path in argv[1:]:
704 trimpure = source_path.endswith(_dualmodules)
703 trimpure = source_path.endswith(_dualmodules)
705 modname = dotted_name_of_path(source_path, trimpure=trimpure)
704 modname = dotted_name_of_path(source_path, trimpure=trimpure)
706 localmods[modname] = source_path
705 localmods[modname] = source_path
707 for localmodname, source_path in sorted(localmods.items()):
706 for localmodname, source_path in sorted(localmods.items()):
708 for src, modname, name, line in sources(source_path, localmodname):
707 for src, modname, name, line in sources(source_path, localmodname):
709 try:
708 try:
710 used_imports[modname] = sorted(
709 used_imports[modname] = sorted(
711 imported_modules(src, modname, name, localmods,
710 imported_modules(src, modname, name, localmods,
712 ignore_nested=True))
711 ignore_nested=True))
713 for error, lineno in verify_import_convention(modname, src,
712 for error, lineno in verify_import_convention(modname, src,
714 localmods):
713 localmods):
715 any_errors = True
714 any_errors = True
716 print('%s:%d: %s' % (source_path, lineno + line, error))
715 print('%s:%d: %s' % (source_path, lineno + line, error))
717 except SyntaxError as e:
716 except SyntaxError as e:
718 print('%s:%d: SyntaxError: %s' %
717 print('%s:%d: SyntaxError: %s' %
719 (source_path, e.lineno + line, e))
718 (source_path, e.lineno + line, e))
720 cycles = find_cycles(used_imports)
719 cycles = find_cycles(used_imports)
721 if cycles:
720 if cycles:
722 firstmods = set()
721 firstmods = set()
723 for c in sorted(cycles, key=_cycle_sortkey):
722 for c in sorted(cycles, key=_cycle_sortkey):
724 first = c.split()[0]
723 first = c.split()[0]
725 # As a rough cut, ignore any cycle that starts with the
724 # As a rough cut, ignore any cycle that starts with the
726 # same module as some other cycle. Otherwise we see lots
725 # same module as some other cycle. Otherwise we see lots
727 # of cycles that are effectively duplicates.
726 # of cycles that are effectively duplicates.
728 if first in firstmods:
727 if first in firstmods:
729 continue
728 continue
730 print('Import cycle:', c)
729 print('Import cycle:', c)
731 firstmods.add(first)
730 firstmods.add(first)
732 any_errors = True
731 any_errors = True
733 return any_errors != 0
732 return any_errors != 0
734
733
735 if __name__ == '__main__':
734 if __name__ == '__main__':
736 sys.exit(int(main(sys.argv)))
735 sys.exit(int(main(sys.argv)))
@@ -1,35 +1,35 b''
1 <?xml version="1.0" encoding="utf-8"?>
1 <?xml version="1.0" encoding="utf-8"?>
2 <Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">
2 <Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">
3
3
4 <?include guids.wxi ?>
4 <?include guids.wxi ?>
5 <?include defines.wxi ?>
5 <?include defines.wxi ?>
6
6
7 <Fragment>
7 <Fragment>
8 <DirectoryRef Id="INSTALLDIR" FileSource="$(var.SourceDir)">
8 <DirectoryRef Id="INSTALLDIR" FileSource="$(var.SourceDir)">
9 <Component Id="distOutput" Guid="$(var.dist.guid)" Win64='$(var.IsX64)'>
9 <Component Id="distOutput" Guid="$(var.dist.guid)" Win64='$(var.IsX64)'>
10 <File Name="python27.dll" KeyPath="yes" />
10 <File Name="python27.dll" KeyPath="yes" />
11 </Component>
11 </Component>
12 <Directory Id="libdir" Name="lib" FileSource="$(var.SourceDir)/lib">
12 <Directory Id="libdir" Name="lib" FileSource="$(var.SourceDir)/lib">
13 <Component Id="libOutput" Guid="$(var.lib.guid)" Win64='$(var.IsX64)'>
13 <Component Id="libOutput" Guid="$(var.lib.guid)" Win64='$(var.IsX64)'>
14 <File Name="library.zip" KeyPath="yes" />
14 <File Name="library.zip" KeyPath="yes" />
15 <File Name="mercurial.base85.pyd" />
15 <File Name="mercurial.base85.pyd" />
16 <File Name="mercurial.bdiff.pyd" />
16 <File Name="mercurial.bdiff.pyd" />
17 <File Name="mercurial.diffhelpers.pyd" />
17 <File Name="mercurial.diffhelpers.pyd" />
18 <File Name="mercurial.mpatch.pyd" />
18 <File Name="mercurial.mpatch.pyd" />
19 <File Name="mercurial.osutil.pyd" />
19 <File Name="mercurial.cext.osutil.pyd" />
20 <File Name="mercurial.parsers.pyd" />
20 <File Name="mercurial.parsers.pyd" />
21 <File Name="pyexpat.pyd" />
21 <File Name="pyexpat.pyd" />
22 <File Name="bz2.pyd" />
22 <File Name="bz2.pyd" />
23 <File Name="select.pyd" />
23 <File Name="select.pyd" />
24 <File Name="unicodedata.pyd" />
24 <File Name="unicodedata.pyd" />
25 <File Name="_ctypes.pyd" />
25 <File Name="_ctypes.pyd" />
26 <File Name="_elementtree.pyd" />
26 <File Name="_elementtree.pyd" />
27 <File Name="_hashlib.pyd" />
27 <File Name="_hashlib.pyd" />
28 <File Name="_socket.pyd" />
28 <File Name="_socket.pyd" />
29 <File Name="_ssl.pyd" />
29 <File Name="_ssl.pyd" />
30 </Component>
30 </Component>
31 </Directory>
31 </Directory>
32 </DirectoryRef>
32 </DirectoryRef>
33 </Fragment>
33 </Fragment>
34
34
35 </Wix>
35 </Wix>
@@ -1,404 +1,403 b''
1 # __init__.py - Startup and module loading logic for Mercurial.
1 # __init__.py - Startup and module loading logic for Mercurial.
2 #
2 #
3 # Copyright 2015 Gregory Szorc <gregory.szorc@gmail.com>
3 # Copyright 2015 Gregory Szorc <gregory.szorc@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import imp
10 import imp
11 import os
11 import os
12 import sys
12 import sys
13 import zipimport
13 import zipimport
14
14
15 from . import (
15 from . import (
16 policy
16 policy
17 )
17 )
18
18
19 __all__ = []
19 __all__ = []
20
20
21 modulepolicy = policy.policy
21 modulepolicy = policy.policy
22
22
23 # Modules that have both Python and C implementations. See also the
23 # Modules that have both Python and C implementations. See also the
24 # set of .py files under mercurial/pure/.
24 # set of .py files under mercurial/pure/.
25 _dualmodules = {
25 _dualmodules = {
26 'mercurial.base85',
26 'mercurial.base85',
27 'mercurial.bdiff',
27 'mercurial.bdiff',
28 'mercurial.diffhelpers',
28 'mercurial.diffhelpers',
29 'mercurial.mpatch',
29 'mercurial.mpatch',
30 'mercurial.osutil',
31 'mercurial.parsers',
30 'mercurial.parsers',
32 }
31 }
33
32
34 class hgimporter(object):
33 class hgimporter(object):
35 """Object that conforms to import hook interface defined in PEP-302."""
34 """Object that conforms to import hook interface defined in PEP-302."""
36 def find_module(self, name, path=None):
35 def find_module(self, name, path=None):
37 # We only care about modules that have both C and pure implementations.
36 # We only care about modules that have both C and pure implementations.
38 if name in _dualmodules:
37 if name in _dualmodules:
39 return self
38 return self
40 return None
39 return None
41
40
42 def load_module(self, name):
41 def load_module(self, name):
43 mod = sys.modules.get(name, None)
42 mod = sys.modules.get(name, None)
44 if mod:
43 if mod:
45 return mod
44 return mod
46
45
47 mercurial = sys.modules['mercurial']
46 mercurial = sys.modules['mercurial']
48
47
49 # The zip importer behaves sufficiently differently from the default
48 # The zip importer behaves sufficiently differently from the default
50 # importer to warrant its own code path.
49 # importer to warrant its own code path.
51 loader = getattr(mercurial, '__loader__', None)
50 loader = getattr(mercurial, '__loader__', None)
52 if isinstance(loader, zipimport.zipimporter):
51 if isinstance(loader, zipimport.zipimporter):
53 def ziploader(*paths):
52 def ziploader(*paths):
54 """Obtain a zipimporter for a directory under the main zip."""
53 """Obtain a zipimporter for a directory under the main zip."""
55 path = os.path.join(loader.archive, *paths)
54 path = os.path.join(loader.archive, *paths)
56 zl = sys.path_importer_cache.get(path)
55 zl = sys.path_importer_cache.get(path)
57 if not zl:
56 if not zl:
58 zl = zipimport.zipimporter(path)
57 zl = zipimport.zipimporter(path)
59 return zl
58 return zl
60
59
61 try:
60 try:
62 if modulepolicy in policy.policynoc:
61 if modulepolicy in policy.policynoc:
63 raise ImportError()
62 raise ImportError()
64
63
65 zl = ziploader('mercurial')
64 zl = ziploader('mercurial')
66 mod = zl.load_module(name)
65 mod = zl.load_module(name)
67 # Unlike imp, ziploader doesn't expose module metadata that
66 # Unlike imp, ziploader doesn't expose module metadata that
68 # indicates the type of module. So just assume what we found
67 # indicates the type of module. So just assume what we found
69 # is OK (even though it could be a pure Python module).
68 # is OK (even though it could be a pure Python module).
70 except ImportError:
69 except ImportError:
71 if modulepolicy == b'c':
70 if modulepolicy == b'c':
72 raise
71 raise
73 zl = ziploader('mercurial', 'pure')
72 zl = ziploader('mercurial', 'pure')
74 mod = zl.load_module(name)
73 mod = zl.load_module(name)
75
74
76 sys.modules[name] = mod
75 sys.modules[name] = mod
77 return mod
76 return mod
78
77
79 # Unlike the default importer which searches special locations and
78 # Unlike the default importer which searches special locations and
80 # sys.path, we only look in the directory where "mercurial" was
79 # sys.path, we only look in the directory where "mercurial" was
81 # imported from.
80 # imported from.
82
81
83 # imp.find_module doesn't support submodules (modules with ".").
82 # imp.find_module doesn't support submodules (modules with ".").
84 # Instead you have to pass the parent package's __path__ attribute
83 # Instead you have to pass the parent package's __path__ attribute
85 # as the path argument.
84 # as the path argument.
86 stem = name.split('.')[-1]
85 stem = name.split('.')[-1]
87
86
88 try:
87 try:
89 if modulepolicy in policy.policynoc:
88 if modulepolicy in policy.policynoc:
90 raise ImportError()
89 raise ImportError()
91
90
92 modinfo = imp.find_module(stem, mercurial.__path__)
91 modinfo = imp.find_module(stem, mercurial.__path__)
93
92
94 # The Mercurial installer used to copy files from
93 # The Mercurial installer used to copy files from
95 # mercurial/pure/*.py to mercurial/*.py. Therefore, it's possible
94 # mercurial/pure/*.py to mercurial/*.py. Therefore, it's possible
96 # for some installations to have .py files under mercurial/*.
95 # for some installations to have .py files under mercurial/*.
97 # Loading Python modules when we expected C versions could result
96 # Loading Python modules when we expected C versions could result
98 # in a) poor performance b) loading a version from a previous
97 # in a) poor performance b) loading a version from a previous
99 # Mercurial version, potentially leading to incompatibility. Either
98 # Mercurial version, potentially leading to incompatibility. Either
100 # scenario is bad. So we verify that modules loaded from
99 # scenario is bad. So we verify that modules loaded from
101 # mercurial/* are C extensions. If the current policy allows the
100 # mercurial/* are C extensions. If the current policy allows the
102 # loading of .py modules, the module will be re-imported from
101 # loading of .py modules, the module will be re-imported from
103 # mercurial/pure/* below.
102 # mercurial/pure/* below.
104 if modinfo[2][2] != imp.C_EXTENSION:
103 if modinfo[2][2] != imp.C_EXTENSION:
105 raise ImportError('.py version of %s found where C '
104 raise ImportError('.py version of %s found where C '
106 'version should exist' % name)
105 'version should exist' % name)
107
106
108 except ImportError:
107 except ImportError:
109 if modulepolicy == b'c':
108 if modulepolicy == b'c':
110 raise
109 raise
111
110
112 # Could not load the C extension and pure Python is allowed. So
111 # Could not load the C extension and pure Python is allowed. So
113 # try to load them.
112 # try to load them.
114 from . import pure
113 from . import pure
115 modinfo = imp.find_module(stem, pure.__path__)
114 modinfo = imp.find_module(stem, pure.__path__)
116 if not modinfo:
115 if not modinfo:
117 raise ImportError('could not find mercurial module %s' %
116 raise ImportError('could not find mercurial module %s' %
118 name)
117 name)
119
118
120 mod = imp.load_module(name, *modinfo)
119 mod = imp.load_module(name, *modinfo)
121 sys.modules[name] = mod
120 sys.modules[name] = mod
122 return mod
121 return mod
123
122
124 # Python 3 uses a custom module loader that transforms source code between
123 # Python 3 uses a custom module loader that transforms source code between
125 # source file reading and compilation. This is done by registering a custom
124 # source file reading and compilation. This is done by registering a custom
126 # finder that changes the spec for Mercurial modules to use a custom loader.
125 # finder that changes the spec for Mercurial modules to use a custom loader.
127 if sys.version_info[0] >= 3:
126 if sys.version_info[0] >= 3:
128 from . import pure
127 from . import pure
129 import importlib
128 import importlib
130 import io
129 import io
131 import token
130 import token
132 import tokenize
131 import tokenize
133
132
134 class hgpathentryfinder(importlib.abc.MetaPathFinder):
133 class hgpathentryfinder(importlib.abc.MetaPathFinder):
135 """A sys.meta_path finder that uses a custom module loader."""
134 """A sys.meta_path finder that uses a custom module loader."""
136 def find_spec(self, fullname, path, target=None):
135 def find_spec(self, fullname, path, target=None):
137 # Only handle Mercurial-related modules.
136 # Only handle Mercurial-related modules.
138 if not fullname.startswith(('mercurial.', 'hgext.', 'hgext3rd.')):
137 if not fullname.startswith(('mercurial.', 'hgext.', 'hgext3rd.')):
139 return None
138 return None
140 # zstd is already dual-version clean, don't try and mangle it
139 # zstd is already dual-version clean, don't try and mangle it
141 if fullname.startswith('mercurial.zstd'):
140 if fullname.startswith('mercurial.zstd'):
142 return None
141 return None
143
142
144 # This assumes Python 3 doesn't support loading C modules.
143 # This assumes Python 3 doesn't support loading C modules.
145 if fullname in _dualmodules:
144 if fullname in _dualmodules:
146 stem = fullname.split('.')[-1]
145 stem = fullname.split('.')[-1]
147 fullname = 'mercurial.pure.%s' % stem
146 fullname = 'mercurial.pure.%s' % stem
148 target = pure
147 target = pure
149 assert len(path) == 1
148 assert len(path) == 1
150 path = [os.path.join(path[0], 'pure')]
149 path = [os.path.join(path[0], 'pure')]
151
150
152 # Try to find the module using other registered finders.
151 # Try to find the module using other registered finders.
153 spec = None
152 spec = None
154 for finder in sys.meta_path:
153 for finder in sys.meta_path:
155 if finder == self:
154 if finder == self:
156 continue
155 continue
157
156
158 spec = finder.find_spec(fullname, path, target=target)
157 spec = finder.find_spec(fullname, path, target=target)
159 if spec:
158 if spec:
160 break
159 break
161
160
162 # This is a Mercurial-related module but we couldn't find it
161 # This is a Mercurial-related module but we couldn't find it
163 # using the previously-registered finders. This likely means
162 # using the previously-registered finders. This likely means
164 # the module doesn't exist.
163 # the module doesn't exist.
165 if not spec:
164 if not spec:
166 return None
165 return None
167
166
168 if (fullname.startswith('mercurial.pure.')
167 if (fullname.startswith('mercurial.pure.')
169 and fullname.replace('.pure.', '.') in _dualmodules):
168 and fullname.replace('.pure.', '.') in _dualmodules):
170 spec.name = spec.name.replace('.pure.', '.')
169 spec.name = spec.name.replace('.pure.', '.')
171
170
172 # TODO need to support loaders from alternate specs, like zip
171 # TODO need to support loaders from alternate specs, like zip
173 # loaders.
172 # loaders.
174 spec.loader = hgloader(spec.name, spec.origin)
173 spec.loader = hgloader(spec.name, spec.origin)
175 return spec
174 return spec
176
175
177 def replacetokens(tokens, fullname):
176 def replacetokens(tokens, fullname):
178 """Transform a stream of tokens from raw to Python 3.
177 """Transform a stream of tokens from raw to Python 3.
179
178
180 It is called by the custom module loading machinery to rewrite
179 It is called by the custom module loading machinery to rewrite
181 source/tokens between source decoding and compilation.
180 source/tokens between source decoding and compilation.
182
181
183 Returns a generator of possibly rewritten tokens.
182 Returns a generator of possibly rewritten tokens.
184
183
185 The input token list may be mutated as part of processing. However,
184 The input token list may be mutated as part of processing. However,
186 its changes do not necessarily match the output token stream.
185 its changes do not necessarily match the output token stream.
187
186
188 REMEMBER TO CHANGE ``BYTECODEHEADER`` WHEN CHANGING THIS FUNCTION
187 REMEMBER TO CHANGE ``BYTECODEHEADER`` WHEN CHANGING THIS FUNCTION
189 OR CACHED FILES WON'T GET INVALIDATED PROPERLY.
188 OR CACHED FILES WON'T GET INVALIDATED PROPERLY.
190 """
189 """
191 futureimpline = False
190 futureimpline = False
192
191
193 # The following utility functions access the tokens list and i index of
192 # The following utility functions access the tokens list and i index of
194 # the for i, t enumerate(tokens) loop below
193 # the for i, t enumerate(tokens) loop below
195 def _isop(j, *o):
194 def _isop(j, *o):
196 """Assert that tokens[j] is an OP with one of the given values"""
195 """Assert that tokens[j] is an OP with one of the given values"""
197 try:
196 try:
198 return tokens[j].type == token.OP and tokens[j].string in o
197 return tokens[j].type == token.OP and tokens[j].string in o
199 except IndexError:
198 except IndexError:
200 return False
199 return False
201
200
202 def _findargnofcall(n):
201 def _findargnofcall(n):
203 """Find arg n of a call expression (start at 0)
202 """Find arg n of a call expression (start at 0)
204
203
205 Returns index of the first token of that argument, or None if
204 Returns index of the first token of that argument, or None if
206 there is not that many arguments.
205 there is not that many arguments.
207
206
208 Assumes that token[i + 1] is '('.
207 Assumes that token[i + 1] is '('.
209
208
210 """
209 """
211 nested = 0
210 nested = 0
212 for j in range(i + 2, len(tokens)):
211 for j in range(i + 2, len(tokens)):
213 if _isop(j, ')', ']', '}'):
212 if _isop(j, ')', ']', '}'):
214 # end of call, tuple, subscription or dict / set
213 # end of call, tuple, subscription or dict / set
215 nested -= 1
214 nested -= 1
216 if nested < 0:
215 if nested < 0:
217 return None
216 return None
218 elif n == 0:
217 elif n == 0:
219 # this is the starting position of arg
218 # this is the starting position of arg
220 return j
219 return j
221 elif _isop(j, '(', '[', '{'):
220 elif _isop(j, '(', '[', '{'):
222 nested += 1
221 nested += 1
223 elif _isop(j, ',') and nested == 0:
222 elif _isop(j, ',') and nested == 0:
224 n -= 1
223 n -= 1
225
224
226 return None
225 return None
227
226
228 def _ensureunicode(j):
227 def _ensureunicode(j):
229 """Make sure the token at j is a unicode string
228 """Make sure the token at j is a unicode string
230
229
231 This rewrites a string token to include the unicode literal prefix
230 This rewrites a string token to include the unicode literal prefix
232 so the string transformer won't add the byte prefix.
231 so the string transformer won't add the byte prefix.
233
232
234 Ignores tokens that are not strings. Assumes bounds checking has
233 Ignores tokens that are not strings. Assumes bounds checking has
235 already been done.
234 already been done.
236
235
237 """
236 """
238 st = tokens[j]
237 st = tokens[j]
239 if st.type == token.STRING and st.string.startswith(("'", '"')):
238 if st.type == token.STRING and st.string.startswith(("'", '"')):
240 tokens[j] = st._replace(string='u%s' % st.string)
239 tokens[j] = st._replace(string='u%s' % st.string)
241
240
242 for i, t in enumerate(tokens):
241 for i, t in enumerate(tokens):
243 # Convert most string literals to byte literals. String literals
242 # Convert most string literals to byte literals. String literals
244 # in Python 2 are bytes. String literals in Python 3 are unicode.
243 # in Python 2 are bytes. String literals in Python 3 are unicode.
245 # Most strings in Mercurial are bytes and unicode strings are rare.
244 # Most strings in Mercurial are bytes and unicode strings are rare.
246 # Rather than rewrite all string literals to use ``b''`` to indicate
245 # Rather than rewrite all string literals to use ``b''`` to indicate
247 # byte strings, we apply this token transformer to insert the ``b``
246 # byte strings, we apply this token transformer to insert the ``b``
248 # prefix nearly everywhere.
247 # prefix nearly everywhere.
249 if t.type == token.STRING:
248 if t.type == token.STRING:
250 s = t.string
249 s = t.string
251
250
252 # Preserve docstrings as string literals. This is inconsistent
251 # Preserve docstrings as string literals. This is inconsistent
253 # with regular unprefixed strings. However, the
252 # with regular unprefixed strings. However, the
254 # "from __future__" parsing (which allows a module docstring to
253 # "from __future__" parsing (which allows a module docstring to
255 # exist before it) doesn't properly handle the docstring if it
254 # exist before it) doesn't properly handle the docstring if it
256 # is b''' prefixed, leading to a SyntaxError. We leave all
255 # is b''' prefixed, leading to a SyntaxError. We leave all
257 # docstrings as unprefixed to avoid this. This means Mercurial
256 # docstrings as unprefixed to avoid this. This means Mercurial
258 # components touching docstrings need to handle unicode,
257 # components touching docstrings need to handle unicode,
259 # unfortunately.
258 # unfortunately.
260 if s[0:3] in ("'''", '"""'):
259 if s[0:3] in ("'''", '"""'):
261 yield t
260 yield t
262 continue
261 continue
263
262
264 # If the first character isn't a quote, it is likely a string
263 # If the first character isn't a quote, it is likely a string
265 # prefixing character (such as 'b', 'u', or 'r'. Ignore.
264 # prefixing character (such as 'b', 'u', or 'r'. Ignore.
266 if s[0] not in ("'", '"'):
265 if s[0] not in ("'", '"'):
267 yield t
266 yield t
268 continue
267 continue
269
268
270 # String literal. Prefix to make a b'' string.
269 # String literal. Prefix to make a b'' string.
271 yield t._replace(string='b%s' % t.string)
270 yield t._replace(string='b%s' % t.string)
272 continue
271 continue
273
272
274 # Insert compatibility imports at "from __future__ import" line.
273 # Insert compatibility imports at "from __future__ import" line.
275 # No '\n' should be added to preserve line numbers.
274 # No '\n' should be added to preserve line numbers.
276 if (t.type == token.NAME and t.string == 'import' and
275 if (t.type == token.NAME and t.string == 'import' and
277 all(u.type == token.NAME for u in tokens[i - 2:i]) and
276 all(u.type == token.NAME for u in tokens[i - 2:i]) and
278 [u.string for u in tokens[i - 2:i]] == ['from', '__future__']):
277 [u.string for u in tokens[i - 2:i]] == ['from', '__future__']):
279 futureimpline = True
278 futureimpline = True
280 if t.type == token.NEWLINE and futureimpline:
279 if t.type == token.NEWLINE and futureimpline:
281 futureimpline = False
280 futureimpline = False
282 if fullname == 'mercurial.pycompat':
281 if fullname == 'mercurial.pycompat':
283 yield t
282 yield t
284 continue
283 continue
285 r, c = t.start
284 r, c = t.start
286 l = (b'; from mercurial.pycompat import '
285 l = (b'; from mercurial.pycompat import '
287 b'delattr, getattr, hasattr, setattr, xrange, '
286 b'delattr, getattr, hasattr, setattr, xrange, '
288 b'open, unicode\n')
287 b'open, unicode\n')
289 for u in tokenize.tokenize(io.BytesIO(l).readline):
288 for u in tokenize.tokenize(io.BytesIO(l).readline):
290 if u.type in (tokenize.ENCODING, token.ENDMARKER):
289 if u.type in (tokenize.ENCODING, token.ENDMARKER):
291 continue
290 continue
292 yield u._replace(
291 yield u._replace(
293 start=(r, c + u.start[1]), end=(r, c + u.end[1]))
292 start=(r, c + u.start[1]), end=(r, c + u.end[1]))
294 continue
293 continue
295
294
296 # This looks like a function call.
295 # This looks like a function call.
297 if t.type == token.NAME and _isop(i + 1, '('):
296 if t.type == token.NAME and _isop(i + 1, '('):
298 fn = t.string
297 fn = t.string
299
298
300 # *attr() builtins don't accept byte strings to 2nd argument.
299 # *attr() builtins don't accept byte strings to 2nd argument.
301 if (fn in ('getattr', 'setattr', 'hasattr', 'safehasattr') and
300 if (fn in ('getattr', 'setattr', 'hasattr', 'safehasattr') and
302 not _isop(i - 1, '.')):
301 not _isop(i - 1, '.')):
303 arg1idx = _findargnofcall(1)
302 arg1idx = _findargnofcall(1)
304 if arg1idx is not None:
303 if arg1idx is not None:
305 _ensureunicode(arg1idx)
304 _ensureunicode(arg1idx)
306
305
307 # .encode() and .decode() on str/bytes/unicode don't accept
306 # .encode() and .decode() on str/bytes/unicode don't accept
308 # byte strings on Python 3.
307 # byte strings on Python 3.
309 elif fn in ('encode', 'decode') and _isop(i - 1, '.'):
308 elif fn in ('encode', 'decode') and _isop(i - 1, '.'):
310 for argn in range(2):
309 for argn in range(2):
311 argidx = _findargnofcall(argn)
310 argidx = _findargnofcall(argn)
312 if argidx is not None:
311 if argidx is not None:
313 _ensureunicode(argidx)
312 _ensureunicode(argidx)
314
313
315 # It changes iteritems/values to items/values as they are not
314 # It changes iteritems/values to items/values as they are not
316 # present in Python 3 world.
315 # present in Python 3 world.
317 elif fn in ('iteritems', 'itervalues'):
316 elif fn in ('iteritems', 'itervalues'):
318 yield t._replace(string=fn[4:])
317 yield t._replace(string=fn[4:])
319 continue
318 continue
320
319
321 # Emit unmodified token.
320 # Emit unmodified token.
322 yield t
321 yield t
323
322
324 # Header to add to bytecode files. This MUST be changed when
323 # Header to add to bytecode files. This MUST be changed when
325 # ``replacetoken`` or any mechanism that changes semantics of module
324 # ``replacetoken`` or any mechanism that changes semantics of module
326 # loading is changed. Otherwise cached bytecode may get loaded without
325 # loading is changed. Otherwise cached bytecode may get loaded without
327 # the new transformation mechanisms applied.
326 # the new transformation mechanisms applied.
328 BYTECODEHEADER = b'HG\x00\x0a'
327 BYTECODEHEADER = b'HG\x00\x0a'
329
328
330 class hgloader(importlib.machinery.SourceFileLoader):
329 class hgloader(importlib.machinery.SourceFileLoader):
331 """Custom module loader that transforms source code.
330 """Custom module loader that transforms source code.
332
331
333 When the source code is converted to a code object, we transform
332 When the source code is converted to a code object, we transform
334 certain patterns to be Python 3 compatible. This allows us to write code
333 certain patterns to be Python 3 compatible. This allows us to write code
335 that is natively Python 2 and compatible with Python 3 without
334 that is natively Python 2 and compatible with Python 3 without
336 making the code excessively ugly.
335 making the code excessively ugly.
337
336
338 We do this by transforming the token stream between parse and compile.
337 We do this by transforming the token stream between parse and compile.
339
338
340 Implementing transformations invalidates caching assumptions made
339 Implementing transformations invalidates caching assumptions made
341 by the built-in importer. The built-in importer stores a header on
340 by the built-in importer. The built-in importer stores a header on
342 saved bytecode files indicating the Python/bytecode version. If the
341 saved bytecode files indicating the Python/bytecode version. If the
343 version changes, the cached bytecode is ignored. The Mercurial
342 version changes, the cached bytecode is ignored. The Mercurial
344 transformations could change at any time. This means we need to check
343 transformations could change at any time. This means we need to check
345 that cached bytecode was generated with the current transformation
344 that cached bytecode was generated with the current transformation
346 code or there could be a mismatch between cached bytecode and what
345 code or there could be a mismatch between cached bytecode and what
347 would be generated from this class.
346 would be generated from this class.
348
347
349 We supplement the bytecode caching layer by wrapping ``get_data``
348 We supplement the bytecode caching layer by wrapping ``get_data``
350 and ``set_data``. These functions are called when the
349 and ``set_data``. These functions are called when the
351 ``SourceFileLoader`` retrieves and saves bytecode cache files,
350 ``SourceFileLoader`` retrieves and saves bytecode cache files,
352 respectively. We simply add an additional header on the file. As
351 respectively. We simply add an additional header on the file. As
353 long as the version in this file is changed when semantics change,
352 long as the version in this file is changed when semantics change,
354 cached bytecode should be invalidated when transformations change.
353 cached bytecode should be invalidated when transformations change.
355
354
356 The added header has the form ``HG<VERSION>``. That is a literal
355 The added header has the form ``HG<VERSION>``. That is a literal
357 ``HG`` with 2 binary bytes indicating the transformation version.
356 ``HG`` with 2 binary bytes indicating the transformation version.
358 """
357 """
359 def get_data(self, path):
358 def get_data(self, path):
360 data = super(hgloader, self).get_data(path)
359 data = super(hgloader, self).get_data(path)
361
360
362 if not path.endswith(tuple(importlib.machinery.BYTECODE_SUFFIXES)):
361 if not path.endswith(tuple(importlib.machinery.BYTECODE_SUFFIXES)):
363 return data
362 return data
364
363
365 # There should be a header indicating the Mercurial transformation
364 # There should be a header indicating the Mercurial transformation
366 # version. If it doesn't exist or doesn't match the current version,
365 # version. If it doesn't exist or doesn't match the current version,
367 # we raise an OSError because that is what
366 # we raise an OSError because that is what
368 # ``SourceFileLoader.get_code()`` expects when loading bytecode
367 # ``SourceFileLoader.get_code()`` expects when loading bytecode
369 # paths to indicate the cached file is "bad."
368 # paths to indicate the cached file is "bad."
370 if data[0:2] != b'HG':
369 if data[0:2] != b'HG':
371 raise OSError('no hg header')
370 raise OSError('no hg header')
372 if data[0:4] != BYTECODEHEADER:
371 if data[0:4] != BYTECODEHEADER:
373 raise OSError('hg header version mismatch')
372 raise OSError('hg header version mismatch')
374
373
375 return data[4:]
374 return data[4:]
376
375
377 def set_data(self, path, data, *args, **kwargs):
376 def set_data(self, path, data, *args, **kwargs):
378 if path.endswith(tuple(importlib.machinery.BYTECODE_SUFFIXES)):
377 if path.endswith(tuple(importlib.machinery.BYTECODE_SUFFIXES)):
379 data = BYTECODEHEADER + data
378 data = BYTECODEHEADER + data
380
379
381 return super(hgloader, self).set_data(path, data, *args, **kwargs)
380 return super(hgloader, self).set_data(path, data, *args, **kwargs)
382
381
383 def source_to_code(self, data, path):
382 def source_to_code(self, data, path):
384 """Perform token transformation before compilation."""
383 """Perform token transformation before compilation."""
385 buf = io.BytesIO(data)
384 buf = io.BytesIO(data)
386 tokens = tokenize.tokenize(buf.readline)
385 tokens = tokenize.tokenize(buf.readline)
387 data = tokenize.untokenize(replacetokens(list(tokens), self.name))
386 data = tokenize.untokenize(replacetokens(list(tokens), self.name))
388 # Python's built-in importer strips frames from exceptions raised
387 # Python's built-in importer strips frames from exceptions raised
389 # for this code. Unfortunately, that mechanism isn't extensible
388 # for this code. Unfortunately, that mechanism isn't extensible
390 # and our frame will be blamed for the import failure. There
389 # and our frame will be blamed for the import failure. There
391 # are extremely hacky ways to do frame stripping. We haven't
390 # are extremely hacky ways to do frame stripping. We haven't
392 # implemented them because they are very ugly.
391 # implemented them because they are very ugly.
393 return super(hgloader, self).source_to_code(data, path)
392 return super(hgloader, self).source_to_code(data, path)
394
393
395 # We automagically register our custom importer as a side-effect of loading.
394 # We automagically register our custom importer as a side-effect of loading.
396 # This is necessary to ensure that any entry points are able to import
395 # This is necessary to ensure that any entry points are able to import
397 # mercurial.* modules without having to perform this registration themselves.
396 # mercurial.* modules without having to perform this registration themselves.
398 if sys.version_info[0] >= 3:
397 if sys.version_info[0] >= 3:
399 _importercls = hgpathentryfinder
398 _importercls = hgpathentryfinder
400 else:
399 else:
401 _importercls = hgimporter
400 _importercls = hgimporter
402 if not any(isinstance(x, _importercls) for x in sys.meta_path):
401 if not any(isinstance(x, _importercls) for x in sys.meta_path):
403 # meta_path is used before any implicit finders and before sys.path.
402 # meta_path is used before any implicit finders and before sys.path.
404 sys.meta_path.insert(0, _importercls())
403 sys.meta_path.insert(0, _importercls())
1 NO CONTENT: file renamed from mercurial/osutil.c to mercurial/cext/osutil.c
NO CONTENT: file renamed from mercurial/osutil.c to mercurial/cext/osutil.c
@@ -1,2204 +1,2206 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import difflib
10 import difflib
11 import errno
11 import errno
12 import operator
12 import operator
13 import os
13 import os
14 import random
14 import random
15 import socket
15 import socket
16 import string
16 import string
17 import sys
17 import sys
18 import tempfile
18 import tempfile
19 import time
19 import time
20
20
21 from .i18n import _
21 from .i18n import _
22 from .node import (
22 from .node import (
23 bin,
23 bin,
24 hex,
24 hex,
25 nullhex,
25 nullhex,
26 nullid,
26 nullid,
27 nullrev,
27 nullrev,
28 short,
28 short,
29 )
29 )
30 from . import (
30 from . import (
31 bundle2,
31 bundle2,
32 changegroup,
32 changegroup,
33 cmdutil,
33 cmdutil,
34 color,
34 color,
35 commands,
35 commands,
36 context,
36 context,
37 dagparser,
37 dagparser,
38 dagutil,
38 dagutil,
39 encoding,
39 encoding,
40 error,
40 error,
41 exchange,
41 exchange,
42 extensions,
42 extensions,
43 filemerge,
43 filemerge,
44 fileset,
44 fileset,
45 formatter,
45 formatter,
46 hg,
46 hg,
47 localrepo,
47 localrepo,
48 lock as lockmod,
48 lock as lockmod,
49 merge as mergemod,
49 merge as mergemod,
50 obsolete,
50 obsolete,
51 policy,
51 policy,
52 pvec,
52 pvec,
53 pycompat,
53 pycompat,
54 registrar,
54 registrar,
55 repair,
55 repair,
56 revlog,
56 revlog,
57 revset,
57 revset,
58 revsetlang,
58 revsetlang,
59 scmutil,
59 scmutil,
60 setdiscovery,
60 setdiscovery,
61 simplemerge,
61 simplemerge,
62 smartset,
62 smartset,
63 sslutil,
63 sslutil,
64 streamclone,
64 streamclone,
65 templater,
65 templater,
66 treediscovery,
66 treediscovery,
67 upgrade,
67 upgrade,
68 util,
68 util,
69 vfs as vfsmod,
69 vfs as vfsmod,
70 )
70 )
71
71
72 release = lockmod.release
72 release = lockmod.release
73
73
74 # We reuse the command table from commands because it is easier than
74 # We reuse the command table from commands because it is easier than
75 # teaching dispatch about multiple tables.
75 # teaching dispatch about multiple tables.
76 command = registrar.command(commands.table)
76 command = registrar.command(commands.table)
77
77
78 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
78 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
79 def debugancestor(ui, repo, *args):
79 def debugancestor(ui, repo, *args):
80 """find the ancestor revision of two revisions in a given index"""
80 """find the ancestor revision of two revisions in a given index"""
81 if len(args) == 3:
81 if len(args) == 3:
82 index, rev1, rev2 = args
82 index, rev1, rev2 = args
83 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
83 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
84 lookup = r.lookup
84 lookup = r.lookup
85 elif len(args) == 2:
85 elif len(args) == 2:
86 if not repo:
86 if not repo:
87 raise error.Abort(_('there is no Mercurial repository here '
87 raise error.Abort(_('there is no Mercurial repository here '
88 '(.hg not found)'))
88 '(.hg not found)'))
89 rev1, rev2 = args
89 rev1, rev2 = args
90 r = repo.changelog
90 r = repo.changelog
91 lookup = repo.lookup
91 lookup = repo.lookup
92 else:
92 else:
93 raise error.Abort(_('either two or three arguments required'))
93 raise error.Abort(_('either two or three arguments required'))
94 a = r.ancestor(lookup(rev1), lookup(rev2))
94 a = r.ancestor(lookup(rev1), lookup(rev2))
95 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
95 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
96
96
97 @command('debugapplystreamclonebundle', [], 'FILE')
97 @command('debugapplystreamclonebundle', [], 'FILE')
98 def debugapplystreamclonebundle(ui, repo, fname):
98 def debugapplystreamclonebundle(ui, repo, fname):
99 """apply a stream clone bundle file"""
99 """apply a stream clone bundle file"""
100 f = hg.openpath(ui, fname)
100 f = hg.openpath(ui, fname)
101 gen = exchange.readbundle(ui, f, fname)
101 gen = exchange.readbundle(ui, f, fname)
102 gen.apply(repo)
102 gen.apply(repo)
103
103
104 @command('debugbuilddag',
104 @command('debugbuilddag',
105 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
105 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
106 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
106 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
107 ('n', 'new-file', None, _('add new file at each rev'))],
107 ('n', 'new-file', None, _('add new file at each rev'))],
108 _('[OPTION]... [TEXT]'))
108 _('[OPTION]... [TEXT]'))
109 def debugbuilddag(ui, repo, text=None,
109 def debugbuilddag(ui, repo, text=None,
110 mergeable_file=False,
110 mergeable_file=False,
111 overwritten_file=False,
111 overwritten_file=False,
112 new_file=False):
112 new_file=False):
113 """builds a repo with a given DAG from scratch in the current empty repo
113 """builds a repo with a given DAG from scratch in the current empty repo
114
114
115 The description of the DAG is read from stdin if not given on the
115 The description of the DAG is read from stdin if not given on the
116 command line.
116 command line.
117
117
118 Elements:
118 Elements:
119
119
120 - "+n" is a linear run of n nodes based on the current default parent
120 - "+n" is a linear run of n nodes based on the current default parent
121 - "." is a single node based on the current default parent
121 - "." is a single node based on the current default parent
122 - "$" resets the default parent to null (implied at the start);
122 - "$" resets the default parent to null (implied at the start);
123 otherwise the default parent is always the last node created
123 otherwise the default parent is always the last node created
124 - "<p" sets the default parent to the backref p
124 - "<p" sets the default parent to the backref p
125 - "*p" is a fork at parent p, which is a backref
125 - "*p" is a fork at parent p, which is a backref
126 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
126 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
127 - "/p2" is a merge of the preceding node and p2
127 - "/p2" is a merge of the preceding node and p2
128 - ":tag" defines a local tag for the preceding node
128 - ":tag" defines a local tag for the preceding node
129 - "@branch" sets the named branch for subsequent nodes
129 - "@branch" sets the named branch for subsequent nodes
130 - "#...\\n" is a comment up to the end of the line
130 - "#...\\n" is a comment up to the end of the line
131
131
132 Whitespace between the above elements is ignored.
132 Whitespace between the above elements is ignored.
133
133
134 A backref is either
134 A backref is either
135
135
136 - a number n, which references the node curr-n, where curr is the current
136 - a number n, which references the node curr-n, where curr is the current
137 node, or
137 node, or
138 - the name of a local tag you placed earlier using ":tag", or
138 - the name of a local tag you placed earlier using ":tag", or
139 - empty to denote the default parent.
139 - empty to denote the default parent.
140
140
141 All string valued-elements are either strictly alphanumeric, or must
141 All string valued-elements are either strictly alphanumeric, or must
142 be enclosed in double quotes ("..."), with "\\" as escape character.
142 be enclosed in double quotes ("..."), with "\\" as escape character.
143 """
143 """
144
144
145 if text is None:
145 if text is None:
146 ui.status(_("reading DAG from stdin\n"))
146 ui.status(_("reading DAG from stdin\n"))
147 text = ui.fin.read()
147 text = ui.fin.read()
148
148
149 cl = repo.changelog
149 cl = repo.changelog
150 if len(cl) > 0:
150 if len(cl) > 0:
151 raise error.Abort(_('repository is not empty'))
151 raise error.Abort(_('repository is not empty'))
152
152
153 # determine number of revs in DAG
153 # determine number of revs in DAG
154 total = 0
154 total = 0
155 for type, data in dagparser.parsedag(text):
155 for type, data in dagparser.parsedag(text):
156 if type == 'n':
156 if type == 'n':
157 total += 1
157 total += 1
158
158
159 if mergeable_file:
159 if mergeable_file:
160 linesperrev = 2
160 linesperrev = 2
161 # make a file with k lines per rev
161 # make a file with k lines per rev
162 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
162 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
163 initialmergedlines.append("")
163 initialmergedlines.append("")
164
164
165 tags = []
165 tags = []
166
166
167 wlock = lock = tr = None
167 wlock = lock = tr = None
168 try:
168 try:
169 wlock = repo.wlock()
169 wlock = repo.wlock()
170 lock = repo.lock()
170 lock = repo.lock()
171 tr = repo.transaction("builddag")
171 tr = repo.transaction("builddag")
172
172
173 at = -1
173 at = -1
174 atbranch = 'default'
174 atbranch = 'default'
175 nodeids = []
175 nodeids = []
176 id = 0
176 id = 0
177 ui.progress(_('building'), id, unit=_('revisions'), total=total)
177 ui.progress(_('building'), id, unit=_('revisions'), total=total)
178 for type, data in dagparser.parsedag(text):
178 for type, data in dagparser.parsedag(text):
179 if type == 'n':
179 if type == 'n':
180 ui.note(('node %s\n' % str(data)))
180 ui.note(('node %s\n' % str(data)))
181 id, ps = data
181 id, ps = data
182
182
183 files = []
183 files = []
184 fctxs = {}
184 fctxs = {}
185
185
186 p2 = None
186 p2 = None
187 if mergeable_file:
187 if mergeable_file:
188 fn = "mf"
188 fn = "mf"
189 p1 = repo[ps[0]]
189 p1 = repo[ps[0]]
190 if len(ps) > 1:
190 if len(ps) > 1:
191 p2 = repo[ps[1]]
191 p2 = repo[ps[1]]
192 pa = p1.ancestor(p2)
192 pa = p1.ancestor(p2)
193 base, local, other = [x[fn].data() for x in (pa, p1,
193 base, local, other = [x[fn].data() for x in (pa, p1,
194 p2)]
194 p2)]
195 m3 = simplemerge.Merge3Text(base, local, other)
195 m3 = simplemerge.Merge3Text(base, local, other)
196 ml = [l.strip() for l in m3.merge_lines()]
196 ml = [l.strip() for l in m3.merge_lines()]
197 ml.append("")
197 ml.append("")
198 elif at > 0:
198 elif at > 0:
199 ml = p1[fn].data().split("\n")
199 ml = p1[fn].data().split("\n")
200 else:
200 else:
201 ml = initialmergedlines
201 ml = initialmergedlines
202 ml[id * linesperrev] += " r%i" % id
202 ml[id * linesperrev] += " r%i" % id
203 mergedtext = "\n".join(ml)
203 mergedtext = "\n".join(ml)
204 files.append(fn)
204 files.append(fn)
205 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
205 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
206
206
207 if overwritten_file:
207 if overwritten_file:
208 fn = "of"
208 fn = "of"
209 files.append(fn)
209 files.append(fn)
210 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
210 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
211
211
212 if new_file:
212 if new_file:
213 fn = "nf%i" % id
213 fn = "nf%i" % id
214 files.append(fn)
214 files.append(fn)
215 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
215 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
216 if len(ps) > 1:
216 if len(ps) > 1:
217 if not p2:
217 if not p2:
218 p2 = repo[ps[1]]
218 p2 = repo[ps[1]]
219 for fn in p2:
219 for fn in p2:
220 if fn.startswith("nf"):
220 if fn.startswith("nf"):
221 files.append(fn)
221 files.append(fn)
222 fctxs[fn] = p2[fn]
222 fctxs[fn] = p2[fn]
223
223
224 def fctxfn(repo, cx, path):
224 def fctxfn(repo, cx, path):
225 return fctxs.get(path)
225 return fctxs.get(path)
226
226
227 if len(ps) == 0 or ps[0] < 0:
227 if len(ps) == 0 or ps[0] < 0:
228 pars = [None, None]
228 pars = [None, None]
229 elif len(ps) == 1:
229 elif len(ps) == 1:
230 pars = [nodeids[ps[0]], None]
230 pars = [nodeids[ps[0]], None]
231 else:
231 else:
232 pars = [nodeids[p] for p in ps]
232 pars = [nodeids[p] for p in ps]
233 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
233 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
234 date=(id, 0),
234 date=(id, 0),
235 user="debugbuilddag",
235 user="debugbuilddag",
236 extra={'branch': atbranch})
236 extra={'branch': atbranch})
237 nodeid = repo.commitctx(cx)
237 nodeid = repo.commitctx(cx)
238 nodeids.append(nodeid)
238 nodeids.append(nodeid)
239 at = id
239 at = id
240 elif type == 'l':
240 elif type == 'l':
241 id, name = data
241 id, name = data
242 ui.note(('tag %s\n' % name))
242 ui.note(('tag %s\n' % name))
243 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
243 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
244 elif type == 'a':
244 elif type == 'a':
245 ui.note(('branch %s\n' % data))
245 ui.note(('branch %s\n' % data))
246 atbranch = data
246 atbranch = data
247 ui.progress(_('building'), id, unit=_('revisions'), total=total)
247 ui.progress(_('building'), id, unit=_('revisions'), total=total)
248 tr.close()
248 tr.close()
249
249
250 if tags:
250 if tags:
251 repo.vfs.write("localtags", "".join(tags))
251 repo.vfs.write("localtags", "".join(tags))
252 finally:
252 finally:
253 ui.progress(_('building'), None)
253 ui.progress(_('building'), None)
254 release(tr, lock, wlock)
254 release(tr, lock, wlock)
255
255
256 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
256 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
257 indent_string = ' ' * indent
257 indent_string = ' ' * indent
258 if all:
258 if all:
259 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
259 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
260 % indent_string)
260 % indent_string)
261
261
262 def showchunks(named):
262 def showchunks(named):
263 ui.write("\n%s%s\n" % (indent_string, named))
263 ui.write("\n%s%s\n" % (indent_string, named))
264 chain = None
264 chain = None
265 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
265 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
266 node = chunkdata['node']
266 node = chunkdata['node']
267 p1 = chunkdata['p1']
267 p1 = chunkdata['p1']
268 p2 = chunkdata['p2']
268 p2 = chunkdata['p2']
269 cs = chunkdata['cs']
269 cs = chunkdata['cs']
270 deltabase = chunkdata['deltabase']
270 deltabase = chunkdata['deltabase']
271 delta = chunkdata['delta']
271 delta = chunkdata['delta']
272 ui.write("%s%s %s %s %s %s %s\n" %
272 ui.write("%s%s %s %s %s %s %s\n" %
273 (indent_string, hex(node), hex(p1), hex(p2),
273 (indent_string, hex(node), hex(p1), hex(p2),
274 hex(cs), hex(deltabase), len(delta)))
274 hex(cs), hex(deltabase), len(delta)))
275 chain = node
275 chain = node
276
276
277 chunkdata = gen.changelogheader()
277 chunkdata = gen.changelogheader()
278 showchunks("changelog")
278 showchunks("changelog")
279 chunkdata = gen.manifestheader()
279 chunkdata = gen.manifestheader()
280 showchunks("manifest")
280 showchunks("manifest")
281 for chunkdata in iter(gen.filelogheader, {}):
281 for chunkdata in iter(gen.filelogheader, {}):
282 fname = chunkdata['filename']
282 fname = chunkdata['filename']
283 showchunks(fname)
283 showchunks(fname)
284 else:
284 else:
285 if isinstance(gen, bundle2.unbundle20):
285 if isinstance(gen, bundle2.unbundle20):
286 raise error.Abort(_('use debugbundle2 for this file'))
286 raise error.Abort(_('use debugbundle2 for this file'))
287 chunkdata = gen.changelogheader()
287 chunkdata = gen.changelogheader()
288 chain = None
288 chain = None
289 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
289 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
290 node = chunkdata['node']
290 node = chunkdata['node']
291 ui.write("%s%s\n" % (indent_string, hex(node)))
291 ui.write("%s%s\n" % (indent_string, hex(node)))
292 chain = node
292 chain = node
293
293
294 def _debugbundle2(ui, gen, all=None, **opts):
294 def _debugbundle2(ui, gen, all=None, **opts):
295 """lists the contents of a bundle2"""
295 """lists the contents of a bundle2"""
296 if not isinstance(gen, bundle2.unbundle20):
296 if not isinstance(gen, bundle2.unbundle20):
297 raise error.Abort(_('not a bundle2 file'))
297 raise error.Abort(_('not a bundle2 file'))
298 ui.write(('Stream params: %s\n' % repr(gen.params)))
298 ui.write(('Stream params: %s\n' % repr(gen.params)))
299 for part in gen.iterparts():
299 for part in gen.iterparts():
300 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
300 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
301 if part.type == 'changegroup':
301 if part.type == 'changegroup':
302 version = part.params.get('version', '01')
302 version = part.params.get('version', '01')
303 cg = changegroup.getunbundler(version, part, 'UN')
303 cg = changegroup.getunbundler(version, part, 'UN')
304 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
304 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
305
305
306 @command('debugbundle',
306 @command('debugbundle',
307 [('a', 'all', None, _('show all details')),
307 [('a', 'all', None, _('show all details')),
308 ('', 'spec', None, _('print the bundlespec of the bundle'))],
308 ('', 'spec', None, _('print the bundlespec of the bundle'))],
309 _('FILE'),
309 _('FILE'),
310 norepo=True)
310 norepo=True)
311 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
311 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
312 """lists the contents of a bundle"""
312 """lists the contents of a bundle"""
313 with hg.openpath(ui, bundlepath) as f:
313 with hg.openpath(ui, bundlepath) as f:
314 if spec:
314 if spec:
315 spec = exchange.getbundlespec(ui, f)
315 spec = exchange.getbundlespec(ui, f)
316 ui.write('%s\n' % spec)
316 ui.write('%s\n' % spec)
317 return
317 return
318
318
319 gen = exchange.readbundle(ui, f, bundlepath)
319 gen = exchange.readbundle(ui, f, bundlepath)
320 if isinstance(gen, bundle2.unbundle20):
320 if isinstance(gen, bundle2.unbundle20):
321 return _debugbundle2(ui, gen, all=all, **opts)
321 return _debugbundle2(ui, gen, all=all, **opts)
322 _debugchangegroup(ui, gen, all=all, **opts)
322 _debugchangegroup(ui, gen, all=all, **opts)
323
323
324 @command('debugcheckstate', [], '')
324 @command('debugcheckstate', [], '')
325 def debugcheckstate(ui, repo):
325 def debugcheckstate(ui, repo):
326 """validate the correctness of the current dirstate"""
326 """validate the correctness of the current dirstate"""
327 parent1, parent2 = repo.dirstate.parents()
327 parent1, parent2 = repo.dirstate.parents()
328 m1 = repo[parent1].manifest()
328 m1 = repo[parent1].manifest()
329 m2 = repo[parent2].manifest()
329 m2 = repo[parent2].manifest()
330 errors = 0
330 errors = 0
331 for f in repo.dirstate:
331 for f in repo.dirstate:
332 state = repo.dirstate[f]
332 state = repo.dirstate[f]
333 if state in "nr" and f not in m1:
333 if state in "nr" and f not in m1:
334 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
334 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
335 errors += 1
335 errors += 1
336 if state in "a" and f in m1:
336 if state in "a" and f in m1:
337 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
337 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
338 errors += 1
338 errors += 1
339 if state in "m" and f not in m1 and f not in m2:
339 if state in "m" and f not in m1 and f not in m2:
340 ui.warn(_("%s in state %s, but not in either manifest\n") %
340 ui.warn(_("%s in state %s, but not in either manifest\n") %
341 (f, state))
341 (f, state))
342 errors += 1
342 errors += 1
343 for f in m1:
343 for f in m1:
344 state = repo.dirstate[f]
344 state = repo.dirstate[f]
345 if state not in "nrm":
345 if state not in "nrm":
346 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
346 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
347 errors += 1
347 errors += 1
348 if errors:
348 if errors:
349 error = _(".hg/dirstate inconsistent with current parent's manifest")
349 error = _(".hg/dirstate inconsistent with current parent's manifest")
350 raise error.Abort(error)
350 raise error.Abort(error)
351
351
352 @command('debugcolor',
352 @command('debugcolor',
353 [('', 'style', None, _('show all configured styles'))],
353 [('', 'style', None, _('show all configured styles'))],
354 'hg debugcolor')
354 'hg debugcolor')
355 def debugcolor(ui, repo, **opts):
355 def debugcolor(ui, repo, **opts):
356 """show available color, effects or style"""
356 """show available color, effects or style"""
357 ui.write(('color mode: %s\n') % ui._colormode)
357 ui.write(('color mode: %s\n') % ui._colormode)
358 if opts.get('style'):
358 if opts.get('style'):
359 return _debugdisplaystyle(ui)
359 return _debugdisplaystyle(ui)
360 else:
360 else:
361 return _debugdisplaycolor(ui)
361 return _debugdisplaycolor(ui)
362
362
363 def _debugdisplaycolor(ui):
363 def _debugdisplaycolor(ui):
364 ui = ui.copy()
364 ui = ui.copy()
365 ui._styles.clear()
365 ui._styles.clear()
366 for effect in color._activeeffects(ui).keys():
366 for effect in color._activeeffects(ui).keys():
367 ui._styles[effect] = effect
367 ui._styles[effect] = effect
368 if ui._terminfoparams:
368 if ui._terminfoparams:
369 for k, v in ui.configitems('color'):
369 for k, v in ui.configitems('color'):
370 if k.startswith('color.'):
370 if k.startswith('color.'):
371 ui._styles[k] = k[6:]
371 ui._styles[k] = k[6:]
372 elif k.startswith('terminfo.'):
372 elif k.startswith('terminfo.'):
373 ui._styles[k] = k[9:]
373 ui._styles[k] = k[9:]
374 ui.write(_('available colors:\n'))
374 ui.write(_('available colors:\n'))
375 # sort label with a '_' after the other to group '_background' entry.
375 # sort label with a '_' after the other to group '_background' entry.
376 items = sorted(ui._styles.items(),
376 items = sorted(ui._styles.items(),
377 key=lambda i: ('_' in i[0], i[0], i[1]))
377 key=lambda i: ('_' in i[0], i[0], i[1]))
378 for colorname, label in items:
378 for colorname, label in items:
379 ui.write(('%s\n') % colorname, label=label)
379 ui.write(('%s\n') % colorname, label=label)
380
380
381 def _debugdisplaystyle(ui):
381 def _debugdisplaystyle(ui):
382 ui.write(_('available style:\n'))
382 ui.write(_('available style:\n'))
383 width = max(len(s) for s in ui._styles)
383 width = max(len(s) for s in ui._styles)
384 for label, effects in sorted(ui._styles.items()):
384 for label, effects in sorted(ui._styles.items()):
385 ui.write('%s' % label, label=label)
385 ui.write('%s' % label, label=label)
386 if effects:
386 if effects:
387 # 50
387 # 50
388 ui.write(': ')
388 ui.write(': ')
389 ui.write(' ' * (max(0, width - len(label))))
389 ui.write(' ' * (max(0, width - len(label))))
390 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
390 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
391 ui.write('\n')
391 ui.write('\n')
392
392
393 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
393 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
394 def debugcommands(ui, cmd='', *args):
394 def debugcommands(ui, cmd='', *args):
395 """list all available commands and options"""
395 """list all available commands and options"""
396 for cmd, vals in sorted(commands.table.iteritems()):
396 for cmd, vals in sorted(commands.table.iteritems()):
397 cmd = cmd.split('|')[0].strip('^')
397 cmd = cmd.split('|')[0].strip('^')
398 opts = ', '.join([i[1] for i in vals[1]])
398 opts = ', '.join([i[1] for i in vals[1]])
399 ui.write('%s: %s\n' % (cmd, opts))
399 ui.write('%s: %s\n' % (cmd, opts))
400
400
401 @command('debugcomplete',
401 @command('debugcomplete',
402 [('o', 'options', None, _('show the command options'))],
402 [('o', 'options', None, _('show the command options'))],
403 _('[-o] CMD'),
403 _('[-o] CMD'),
404 norepo=True)
404 norepo=True)
405 def debugcomplete(ui, cmd='', **opts):
405 def debugcomplete(ui, cmd='', **opts):
406 """returns the completion list associated with the given command"""
406 """returns the completion list associated with the given command"""
407
407
408 if opts.get('options'):
408 if opts.get('options'):
409 options = []
409 options = []
410 otables = [commands.globalopts]
410 otables = [commands.globalopts]
411 if cmd:
411 if cmd:
412 aliases, entry = cmdutil.findcmd(cmd, commands.table, False)
412 aliases, entry = cmdutil.findcmd(cmd, commands.table, False)
413 otables.append(entry[1])
413 otables.append(entry[1])
414 for t in otables:
414 for t in otables:
415 for o in t:
415 for o in t:
416 if "(DEPRECATED)" in o[3]:
416 if "(DEPRECATED)" in o[3]:
417 continue
417 continue
418 if o[0]:
418 if o[0]:
419 options.append('-%s' % o[0])
419 options.append('-%s' % o[0])
420 options.append('--%s' % o[1])
420 options.append('--%s' % o[1])
421 ui.write("%s\n" % "\n".join(options))
421 ui.write("%s\n" % "\n".join(options))
422 return
422 return
423
423
424 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, commands.table)
424 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, commands.table)
425 if ui.verbose:
425 if ui.verbose:
426 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
426 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
427 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
427 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
428
428
429 @command('debugcreatestreamclonebundle', [], 'FILE')
429 @command('debugcreatestreamclonebundle', [], 'FILE')
430 def debugcreatestreamclonebundle(ui, repo, fname):
430 def debugcreatestreamclonebundle(ui, repo, fname):
431 """create a stream clone bundle file
431 """create a stream clone bundle file
432
432
433 Stream bundles are special bundles that are essentially archives of
433 Stream bundles are special bundles that are essentially archives of
434 revlog files. They are commonly used for cloning very quickly.
434 revlog files. They are commonly used for cloning very quickly.
435 """
435 """
436 requirements, gen = streamclone.generatebundlev1(repo)
436 requirements, gen = streamclone.generatebundlev1(repo)
437 changegroup.writechunks(ui, gen, fname)
437 changegroup.writechunks(ui, gen, fname)
438
438
439 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
439 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
440
440
441 @command('debugdag',
441 @command('debugdag',
442 [('t', 'tags', None, _('use tags as labels')),
442 [('t', 'tags', None, _('use tags as labels')),
443 ('b', 'branches', None, _('annotate with branch names')),
443 ('b', 'branches', None, _('annotate with branch names')),
444 ('', 'dots', None, _('use dots for runs')),
444 ('', 'dots', None, _('use dots for runs')),
445 ('s', 'spaces', None, _('separate elements by spaces'))],
445 ('s', 'spaces', None, _('separate elements by spaces'))],
446 _('[OPTION]... [FILE [REV]...]'),
446 _('[OPTION]... [FILE [REV]...]'),
447 optionalrepo=True)
447 optionalrepo=True)
448 def debugdag(ui, repo, file_=None, *revs, **opts):
448 def debugdag(ui, repo, file_=None, *revs, **opts):
449 """format the changelog or an index DAG as a concise textual description
449 """format the changelog or an index DAG as a concise textual description
450
450
451 If you pass a revlog index, the revlog's DAG is emitted. If you list
451 If you pass a revlog index, the revlog's DAG is emitted. If you list
452 revision numbers, they get labeled in the output as rN.
452 revision numbers, they get labeled in the output as rN.
453
453
454 Otherwise, the changelog DAG of the current repo is emitted.
454 Otherwise, the changelog DAG of the current repo is emitted.
455 """
455 """
456 spaces = opts.get('spaces')
456 spaces = opts.get('spaces')
457 dots = opts.get('dots')
457 dots = opts.get('dots')
458 if file_:
458 if file_:
459 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
459 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
460 file_)
460 file_)
461 revs = set((int(r) for r in revs))
461 revs = set((int(r) for r in revs))
462 def events():
462 def events():
463 for r in rlog:
463 for r in rlog:
464 yield 'n', (r, list(p for p in rlog.parentrevs(r)
464 yield 'n', (r, list(p for p in rlog.parentrevs(r)
465 if p != -1))
465 if p != -1))
466 if r in revs:
466 if r in revs:
467 yield 'l', (r, "r%i" % r)
467 yield 'l', (r, "r%i" % r)
468 elif repo:
468 elif repo:
469 cl = repo.changelog
469 cl = repo.changelog
470 tags = opts.get('tags')
470 tags = opts.get('tags')
471 branches = opts.get('branches')
471 branches = opts.get('branches')
472 if tags:
472 if tags:
473 labels = {}
473 labels = {}
474 for l, n in repo.tags().items():
474 for l, n in repo.tags().items():
475 labels.setdefault(cl.rev(n), []).append(l)
475 labels.setdefault(cl.rev(n), []).append(l)
476 def events():
476 def events():
477 b = "default"
477 b = "default"
478 for r in cl:
478 for r in cl:
479 if branches:
479 if branches:
480 newb = cl.read(cl.node(r))[5]['branch']
480 newb = cl.read(cl.node(r))[5]['branch']
481 if newb != b:
481 if newb != b:
482 yield 'a', newb
482 yield 'a', newb
483 b = newb
483 b = newb
484 yield 'n', (r, list(p for p in cl.parentrevs(r)
484 yield 'n', (r, list(p for p in cl.parentrevs(r)
485 if p != -1))
485 if p != -1))
486 if tags:
486 if tags:
487 ls = labels.get(r)
487 ls = labels.get(r)
488 if ls:
488 if ls:
489 for l in ls:
489 for l in ls:
490 yield 'l', (r, l)
490 yield 'l', (r, l)
491 else:
491 else:
492 raise error.Abort(_('need repo for changelog dag'))
492 raise error.Abort(_('need repo for changelog dag'))
493
493
494 for line in dagparser.dagtextlines(events(),
494 for line in dagparser.dagtextlines(events(),
495 addspaces=spaces,
495 addspaces=spaces,
496 wraplabels=True,
496 wraplabels=True,
497 wrapannotations=True,
497 wrapannotations=True,
498 wrapnonlinear=dots,
498 wrapnonlinear=dots,
499 usedots=dots,
499 usedots=dots,
500 maxlinewidth=70):
500 maxlinewidth=70):
501 ui.write(line)
501 ui.write(line)
502 ui.write("\n")
502 ui.write("\n")
503
503
504 @command('debugdata', commands.debugrevlogopts, _('-c|-m|FILE REV'))
504 @command('debugdata', commands.debugrevlogopts, _('-c|-m|FILE REV'))
505 def debugdata(ui, repo, file_, rev=None, **opts):
505 def debugdata(ui, repo, file_, rev=None, **opts):
506 """dump the contents of a data file revision"""
506 """dump the contents of a data file revision"""
507 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
507 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
508 if rev is not None:
508 if rev is not None:
509 raise error.CommandError('debugdata', _('invalid arguments'))
509 raise error.CommandError('debugdata', _('invalid arguments'))
510 file_, rev = None, file_
510 file_, rev = None, file_
511 elif rev is None:
511 elif rev is None:
512 raise error.CommandError('debugdata', _('invalid arguments'))
512 raise error.CommandError('debugdata', _('invalid arguments'))
513 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
513 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
514 try:
514 try:
515 ui.write(r.revision(r.lookup(rev), raw=True))
515 ui.write(r.revision(r.lookup(rev), raw=True))
516 except KeyError:
516 except KeyError:
517 raise error.Abort(_('invalid revision identifier %s') % rev)
517 raise error.Abort(_('invalid revision identifier %s') % rev)
518
518
519 @command('debugdate',
519 @command('debugdate',
520 [('e', 'extended', None, _('try extended date formats'))],
520 [('e', 'extended', None, _('try extended date formats'))],
521 _('[-e] DATE [RANGE]'),
521 _('[-e] DATE [RANGE]'),
522 norepo=True, optionalrepo=True)
522 norepo=True, optionalrepo=True)
523 def debugdate(ui, date, range=None, **opts):
523 def debugdate(ui, date, range=None, **opts):
524 """parse and display a date"""
524 """parse and display a date"""
525 if opts["extended"]:
525 if opts["extended"]:
526 d = util.parsedate(date, util.extendeddateformats)
526 d = util.parsedate(date, util.extendeddateformats)
527 else:
527 else:
528 d = util.parsedate(date)
528 d = util.parsedate(date)
529 ui.write(("internal: %s %s\n") % d)
529 ui.write(("internal: %s %s\n") % d)
530 ui.write(("standard: %s\n") % util.datestr(d))
530 ui.write(("standard: %s\n") % util.datestr(d))
531 if range:
531 if range:
532 m = util.matchdate(range)
532 m = util.matchdate(range)
533 ui.write(("match: %s\n") % m(d[0]))
533 ui.write(("match: %s\n") % m(d[0]))
534
534
535 @command('debugdeltachain',
535 @command('debugdeltachain',
536 commands.debugrevlogopts + commands.formatteropts,
536 commands.debugrevlogopts + commands.formatteropts,
537 _('-c|-m|FILE'),
537 _('-c|-m|FILE'),
538 optionalrepo=True)
538 optionalrepo=True)
539 def debugdeltachain(ui, repo, file_=None, **opts):
539 def debugdeltachain(ui, repo, file_=None, **opts):
540 """dump information about delta chains in a revlog
540 """dump information about delta chains in a revlog
541
541
542 Output can be templatized. Available template keywords are:
542 Output can be templatized. Available template keywords are:
543
543
544 :``rev``: revision number
544 :``rev``: revision number
545 :``chainid``: delta chain identifier (numbered by unique base)
545 :``chainid``: delta chain identifier (numbered by unique base)
546 :``chainlen``: delta chain length to this revision
546 :``chainlen``: delta chain length to this revision
547 :``prevrev``: previous revision in delta chain
547 :``prevrev``: previous revision in delta chain
548 :``deltatype``: role of delta / how it was computed
548 :``deltatype``: role of delta / how it was computed
549 :``compsize``: compressed size of revision
549 :``compsize``: compressed size of revision
550 :``uncompsize``: uncompressed size of revision
550 :``uncompsize``: uncompressed size of revision
551 :``chainsize``: total size of compressed revisions in chain
551 :``chainsize``: total size of compressed revisions in chain
552 :``chainratio``: total chain size divided by uncompressed revision size
552 :``chainratio``: total chain size divided by uncompressed revision size
553 (new delta chains typically start at ratio 2.00)
553 (new delta chains typically start at ratio 2.00)
554 :``lindist``: linear distance from base revision in delta chain to end
554 :``lindist``: linear distance from base revision in delta chain to end
555 of this revision
555 of this revision
556 :``extradist``: total size of revisions not part of this delta chain from
556 :``extradist``: total size of revisions not part of this delta chain from
557 base of delta chain to end of this revision; a measurement
557 base of delta chain to end of this revision; a measurement
558 of how much extra data we need to read/seek across to read
558 of how much extra data we need to read/seek across to read
559 the delta chain for this revision
559 the delta chain for this revision
560 :``extraratio``: extradist divided by chainsize; another representation of
560 :``extraratio``: extradist divided by chainsize; another representation of
561 how much unrelated data is needed to load this delta chain
561 how much unrelated data is needed to load this delta chain
562 """
562 """
563 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
563 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
564 index = r.index
564 index = r.index
565 generaldelta = r.version & revlog.FLAG_GENERALDELTA
565 generaldelta = r.version & revlog.FLAG_GENERALDELTA
566
566
567 def revinfo(rev):
567 def revinfo(rev):
568 e = index[rev]
568 e = index[rev]
569 compsize = e[1]
569 compsize = e[1]
570 uncompsize = e[2]
570 uncompsize = e[2]
571 chainsize = 0
571 chainsize = 0
572
572
573 if generaldelta:
573 if generaldelta:
574 if e[3] == e[5]:
574 if e[3] == e[5]:
575 deltatype = 'p1'
575 deltatype = 'p1'
576 elif e[3] == e[6]:
576 elif e[3] == e[6]:
577 deltatype = 'p2'
577 deltatype = 'p2'
578 elif e[3] == rev - 1:
578 elif e[3] == rev - 1:
579 deltatype = 'prev'
579 deltatype = 'prev'
580 elif e[3] == rev:
580 elif e[3] == rev:
581 deltatype = 'base'
581 deltatype = 'base'
582 else:
582 else:
583 deltatype = 'other'
583 deltatype = 'other'
584 else:
584 else:
585 if e[3] == rev:
585 if e[3] == rev:
586 deltatype = 'base'
586 deltatype = 'base'
587 else:
587 else:
588 deltatype = 'prev'
588 deltatype = 'prev'
589
589
590 chain = r._deltachain(rev)[0]
590 chain = r._deltachain(rev)[0]
591 for iterrev in chain:
591 for iterrev in chain:
592 e = index[iterrev]
592 e = index[iterrev]
593 chainsize += e[1]
593 chainsize += e[1]
594
594
595 return compsize, uncompsize, deltatype, chain, chainsize
595 return compsize, uncompsize, deltatype, chain, chainsize
596
596
597 fm = ui.formatter('debugdeltachain', opts)
597 fm = ui.formatter('debugdeltachain', opts)
598
598
599 fm.plain(' rev chain# chainlen prev delta '
599 fm.plain(' rev chain# chainlen prev delta '
600 'size rawsize chainsize ratio lindist extradist '
600 'size rawsize chainsize ratio lindist extradist '
601 'extraratio\n')
601 'extraratio\n')
602
602
603 chainbases = {}
603 chainbases = {}
604 for rev in r:
604 for rev in r:
605 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
605 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
606 chainbase = chain[0]
606 chainbase = chain[0]
607 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
607 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
608 basestart = r.start(chainbase)
608 basestart = r.start(chainbase)
609 revstart = r.start(rev)
609 revstart = r.start(rev)
610 lineardist = revstart + comp - basestart
610 lineardist = revstart + comp - basestart
611 extradist = lineardist - chainsize
611 extradist = lineardist - chainsize
612 try:
612 try:
613 prevrev = chain[-2]
613 prevrev = chain[-2]
614 except IndexError:
614 except IndexError:
615 prevrev = -1
615 prevrev = -1
616
616
617 chainratio = float(chainsize) / float(uncomp)
617 chainratio = float(chainsize) / float(uncomp)
618 extraratio = float(extradist) / float(chainsize)
618 extraratio = float(extradist) / float(chainsize)
619
619
620 fm.startitem()
620 fm.startitem()
621 fm.write('rev chainid chainlen prevrev deltatype compsize '
621 fm.write('rev chainid chainlen prevrev deltatype compsize '
622 'uncompsize chainsize chainratio lindist extradist '
622 'uncompsize chainsize chainratio lindist extradist '
623 'extraratio',
623 'extraratio',
624 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
624 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
625 rev, chainid, len(chain), prevrev, deltatype, comp,
625 rev, chainid, len(chain), prevrev, deltatype, comp,
626 uncomp, chainsize, chainratio, lineardist, extradist,
626 uncomp, chainsize, chainratio, lineardist, extradist,
627 extraratio,
627 extraratio,
628 rev=rev, chainid=chainid, chainlen=len(chain),
628 rev=rev, chainid=chainid, chainlen=len(chain),
629 prevrev=prevrev, deltatype=deltatype, compsize=comp,
629 prevrev=prevrev, deltatype=deltatype, compsize=comp,
630 uncompsize=uncomp, chainsize=chainsize,
630 uncompsize=uncomp, chainsize=chainsize,
631 chainratio=chainratio, lindist=lineardist,
631 chainratio=chainratio, lindist=lineardist,
632 extradist=extradist, extraratio=extraratio)
632 extradist=extradist, extraratio=extraratio)
633
633
634 fm.end()
634 fm.end()
635
635
636 @command('debugdirstate|debugstate',
636 @command('debugdirstate|debugstate',
637 [('', 'nodates', None, _('do not display the saved mtime')),
637 [('', 'nodates', None, _('do not display the saved mtime')),
638 ('', 'datesort', None, _('sort by saved mtime'))],
638 ('', 'datesort', None, _('sort by saved mtime'))],
639 _('[OPTION]...'))
639 _('[OPTION]...'))
640 def debugstate(ui, repo, **opts):
640 def debugstate(ui, repo, **opts):
641 """show the contents of the current dirstate"""
641 """show the contents of the current dirstate"""
642
642
643 nodates = opts.get('nodates')
643 nodates = opts.get('nodates')
644 datesort = opts.get('datesort')
644 datesort = opts.get('datesort')
645
645
646 timestr = ""
646 timestr = ""
647 if datesort:
647 if datesort:
648 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
648 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
649 else:
649 else:
650 keyfunc = None # sort by filename
650 keyfunc = None # sort by filename
651 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
651 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
652 if ent[3] == -1:
652 if ent[3] == -1:
653 timestr = 'unset '
653 timestr = 'unset '
654 elif nodates:
654 elif nodates:
655 timestr = 'set '
655 timestr = 'set '
656 else:
656 else:
657 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
657 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
658 time.localtime(ent[3]))
658 time.localtime(ent[3]))
659 if ent[1] & 0o20000:
659 if ent[1] & 0o20000:
660 mode = 'lnk'
660 mode = 'lnk'
661 else:
661 else:
662 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
662 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
663 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
663 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
664 for f in repo.dirstate.copies():
664 for f in repo.dirstate.copies():
665 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
665 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
666
666
667 @command('debugdiscovery',
667 @command('debugdiscovery',
668 [('', 'old', None, _('use old-style discovery')),
668 [('', 'old', None, _('use old-style discovery')),
669 ('', 'nonheads', None,
669 ('', 'nonheads', None,
670 _('use old-style discovery with non-heads included')),
670 _('use old-style discovery with non-heads included')),
671 ] + commands.remoteopts,
671 ] + commands.remoteopts,
672 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
672 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
673 def debugdiscovery(ui, repo, remoteurl="default", **opts):
673 def debugdiscovery(ui, repo, remoteurl="default", **opts):
674 """runs the changeset discovery protocol in isolation"""
674 """runs the changeset discovery protocol in isolation"""
675 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
675 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
676 opts.get('branch'))
676 opts.get('branch'))
677 remote = hg.peer(repo, opts, remoteurl)
677 remote = hg.peer(repo, opts, remoteurl)
678 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
678 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
679
679
680 # make sure tests are repeatable
680 # make sure tests are repeatable
681 random.seed(12323)
681 random.seed(12323)
682
682
683 def doit(localheads, remoteheads, remote=remote):
683 def doit(localheads, remoteheads, remote=remote):
684 if opts.get('old'):
684 if opts.get('old'):
685 if localheads:
685 if localheads:
686 raise error.Abort('cannot use localheads with old style '
686 raise error.Abort('cannot use localheads with old style '
687 'discovery')
687 'discovery')
688 if not util.safehasattr(remote, 'branches'):
688 if not util.safehasattr(remote, 'branches'):
689 # enable in-client legacy support
689 # enable in-client legacy support
690 remote = localrepo.locallegacypeer(remote.local())
690 remote = localrepo.locallegacypeer(remote.local())
691 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
691 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
692 force=True)
692 force=True)
693 common = set(common)
693 common = set(common)
694 if not opts.get('nonheads'):
694 if not opts.get('nonheads'):
695 ui.write(("unpruned common: %s\n") %
695 ui.write(("unpruned common: %s\n") %
696 " ".join(sorted(short(n) for n in common)))
696 " ".join(sorted(short(n) for n in common)))
697 dag = dagutil.revlogdag(repo.changelog)
697 dag = dagutil.revlogdag(repo.changelog)
698 all = dag.ancestorset(dag.internalizeall(common))
698 all = dag.ancestorset(dag.internalizeall(common))
699 common = dag.externalizeall(dag.headsetofconnecteds(all))
699 common = dag.externalizeall(dag.headsetofconnecteds(all))
700 else:
700 else:
701 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
701 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
702 common = set(common)
702 common = set(common)
703 rheads = set(hds)
703 rheads = set(hds)
704 lheads = set(repo.heads())
704 lheads = set(repo.heads())
705 ui.write(("common heads: %s\n") %
705 ui.write(("common heads: %s\n") %
706 " ".join(sorted(short(n) for n in common)))
706 " ".join(sorted(short(n) for n in common)))
707 if lheads <= common:
707 if lheads <= common:
708 ui.write(("local is subset\n"))
708 ui.write(("local is subset\n"))
709 elif rheads <= common:
709 elif rheads <= common:
710 ui.write(("remote is subset\n"))
710 ui.write(("remote is subset\n"))
711
711
712 serverlogs = opts.get('serverlog')
712 serverlogs = opts.get('serverlog')
713 if serverlogs:
713 if serverlogs:
714 for filename in serverlogs:
714 for filename in serverlogs:
715 with open(filename, 'r') as logfile:
715 with open(filename, 'r') as logfile:
716 line = logfile.readline()
716 line = logfile.readline()
717 while line:
717 while line:
718 parts = line.strip().split(';')
718 parts = line.strip().split(';')
719 op = parts[1]
719 op = parts[1]
720 if op == 'cg':
720 if op == 'cg':
721 pass
721 pass
722 elif op == 'cgss':
722 elif op == 'cgss':
723 doit(parts[2].split(' '), parts[3].split(' '))
723 doit(parts[2].split(' '), parts[3].split(' '))
724 elif op == 'unb':
724 elif op == 'unb':
725 doit(parts[3].split(' '), parts[2].split(' '))
725 doit(parts[3].split(' '), parts[2].split(' '))
726 line = logfile.readline()
726 line = logfile.readline()
727 else:
727 else:
728 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
728 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
729 opts.get('remote_head'))
729 opts.get('remote_head'))
730 localrevs = opts.get('local_head')
730 localrevs = opts.get('local_head')
731 doit(localrevs, remoterevs)
731 doit(localrevs, remoterevs)
732
732
733 @command('debugextensions', commands.formatteropts, [], norepo=True)
733 @command('debugextensions', commands.formatteropts, [], norepo=True)
734 def debugextensions(ui, **opts):
734 def debugextensions(ui, **opts):
735 '''show information about active extensions'''
735 '''show information about active extensions'''
736 exts = extensions.extensions(ui)
736 exts = extensions.extensions(ui)
737 hgver = util.version()
737 hgver = util.version()
738 fm = ui.formatter('debugextensions', opts)
738 fm = ui.formatter('debugextensions', opts)
739 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
739 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
740 isinternal = extensions.ismoduleinternal(extmod)
740 isinternal = extensions.ismoduleinternal(extmod)
741 extsource = pycompat.fsencode(extmod.__file__)
741 extsource = pycompat.fsencode(extmod.__file__)
742 if isinternal:
742 if isinternal:
743 exttestedwith = [] # never expose magic string to users
743 exttestedwith = [] # never expose magic string to users
744 else:
744 else:
745 exttestedwith = getattr(extmod, 'testedwith', '').split()
745 exttestedwith = getattr(extmod, 'testedwith', '').split()
746 extbuglink = getattr(extmod, 'buglink', None)
746 extbuglink = getattr(extmod, 'buglink', None)
747
747
748 fm.startitem()
748 fm.startitem()
749
749
750 if ui.quiet or ui.verbose:
750 if ui.quiet or ui.verbose:
751 fm.write('name', '%s\n', extname)
751 fm.write('name', '%s\n', extname)
752 else:
752 else:
753 fm.write('name', '%s', extname)
753 fm.write('name', '%s', extname)
754 if isinternal or hgver in exttestedwith:
754 if isinternal or hgver in exttestedwith:
755 fm.plain('\n')
755 fm.plain('\n')
756 elif not exttestedwith:
756 elif not exttestedwith:
757 fm.plain(_(' (untested!)\n'))
757 fm.plain(_(' (untested!)\n'))
758 else:
758 else:
759 lasttestedversion = exttestedwith[-1]
759 lasttestedversion = exttestedwith[-1]
760 fm.plain(' (%s!)\n' % lasttestedversion)
760 fm.plain(' (%s!)\n' % lasttestedversion)
761
761
762 fm.condwrite(ui.verbose and extsource, 'source',
762 fm.condwrite(ui.verbose and extsource, 'source',
763 _(' location: %s\n'), extsource or "")
763 _(' location: %s\n'), extsource or "")
764
764
765 if ui.verbose:
765 if ui.verbose:
766 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
766 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
767 fm.data(bundled=isinternal)
767 fm.data(bundled=isinternal)
768
768
769 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
769 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
770 _(' tested with: %s\n'),
770 _(' tested with: %s\n'),
771 fm.formatlist(exttestedwith, name='ver'))
771 fm.formatlist(exttestedwith, name='ver'))
772
772
773 fm.condwrite(ui.verbose and extbuglink, 'buglink',
773 fm.condwrite(ui.verbose and extbuglink, 'buglink',
774 _(' bug reporting: %s\n'), extbuglink or "")
774 _(' bug reporting: %s\n'), extbuglink or "")
775
775
776 fm.end()
776 fm.end()
777
777
778 @command('debugfileset',
778 @command('debugfileset',
779 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
779 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
780 _('[-r REV] FILESPEC'))
780 _('[-r REV] FILESPEC'))
781 def debugfileset(ui, repo, expr, **opts):
781 def debugfileset(ui, repo, expr, **opts):
782 '''parse and apply a fileset specification'''
782 '''parse and apply a fileset specification'''
783 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
783 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
784 if ui.verbose:
784 if ui.verbose:
785 tree = fileset.parse(expr)
785 tree = fileset.parse(expr)
786 ui.note(fileset.prettyformat(tree), "\n")
786 ui.note(fileset.prettyformat(tree), "\n")
787
787
788 for f in ctx.getfileset(expr):
788 for f in ctx.getfileset(expr):
789 ui.write("%s\n" % f)
789 ui.write("%s\n" % f)
790
790
791 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
791 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
792 def debugfsinfo(ui, path="."):
792 def debugfsinfo(ui, path="."):
793 """show information detected about current filesystem"""
793 """show information detected about current filesystem"""
794 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
794 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
795 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
795 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
796 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
796 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
797 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
797 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
798 casesensitive = '(unknown)'
798 casesensitive = '(unknown)'
799 try:
799 try:
800 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
800 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
801 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
801 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
802 except OSError:
802 except OSError:
803 pass
803 pass
804 ui.write(('case-sensitive: %s\n') % casesensitive)
804 ui.write(('case-sensitive: %s\n') % casesensitive)
805
805
806 @command('debuggetbundle',
806 @command('debuggetbundle',
807 [('H', 'head', [], _('id of head node'), _('ID')),
807 [('H', 'head', [], _('id of head node'), _('ID')),
808 ('C', 'common', [], _('id of common node'), _('ID')),
808 ('C', 'common', [], _('id of common node'), _('ID')),
809 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
809 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
810 _('REPO FILE [-H|-C ID]...'),
810 _('REPO FILE [-H|-C ID]...'),
811 norepo=True)
811 norepo=True)
812 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
812 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
813 """retrieves a bundle from a repo
813 """retrieves a bundle from a repo
814
814
815 Every ID must be a full-length hex node id string. Saves the bundle to the
815 Every ID must be a full-length hex node id string. Saves the bundle to the
816 given file.
816 given file.
817 """
817 """
818 repo = hg.peer(ui, opts, repopath)
818 repo = hg.peer(ui, opts, repopath)
819 if not repo.capable('getbundle'):
819 if not repo.capable('getbundle'):
820 raise error.Abort("getbundle() not supported by target repository")
820 raise error.Abort("getbundle() not supported by target repository")
821 args = {}
821 args = {}
822 if common:
822 if common:
823 args['common'] = [bin(s) for s in common]
823 args['common'] = [bin(s) for s in common]
824 if head:
824 if head:
825 args['heads'] = [bin(s) for s in head]
825 args['heads'] = [bin(s) for s in head]
826 # TODO: get desired bundlecaps from command line.
826 # TODO: get desired bundlecaps from command line.
827 args['bundlecaps'] = None
827 args['bundlecaps'] = None
828 bundle = repo.getbundle('debug', **args)
828 bundle = repo.getbundle('debug', **args)
829
829
830 bundletype = opts.get('type', 'bzip2').lower()
830 bundletype = opts.get('type', 'bzip2').lower()
831 btypes = {'none': 'HG10UN',
831 btypes = {'none': 'HG10UN',
832 'bzip2': 'HG10BZ',
832 'bzip2': 'HG10BZ',
833 'gzip': 'HG10GZ',
833 'gzip': 'HG10GZ',
834 'bundle2': 'HG20'}
834 'bundle2': 'HG20'}
835 bundletype = btypes.get(bundletype)
835 bundletype = btypes.get(bundletype)
836 if bundletype not in bundle2.bundletypes:
836 if bundletype not in bundle2.bundletypes:
837 raise error.Abort(_('unknown bundle type specified with --type'))
837 raise error.Abort(_('unknown bundle type specified with --type'))
838 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
838 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
839
839
840 @command('debugignore', [], '[FILE]')
840 @command('debugignore', [], '[FILE]')
841 def debugignore(ui, repo, *files, **opts):
841 def debugignore(ui, repo, *files, **opts):
842 """display the combined ignore pattern and information about ignored files
842 """display the combined ignore pattern and information about ignored files
843
843
844 With no argument display the combined ignore pattern.
844 With no argument display the combined ignore pattern.
845
845
846 Given space separated file names, shows if the given file is ignored and
846 Given space separated file names, shows if the given file is ignored and
847 if so, show the ignore rule (file and line number) that matched it.
847 if so, show the ignore rule (file and line number) that matched it.
848 """
848 """
849 ignore = repo.dirstate._ignore
849 ignore = repo.dirstate._ignore
850 if not files:
850 if not files:
851 # Show all the patterns
851 # Show all the patterns
852 includepat = getattr(ignore, 'includepat', None)
852 includepat = getattr(ignore, 'includepat', None)
853 if includepat is not None:
853 if includepat is not None:
854 ui.write("%s\n" % includepat)
854 ui.write("%s\n" % includepat)
855 else:
855 else:
856 raise error.Abort(_("no ignore patterns found"))
856 raise error.Abort(_("no ignore patterns found"))
857 else:
857 else:
858 for f in files:
858 for f in files:
859 nf = util.normpath(f)
859 nf = util.normpath(f)
860 ignored = None
860 ignored = None
861 ignoredata = None
861 ignoredata = None
862 if nf != '.':
862 if nf != '.':
863 if ignore(nf):
863 if ignore(nf):
864 ignored = nf
864 ignored = nf
865 ignoredata = repo.dirstate._ignorefileandline(nf)
865 ignoredata = repo.dirstate._ignorefileandline(nf)
866 else:
866 else:
867 for p in util.finddirs(nf):
867 for p in util.finddirs(nf):
868 if ignore(p):
868 if ignore(p):
869 ignored = p
869 ignored = p
870 ignoredata = repo.dirstate._ignorefileandline(p)
870 ignoredata = repo.dirstate._ignorefileandline(p)
871 break
871 break
872 if ignored:
872 if ignored:
873 if ignored == nf:
873 if ignored == nf:
874 ui.write(_("%s is ignored\n") % f)
874 ui.write(_("%s is ignored\n") % f)
875 else:
875 else:
876 ui.write(_("%s is ignored because of "
876 ui.write(_("%s is ignored because of "
877 "containing folder %s\n")
877 "containing folder %s\n")
878 % (f, ignored))
878 % (f, ignored))
879 ignorefile, lineno, line = ignoredata
879 ignorefile, lineno, line = ignoredata
880 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
880 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
881 % (ignorefile, lineno, line))
881 % (ignorefile, lineno, line))
882 else:
882 else:
883 ui.write(_("%s is not ignored\n") % f)
883 ui.write(_("%s is not ignored\n") % f)
884
884
885 @command('debugindex', commands.debugrevlogopts +
885 @command('debugindex', commands.debugrevlogopts +
886 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
886 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
887 _('[-f FORMAT] -c|-m|FILE'),
887 _('[-f FORMAT] -c|-m|FILE'),
888 optionalrepo=True)
888 optionalrepo=True)
889 def debugindex(ui, repo, file_=None, **opts):
889 def debugindex(ui, repo, file_=None, **opts):
890 """dump the contents of an index file"""
890 """dump the contents of an index file"""
891 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
891 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
892 format = opts.get('format', 0)
892 format = opts.get('format', 0)
893 if format not in (0, 1):
893 if format not in (0, 1):
894 raise error.Abort(_("unknown format %d") % format)
894 raise error.Abort(_("unknown format %d") % format)
895
895
896 generaldelta = r.version & revlog.FLAG_GENERALDELTA
896 generaldelta = r.version & revlog.FLAG_GENERALDELTA
897 if generaldelta:
897 if generaldelta:
898 basehdr = ' delta'
898 basehdr = ' delta'
899 else:
899 else:
900 basehdr = ' base'
900 basehdr = ' base'
901
901
902 if ui.debugflag:
902 if ui.debugflag:
903 shortfn = hex
903 shortfn = hex
904 else:
904 else:
905 shortfn = short
905 shortfn = short
906
906
907 # There might not be anything in r, so have a sane default
907 # There might not be anything in r, so have a sane default
908 idlen = 12
908 idlen = 12
909 for i in r:
909 for i in r:
910 idlen = len(shortfn(r.node(i)))
910 idlen = len(shortfn(r.node(i)))
911 break
911 break
912
912
913 if format == 0:
913 if format == 0:
914 ui.write((" rev offset length " + basehdr + " linkrev"
914 ui.write((" rev offset length " + basehdr + " linkrev"
915 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
915 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
916 elif format == 1:
916 elif format == 1:
917 ui.write((" rev flag offset length"
917 ui.write((" rev flag offset length"
918 " size " + basehdr + " link p1 p2"
918 " size " + basehdr + " link p1 p2"
919 " %s\n") % "nodeid".rjust(idlen))
919 " %s\n") % "nodeid".rjust(idlen))
920
920
921 for i in r:
921 for i in r:
922 node = r.node(i)
922 node = r.node(i)
923 if generaldelta:
923 if generaldelta:
924 base = r.deltaparent(i)
924 base = r.deltaparent(i)
925 else:
925 else:
926 base = r.chainbase(i)
926 base = r.chainbase(i)
927 if format == 0:
927 if format == 0:
928 try:
928 try:
929 pp = r.parents(node)
929 pp = r.parents(node)
930 except Exception:
930 except Exception:
931 pp = [nullid, nullid]
931 pp = [nullid, nullid]
932 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
932 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
933 i, r.start(i), r.length(i), base, r.linkrev(i),
933 i, r.start(i), r.length(i), base, r.linkrev(i),
934 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
934 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
935 elif format == 1:
935 elif format == 1:
936 pr = r.parentrevs(i)
936 pr = r.parentrevs(i)
937 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
937 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
938 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
938 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
939 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
939 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
940
940
941 @command('debugindexdot', commands.debugrevlogopts,
941 @command('debugindexdot', commands.debugrevlogopts,
942 _('-c|-m|FILE'), optionalrepo=True)
942 _('-c|-m|FILE'), optionalrepo=True)
943 def debugindexdot(ui, repo, file_=None, **opts):
943 def debugindexdot(ui, repo, file_=None, **opts):
944 """dump an index DAG as a graphviz dot file"""
944 """dump an index DAG as a graphviz dot file"""
945 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
945 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
946 ui.write(("digraph G {\n"))
946 ui.write(("digraph G {\n"))
947 for i in r:
947 for i in r:
948 node = r.node(i)
948 node = r.node(i)
949 pp = r.parents(node)
949 pp = r.parents(node)
950 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
950 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
951 if pp[1] != nullid:
951 if pp[1] != nullid:
952 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
952 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
953 ui.write("}\n")
953 ui.write("}\n")
954
954
955 @command('debuginstall', [] + commands.formatteropts, '', norepo=True)
955 @command('debuginstall', [] + commands.formatteropts, '', norepo=True)
956 def debuginstall(ui, **opts):
956 def debuginstall(ui, **opts):
957 '''test Mercurial installation
957 '''test Mercurial installation
958
958
959 Returns 0 on success.
959 Returns 0 on success.
960 '''
960 '''
961
961
962 def writetemp(contents):
962 def writetemp(contents):
963 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
963 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
964 f = os.fdopen(fd, pycompat.sysstr("wb"))
964 f = os.fdopen(fd, pycompat.sysstr("wb"))
965 f.write(contents)
965 f.write(contents)
966 f.close()
966 f.close()
967 return name
967 return name
968
968
969 problems = 0
969 problems = 0
970
970
971 fm = ui.formatter('debuginstall', opts)
971 fm = ui.formatter('debuginstall', opts)
972 fm.startitem()
972 fm.startitem()
973
973
974 # encoding
974 # encoding
975 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
975 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
976 err = None
976 err = None
977 try:
977 try:
978 encoding.fromlocal("test")
978 encoding.fromlocal("test")
979 except error.Abort as inst:
979 except error.Abort as inst:
980 err = inst
980 err = inst
981 problems += 1
981 problems += 1
982 fm.condwrite(err, 'encodingerror', _(" %s\n"
982 fm.condwrite(err, 'encodingerror', _(" %s\n"
983 " (check that your locale is properly set)\n"), err)
983 " (check that your locale is properly set)\n"), err)
984
984
985 # Python
985 # Python
986 fm.write('pythonexe', _("checking Python executable (%s)\n"),
986 fm.write('pythonexe', _("checking Python executable (%s)\n"),
987 pycompat.sysexecutable)
987 pycompat.sysexecutable)
988 fm.write('pythonver', _("checking Python version (%s)\n"),
988 fm.write('pythonver', _("checking Python version (%s)\n"),
989 ("%d.%d.%d" % sys.version_info[:3]))
989 ("%d.%d.%d" % sys.version_info[:3]))
990 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
990 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
991 os.path.dirname(pycompat.fsencode(os.__file__)))
991 os.path.dirname(pycompat.fsencode(os.__file__)))
992
992
993 security = set(sslutil.supportedprotocols)
993 security = set(sslutil.supportedprotocols)
994 if sslutil.hassni:
994 if sslutil.hassni:
995 security.add('sni')
995 security.add('sni')
996
996
997 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
997 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
998 fm.formatlist(sorted(security), name='protocol',
998 fm.formatlist(sorted(security), name='protocol',
999 fmt='%s', sep=','))
999 fmt='%s', sep=','))
1000
1000
1001 # These are warnings, not errors. So don't increment problem count. This
1001 # These are warnings, not errors. So don't increment problem count. This
1002 # may change in the future.
1002 # may change in the future.
1003 if 'tls1.2' not in security:
1003 if 'tls1.2' not in security:
1004 fm.plain(_(' TLS 1.2 not supported by Python install; '
1004 fm.plain(_(' TLS 1.2 not supported by Python install; '
1005 'network connections lack modern security\n'))
1005 'network connections lack modern security\n'))
1006 if 'sni' not in security:
1006 if 'sni' not in security:
1007 fm.plain(_(' SNI not supported by Python install; may have '
1007 fm.plain(_(' SNI not supported by Python install; may have '
1008 'connectivity issues with some servers\n'))
1008 'connectivity issues with some servers\n'))
1009
1009
1010 # TODO print CA cert info
1010 # TODO print CA cert info
1011
1011
1012 # hg version
1012 # hg version
1013 hgver = util.version()
1013 hgver = util.version()
1014 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1014 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1015 hgver.split('+')[0])
1015 hgver.split('+')[0])
1016 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1016 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1017 '+'.join(hgver.split('+')[1:]))
1017 '+'.join(hgver.split('+')[1:]))
1018
1018
1019 # compiled modules
1019 # compiled modules
1020 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1020 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1021 policy.policy)
1021 policy.policy)
1022 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1022 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1023 os.path.dirname(pycompat.fsencode(__file__)))
1023 os.path.dirname(pycompat.fsencode(__file__)))
1024
1024
1025 if policy.policy in ('c', 'allow'):
1025 if policy.policy in ('c', 'allow'):
1026 err = None
1026 err = None
1027 try:
1027 try:
1028 from . import (
1028 from . import (
1029 base85,
1029 base85,
1030 bdiff,
1030 bdiff,
1031 mpatch,
1031 mpatch,
1032 )
1033 from .cext import (
1032 osutil,
1034 osutil,
1033 )
1035 )
1034 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1036 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1035 except Exception as inst:
1037 except Exception as inst:
1036 err = inst
1038 err = inst
1037 problems += 1
1039 problems += 1
1038 fm.condwrite(err, 'extensionserror', " %s\n", err)
1040 fm.condwrite(err, 'extensionserror', " %s\n", err)
1039
1041
1040 compengines = util.compengines._engines.values()
1042 compengines = util.compengines._engines.values()
1041 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1043 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1042 fm.formatlist(sorted(e.name() for e in compengines),
1044 fm.formatlist(sorted(e.name() for e in compengines),
1043 name='compengine', fmt='%s', sep=', '))
1045 name='compengine', fmt='%s', sep=', '))
1044 fm.write('compenginesavail', _('checking available compression engines '
1046 fm.write('compenginesavail', _('checking available compression engines '
1045 '(%s)\n'),
1047 '(%s)\n'),
1046 fm.formatlist(sorted(e.name() for e in compengines
1048 fm.formatlist(sorted(e.name() for e in compengines
1047 if e.available()),
1049 if e.available()),
1048 name='compengine', fmt='%s', sep=', '))
1050 name='compengine', fmt='%s', sep=', '))
1049 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1051 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1050 fm.write('compenginesserver', _('checking available compression engines '
1052 fm.write('compenginesserver', _('checking available compression engines '
1051 'for wire protocol (%s)\n'),
1053 'for wire protocol (%s)\n'),
1052 fm.formatlist([e.name() for e in wirecompengines
1054 fm.formatlist([e.name() for e in wirecompengines
1053 if e.wireprotosupport()],
1055 if e.wireprotosupport()],
1054 name='compengine', fmt='%s', sep=', '))
1056 name='compengine', fmt='%s', sep=', '))
1055
1057
1056 # templates
1058 # templates
1057 p = templater.templatepaths()
1059 p = templater.templatepaths()
1058 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1060 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1059 fm.condwrite(not p, '', _(" no template directories found\n"))
1061 fm.condwrite(not p, '', _(" no template directories found\n"))
1060 if p:
1062 if p:
1061 m = templater.templatepath("map-cmdline.default")
1063 m = templater.templatepath("map-cmdline.default")
1062 if m:
1064 if m:
1063 # template found, check if it is working
1065 # template found, check if it is working
1064 err = None
1066 err = None
1065 try:
1067 try:
1066 templater.templater.frommapfile(m)
1068 templater.templater.frommapfile(m)
1067 except Exception as inst:
1069 except Exception as inst:
1068 err = inst
1070 err = inst
1069 p = None
1071 p = None
1070 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1072 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1071 else:
1073 else:
1072 p = None
1074 p = None
1073 fm.condwrite(p, 'defaulttemplate',
1075 fm.condwrite(p, 'defaulttemplate',
1074 _("checking default template (%s)\n"), m)
1076 _("checking default template (%s)\n"), m)
1075 fm.condwrite(not m, 'defaulttemplatenotfound',
1077 fm.condwrite(not m, 'defaulttemplatenotfound',
1076 _(" template '%s' not found\n"), "default")
1078 _(" template '%s' not found\n"), "default")
1077 if not p:
1079 if not p:
1078 problems += 1
1080 problems += 1
1079 fm.condwrite(not p, '',
1081 fm.condwrite(not p, '',
1080 _(" (templates seem to have been installed incorrectly)\n"))
1082 _(" (templates seem to have been installed incorrectly)\n"))
1081
1083
1082 # editor
1084 # editor
1083 editor = ui.geteditor()
1085 editor = ui.geteditor()
1084 editor = util.expandpath(editor)
1086 editor = util.expandpath(editor)
1085 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1087 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1086 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1088 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1087 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1089 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1088 _(" No commit editor set and can't find %s in PATH\n"
1090 _(" No commit editor set and can't find %s in PATH\n"
1089 " (specify a commit editor in your configuration"
1091 " (specify a commit editor in your configuration"
1090 " file)\n"), not cmdpath and editor == 'vi' and editor)
1092 " file)\n"), not cmdpath and editor == 'vi' and editor)
1091 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1093 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1092 _(" Can't find editor '%s' in PATH\n"
1094 _(" Can't find editor '%s' in PATH\n"
1093 " (specify a commit editor in your configuration"
1095 " (specify a commit editor in your configuration"
1094 " file)\n"), not cmdpath and editor)
1096 " file)\n"), not cmdpath and editor)
1095 if not cmdpath and editor != 'vi':
1097 if not cmdpath and editor != 'vi':
1096 problems += 1
1098 problems += 1
1097
1099
1098 # check username
1100 # check username
1099 username = None
1101 username = None
1100 err = None
1102 err = None
1101 try:
1103 try:
1102 username = ui.username()
1104 username = ui.username()
1103 except error.Abort as e:
1105 except error.Abort as e:
1104 err = e
1106 err = e
1105 problems += 1
1107 problems += 1
1106
1108
1107 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1109 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1108 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1110 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1109 " (specify a username in your configuration file)\n"), err)
1111 " (specify a username in your configuration file)\n"), err)
1110
1112
1111 fm.condwrite(not problems, '',
1113 fm.condwrite(not problems, '',
1112 _("no problems detected\n"))
1114 _("no problems detected\n"))
1113 if not problems:
1115 if not problems:
1114 fm.data(problems=problems)
1116 fm.data(problems=problems)
1115 fm.condwrite(problems, 'problems',
1117 fm.condwrite(problems, 'problems',
1116 _("%d problems detected,"
1118 _("%d problems detected,"
1117 " please check your install!\n"), problems)
1119 " please check your install!\n"), problems)
1118 fm.end()
1120 fm.end()
1119
1121
1120 return problems
1122 return problems
1121
1123
1122 @command('debugknown', [], _('REPO ID...'), norepo=True)
1124 @command('debugknown', [], _('REPO ID...'), norepo=True)
1123 def debugknown(ui, repopath, *ids, **opts):
1125 def debugknown(ui, repopath, *ids, **opts):
1124 """test whether node ids are known to a repo
1126 """test whether node ids are known to a repo
1125
1127
1126 Every ID must be a full-length hex node id string. Returns a list of 0s
1128 Every ID must be a full-length hex node id string. Returns a list of 0s
1127 and 1s indicating unknown/known.
1129 and 1s indicating unknown/known.
1128 """
1130 """
1129 repo = hg.peer(ui, opts, repopath)
1131 repo = hg.peer(ui, opts, repopath)
1130 if not repo.capable('known'):
1132 if not repo.capable('known'):
1131 raise error.Abort("known() not supported by target repository")
1133 raise error.Abort("known() not supported by target repository")
1132 flags = repo.known([bin(s) for s in ids])
1134 flags = repo.known([bin(s) for s in ids])
1133 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1135 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1134
1136
1135 @command('debuglabelcomplete', [], _('LABEL...'))
1137 @command('debuglabelcomplete', [], _('LABEL...'))
1136 def debuglabelcomplete(ui, repo, *args):
1138 def debuglabelcomplete(ui, repo, *args):
1137 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1139 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1138 debugnamecomplete(ui, repo, *args)
1140 debugnamecomplete(ui, repo, *args)
1139
1141
1140 @command('debuglocks',
1142 @command('debuglocks',
1141 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1143 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1142 ('W', 'force-wlock', None,
1144 ('W', 'force-wlock', None,
1143 _('free the working state lock (DANGEROUS)'))],
1145 _('free the working state lock (DANGEROUS)'))],
1144 _('[OPTION]...'))
1146 _('[OPTION]...'))
1145 def debuglocks(ui, repo, **opts):
1147 def debuglocks(ui, repo, **opts):
1146 """show or modify state of locks
1148 """show or modify state of locks
1147
1149
1148 By default, this command will show which locks are held. This
1150 By default, this command will show which locks are held. This
1149 includes the user and process holding the lock, the amount of time
1151 includes the user and process holding the lock, the amount of time
1150 the lock has been held, and the machine name where the process is
1152 the lock has been held, and the machine name where the process is
1151 running if it's not local.
1153 running if it's not local.
1152
1154
1153 Locks protect the integrity of Mercurial's data, so should be
1155 Locks protect the integrity of Mercurial's data, so should be
1154 treated with care. System crashes or other interruptions may cause
1156 treated with care. System crashes or other interruptions may cause
1155 locks to not be properly released, though Mercurial will usually
1157 locks to not be properly released, though Mercurial will usually
1156 detect and remove such stale locks automatically.
1158 detect and remove such stale locks automatically.
1157
1159
1158 However, detecting stale locks may not always be possible (for
1160 However, detecting stale locks may not always be possible (for
1159 instance, on a shared filesystem). Removing locks may also be
1161 instance, on a shared filesystem). Removing locks may also be
1160 blocked by filesystem permissions.
1162 blocked by filesystem permissions.
1161
1163
1162 Returns 0 if no locks are held.
1164 Returns 0 if no locks are held.
1163
1165
1164 """
1166 """
1165
1167
1166 if opts.get('force_lock'):
1168 if opts.get('force_lock'):
1167 repo.svfs.unlink('lock')
1169 repo.svfs.unlink('lock')
1168 if opts.get('force_wlock'):
1170 if opts.get('force_wlock'):
1169 repo.vfs.unlink('wlock')
1171 repo.vfs.unlink('wlock')
1170 if opts.get('force_lock') or opts.get('force_lock'):
1172 if opts.get('force_lock') or opts.get('force_lock'):
1171 return 0
1173 return 0
1172
1174
1173 now = time.time()
1175 now = time.time()
1174 held = 0
1176 held = 0
1175
1177
1176 def report(vfs, name, method):
1178 def report(vfs, name, method):
1177 # this causes stale locks to get reaped for more accurate reporting
1179 # this causes stale locks to get reaped for more accurate reporting
1178 try:
1180 try:
1179 l = method(False)
1181 l = method(False)
1180 except error.LockHeld:
1182 except error.LockHeld:
1181 l = None
1183 l = None
1182
1184
1183 if l:
1185 if l:
1184 l.release()
1186 l.release()
1185 else:
1187 else:
1186 try:
1188 try:
1187 stat = vfs.lstat(name)
1189 stat = vfs.lstat(name)
1188 age = now - stat.st_mtime
1190 age = now - stat.st_mtime
1189 user = util.username(stat.st_uid)
1191 user = util.username(stat.st_uid)
1190 locker = vfs.readlock(name)
1192 locker = vfs.readlock(name)
1191 if ":" in locker:
1193 if ":" in locker:
1192 host, pid = locker.split(':')
1194 host, pid = locker.split(':')
1193 if host == socket.gethostname():
1195 if host == socket.gethostname():
1194 locker = 'user %s, process %s' % (user, pid)
1196 locker = 'user %s, process %s' % (user, pid)
1195 else:
1197 else:
1196 locker = 'user %s, process %s, host %s' \
1198 locker = 'user %s, process %s, host %s' \
1197 % (user, pid, host)
1199 % (user, pid, host)
1198 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1200 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1199 return 1
1201 return 1
1200 except OSError as e:
1202 except OSError as e:
1201 if e.errno != errno.ENOENT:
1203 if e.errno != errno.ENOENT:
1202 raise
1204 raise
1203
1205
1204 ui.write(("%-6s free\n") % (name + ":"))
1206 ui.write(("%-6s free\n") % (name + ":"))
1205 return 0
1207 return 0
1206
1208
1207 held += report(repo.svfs, "lock", repo.lock)
1209 held += report(repo.svfs, "lock", repo.lock)
1208 held += report(repo.vfs, "wlock", repo.wlock)
1210 held += report(repo.vfs, "wlock", repo.wlock)
1209
1211
1210 return held
1212 return held
1211
1213
1212 @command('debugmergestate', [], '')
1214 @command('debugmergestate', [], '')
1213 def debugmergestate(ui, repo, *args):
1215 def debugmergestate(ui, repo, *args):
1214 """print merge state
1216 """print merge state
1215
1217
1216 Use --verbose to print out information about whether v1 or v2 merge state
1218 Use --verbose to print out information about whether v1 or v2 merge state
1217 was chosen."""
1219 was chosen."""
1218 def _hashornull(h):
1220 def _hashornull(h):
1219 if h == nullhex:
1221 if h == nullhex:
1220 return 'null'
1222 return 'null'
1221 else:
1223 else:
1222 return h
1224 return h
1223
1225
1224 def printrecords(version):
1226 def printrecords(version):
1225 ui.write(('* version %s records\n') % version)
1227 ui.write(('* version %s records\n') % version)
1226 if version == 1:
1228 if version == 1:
1227 records = v1records
1229 records = v1records
1228 else:
1230 else:
1229 records = v2records
1231 records = v2records
1230
1232
1231 for rtype, record in records:
1233 for rtype, record in records:
1232 # pretty print some record types
1234 # pretty print some record types
1233 if rtype == 'L':
1235 if rtype == 'L':
1234 ui.write(('local: %s\n') % record)
1236 ui.write(('local: %s\n') % record)
1235 elif rtype == 'O':
1237 elif rtype == 'O':
1236 ui.write(('other: %s\n') % record)
1238 ui.write(('other: %s\n') % record)
1237 elif rtype == 'm':
1239 elif rtype == 'm':
1238 driver, mdstate = record.split('\0', 1)
1240 driver, mdstate = record.split('\0', 1)
1239 ui.write(('merge driver: %s (state "%s")\n')
1241 ui.write(('merge driver: %s (state "%s")\n')
1240 % (driver, mdstate))
1242 % (driver, mdstate))
1241 elif rtype in 'FDC':
1243 elif rtype in 'FDC':
1242 r = record.split('\0')
1244 r = record.split('\0')
1243 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1245 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1244 if version == 1:
1246 if version == 1:
1245 onode = 'not stored in v1 format'
1247 onode = 'not stored in v1 format'
1246 flags = r[7]
1248 flags = r[7]
1247 else:
1249 else:
1248 onode, flags = r[7:9]
1250 onode, flags = r[7:9]
1249 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1251 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1250 % (f, rtype, state, _hashornull(hash)))
1252 % (f, rtype, state, _hashornull(hash)))
1251 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1253 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1252 ui.write((' ancestor path: %s (node %s)\n')
1254 ui.write((' ancestor path: %s (node %s)\n')
1253 % (afile, _hashornull(anode)))
1255 % (afile, _hashornull(anode)))
1254 ui.write((' other path: %s (node %s)\n')
1256 ui.write((' other path: %s (node %s)\n')
1255 % (ofile, _hashornull(onode)))
1257 % (ofile, _hashornull(onode)))
1256 elif rtype == 'f':
1258 elif rtype == 'f':
1257 filename, rawextras = record.split('\0', 1)
1259 filename, rawextras = record.split('\0', 1)
1258 extras = rawextras.split('\0')
1260 extras = rawextras.split('\0')
1259 i = 0
1261 i = 0
1260 extrastrings = []
1262 extrastrings = []
1261 while i < len(extras):
1263 while i < len(extras):
1262 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1264 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1263 i += 2
1265 i += 2
1264
1266
1265 ui.write(('file extras: %s (%s)\n')
1267 ui.write(('file extras: %s (%s)\n')
1266 % (filename, ', '.join(extrastrings)))
1268 % (filename, ', '.join(extrastrings)))
1267 elif rtype == 'l':
1269 elif rtype == 'l':
1268 labels = record.split('\0', 2)
1270 labels = record.split('\0', 2)
1269 labels = [l for l in labels if len(l) > 0]
1271 labels = [l for l in labels if len(l) > 0]
1270 ui.write(('labels:\n'))
1272 ui.write(('labels:\n'))
1271 ui.write((' local: %s\n' % labels[0]))
1273 ui.write((' local: %s\n' % labels[0]))
1272 ui.write((' other: %s\n' % labels[1]))
1274 ui.write((' other: %s\n' % labels[1]))
1273 if len(labels) > 2:
1275 if len(labels) > 2:
1274 ui.write((' base: %s\n' % labels[2]))
1276 ui.write((' base: %s\n' % labels[2]))
1275 else:
1277 else:
1276 ui.write(('unrecognized entry: %s\t%s\n')
1278 ui.write(('unrecognized entry: %s\t%s\n')
1277 % (rtype, record.replace('\0', '\t')))
1279 % (rtype, record.replace('\0', '\t')))
1278
1280
1279 # Avoid mergestate.read() since it may raise an exception for unsupported
1281 # Avoid mergestate.read() since it may raise an exception for unsupported
1280 # merge state records. We shouldn't be doing this, but this is OK since this
1282 # merge state records. We shouldn't be doing this, but this is OK since this
1281 # command is pretty low-level.
1283 # command is pretty low-level.
1282 ms = mergemod.mergestate(repo)
1284 ms = mergemod.mergestate(repo)
1283
1285
1284 # sort so that reasonable information is on top
1286 # sort so that reasonable information is on top
1285 v1records = ms._readrecordsv1()
1287 v1records = ms._readrecordsv1()
1286 v2records = ms._readrecordsv2()
1288 v2records = ms._readrecordsv2()
1287 order = 'LOml'
1289 order = 'LOml'
1288 def key(r):
1290 def key(r):
1289 idx = order.find(r[0])
1291 idx = order.find(r[0])
1290 if idx == -1:
1292 if idx == -1:
1291 return (1, r[1])
1293 return (1, r[1])
1292 else:
1294 else:
1293 return (0, idx)
1295 return (0, idx)
1294 v1records.sort(key=key)
1296 v1records.sort(key=key)
1295 v2records.sort(key=key)
1297 v2records.sort(key=key)
1296
1298
1297 if not v1records and not v2records:
1299 if not v1records and not v2records:
1298 ui.write(('no merge state found\n'))
1300 ui.write(('no merge state found\n'))
1299 elif not v2records:
1301 elif not v2records:
1300 ui.note(('no version 2 merge state\n'))
1302 ui.note(('no version 2 merge state\n'))
1301 printrecords(1)
1303 printrecords(1)
1302 elif ms._v1v2match(v1records, v2records):
1304 elif ms._v1v2match(v1records, v2records):
1303 ui.note(('v1 and v2 states match: using v2\n'))
1305 ui.note(('v1 and v2 states match: using v2\n'))
1304 printrecords(2)
1306 printrecords(2)
1305 else:
1307 else:
1306 ui.note(('v1 and v2 states mismatch: using v1\n'))
1308 ui.note(('v1 and v2 states mismatch: using v1\n'))
1307 printrecords(1)
1309 printrecords(1)
1308 if ui.verbose:
1310 if ui.verbose:
1309 printrecords(2)
1311 printrecords(2)
1310
1312
1311 @command('debugnamecomplete', [], _('NAME...'))
1313 @command('debugnamecomplete', [], _('NAME...'))
1312 def debugnamecomplete(ui, repo, *args):
1314 def debugnamecomplete(ui, repo, *args):
1313 '''complete "names" - tags, open branch names, bookmark names'''
1315 '''complete "names" - tags, open branch names, bookmark names'''
1314
1316
1315 names = set()
1317 names = set()
1316 # since we previously only listed open branches, we will handle that
1318 # since we previously only listed open branches, we will handle that
1317 # specially (after this for loop)
1319 # specially (after this for loop)
1318 for name, ns in repo.names.iteritems():
1320 for name, ns in repo.names.iteritems():
1319 if name != 'branches':
1321 if name != 'branches':
1320 names.update(ns.listnames(repo))
1322 names.update(ns.listnames(repo))
1321 names.update(tag for (tag, heads, tip, closed)
1323 names.update(tag for (tag, heads, tip, closed)
1322 in repo.branchmap().iterbranches() if not closed)
1324 in repo.branchmap().iterbranches() if not closed)
1323 completions = set()
1325 completions = set()
1324 if not args:
1326 if not args:
1325 args = ['']
1327 args = ['']
1326 for a in args:
1328 for a in args:
1327 completions.update(n for n in names if n.startswith(a))
1329 completions.update(n for n in names if n.startswith(a))
1328 ui.write('\n'.join(sorted(completions)))
1330 ui.write('\n'.join(sorted(completions)))
1329 ui.write('\n')
1331 ui.write('\n')
1330
1332
1331 @command('debugobsolete',
1333 @command('debugobsolete',
1332 [('', 'flags', 0, _('markers flag')),
1334 [('', 'flags', 0, _('markers flag')),
1333 ('', 'record-parents', False,
1335 ('', 'record-parents', False,
1334 _('record parent information for the precursor')),
1336 _('record parent information for the precursor')),
1335 ('r', 'rev', [], _('display markers relevant to REV')),
1337 ('r', 'rev', [], _('display markers relevant to REV')),
1336 ('', 'index', False, _('display index of the marker')),
1338 ('', 'index', False, _('display index of the marker')),
1337 ('', 'delete', [], _('delete markers specified by indices')),
1339 ('', 'delete', [], _('delete markers specified by indices')),
1338 ] + commands.commitopts2 + commands.formatteropts,
1340 ] + commands.commitopts2 + commands.formatteropts,
1339 _('[OBSOLETED [REPLACEMENT ...]]'))
1341 _('[OBSOLETED [REPLACEMENT ...]]'))
1340 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1342 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1341 """create arbitrary obsolete marker
1343 """create arbitrary obsolete marker
1342
1344
1343 With no arguments, displays the list of obsolescence markers."""
1345 With no arguments, displays the list of obsolescence markers."""
1344
1346
1345 def parsenodeid(s):
1347 def parsenodeid(s):
1346 try:
1348 try:
1347 # We do not use revsingle/revrange functions here to accept
1349 # We do not use revsingle/revrange functions here to accept
1348 # arbitrary node identifiers, possibly not present in the
1350 # arbitrary node identifiers, possibly not present in the
1349 # local repository.
1351 # local repository.
1350 n = bin(s)
1352 n = bin(s)
1351 if len(n) != len(nullid):
1353 if len(n) != len(nullid):
1352 raise TypeError()
1354 raise TypeError()
1353 return n
1355 return n
1354 except TypeError:
1356 except TypeError:
1355 raise error.Abort('changeset references must be full hexadecimal '
1357 raise error.Abort('changeset references must be full hexadecimal '
1356 'node identifiers')
1358 'node identifiers')
1357
1359
1358 if opts.get('delete'):
1360 if opts.get('delete'):
1359 indices = []
1361 indices = []
1360 for v in opts.get('delete'):
1362 for v in opts.get('delete'):
1361 try:
1363 try:
1362 indices.append(int(v))
1364 indices.append(int(v))
1363 except ValueError:
1365 except ValueError:
1364 raise error.Abort(_('invalid index value: %r') % v,
1366 raise error.Abort(_('invalid index value: %r') % v,
1365 hint=_('use integers for indices'))
1367 hint=_('use integers for indices'))
1366
1368
1367 if repo.currenttransaction():
1369 if repo.currenttransaction():
1368 raise error.Abort(_('cannot delete obsmarkers in the middle '
1370 raise error.Abort(_('cannot delete obsmarkers in the middle '
1369 'of transaction.'))
1371 'of transaction.'))
1370
1372
1371 with repo.lock():
1373 with repo.lock():
1372 n = repair.deleteobsmarkers(repo.obsstore, indices)
1374 n = repair.deleteobsmarkers(repo.obsstore, indices)
1373 ui.write(_('deleted %i obsolescence markers\n') % n)
1375 ui.write(_('deleted %i obsolescence markers\n') % n)
1374
1376
1375 return
1377 return
1376
1378
1377 if precursor is not None:
1379 if precursor is not None:
1378 if opts['rev']:
1380 if opts['rev']:
1379 raise error.Abort('cannot select revision when creating marker')
1381 raise error.Abort('cannot select revision when creating marker')
1380 metadata = {}
1382 metadata = {}
1381 metadata['user'] = opts['user'] or ui.username()
1383 metadata['user'] = opts['user'] or ui.username()
1382 succs = tuple(parsenodeid(succ) for succ in successors)
1384 succs = tuple(parsenodeid(succ) for succ in successors)
1383 l = repo.lock()
1385 l = repo.lock()
1384 try:
1386 try:
1385 tr = repo.transaction('debugobsolete')
1387 tr = repo.transaction('debugobsolete')
1386 try:
1388 try:
1387 date = opts.get('date')
1389 date = opts.get('date')
1388 if date:
1390 if date:
1389 date = util.parsedate(date)
1391 date = util.parsedate(date)
1390 else:
1392 else:
1391 date = None
1393 date = None
1392 prec = parsenodeid(precursor)
1394 prec = parsenodeid(precursor)
1393 parents = None
1395 parents = None
1394 if opts['record_parents']:
1396 if opts['record_parents']:
1395 if prec not in repo.unfiltered():
1397 if prec not in repo.unfiltered():
1396 raise error.Abort('cannot used --record-parents on '
1398 raise error.Abort('cannot used --record-parents on '
1397 'unknown changesets')
1399 'unknown changesets')
1398 parents = repo.unfiltered()[prec].parents()
1400 parents = repo.unfiltered()[prec].parents()
1399 parents = tuple(p.node() for p in parents)
1401 parents = tuple(p.node() for p in parents)
1400 repo.obsstore.create(tr, prec, succs, opts['flags'],
1402 repo.obsstore.create(tr, prec, succs, opts['flags'],
1401 parents=parents, date=date,
1403 parents=parents, date=date,
1402 metadata=metadata)
1404 metadata=metadata)
1403 tr.close()
1405 tr.close()
1404 except ValueError as exc:
1406 except ValueError as exc:
1405 raise error.Abort(_('bad obsmarker input: %s') % exc)
1407 raise error.Abort(_('bad obsmarker input: %s') % exc)
1406 finally:
1408 finally:
1407 tr.release()
1409 tr.release()
1408 finally:
1410 finally:
1409 l.release()
1411 l.release()
1410 else:
1412 else:
1411 if opts['rev']:
1413 if opts['rev']:
1412 revs = scmutil.revrange(repo, opts['rev'])
1414 revs = scmutil.revrange(repo, opts['rev'])
1413 nodes = [repo[r].node() for r in revs]
1415 nodes = [repo[r].node() for r in revs]
1414 markers = list(obsolete.getmarkers(repo, nodes=nodes))
1416 markers = list(obsolete.getmarkers(repo, nodes=nodes))
1415 markers.sort(key=lambda x: x._data)
1417 markers.sort(key=lambda x: x._data)
1416 else:
1418 else:
1417 markers = obsolete.getmarkers(repo)
1419 markers = obsolete.getmarkers(repo)
1418
1420
1419 markerstoiter = markers
1421 markerstoiter = markers
1420 isrelevant = lambda m: True
1422 isrelevant = lambda m: True
1421 if opts.get('rev') and opts.get('index'):
1423 if opts.get('rev') and opts.get('index'):
1422 markerstoiter = obsolete.getmarkers(repo)
1424 markerstoiter = obsolete.getmarkers(repo)
1423 markerset = set(markers)
1425 markerset = set(markers)
1424 isrelevant = lambda m: m in markerset
1426 isrelevant = lambda m: m in markerset
1425
1427
1426 fm = ui.formatter('debugobsolete', opts)
1428 fm = ui.formatter('debugobsolete', opts)
1427 for i, m in enumerate(markerstoiter):
1429 for i, m in enumerate(markerstoiter):
1428 if not isrelevant(m):
1430 if not isrelevant(m):
1429 # marker can be irrelevant when we're iterating over a set
1431 # marker can be irrelevant when we're iterating over a set
1430 # of markers (markerstoiter) which is bigger than the set
1432 # of markers (markerstoiter) which is bigger than the set
1431 # of markers we want to display (markers)
1433 # of markers we want to display (markers)
1432 # this can happen if both --index and --rev options are
1434 # this can happen if both --index and --rev options are
1433 # provided and thus we need to iterate over all of the markers
1435 # provided and thus we need to iterate over all of the markers
1434 # to get the correct indices, but only display the ones that
1436 # to get the correct indices, but only display the ones that
1435 # are relevant to --rev value
1437 # are relevant to --rev value
1436 continue
1438 continue
1437 fm.startitem()
1439 fm.startitem()
1438 ind = i if opts.get('index') else None
1440 ind = i if opts.get('index') else None
1439 cmdutil.showmarker(fm, m, index=ind)
1441 cmdutil.showmarker(fm, m, index=ind)
1440 fm.end()
1442 fm.end()
1441
1443
1442 @command('debugpathcomplete',
1444 @command('debugpathcomplete',
1443 [('f', 'full', None, _('complete an entire path')),
1445 [('f', 'full', None, _('complete an entire path')),
1444 ('n', 'normal', None, _('show only normal files')),
1446 ('n', 'normal', None, _('show only normal files')),
1445 ('a', 'added', None, _('show only added files')),
1447 ('a', 'added', None, _('show only added files')),
1446 ('r', 'removed', None, _('show only removed files'))],
1448 ('r', 'removed', None, _('show only removed files'))],
1447 _('FILESPEC...'))
1449 _('FILESPEC...'))
1448 def debugpathcomplete(ui, repo, *specs, **opts):
1450 def debugpathcomplete(ui, repo, *specs, **opts):
1449 '''complete part or all of a tracked path
1451 '''complete part or all of a tracked path
1450
1452
1451 This command supports shells that offer path name completion. It
1453 This command supports shells that offer path name completion. It
1452 currently completes only files already known to the dirstate.
1454 currently completes only files already known to the dirstate.
1453
1455
1454 Completion extends only to the next path segment unless
1456 Completion extends only to the next path segment unless
1455 --full is specified, in which case entire paths are used.'''
1457 --full is specified, in which case entire paths are used.'''
1456
1458
1457 def complete(path, acceptable):
1459 def complete(path, acceptable):
1458 dirstate = repo.dirstate
1460 dirstate = repo.dirstate
1459 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1461 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1460 rootdir = repo.root + pycompat.ossep
1462 rootdir = repo.root + pycompat.ossep
1461 if spec != repo.root and not spec.startswith(rootdir):
1463 if spec != repo.root and not spec.startswith(rootdir):
1462 return [], []
1464 return [], []
1463 if os.path.isdir(spec):
1465 if os.path.isdir(spec):
1464 spec += '/'
1466 spec += '/'
1465 spec = spec[len(rootdir):]
1467 spec = spec[len(rootdir):]
1466 fixpaths = pycompat.ossep != '/'
1468 fixpaths = pycompat.ossep != '/'
1467 if fixpaths:
1469 if fixpaths:
1468 spec = spec.replace(pycompat.ossep, '/')
1470 spec = spec.replace(pycompat.ossep, '/')
1469 speclen = len(spec)
1471 speclen = len(spec)
1470 fullpaths = opts['full']
1472 fullpaths = opts['full']
1471 files, dirs = set(), set()
1473 files, dirs = set(), set()
1472 adddir, addfile = dirs.add, files.add
1474 adddir, addfile = dirs.add, files.add
1473 for f, st in dirstate.iteritems():
1475 for f, st in dirstate.iteritems():
1474 if f.startswith(spec) and st[0] in acceptable:
1476 if f.startswith(spec) and st[0] in acceptable:
1475 if fixpaths:
1477 if fixpaths:
1476 f = f.replace('/', pycompat.ossep)
1478 f = f.replace('/', pycompat.ossep)
1477 if fullpaths:
1479 if fullpaths:
1478 addfile(f)
1480 addfile(f)
1479 continue
1481 continue
1480 s = f.find(pycompat.ossep, speclen)
1482 s = f.find(pycompat.ossep, speclen)
1481 if s >= 0:
1483 if s >= 0:
1482 adddir(f[:s])
1484 adddir(f[:s])
1483 else:
1485 else:
1484 addfile(f)
1486 addfile(f)
1485 return files, dirs
1487 return files, dirs
1486
1488
1487 acceptable = ''
1489 acceptable = ''
1488 if opts['normal']:
1490 if opts['normal']:
1489 acceptable += 'nm'
1491 acceptable += 'nm'
1490 if opts['added']:
1492 if opts['added']:
1491 acceptable += 'a'
1493 acceptable += 'a'
1492 if opts['removed']:
1494 if opts['removed']:
1493 acceptable += 'r'
1495 acceptable += 'r'
1494 cwd = repo.getcwd()
1496 cwd = repo.getcwd()
1495 if not specs:
1497 if not specs:
1496 specs = ['.']
1498 specs = ['.']
1497
1499
1498 files, dirs = set(), set()
1500 files, dirs = set(), set()
1499 for spec in specs:
1501 for spec in specs:
1500 f, d = complete(spec, acceptable or 'nmar')
1502 f, d = complete(spec, acceptable or 'nmar')
1501 files.update(f)
1503 files.update(f)
1502 dirs.update(d)
1504 dirs.update(d)
1503 files.update(dirs)
1505 files.update(dirs)
1504 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1506 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1505 ui.write('\n')
1507 ui.write('\n')
1506
1508
1507 @command('debugpickmergetool',
1509 @command('debugpickmergetool',
1508 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1510 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1509 ('', 'changedelete', None, _('emulate merging change and delete')),
1511 ('', 'changedelete', None, _('emulate merging change and delete')),
1510 ] + commands.walkopts + commands.mergetoolopts,
1512 ] + commands.walkopts + commands.mergetoolopts,
1511 _('[PATTERN]...'),
1513 _('[PATTERN]...'),
1512 inferrepo=True)
1514 inferrepo=True)
1513 def debugpickmergetool(ui, repo, *pats, **opts):
1515 def debugpickmergetool(ui, repo, *pats, **opts):
1514 """examine which merge tool is chosen for specified file
1516 """examine which merge tool is chosen for specified file
1515
1517
1516 As described in :hg:`help merge-tools`, Mercurial examines
1518 As described in :hg:`help merge-tools`, Mercurial examines
1517 configurations below in this order to decide which merge tool is
1519 configurations below in this order to decide which merge tool is
1518 chosen for specified file.
1520 chosen for specified file.
1519
1521
1520 1. ``--tool`` option
1522 1. ``--tool`` option
1521 2. ``HGMERGE`` environment variable
1523 2. ``HGMERGE`` environment variable
1522 3. configurations in ``merge-patterns`` section
1524 3. configurations in ``merge-patterns`` section
1523 4. configuration of ``ui.merge``
1525 4. configuration of ``ui.merge``
1524 5. configurations in ``merge-tools`` section
1526 5. configurations in ``merge-tools`` section
1525 6. ``hgmerge`` tool (for historical reason only)
1527 6. ``hgmerge`` tool (for historical reason only)
1526 7. default tool for fallback (``:merge`` or ``:prompt``)
1528 7. default tool for fallback (``:merge`` or ``:prompt``)
1527
1529
1528 This command writes out examination result in the style below::
1530 This command writes out examination result in the style below::
1529
1531
1530 FILE = MERGETOOL
1532 FILE = MERGETOOL
1531
1533
1532 By default, all files known in the first parent context of the
1534 By default, all files known in the first parent context of the
1533 working directory are examined. Use file patterns and/or -I/-X
1535 working directory are examined. Use file patterns and/or -I/-X
1534 options to limit target files. -r/--rev is also useful to examine
1536 options to limit target files. -r/--rev is also useful to examine
1535 files in another context without actual updating to it.
1537 files in another context without actual updating to it.
1536
1538
1537 With --debug, this command shows warning messages while matching
1539 With --debug, this command shows warning messages while matching
1538 against ``merge-patterns`` and so on, too. It is recommended to
1540 against ``merge-patterns`` and so on, too. It is recommended to
1539 use this option with explicit file patterns and/or -I/-X options,
1541 use this option with explicit file patterns and/or -I/-X options,
1540 because this option increases amount of output per file according
1542 because this option increases amount of output per file according
1541 to configurations in hgrc.
1543 to configurations in hgrc.
1542
1544
1543 With -v/--verbose, this command shows configurations below at
1545 With -v/--verbose, this command shows configurations below at
1544 first (only if specified).
1546 first (only if specified).
1545
1547
1546 - ``--tool`` option
1548 - ``--tool`` option
1547 - ``HGMERGE`` environment variable
1549 - ``HGMERGE`` environment variable
1548 - configuration of ``ui.merge``
1550 - configuration of ``ui.merge``
1549
1551
1550 If merge tool is chosen before matching against
1552 If merge tool is chosen before matching against
1551 ``merge-patterns``, this command can't show any helpful
1553 ``merge-patterns``, this command can't show any helpful
1552 information, even with --debug. In such case, information above is
1554 information, even with --debug. In such case, information above is
1553 useful to know why a merge tool is chosen.
1555 useful to know why a merge tool is chosen.
1554 """
1556 """
1555 overrides = {}
1557 overrides = {}
1556 if opts['tool']:
1558 if opts['tool']:
1557 overrides[('ui', 'forcemerge')] = opts['tool']
1559 overrides[('ui', 'forcemerge')] = opts['tool']
1558 ui.note(('with --tool %r\n') % (opts['tool']))
1560 ui.note(('with --tool %r\n') % (opts['tool']))
1559
1561
1560 with ui.configoverride(overrides, 'debugmergepatterns'):
1562 with ui.configoverride(overrides, 'debugmergepatterns'):
1561 hgmerge = encoding.environ.get("HGMERGE")
1563 hgmerge = encoding.environ.get("HGMERGE")
1562 if hgmerge is not None:
1564 if hgmerge is not None:
1563 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1565 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1564 uimerge = ui.config("ui", "merge")
1566 uimerge = ui.config("ui", "merge")
1565 if uimerge:
1567 if uimerge:
1566 ui.note(('with ui.merge=%r\n') % (uimerge))
1568 ui.note(('with ui.merge=%r\n') % (uimerge))
1567
1569
1568 ctx = scmutil.revsingle(repo, opts.get('rev'))
1570 ctx = scmutil.revsingle(repo, opts.get('rev'))
1569 m = scmutil.match(ctx, pats, opts)
1571 m = scmutil.match(ctx, pats, opts)
1570 changedelete = opts['changedelete']
1572 changedelete = opts['changedelete']
1571 for path in ctx.walk(m):
1573 for path in ctx.walk(m):
1572 fctx = ctx[path]
1574 fctx = ctx[path]
1573 try:
1575 try:
1574 if not ui.debugflag:
1576 if not ui.debugflag:
1575 ui.pushbuffer(error=True)
1577 ui.pushbuffer(error=True)
1576 tool, toolpath = filemerge._picktool(repo, ui, path,
1578 tool, toolpath = filemerge._picktool(repo, ui, path,
1577 fctx.isbinary(),
1579 fctx.isbinary(),
1578 'l' in fctx.flags(),
1580 'l' in fctx.flags(),
1579 changedelete)
1581 changedelete)
1580 finally:
1582 finally:
1581 if not ui.debugflag:
1583 if not ui.debugflag:
1582 ui.popbuffer()
1584 ui.popbuffer()
1583 ui.write(('%s = %s\n') % (path, tool))
1585 ui.write(('%s = %s\n') % (path, tool))
1584
1586
1585 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1587 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1586 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1588 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1587 '''access the pushkey key/value protocol
1589 '''access the pushkey key/value protocol
1588
1590
1589 With two args, list the keys in the given namespace.
1591 With two args, list the keys in the given namespace.
1590
1592
1591 With five args, set a key to new if it currently is set to old.
1593 With five args, set a key to new if it currently is set to old.
1592 Reports success or failure.
1594 Reports success or failure.
1593 '''
1595 '''
1594
1596
1595 target = hg.peer(ui, {}, repopath)
1597 target = hg.peer(ui, {}, repopath)
1596 if keyinfo:
1598 if keyinfo:
1597 key, old, new = keyinfo
1599 key, old, new = keyinfo
1598 r = target.pushkey(namespace, key, old, new)
1600 r = target.pushkey(namespace, key, old, new)
1599 ui.status(str(r) + '\n')
1601 ui.status(str(r) + '\n')
1600 return not r
1602 return not r
1601 else:
1603 else:
1602 for k, v in sorted(target.listkeys(namespace).iteritems()):
1604 for k, v in sorted(target.listkeys(namespace).iteritems()):
1603 ui.write("%s\t%s\n" % (util.escapestr(k),
1605 ui.write("%s\t%s\n" % (util.escapestr(k),
1604 util.escapestr(v)))
1606 util.escapestr(v)))
1605
1607
1606 @command('debugpvec', [], _('A B'))
1608 @command('debugpvec', [], _('A B'))
1607 def debugpvec(ui, repo, a, b=None):
1609 def debugpvec(ui, repo, a, b=None):
1608 ca = scmutil.revsingle(repo, a)
1610 ca = scmutil.revsingle(repo, a)
1609 cb = scmutil.revsingle(repo, b)
1611 cb = scmutil.revsingle(repo, b)
1610 pa = pvec.ctxpvec(ca)
1612 pa = pvec.ctxpvec(ca)
1611 pb = pvec.ctxpvec(cb)
1613 pb = pvec.ctxpvec(cb)
1612 if pa == pb:
1614 if pa == pb:
1613 rel = "="
1615 rel = "="
1614 elif pa > pb:
1616 elif pa > pb:
1615 rel = ">"
1617 rel = ">"
1616 elif pa < pb:
1618 elif pa < pb:
1617 rel = "<"
1619 rel = "<"
1618 elif pa | pb:
1620 elif pa | pb:
1619 rel = "|"
1621 rel = "|"
1620 ui.write(_("a: %s\n") % pa)
1622 ui.write(_("a: %s\n") % pa)
1621 ui.write(_("b: %s\n") % pb)
1623 ui.write(_("b: %s\n") % pb)
1622 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1624 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1623 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1625 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1624 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1626 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1625 pa.distance(pb), rel))
1627 pa.distance(pb), rel))
1626
1628
1627 @command('debugrebuilddirstate|debugrebuildstate',
1629 @command('debugrebuilddirstate|debugrebuildstate',
1628 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1630 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1629 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1631 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1630 'the working copy parent')),
1632 'the working copy parent')),
1631 ],
1633 ],
1632 _('[-r REV]'))
1634 _('[-r REV]'))
1633 def debugrebuilddirstate(ui, repo, rev, **opts):
1635 def debugrebuilddirstate(ui, repo, rev, **opts):
1634 """rebuild the dirstate as it would look like for the given revision
1636 """rebuild the dirstate as it would look like for the given revision
1635
1637
1636 If no revision is specified the first current parent will be used.
1638 If no revision is specified the first current parent will be used.
1637
1639
1638 The dirstate will be set to the files of the given revision.
1640 The dirstate will be set to the files of the given revision.
1639 The actual working directory content or existing dirstate
1641 The actual working directory content or existing dirstate
1640 information such as adds or removes is not considered.
1642 information such as adds or removes is not considered.
1641
1643
1642 ``minimal`` will only rebuild the dirstate status for files that claim to be
1644 ``minimal`` will only rebuild the dirstate status for files that claim to be
1643 tracked but are not in the parent manifest, or that exist in the parent
1645 tracked but are not in the parent manifest, or that exist in the parent
1644 manifest but are not in the dirstate. It will not change adds, removes, or
1646 manifest but are not in the dirstate. It will not change adds, removes, or
1645 modified files that are in the working copy parent.
1647 modified files that are in the working copy parent.
1646
1648
1647 One use of this command is to make the next :hg:`status` invocation
1649 One use of this command is to make the next :hg:`status` invocation
1648 check the actual file content.
1650 check the actual file content.
1649 """
1651 """
1650 ctx = scmutil.revsingle(repo, rev)
1652 ctx = scmutil.revsingle(repo, rev)
1651 with repo.wlock():
1653 with repo.wlock():
1652 dirstate = repo.dirstate
1654 dirstate = repo.dirstate
1653 changedfiles = None
1655 changedfiles = None
1654 # See command doc for what minimal does.
1656 # See command doc for what minimal does.
1655 if opts.get('minimal'):
1657 if opts.get('minimal'):
1656 manifestfiles = set(ctx.manifest().keys())
1658 manifestfiles = set(ctx.manifest().keys())
1657 dirstatefiles = set(dirstate)
1659 dirstatefiles = set(dirstate)
1658 manifestonly = manifestfiles - dirstatefiles
1660 manifestonly = manifestfiles - dirstatefiles
1659 dsonly = dirstatefiles - manifestfiles
1661 dsonly = dirstatefiles - manifestfiles
1660 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1662 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1661 changedfiles = manifestonly | dsnotadded
1663 changedfiles = manifestonly | dsnotadded
1662
1664
1663 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1665 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1664
1666
1665 @command('debugrebuildfncache', [], '')
1667 @command('debugrebuildfncache', [], '')
1666 def debugrebuildfncache(ui, repo):
1668 def debugrebuildfncache(ui, repo):
1667 """rebuild the fncache file"""
1669 """rebuild the fncache file"""
1668 repair.rebuildfncache(ui, repo)
1670 repair.rebuildfncache(ui, repo)
1669
1671
1670 @command('debugrename',
1672 @command('debugrename',
1671 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1673 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1672 _('[-r REV] FILE'))
1674 _('[-r REV] FILE'))
1673 def debugrename(ui, repo, file1, *pats, **opts):
1675 def debugrename(ui, repo, file1, *pats, **opts):
1674 """dump rename information"""
1676 """dump rename information"""
1675
1677
1676 ctx = scmutil.revsingle(repo, opts.get('rev'))
1678 ctx = scmutil.revsingle(repo, opts.get('rev'))
1677 m = scmutil.match(ctx, (file1,) + pats, opts)
1679 m = scmutil.match(ctx, (file1,) + pats, opts)
1678 for abs in ctx.walk(m):
1680 for abs in ctx.walk(m):
1679 fctx = ctx[abs]
1681 fctx = ctx[abs]
1680 o = fctx.filelog().renamed(fctx.filenode())
1682 o = fctx.filelog().renamed(fctx.filenode())
1681 rel = m.rel(abs)
1683 rel = m.rel(abs)
1682 if o:
1684 if o:
1683 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1685 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1684 else:
1686 else:
1685 ui.write(_("%s not renamed\n") % rel)
1687 ui.write(_("%s not renamed\n") % rel)
1686
1688
1687 @command('debugrevlog', commands.debugrevlogopts +
1689 @command('debugrevlog', commands.debugrevlogopts +
1688 [('d', 'dump', False, _('dump index data'))],
1690 [('d', 'dump', False, _('dump index data'))],
1689 _('-c|-m|FILE'),
1691 _('-c|-m|FILE'),
1690 optionalrepo=True)
1692 optionalrepo=True)
1691 def debugrevlog(ui, repo, file_=None, **opts):
1693 def debugrevlog(ui, repo, file_=None, **opts):
1692 """show data and statistics about a revlog"""
1694 """show data and statistics about a revlog"""
1693 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1695 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1694
1696
1695 if opts.get("dump"):
1697 if opts.get("dump"):
1696 numrevs = len(r)
1698 numrevs = len(r)
1697 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1699 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1698 " rawsize totalsize compression heads chainlen\n"))
1700 " rawsize totalsize compression heads chainlen\n"))
1699 ts = 0
1701 ts = 0
1700 heads = set()
1702 heads = set()
1701
1703
1702 for rev in xrange(numrevs):
1704 for rev in xrange(numrevs):
1703 dbase = r.deltaparent(rev)
1705 dbase = r.deltaparent(rev)
1704 if dbase == -1:
1706 if dbase == -1:
1705 dbase = rev
1707 dbase = rev
1706 cbase = r.chainbase(rev)
1708 cbase = r.chainbase(rev)
1707 clen = r.chainlen(rev)
1709 clen = r.chainlen(rev)
1708 p1, p2 = r.parentrevs(rev)
1710 p1, p2 = r.parentrevs(rev)
1709 rs = r.rawsize(rev)
1711 rs = r.rawsize(rev)
1710 ts = ts + rs
1712 ts = ts + rs
1711 heads -= set(r.parentrevs(rev))
1713 heads -= set(r.parentrevs(rev))
1712 heads.add(rev)
1714 heads.add(rev)
1713 try:
1715 try:
1714 compression = ts / r.end(rev)
1716 compression = ts / r.end(rev)
1715 except ZeroDivisionError:
1717 except ZeroDivisionError:
1716 compression = 0
1718 compression = 0
1717 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1719 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1718 "%11d %5d %8d\n" %
1720 "%11d %5d %8d\n" %
1719 (rev, p1, p2, r.start(rev), r.end(rev),
1721 (rev, p1, p2, r.start(rev), r.end(rev),
1720 r.start(dbase), r.start(cbase),
1722 r.start(dbase), r.start(cbase),
1721 r.start(p1), r.start(p2),
1723 r.start(p1), r.start(p2),
1722 rs, ts, compression, len(heads), clen))
1724 rs, ts, compression, len(heads), clen))
1723 return 0
1725 return 0
1724
1726
1725 v = r.version
1727 v = r.version
1726 format = v & 0xFFFF
1728 format = v & 0xFFFF
1727 flags = []
1729 flags = []
1728 gdelta = False
1730 gdelta = False
1729 if v & revlog.FLAG_INLINE_DATA:
1731 if v & revlog.FLAG_INLINE_DATA:
1730 flags.append('inline')
1732 flags.append('inline')
1731 if v & revlog.FLAG_GENERALDELTA:
1733 if v & revlog.FLAG_GENERALDELTA:
1732 gdelta = True
1734 gdelta = True
1733 flags.append('generaldelta')
1735 flags.append('generaldelta')
1734 if not flags:
1736 if not flags:
1735 flags = ['(none)']
1737 flags = ['(none)']
1736
1738
1737 nummerges = 0
1739 nummerges = 0
1738 numfull = 0
1740 numfull = 0
1739 numprev = 0
1741 numprev = 0
1740 nump1 = 0
1742 nump1 = 0
1741 nump2 = 0
1743 nump2 = 0
1742 numother = 0
1744 numother = 0
1743 nump1prev = 0
1745 nump1prev = 0
1744 nump2prev = 0
1746 nump2prev = 0
1745 chainlengths = []
1747 chainlengths = []
1746
1748
1747 datasize = [None, 0, 0]
1749 datasize = [None, 0, 0]
1748 fullsize = [None, 0, 0]
1750 fullsize = [None, 0, 0]
1749 deltasize = [None, 0, 0]
1751 deltasize = [None, 0, 0]
1750 chunktypecounts = {}
1752 chunktypecounts = {}
1751 chunktypesizes = {}
1753 chunktypesizes = {}
1752
1754
1753 def addsize(size, l):
1755 def addsize(size, l):
1754 if l[0] is None or size < l[0]:
1756 if l[0] is None or size < l[0]:
1755 l[0] = size
1757 l[0] = size
1756 if size > l[1]:
1758 if size > l[1]:
1757 l[1] = size
1759 l[1] = size
1758 l[2] += size
1760 l[2] += size
1759
1761
1760 numrevs = len(r)
1762 numrevs = len(r)
1761 for rev in xrange(numrevs):
1763 for rev in xrange(numrevs):
1762 p1, p2 = r.parentrevs(rev)
1764 p1, p2 = r.parentrevs(rev)
1763 delta = r.deltaparent(rev)
1765 delta = r.deltaparent(rev)
1764 if format > 0:
1766 if format > 0:
1765 addsize(r.rawsize(rev), datasize)
1767 addsize(r.rawsize(rev), datasize)
1766 if p2 != nullrev:
1768 if p2 != nullrev:
1767 nummerges += 1
1769 nummerges += 1
1768 size = r.length(rev)
1770 size = r.length(rev)
1769 if delta == nullrev:
1771 if delta == nullrev:
1770 chainlengths.append(0)
1772 chainlengths.append(0)
1771 numfull += 1
1773 numfull += 1
1772 addsize(size, fullsize)
1774 addsize(size, fullsize)
1773 else:
1775 else:
1774 chainlengths.append(chainlengths[delta] + 1)
1776 chainlengths.append(chainlengths[delta] + 1)
1775 addsize(size, deltasize)
1777 addsize(size, deltasize)
1776 if delta == rev - 1:
1778 if delta == rev - 1:
1777 numprev += 1
1779 numprev += 1
1778 if delta == p1:
1780 if delta == p1:
1779 nump1prev += 1
1781 nump1prev += 1
1780 elif delta == p2:
1782 elif delta == p2:
1781 nump2prev += 1
1783 nump2prev += 1
1782 elif delta == p1:
1784 elif delta == p1:
1783 nump1 += 1
1785 nump1 += 1
1784 elif delta == p2:
1786 elif delta == p2:
1785 nump2 += 1
1787 nump2 += 1
1786 elif delta != nullrev:
1788 elif delta != nullrev:
1787 numother += 1
1789 numother += 1
1788
1790
1789 # Obtain data on the raw chunks in the revlog.
1791 # Obtain data on the raw chunks in the revlog.
1790 segment = r._getsegmentforrevs(rev, rev)[1]
1792 segment = r._getsegmentforrevs(rev, rev)[1]
1791 if segment:
1793 if segment:
1792 chunktype = segment[0]
1794 chunktype = segment[0]
1793 else:
1795 else:
1794 chunktype = 'empty'
1796 chunktype = 'empty'
1795
1797
1796 if chunktype not in chunktypecounts:
1798 if chunktype not in chunktypecounts:
1797 chunktypecounts[chunktype] = 0
1799 chunktypecounts[chunktype] = 0
1798 chunktypesizes[chunktype] = 0
1800 chunktypesizes[chunktype] = 0
1799
1801
1800 chunktypecounts[chunktype] += 1
1802 chunktypecounts[chunktype] += 1
1801 chunktypesizes[chunktype] += size
1803 chunktypesizes[chunktype] += size
1802
1804
1803 # Adjust size min value for empty cases
1805 # Adjust size min value for empty cases
1804 for size in (datasize, fullsize, deltasize):
1806 for size in (datasize, fullsize, deltasize):
1805 if size[0] is None:
1807 if size[0] is None:
1806 size[0] = 0
1808 size[0] = 0
1807
1809
1808 numdeltas = numrevs - numfull
1810 numdeltas = numrevs - numfull
1809 numoprev = numprev - nump1prev - nump2prev
1811 numoprev = numprev - nump1prev - nump2prev
1810 totalrawsize = datasize[2]
1812 totalrawsize = datasize[2]
1811 datasize[2] /= numrevs
1813 datasize[2] /= numrevs
1812 fulltotal = fullsize[2]
1814 fulltotal = fullsize[2]
1813 fullsize[2] /= numfull
1815 fullsize[2] /= numfull
1814 deltatotal = deltasize[2]
1816 deltatotal = deltasize[2]
1815 if numrevs - numfull > 0:
1817 if numrevs - numfull > 0:
1816 deltasize[2] /= numrevs - numfull
1818 deltasize[2] /= numrevs - numfull
1817 totalsize = fulltotal + deltatotal
1819 totalsize = fulltotal + deltatotal
1818 avgchainlen = sum(chainlengths) / numrevs
1820 avgchainlen = sum(chainlengths) / numrevs
1819 maxchainlen = max(chainlengths)
1821 maxchainlen = max(chainlengths)
1820 compratio = 1
1822 compratio = 1
1821 if totalsize:
1823 if totalsize:
1822 compratio = totalrawsize / totalsize
1824 compratio = totalrawsize / totalsize
1823
1825
1824 basedfmtstr = '%%%dd\n'
1826 basedfmtstr = '%%%dd\n'
1825 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1827 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1826
1828
1827 def dfmtstr(max):
1829 def dfmtstr(max):
1828 return basedfmtstr % len(str(max))
1830 return basedfmtstr % len(str(max))
1829 def pcfmtstr(max, padding=0):
1831 def pcfmtstr(max, padding=0):
1830 return basepcfmtstr % (len(str(max)), ' ' * padding)
1832 return basepcfmtstr % (len(str(max)), ' ' * padding)
1831
1833
1832 def pcfmt(value, total):
1834 def pcfmt(value, total):
1833 if total:
1835 if total:
1834 return (value, 100 * float(value) / total)
1836 return (value, 100 * float(value) / total)
1835 else:
1837 else:
1836 return value, 100.0
1838 return value, 100.0
1837
1839
1838 ui.write(('format : %d\n') % format)
1840 ui.write(('format : %d\n') % format)
1839 ui.write(('flags : %s\n') % ', '.join(flags))
1841 ui.write(('flags : %s\n') % ', '.join(flags))
1840
1842
1841 ui.write('\n')
1843 ui.write('\n')
1842 fmt = pcfmtstr(totalsize)
1844 fmt = pcfmtstr(totalsize)
1843 fmt2 = dfmtstr(totalsize)
1845 fmt2 = dfmtstr(totalsize)
1844 ui.write(('revisions : ') + fmt2 % numrevs)
1846 ui.write(('revisions : ') + fmt2 % numrevs)
1845 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1847 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1846 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1848 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1847 ui.write(('revisions : ') + fmt2 % numrevs)
1849 ui.write(('revisions : ') + fmt2 % numrevs)
1848 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1850 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1849 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1851 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1850 ui.write(('revision size : ') + fmt2 % totalsize)
1852 ui.write(('revision size : ') + fmt2 % totalsize)
1851 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1853 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1852 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1854 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1853
1855
1854 def fmtchunktype(chunktype):
1856 def fmtchunktype(chunktype):
1855 if chunktype == 'empty':
1857 if chunktype == 'empty':
1856 return ' %s : ' % chunktype
1858 return ' %s : ' % chunktype
1857 elif chunktype in string.ascii_letters:
1859 elif chunktype in string.ascii_letters:
1858 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1860 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1859 else:
1861 else:
1860 return ' 0x%s : ' % hex(chunktype)
1862 return ' 0x%s : ' % hex(chunktype)
1861
1863
1862 ui.write('\n')
1864 ui.write('\n')
1863 ui.write(('chunks : ') + fmt2 % numrevs)
1865 ui.write(('chunks : ') + fmt2 % numrevs)
1864 for chunktype in sorted(chunktypecounts):
1866 for chunktype in sorted(chunktypecounts):
1865 ui.write(fmtchunktype(chunktype))
1867 ui.write(fmtchunktype(chunktype))
1866 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1868 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1867 ui.write(('chunks size : ') + fmt2 % totalsize)
1869 ui.write(('chunks size : ') + fmt2 % totalsize)
1868 for chunktype in sorted(chunktypecounts):
1870 for chunktype in sorted(chunktypecounts):
1869 ui.write(fmtchunktype(chunktype))
1871 ui.write(fmtchunktype(chunktype))
1870 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1872 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1871
1873
1872 ui.write('\n')
1874 ui.write('\n')
1873 fmt = dfmtstr(max(avgchainlen, compratio))
1875 fmt = dfmtstr(max(avgchainlen, compratio))
1874 ui.write(('avg chain length : ') + fmt % avgchainlen)
1876 ui.write(('avg chain length : ') + fmt % avgchainlen)
1875 ui.write(('max chain length : ') + fmt % maxchainlen)
1877 ui.write(('max chain length : ') + fmt % maxchainlen)
1876 ui.write(('compression ratio : ') + fmt % compratio)
1878 ui.write(('compression ratio : ') + fmt % compratio)
1877
1879
1878 if format > 0:
1880 if format > 0:
1879 ui.write('\n')
1881 ui.write('\n')
1880 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1882 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1881 % tuple(datasize))
1883 % tuple(datasize))
1882 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1884 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1883 % tuple(fullsize))
1885 % tuple(fullsize))
1884 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1886 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1885 % tuple(deltasize))
1887 % tuple(deltasize))
1886
1888
1887 if numdeltas > 0:
1889 if numdeltas > 0:
1888 ui.write('\n')
1890 ui.write('\n')
1889 fmt = pcfmtstr(numdeltas)
1891 fmt = pcfmtstr(numdeltas)
1890 fmt2 = pcfmtstr(numdeltas, 4)
1892 fmt2 = pcfmtstr(numdeltas, 4)
1891 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1893 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1892 if numprev > 0:
1894 if numprev > 0:
1893 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1895 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1894 numprev))
1896 numprev))
1895 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1897 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1896 numprev))
1898 numprev))
1897 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1899 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1898 numprev))
1900 numprev))
1899 if gdelta:
1901 if gdelta:
1900 ui.write(('deltas against p1 : ')
1902 ui.write(('deltas against p1 : ')
1901 + fmt % pcfmt(nump1, numdeltas))
1903 + fmt % pcfmt(nump1, numdeltas))
1902 ui.write(('deltas against p2 : ')
1904 ui.write(('deltas against p2 : ')
1903 + fmt % pcfmt(nump2, numdeltas))
1905 + fmt % pcfmt(nump2, numdeltas))
1904 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1906 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1905 numdeltas))
1907 numdeltas))
1906
1908
1907 @command('debugrevspec',
1909 @command('debugrevspec',
1908 [('', 'optimize', None,
1910 [('', 'optimize', None,
1909 _('print parsed tree after optimizing (DEPRECATED)')),
1911 _('print parsed tree after optimizing (DEPRECATED)')),
1910 ('p', 'show-stage', [],
1912 ('p', 'show-stage', [],
1911 _('print parsed tree at the given stage'), _('NAME')),
1913 _('print parsed tree at the given stage'), _('NAME')),
1912 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1914 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1913 ('', 'verify-optimized', False, _('verify optimized result')),
1915 ('', 'verify-optimized', False, _('verify optimized result')),
1914 ],
1916 ],
1915 ('REVSPEC'))
1917 ('REVSPEC'))
1916 def debugrevspec(ui, repo, expr, **opts):
1918 def debugrevspec(ui, repo, expr, **opts):
1917 """parse and apply a revision specification
1919 """parse and apply a revision specification
1918
1920
1919 Use -p/--show-stage option to print the parsed tree at the given stages.
1921 Use -p/--show-stage option to print the parsed tree at the given stages.
1920 Use -p all to print tree at every stage.
1922 Use -p all to print tree at every stage.
1921
1923
1922 Use --verify-optimized to compare the optimized result with the unoptimized
1924 Use --verify-optimized to compare the optimized result with the unoptimized
1923 one. Returns 1 if the optimized result differs.
1925 one. Returns 1 if the optimized result differs.
1924 """
1926 """
1925 stages = [
1927 stages = [
1926 ('parsed', lambda tree: tree),
1928 ('parsed', lambda tree: tree),
1927 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1929 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1928 ('concatenated', revsetlang.foldconcat),
1930 ('concatenated', revsetlang.foldconcat),
1929 ('analyzed', revsetlang.analyze),
1931 ('analyzed', revsetlang.analyze),
1930 ('optimized', revsetlang.optimize),
1932 ('optimized', revsetlang.optimize),
1931 ]
1933 ]
1932 if opts['no_optimized']:
1934 if opts['no_optimized']:
1933 stages = stages[:-1]
1935 stages = stages[:-1]
1934 if opts['verify_optimized'] and opts['no_optimized']:
1936 if opts['verify_optimized'] and opts['no_optimized']:
1935 raise error.Abort(_('cannot use --verify-optimized with '
1937 raise error.Abort(_('cannot use --verify-optimized with '
1936 '--no-optimized'))
1938 '--no-optimized'))
1937 stagenames = set(n for n, f in stages)
1939 stagenames = set(n for n, f in stages)
1938
1940
1939 showalways = set()
1941 showalways = set()
1940 showchanged = set()
1942 showchanged = set()
1941 if ui.verbose and not opts['show_stage']:
1943 if ui.verbose and not opts['show_stage']:
1942 # show parsed tree by --verbose (deprecated)
1944 # show parsed tree by --verbose (deprecated)
1943 showalways.add('parsed')
1945 showalways.add('parsed')
1944 showchanged.update(['expanded', 'concatenated'])
1946 showchanged.update(['expanded', 'concatenated'])
1945 if opts['optimize']:
1947 if opts['optimize']:
1946 showalways.add('optimized')
1948 showalways.add('optimized')
1947 if opts['show_stage'] and opts['optimize']:
1949 if opts['show_stage'] and opts['optimize']:
1948 raise error.Abort(_('cannot use --optimize with --show-stage'))
1950 raise error.Abort(_('cannot use --optimize with --show-stage'))
1949 if opts['show_stage'] == ['all']:
1951 if opts['show_stage'] == ['all']:
1950 showalways.update(stagenames)
1952 showalways.update(stagenames)
1951 else:
1953 else:
1952 for n in opts['show_stage']:
1954 for n in opts['show_stage']:
1953 if n not in stagenames:
1955 if n not in stagenames:
1954 raise error.Abort(_('invalid stage name: %s') % n)
1956 raise error.Abort(_('invalid stage name: %s') % n)
1955 showalways.update(opts['show_stage'])
1957 showalways.update(opts['show_stage'])
1956
1958
1957 treebystage = {}
1959 treebystage = {}
1958 printedtree = None
1960 printedtree = None
1959 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1961 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1960 for n, f in stages:
1962 for n, f in stages:
1961 treebystage[n] = tree = f(tree)
1963 treebystage[n] = tree = f(tree)
1962 if n in showalways or (n in showchanged and tree != printedtree):
1964 if n in showalways or (n in showchanged and tree != printedtree):
1963 if opts['show_stage'] or n != 'parsed':
1965 if opts['show_stage'] or n != 'parsed':
1964 ui.write(("* %s:\n") % n)
1966 ui.write(("* %s:\n") % n)
1965 ui.write(revsetlang.prettyformat(tree), "\n")
1967 ui.write(revsetlang.prettyformat(tree), "\n")
1966 printedtree = tree
1968 printedtree = tree
1967
1969
1968 if opts['verify_optimized']:
1970 if opts['verify_optimized']:
1969 arevs = revset.makematcher(treebystage['analyzed'])(repo)
1971 arevs = revset.makematcher(treebystage['analyzed'])(repo)
1970 brevs = revset.makematcher(treebystage['optimized'])(repo)
1972 brevs = revset.makematcher(treebystage['optimized'])(repo)
1971 if ui.verbose:
1973 if ui.verbose:
1972 ui.note(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
1974 ui.note(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
1973 ui.note(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
1975 ui.note(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
1974 arevs = list(arevs)
1976 arevs = list(arevs)
1975 brevs = list(brevs)
1977 brevs = list(brevs)
1976 if arevs == brevs:
1978 if arevs == brevs:
1977 return 0
1979 return 0
1978 ui.write(('--- analyzed\n'), label='diff.file_a')
1980 ui.write(('--- analyzed\n'), label='diff.file_a')
1979 ui.write(('+++ optimized\n'), label='diff.file_b')
1981 ui.write(('+++ optimized\n'), label='diff.file_b')
1980 sm = difflib.SequenceMatcher(None, arevs, brevs)
1982 sm = difflib.SequenceMatcher(None, arevs, brevs)
1981 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1983 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1982 if tag in ('delete', 'replace'):
1984 if tag in ('delete', 'replace'):
1983 for c in arevs[alo:ahi]:
1985 for c in arevs[alo:ahi]:
1984 ui.write('-%s\n' % c, label='diff.deleted')
1986 ui.write('-%s\n' % c, label='diff.deleted')
1985 if tag in ('insert', 'replace'):
1987 if tag in ('insert', 'replace'):
1986 for c in brevs[blo:bhi]:
1988 for c in brevs[blo:bhi]:
1987 ui.write('+%s\n' % c, label='diff.inserted')
1989 ui.write('+%s\n' % c, label='diff.inserted')
1988 if tag == 'equal':
1990 if tag == 'equal':
1989 for c in arevs[alo:ahi]:
1991 for c in arevs[alo:ahi]:
1990 ui.write(' %s\n' % c)
1992 ui.write(' %s\n' % c)
1991 return 1
1993 return 1
1992
1994
1993 func = revset.makematcher(tree)
1995 func = revset.makematcher(tree)
1994 revs = func(repo)
1996 revs = func(repo)
1995 if ui.verbose:
1997 if ui.verbose:
1996 ui.note(("* set:\n"), smartset.prettyformat(revs), "\n")
1998 ui.note(("* set:\n"), smartset.prettyformat(revs), "\n")
1997 for c in revs:
1999 for c in revs:
1998 ui.write("%s\n" % c)
2000 ui.write("%s\n" % c)
1999
2001
2000 @command('debugsetparents', [], _('REV1 [REV2]'))
2002 @command('debugsetparents', [], _('REV1 [REV2]'))
2001 def debugsetparents(ui, repo, rev1, rev2=None):
2003 def debugsetparents(ui, repo, rev1, rev2=None):
2002 """manually set the parents of the current working directory
2004 """manually set the parents of the current working directory
2003
2005
2004 This is useful for writing repository conversion tools, but should
2006 This is useful for writing repository conversion tools, but should
2005 be used with care. For example, neither the working directory nor the
2007 be used with care. For example, neither the working directory nor the
2006 dirstate is updated, so file status may be incorrect after running this
2008 dirstate is updated, so file status may be incorrect after running this
2007 command.
2009 command.
2008
2010
2009 Returns 0 on success.
2011 Returns 0 on success.
2010 """
2012 """
2011
2013
2012 r1 = scmutil.revsingle(repo, rev1).node()
2014 r1 = scmutil.revsingle(repo, rev1).node()
2013 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2015 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2014
2016
2015 with repo.wlock():
2017 with repo.wlock():
2016 repo.setparents(r1, r2)
2018 repo.setparents(r1, r2)
2017
2019
2018 @command('debugsub',
2020 @command('debugsub',
2019 [('r', 'rev', '',
2021 [('r', 'rev', '',
2020 _('revision to check'), _('REV'))],
2022 _('revision to check'), _('REV'))],
2021 _('[-r REV] [REV]'))
2023 _('[-r REV] [REV]'))
2022 def debugsub(ui, repo, rev=None):
2024 def debugsub(ui, repo, rev=None):
2023 ctx = scmutil.revsingle(repo, rev, None)
2025 ctx = scmutil.revsingle(repo, rev, None)
2024 for k, v in sorted(ctx.substate.items()):
2026 for k, v in sorted(ctx.substate.items()):
2025 ui.write(('path %s\n') % k)
2027 ui.write(('path %s\n') % k)
2026 ui.write((' source %s\n') % v[0])
2028 ui.write((' source %s\n') % v[0])
2027 ui.write((' revision %s\n') % v[1])
2029 ui.write((' revision %s\n') % v[1])
2028
2030
2029 @command('debugsuccessorssets',
2031 @command('debugsuccessorssets',
2030 [],
2032 [],
2031 _('[REV]'))
2033 _('[REV]'))
2032 def debugsuccessorssets(ui, repo, *revs):
2034 def debugsuccessorssets(ui, repo, *revs):
2033 """show set of successors for revision
2035 """show set of successors for revision
2034
2036
2035 A successors set of changeset A is a consistent group of revisions that
2037 A successors set of changeset A is a consistent group of revisions that
2036 succeed A. It contains non-obsolete changesets only.
2038 succeed A. It contains non-obsolete changesets only.
2037
2039
2038 In most cases a changeset A has a single successors set containing a single
2040 In most cases a changeset A has a single successors set containing a single
2039 successor (changeset A replaced by A').
2041 successor (changeset A replaced by A').
2040
2042
2041 A changeset that is made obsolete with no successors are called "pruned".
2043 A changeset that is made obsolete with no successors are called "pruned".
2042 Such changesets have no successors sets at all.
2044 Such changesets have no successors sets at all.
2043
2045
2044 A changeset that has been "split" will have a successors set containing
2046 A changeset that has been "split" will have a successors set containing
2045 more than one successor.
2047 more than one successor.
2046
2048
2047 A changeset that has been rewritten in multiple different ways is called
2049 A changeset that has been rewritten in multiple different ways is called
2048 "divergent". Such changesets have multiple successor sets (each of which
2050 "divergent". Such changesets have multiple successor sets (each of which
2049 may also be split, i.e. have multiple successors).
2051 may also be split, i.e. have multiple successors).
2050
2052
2051 Results are displayed as follows::
2053 Results are displayed as follows::
2052
2054
2053 <rev1>
2055 <rev1>
2054 <successors-1A>
2056 <successors-1A>
2055 <rev2>
2057 <rev2>
2056 <successors-2A>
2058 <successors-2A>
2057 <successors-2B1> <successors-2B2> <successors-2B3>
2059 <successors-2B1> <successors-2B2> <successors-2B3>
2058
2060
2059 Here rev2 has two possible (i.e. divergent) successors sets. The first
2061 Here rev2 has two possible (i.e. divergent) successors sets. The first
2060 holds one element, whereas the second holds three (i.e. the changeset has
2062 holds one element, whereas the second holds three (i.e. the changeset has
2061 been split).
2063 been split).
2062 """
2064 """
2063 # passed to successorssets caching computation from one call to another
2065 # passed to successorssets caching computation from one call to another
2064 cache = {}
2066 cache = {}
2065 ctx2str = str
2067 ctx2str = str
2066 node2str = short
2068 node2str = short
2067 if ui.debug():
2069 if ui.debug():
2068 def ctx2str(ctx):
2070 def ctx2str(ctx):
2069 return ctx.hex()
2071 return ctx.hex()
2070 node2str = hex
2072 node2str = hex
2071 for rev in scmutil.revrange(repo, revs):
2073 for rev in scmutil.revrange(repo, revs):
2072 ctx = repo[rev]
2074 ctx = repo[rev]
2073 ui.write('%s\n'% ctx2str(ctx))
2075 ui.write('%s\n'% ctx2str(ctx))
2074 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2076 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2075 if succsset:
2077 if succsset:
2076 ui.write(' ')
2078 ui.write(' ')
2077 ui.write(node2str(succsset[0]))
2079 ui.write(node2str(succsset[0]))
2078 for node in succsset[1:]:
2080 for node in succsset[1:]:
2079 ui.write(' ')
2081 ui.write(' ')
2080 ui.write(node2str(node))
2082 ui.write(node2str(node))
2081 ui.write('\n')
2083 ui.write('\n')
2082
2084
2083 @command('debugtemplate',
2085 @command('debugtemplate',
2084 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2086 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2085 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2087 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2086 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2088 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2087 optionalrepo=True)
2089 optionalrepo=True)
2088 def debugtemplate(ui, repo, tmpl, **opts):
2090 def debugtemplate(ui, repo, tmpl, **opts):
2089 """parse and apply a template
2091 """parse and apply a template
2090
2092
2091 If -r/--rev is given, the template is processed as a log template and
2093 If -r/--rev is given, the template is processed as a log template and
2092 applied to the given changesets. Otherwise, it is processed as a generic
2094 applied to the given changesets. Otherwise, it is processed as a generic
2093 template.
2095 template.
2094
2096
2095 Use --verbose to print the parsed tree.
2097 Use --verbose to print the parsed tree.
2096 """
2098 """
2097 revs = None
2099 revs = None
2098 if opts['rev']:
2100 if opts['rev']:
2099 if repo is None:
2101 if repo is None:
2100 raise error.RepoError(_('there is no Mercurial repository here '
2102 raise error.RepoError(_('there is no Mercurial repository here '
2101 '(.hg not found)'))
2103 '(.hg not found)'))
2102 revs = scmutil.revrange(repo, opts['rev'])
2104 revs = scmutil.revrange(repo, opts['rev'])
2103
2105
2104 props = {}
2106 props = {}
2105 for d in opts['define']:
2107 for d in opts['define']:
2106 try:
2108 try:
2107 k, v = (e.strip() for e in d.split('=', 1))
2109 k, v = (e.strip() for e in d.split('=', 1))
2108 if not k or k == 'ui':
2110 if not k or k == 'ui':
2109 raise ValueError
2111 raise ValueError
2110 props[k] = v
2112 props[k] = v
2111 except ValueError:
2113 except ValueError:
2112 raise error.Abort(_('malformed keyword definition: %s') % d)
2114 raise error.Abort(_('malformed keyword definition: %s') % d)
2113
2115
2114 if ui.verbose:
2116 if ui.verbose:
2115 aliases = ui.configitems('templatealias')
2117 aliases = ui.configitems('templatealias')
2116 tree = templater.parse(tmpl)
2118 tree = templater.parse(tmpl)
2117 ui.note(templater.prettyformat(tree), '\n')
2119 ui.note(templater.prettyformat(tree), '\n')
2118 newtree = templater.expandaliases(tree, aliases)
2120 newtree = templater.expandaliases(tree, aliases)
2119 if newtree != tree:
2121 if newtree != tree:
2120 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2122 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2121
2123
2122 mapfile = None
2124 mapfile = None
2123 if revs is None:
2125 if revs is None:
2124 k = 'debugtemplate'
2126 k = 'debugtemplate'
2125 t = formatter.maketemplater(ui, k, tmpl)
2127 t = formatter.maketemplater(ui, k, tmpl)
2126 ui.write(templater.stringify(t(k, ui=ui, **props)))
2128 ui.write(templater.stringify(t(k, ui=ui, **props)))
2127 else:
2129 else:
2128 displayer = cmdutil.changeset_templater(ui, repo, None, opts, tmpl,
2130 displayer = cmdutil.changeset_templater(ui, repo, None, opts, tmpl,
2129 mapfile, buffered=False)
2131 mapfile, buffered=False)
2130 for r in revs:
2132 for r in revs:
2131 displayer.show(repo[r], **props)
2133 displayer.show(repo[r], **props)
2132 displayer.close()
2134 displayer.close()
2133
2135
2134 @command('debugupdatecaches', [])
2136 @command('debugupdatecaches', [])
2135 def debugupdatecaches(ui, repo, *pats, **opts):
2137 def debugupdatecaches(ui, repo, *pats, **opts):
2136 """warm all known caches in the repository"""
2138 """warm all known caches in the repository"""
2137 with repo.wlock():
2139 with repo.wlock():
2138 with repo.lock():
2140 with repo.lock():
2139 repo.updatecaches()
2141 repo.updatecaches()
2140
2142
2141 @command('debugupgraderepo', [
2143 @command('debugupgraderepo', [
2142 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2144 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2143 ('', 'run', False, _('performs an upgrade')),
2145 ('', 'run', False, _('performs an upgrade')),
2144 ])
2146 ])
2145 def debugupgraderepo(ui, repo, run=False, optimize=None):
2147 def debugupgraderepo(ui, repo, run=False, optimize=None):
2146 """upgrade a repository to use different features
2148 """upgrade a repository to use different features
2147
2149
2148 If no arguments are specified, the repository is evaluated for upgrade
2150 If no arguments are specified, the repository is evaluated for upgrade
2149 and a list of problems and potential optimizations is printed.
2151 and a list of problems and potential optimizations is printed.
2150
2152
2151 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2153 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2152 can be influenced via additional arguments. More details will be provided
2154 can be influenced via additional arguments. More details will be provided
2153 by the command output when run without ``--run``.
2155 by the command output when run without ``--run``.
2154
2156
2155 During the upgrade, the repository will be locked and no writes will be
2157 During the upgrade, the repository will be locked and no writes will be
2156 allowed.
2158 allowed.
2157
2159
2158 At the end of the upgrade, the repository may not be readable while new
2160 At the end of the upgrade, the repository may not be readable while new
2159 repository data is swapped in. This window will be as long as it takes to
2161 repository data is swapped in. This window will be as long as it takes to
2160 rename some directories inside the ``.hg`` directory. On most machines, this
2162 rename some directories inside the ``.hg`` directory. On most machines, this
2161 should complete almost instantaneously and the chances of a consumer being
2163 should complete almost instantaneously and the chances of a consumer being
2162 unable to access the repository should be low.
2164 unable to access the repository should be low.
2163 """
2165 """
2164 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2166 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2165
2167
2166 @command('debugwalk', commands.walkopts, _('[OPTION]... [FILE]...'),
2168 @command('debugwalk', commands.walkopts, _('[OPTION]... [FILE]...'),
2167 inferrepo=True)
2169 inferrepo=True)
2168 def debugwalk(ui, repo, *pats, **opts):
2170 def debugwalk(ui, repo, *pats, **opts):
2169 """show how files match on given patterns"""
2171 """show how files match on given patterns"""
2170 m = scmutil.match(repo[None], pats, opts)
2172 m = scmutil.match(repo[None], pats, opts)
2171 items = list(repo[None].walk(m))
2173 items = list(repo[None].walk(m))
2172 if not items:
2174 if not items:
2173 return
2175 return
2174 f = lambda fn: fn
2176 f = lambda fn: fn
2175 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2177 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2176 f = lambda fn: util.normpath(fn)
2178 f = lambda fn: util.normpath(fn)
2177 fmt = 'f %%-%ds %%-%ds %%s' % (
2179 fmt = 'f %%-%ds %%-%ds %%s' % (
2178 max([len(abs) for abs in items]),
2180 max([len(abs) for abs in items]),
2179 max([len(m.rel(abs)) for abs in items]))
2181 max([len(m.rel(abs)) for abs in items]))
2180 for abs in items:
2182 for abs in items:
2181 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2183 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2182 ui.write("%s\n" % line.rstrip())
2184 ui.write("%s\n" % line.rstrip())
2183
2185
2184 @command('debugwireargs',
2186 @command('debugwireargs',
2185 [('', 'three', '', 'three'),
2187 [('', 'three', '', 'three'),
2186 ('', 'four', '', 'four'),
2188 ('', 'four', '', 'four'),
2187 ('', 'five', '', 'five'),
2189 ('', 'five', '', 'five'),
2188 ] + commands.remoteopts,
2190 ] + commands.remoteopts,
2189 _('REPO [OPTIONS]... [ONE [TWO]]'),
2191 _('REPO [OPTIONS]... [ONE [TWO]]'),
2190 norepo=True)
2192 norepo=True)
2191 def debugwireargs(ui, repopath, *vals, **opts):
2193 def debugwireargs(ui, repopath, *vals, **opts):
2192 repo = hg.peer(ui, opts, repopath)
2194 repo = hg.peer(ui, opts, repopath)
2193 for opt in commands.remoteopts:
2195 for opt in commands.remoteopts:
2194 del opts[opt[1]]
2196 del opts[opt[1]]
2195 args = {}
2197 args = {}
2196 for k, v in opts.iteritems():
2198 for k, v in opts.iteritems():
2197 if v:
2199 if v:
2198 args[k] = v
2200 args[k] = v
2199 # run twice to check that we don't mess up the stream for the next command
2201 # run twice to check that we don't mess up the stream for the next command
2200 res1 = repo.debugwireargs(*vals, **args)
2202 res1 = repo.debugwireargs(*vals, **args)
2201 res2 = repo.debugwireargs(*vals, **args)
2203 res2 = repo.debugwireargs(*vals, **args)
2202 ui.write("%s\n" % res1)
2204 ui.write("%s\n" % res1)
2203 if res1 != res2:
2205 if res1 != res2:
2204 ui.warn("%s\n" % res2)
2206 ui.warn("%s\n" % res2)
@@ -1,365 +1,365 b''
1 # osutil.py - pure Python version of osutil.c
1 # osutil.py - pure Python version of osutil.c
2 #
2 #
3 # Copyright 2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import ctypes
10 import ctypes
11 import ctypes.util
11 import ctypes.util
12 import os
12 import os
13 import socket
13 import socket
14 import stat as statmod
14 import stat as statmod
15
15
16 from . import (
16 from .. import (
17 policy,
17 policy,
18 pycompat,
18 pycompat,
19 )
19 )
20
20
21 modulepolicy = policy.policy
21 modulepolicy = policy.policy
22 policynocffi = policy.policynocffi
22 policynocffi = policy.policynocffi
23
23
24 def _mode_to_kind(mode):
24 def _mode_to_kind(mode):
25 if statmod.S_ISREG(mode):
25 if statmod.S_ISREG(mode):
26 return statmod.S_IFREG
26 return statmod.S_IFREG
27 if statmod.S_ISDIR(mode):
27 if statmod.S_ISDIR(mode):
28 return statmod.S_IFDIR
28 return statmod.S_IFDIR
29 if statmod.S_ISLNK(mode):
29 if statmod.S_ISLNK(mode):
30 return statmod.S_IFLNK
30 return statmod.S_IFLNK
31 if statmod.S_ISBLK(mode):
31 if statmod.S_ISBLK(mode):
32 return statmod.S_IFBLK
32 return statmod.S_IFBLK
33 if statmod.S_ISCHR(mode):
33 if statmod.S_ISCHR(mode):
34 return statmod.S_IFCHR
34 return statmod.S_IFCHR
35 if statmod.S_ISFIFO(mode):
35 if statmod.S_ISFIFO(mode):
36 return statmod.S_IFIFO
36 return statmod.S_IFIFO
37 if statmod.S_ISSOCK(mode):
37 if statmod.S_ISSOCK(mode):
38 return statmod.S_IFSOCK
38 return statmod.S_IFSOCK
39 return mode
39 return mode
40
40
41 def listdirpure(path, stat=False, skip=None):
41 def listdirpure(path, stat=False, skip=None):
42 '''listdir(path, stat=False) -> list_of_tuples
42 '''listdir(path, stat=False) -> list_of_tuples
43
43
44 Return a sorted list containing information about the entries
44 Return a sorted list containing information about the entries
45 in the directory.
45 in the directory.
46
46
47 If stat is True, each element is a 3-tuple:
47 If stat is True, each element is a 3-tuple:
48
48
49 (name, type, stat object)
49 (name, type, stat object)
50
50
51 Otherwise, each element is a 2-tuple:
51 Otherwise, each element is a 2-tuple:
52
52
53 (name, type)
53 (name, type)
54 '''
54 '''
55 result = []
55 result = []
56 prefix = path
56 prefix = path
57 if not prefix.endswith(pycompat.ossep):
57 if not prefix.endswith(pycompat.ossep):
58 prefix += pycompat.ossep
58 prefix += pycompat.ossep
59 names = os.listdir(path)
59 names = os.listdir(path)
60 names.sort()
60 names.sort()
61 for fn in names:
61 for fn in names:
62 st = os.lstat(prefix + fn)
62 st = os.lstat(prefix + fn)
63 if fn == skip and statmod.S_ISDIR(st.st_mode):
63 if fn == skip and statmod.S_ISDIR(st.st_mode):
64 return []
64 return []
65 if stat:
65 if stat:
66 result.append((fn, _mode_to_kind(st.st_mode), st))
66 result.append((fn, _mode_to_kind(st.st_mode), st))
67 else:
67 else:
68 result.append((fn, _mode_to_kind(st.st_mode)))
68 result.append((fn, _mode_to_kind(st.st_mode)))
69 return result
69 return result
70
70
71 ffi = None
71 ffi = None
72 if modulepolicy not in policynocffi and pycompat.sysplatform == 'darwin':
72 if modulepolicy not in policynocffi and pycompat.sysplatform == 'darwin':
73 try:
73 try:
74 from _osutil_cffi import ffi, lib
74 from _osutil_cffi import ffi, lib
75 except ImportError:
75 except ImportError:
76 if modulepolicy == 'cffi': # strict cffi import
76 if modulepolicy == 'cffi': # strict cffi import
77 raise
77 raise
78
78
79 if pycompat.sysplatform == 'darwin' and ffi is not None:
79 if pycompat.sysplatform == 'darwin' and ffi is not None:
80 listdir_batch_size = 4096
80 listdir_batch_size = 4096
81 # tweakable number, only affects performance, which chunks
81 # tweakable number, only affects performance, which chunks
82 # of bytes do we get back from getattrlistbulk
82 # of bytes do we get back from getattrlistbulk
83
83
84 attrkinds = [None] * 20 # we need the max no for enum VXXX, 20 is plenty
84 attrkinds = [None] * 20 # we need the max no for enum VXXX, 20 is plenty
85
85
86 attrkinds[lib.VREG] = statmod.S_IFREG
86 attrkinds[lib.VREG] = statmod.S_IFREG
87 attrkinds[lib.VDIR] = statmod.S_IFDIR
87 attrkinds[lib.VDIR] = statmod.S_IFDIR
88 attrkinds[lib.VLNK] = statmod.S_IFLNK
88 attrkinds[lib.VLNK] = statmod.S_IFLNK
89 attrkinds[lib.VBLK] = statmod.S_IFBLK
89 attrkinds[lib.VBLK] = statmod.S_IFBLK
90 attrkinds[lib.VCHR] = statmod.S_IFCHR
90 attrkinds[lib.VCHR] = statmod.S_IFCHR
91 attrkinds[lib.VFIFO] = statmod.S_IFIFO
91 attrkinds[lib.VFIFO] = statmod.S_IFIFO
92 attrkinds[lib.VSOCK] = statmod.S_IFSOCK
92 attrkinds[lib.VSOCK] = statmod.S_IFSOCK
93
93
94 class stat_res(object):
94 class stat_res(object):
95 def __init__(self, st_mode, st_mtime, st_size):
95 def __init__(self, st_mode, st_mtime, st_size):
96 self.st_mode = st_mode
96 self.st_mode = st_mode
97 self.st_mtime = st_mtime
97 self.st_mtime = st_mtime
98 self.st_size = st_size
98 self.st_size = st_size
99
99
100 tv_sec_ofs = ffi.offsetof("struct timespec", "tv_sec")
100 tv_sec_ofs = ffi.offsetof("struct timespec", "tv_sec")
101 buf = ffi.new("char[]", listdir_batch_size)
101 buf = ffi.new("char[]", listdir_batch_size)
102
102
103 def listdirinternal(dfd, req, stat, skip):
103 def listdirinternal(dfd, req, stat, skip):
104 ret = []
104 ret = []
105 while True:
105 while True:
106 r = lib.getattrlistbulk(dfd, req, buf, listdir_batch_size, 0)
106 r = lib.getattrlistbulk(dfd, req, buf, listdir_batch_size, 0)
107 if r == 0:
107 if r == 0:
108 break
108 break
109 if r == -1:
109 if r == -1:
110 raise OSError(ffi.errno, os.strerror(ffi.errno))
110 raise OSError(ffi.errno, os.strerror(ffi.errno))
111 cur = ffi.cast("val_attrs_t*", buf)
111 cur = ffi.cast("val_attrs_t*", buf)
112 for i in range(r):
112 for i in range(r):
113 lgt = cur.length
113 lgt = cur.length
114 assert lgt == ffi.cast('uint32_t*', cur)[0]
114 assert lgt == ffi.cast('uint32_t*', cur)[0]
115 ofs = cur.name_info.attr_dataoffset
115 ofs = cur.name_info.attr_dataoffset
116 str_lgt = cur.name_info.attr_length
116 str_lgt = cur.name_info.attr_length
117 base_ofs = ffi.offsetof('val_attrs_t', 'name_info')
117 base_ofs = ffi.offsetof('val_attrs_t', 'name_info')
118 name = str(ffi.buffer(ffi.cast("char*", cur) + base_ofs + ofs,
118 name = str(ffi.buffer(ffi.cast("char*", cur) + base_ofs + ofs,
119 str_lgt - 1))
119 str_lgt - 1))
120 tp = attrkinds[cur.obj_type]
120 tp = attrkinds[cur.obj_type]
121 if name == "." or name == "..":
121 if name == "." or name == "..":
122 continue
122 continue
123 if skip == name and tp == statmod.S_ISDIR:
123 if skip == name and tp == statmod.S_ISDIR:
124 return []
124 return []
125 if stat:
125 if stat:
126 mtime = cur.mtime.tv_sec
126 mtime = cur.mtime.tv_sec
127 mode = (cur.accessmask & ~lib.S_IFMT)| tp
127 mode = (cur.accessmask & ~lib.S_IFMT)| tp
128 ret.append((name, tp, stat_res(st_mode=mode, st_mtime=mtime,
128 ret.append((name, tp, stat_res(st_mode=mode, st_mtime=mtime,
129 st_size=cur.datalength)))
129 st_size=cur.datalength)))
130 else:
130 else:
131 ret.append((name, tp))
131 ret.append((name, tp))
132 cur = ffi.cast("val_attrs_t*", int(ffi.cast("intptr_t", cur))
132 cur = ffi.cast("val_attrs_t*", int(ffi.cast("intptr_t", cur))
133 + lgt)
133 + lgt)
134 return ret
134 return ret
135
135
136 def listdir(path, stat=False, skip=None):
136 def listdir(path, stat=False, skip=None):
137 req = ffi.new("struct attrlist*")
137 req = ffi.new("struct attrlist*")
138 req.bitmapcount = lib.ATTR_BIT_MAP_COUNT
138 req.bitmapcount = lib.ATTR_BIT_MAP_COUNT
139 req.commonattr = (lib.ATTR_CMN_RETURNED_ATTRS |
139 req.commonattr = (lib.ATTR_CMN_RETURNED_ATTRS |
140 lib.ATTR_CMN_NAME |
140 lib.ATTR_CMN_NAME |
141 lib.ATTR_CMN_OBJTYPE |
141 lib.ATTR_CMN_OBJTYPE |
142 lib.ATTR_CMN_ACCESSMASK |
142 lib.ATTR_CMN_ACCESSMASK |
143 lib.ATTR_CMN_MODTIME)
143 lib.ATTR_CMN_MODTIME)
144 req.fileattr = lib.ATTR_FILE_DATALENGTH
144 req.fileattr = lib.ATTR_FILE_DATALENGTH
145 dfd = lib.open(path, lib.O_RDONLY, 0)
145 dfd = lib.open(path, lib.O_RDONLY, 0)
146 if dfd == -1:
146 if dfd == -1:
147 raise OSError(ffi.errno, os.strerror(ffi.errno))
147 raise OSError(ffi.errno, os.strerror(ffi.errno))
148
148
149 try:
149 try:
150 ret = listdirinternal(dfd, req, stat, skip)
150 ret = listdirinternal(dfd, req, stat, skip)
151 finally:
151 finally:
152 try:
152 try:
153 lib.close(dfd)
153 lib.close(dfd)
154 except BaseException:
154 except BaseException:
155 pass # we ignore all the errors from closing, not
155 pass # we ignore all the errors from closing, not
156 # much we can do about that
156 # much we can do about that
157 return ret
157 return ret
158 else:
158 else:
159 listdir = listdirpure
159 listdir = listdirpure
160
160
161 if pycompat.osname != 'nt':
161 if pycompat.osname != 'nt':
162 posixfile = open
162 posixfile = open
163
163
164 _SCM_RIGHTS = 0x01
164 _SCM_RIGHTS = 0x01
165 _socklen_t = ctypes.c_uint
165 _socklen_t = ctypes.c_uint
166
166
167 if pycompat.sysplatform.startswith('linux'):
167 if pycompat.sysplatform.startswith('linux'):
168 # socket.h says "the type should be socklen_t but the definition of
168 # socket.h says "the type should be socklen_t but the definition of
169 # the kernel is incompatible with this."
169 # the kernel is incompatible with this."
170 _cmsg_len_t = ctypes.c_size_t
170 _cmsg_len_t = ctypes.c_size_t
171 _msg_controllen_t = ctypes.c_size_t
171 _msg_controllen_t = ctypes.c_size_t
172 _msg_iovlen_t = ctypes.c_size_t
172 _msg_iovlen_t = ctypes.c_size_t
173 else:
173 else:
174 _cmsg_len_t = _socklen_t
174 _cmsg_len_t = _socklen_t
175 _msg_controllen_t = _socklen_t
175 _msg_controllen_t = _socklen_t
176 _msg_iovlen_t = ctypes.c_int
176 _msg_iovlen_t = ctypes.c_int
177
177
178 class _iovec(ctypes.Structure):
178 class _iovec(ctypes.Structure):
179 _fields_ = [
179 _fields_ = [
180 (u'iov_base', ctypes.c_void_p),
180 (u'iov_base', ctypes.c_void_p),
181 (u'iov_len', ctypes.c_size_t),
181 (u'iov_len', ctypes.c_size_t),
182 ]
182 ]
183
183
184 class _msghdr(ctypes.Structure):
184 class _msghdr(ctypes.Structure):
185 _fields_ = [
185 _fields_ = [
186 (u'msg_name', ctypes.c_void_p),
186 (u'msg_name', ctypes.c_void_p),
187 (u'msg_namelen', _socklen_t),
187 (u'msg_namelen', _socklen_t),
188 (u'msg_iov', ctypes.POINTER(_iovec)),
188 (u'msg_iov', ctypes.POINTER(_iovec)),
189 (u'msg_iovlen', _msg_iovlen_t),
189 (u'msg_iovlen', _msg_iovlen_t),
190 (u'msg_control', ctypes.c_void_p),
190 (u'msg_control', ctypes.c_void_p),
191 (u'msg_controllen', _msg_controllen_t),
191 (u'msg_controllen', _msg_controllen_t),
192 (u'msg_flags', ctypes.c_int),
192 (u'msg_flags', ctypes.c_int),
193 ]
193 ]
194
194
195 class _cmsghdr(ctypes.Structure):
195 class _cmsghdr(ctypes.Structure):
196 _fields_ = [
196 _fields_ = [
197 (u'cmsg_len', _cmsg_len_t),
197 (u'cmsg_len', _cmsg_len_t),
198 (u'cmsg_level', ctypes.c_int),
198 (u'cmsg_level', ctypes.c_int),
199 (u'cmsg_type', ctypes.c_int),
199 (u'cmsg_type', ctypes.c_int),
200 (u'cmsg_data', ctypes.c_ubyte * 0),
200 (u'cmsg_data', ctypes.c_ubyte * 0),
201 ]
201 ]
202
202
203 _libc = ctypes.CDLL(ctypes.util.find_library(u'c'), use_errno=True)
203 _libc = ctypes.CDLL(ctypes.util.find_library(u'c'), use_errno=True)
204 _recvmsg = getattr(_libc, 'recvmsg', None)
204 _recvmsg = getattr(_libc, 'recvmsg', None)
205 if _recvmsg:
205 if _recvmsg:
206 _recvmsg.restype = getattr(ctypes, 'c_ssize_t', ctypes.c_long)
206 _recvmsg.restype = getattr(ctypes, 'c_ssize_t', ctypes.c_long)
207 _recvmsg.argtypes = (ctypes.c_int, ctypes.POINTER(_msghdr),
207 _recvmsg.argtypes = (ctypes.c_int, ctypes.POINTER(_msghdr),
208 ctypes.c_int)
208 ctypes.c_int)
209 else:
209 else:
210 # recvmsg isn't always provided by libc; such systems are unsupported
210 # recvmsg isn't always provided by libc; such systems are unsupported
211 def _recvmsg(sockfd, msg, flags):
211 def _recvmsg(sockfd, msg, flags):
212 raise NotImplementedError('unsupported platform')
212 raise NotImplementedError('unsupported platform')
213
213
214 def _CMSG_FIRSTHDR(msgh):
214 def _CMSG_FIRSTHDR(msgh):
215 if msgh.msg_controllen < ctypes.sizeof(_cmsghdr):
215 if msgh.msg_controllen < ctypes.sizeof(_cmsghdr):
216 return
216 return
217 cmsgptr = ctypes.cast(msgh.msg_control, ctypes.POINTER(_cmsghdr))
217 cmsgptr = ctypes.cast(msgh.msg_control, ctypes.POINTER(_cmsghdr))
218 return cmsgptr.contents
218 return cmsgptr.contents
219
219
220 # The pure version is less portable than the native version because the
220 # The pure version is less portable than the native version because the
221 # handling of socket ancillary data heavily depends on C preprocessor.
221 # handling of socket ancillary data heavily depends on C preprocessor.
222 # Also, some length fields are wrongly typed in Linux kernel.
222 # Also, some length fields are wrongly typed in Linux kernel.
223 def recvfds(sockfd):
223 def recvfds(sockfd):
224 """receive list of file descriptors via socket"""
224 """receive list of file descriptors via socket"""
225 dummy = (ctypes.c_ubyte * 1)()
225 dummy = (ctypes.c_ubyte * 1)()
226 iov = _iovec(ctypes.cast(dummy, ctypes.c_void_p), ctypes.sizeof(dummy))
226 iov = _iovec(ctypes.cast(dummy, ctypes.c_void_p), ctypes.sizeof(dummy))
227 cbuf = ctypes.create_string_buffer(256)
227 cbuf = ctypes.create_string_buffer(256)
228 msgh = _msghdr(None, 0,
228 msgh = _msghdr(None, 0,
229 ctypes.pointer(iov), 1,
229 ctypes.pointer(iov), 1,
230 ctypes.cast(cbuf, ctypes.c_void_p), ctypes.sizeof(cbuf),
230 ctypes.cast(cbuf, ctypes.c_void_p), ctypes.sizeof(cbuf),
231 0)
231 0)
232 r = _recvmsg(sockfd, ctypes.byref(msgh), 0)
232 r = _recvmsg(sockfd, ctypes.byref(msgh), 0)
233 if r < 0:
233 if r < 0:
234 e = ctypes.get_errno()
234 e = ctypes.get_errno()
235 raise OSError(e, os.strerror(e))
235 raise OSError(e, os.strerror(e))
236 # assumes that the first cmsg has fds because it isn't easy to write
236 # assumes that the first cmsg has fds because it isn't easy to write
237 # portable CMSG_NXTHDR() with ctypes.
237 # portable CMSG_NXTHDR() with ctypes.
238 cmsg = _CMSG_FIRSTHDR(msgh)
238 cmsg = _CMSG_FIRSTHDR(msgh)
239 if not cmsg:
239 if not cmsg:
240 return []
240 return []
241 if (cmsg.cmsg_level != socket.SOL_SOCKET or
241 if (cmsg.cmsg_level != socket.SOL_SOCKET or
242 cmsg.cmsg_type != _SCM_RIGHTS):
242 cmsg.cmsg_type != _SCM_RIGHTS):
243 return []
243 return []
244 rfds = ctypes.cast(cmsg.cmsg_data, ctypes.POINTER(ctypes.c_int))
244 rfds = ctypes.cast(cmsg.cmsg_data, ctypes.POINTER(ctypes.c_int))
245 rfdscount = ((cmsg.cmsg_len - _cmsghdr.cmsg_data.offset) /
245 rfdscount = ((cmsg.cmsg_len - _cmsghdr.cmsg_data.offset) /
246 ctypes.sizeof(ctypes.c_int))
246 ctypes.sizeof(ctypes.c_int))
247 return [rfds[i] for i in xrange(rfdscount)]
247 return [rfds[i] for i in xrange(rfdscount)]
248
248
249 else:
249 else:
250 import msvcrt
250 import msvcrt
251
251
252 _kernel32 = ctypes.windll.kernel32
252 _kernel32 = ctypes.windll.kernel32
253
253
254 _DWORD = ctypes.c_ulong
254 _DWORD = ctypes.c_ulong
255 _LPCSTR = _LPSTR = ctypes.c_char_p
255 _LPCSTR = _LPSTR = ctypes.c_char_p
256 _HANDLE = ctypes.c_void_p
256 _HANDLE = ctypes.c_void_p
257
257
258 _INVALID_HANDLE_VALUE = _HANDLE(-1).value
258 _INVALID_HANDLE_VALUE = _HANDLE(-1).value
259
259
260 # CreateFile
260 # CreateFile
261 _FILE_SHARE_READ = 0x00000001
261 _FILE_SHARE_READ = 0x00000001
262 _FILE_SHARE_WRITE = 0x00000002
262 _FILE_SHARE_WRITE = 0x00000002
263 _FILE_SHARE_DELETE = 0x00000004
263 _FILE_SHARE_DELETE = 0x00000004
264
264
265 _CREATE_ALWAYS = 2
265 _CREATE_ALWAYS = 2
266 _OPEN_EXISTING = 3
266 _OPEN_EXISTING = 3
267 _OPEN_ALWAYS = 4
267 _OPEN_ALWAYS = 4
268
268
269 _GENERIC_READ = 0x80000000
269 _GENERIC_READ = 0x80000000
270 _GENERIC_WRITE = 0x40000000
270 _GENERIC_WRITE = 0x40000000
271
271
272 _FILE_ATTRIBUTE_NORMAL = 0x80
272 _FILE_ATTRIBUTE_NORMAL = 0x80
273
273
274 # open_osfhandle flags
274 # open_osfhandle flags
275 _O_RDONLY = 0x0000
275 _O_RDONLY = 0x0000
276 _O_RDWR = 0x0002
276 _O_RDWR = 0x0002
277 _O_APPEND = 0x0008
277 _O_APPEND = 0x0008
278
278
279 _O_TEXT = 0x4000
279 _O_TEXT = 0x4000
280 _O_BINARY = 0x8000
280 _O_BINARY = 0x8000
281
281
282 # types of parameters of C functions used (required by pypy)
282 # types of parameters of C functions used (required by pypy)
283
283
284 _kernel32.CreateFileA.argtypes = [_LPCSTR, _DWORD, _DWORD, ctypes.c_void_p,
284 _kernel32.CreateFileA.argtypes = [_LPCSTR, _DWORD, _DWORD, ctypes.c_void_p,
285 _DWORD, _DWORD, _HANDLE]
285 _DWORD, _DWORD, _HANDLE]
286 _kernel32.CreateFileA.restype = _HANDLE
286 _kernel32.CreateFileA.restype = _HANDLE
287
287
288 def _raiseioerror(name):
288 def _raiseioerror(name):
289 err = ctypes.WinError()
289 err = ctypes.WinError()
290 raise IOError(err.errno, '%s: %s' % (name, err.strerror))
290 raise IOError(err.errno, '%s: %s' % (name, err.strerror))
291
291
292 class posixfile(object):
292 class posixfile(object):
293 '''a file object aiming for POSIX-like semantics
293 '''a file object aiming for POSIX-like semantics
294
294
295 CPython's open() returns a file that was opened *without* setting the
295 CPython's open() returns a file that was opened *without* setting the
296 _FILE_SHARE_DELETE flag, which causes rename and unlink to abort.
296 _FILE_SHARE_DELETE flag, which causes rename and unlink to abort.
297 This even happens if any hardlinked copy of the file is in open state.
297 This even happens if any hardlinked copy of the file is in open state.
298 We set _FILE_SHARE_DELETE here, so files opened with posixfile can be
298 We set _FILE_SHARE_DELETE here, so files opened with posixfile can be
299 renamed and deleted while they are held open.
299 renamed and deleted while they are held open.
300 Note that if a file opened with posixfile is unlinked, the file
300 Note that if a file opened with posixfile is unlinked, the file
301 remains but cannot be opened again or be recreated under the same name,
301 remains but cannot be opened again or be recreated under the same name,
302 until all reading processes have closed the file.'''
302 until all reading processes have closed the file.'''
303
303
304 def __init__(self, name, mode='r', bufsize=-1):
304 def __init__(self, name, mode='r', bufsize=-1):
305 if 'b' in mode:
305 if 'b' in mode:
306 flags = _O_BINARY
306 flags = _O_BINARY
307 else:
307 else:
308 flags = _O_TEXT
308 flags = _O_TEXT
309
309
310 m0 = mode[0]
310 m0 = mode[0]
311 if m0 == 'r' and '+' not in mode:
311 if m0 == 'r' and '+' not in mode:
312 flags |= _O_RDONLY
312 flags |= _O_RDONLY
313 access = _GENERIC_READ
313 access = _GENERIC_READ
314 else:
314 else:
315 # work around http://support.microsoft.com/kb/899149 and
315 # work around http://support.microsoft.com/kb/899149 and
316 # set _O_RDWR for 'w' and 'a', even if mode has no '+'
316 # set _O_RDWR for 'w' and 'a', even if mode has no '+'
317 flags |= _O_RDWR
317 flags |= _O_RDWR
318 access = _GENERIC_READ | _GENERIC_WRITE
318 access = _GENERIC_READ | _GENERIC_WRITE
319
319
320 if m0 == 'r':
320 if m0 == 'r':
321 creation = _OPEN_EXISTING
321 creation = _OPEN_EXISTING
322 elif m0 == 'w':
322 elif m0 == 'w':
323 creation = _CREATE_ALWAYS
323 creation = _CREATE_ALWAYS
324 elif m0 == 'a':
324 elif m0 == 'a':
325 creation = _OPEN_ALWAYS
325 creation = _OPEN_ALWAYS
326 flags |= _O_APPEND
326 flags |= _O_APPEND
327 else:
327 else:
328 raise ValueError("invalid mode: %s" % mode)
328 raise ValueError("invalid mode: %s" % mode)
329
329
330 fh = _kernel32.CreateFileA(name, access,
330 fh = _kernel32.CreateFileA(name, access,
331 _FILE_SHARE_READ | _FILE_SHARE_WRITE | _FILE_SHARE_DELETE,
331 _FILE_SHARE_READ | _FILE_SHARE_WRITE | _FILE_SHARE_DELETE,
332 None, creation, _FILE_ATTRIBUTE_NORMAL, None)
332 None, creation, _FILE_ATTRIBUTE_NORMAL, None)
333 if fh == _INVALID_HANDLE_VALUE:
333 if fh == _INVALID_HANDLE_VALUE:
334 _raiseioerror(name)
334 _raiseioerror(name)
335
335
336 fd = msvcrt.open_osfhandle(fh, flags)
336 fd = msvcrt.open_osfhandle(fh, flags)
337 if fd == -1:
337 if fd == -1:
338 _kernel32.CloseHandle(fh)
338 _kernel32.CloseHandle(fh)
339 _raiseioerror(name)
339 _raiseioerror(name)
340
340
341 f = os.fdopen(fd, pycompat.sysstr(mode), bufsize)
341 f = os.fdopen(fd, pycompat.sysstr(mode), bufsize)
342 # unfortunately, f.name is '<fdopen>' at this point -- so we store
342 # unfortunately, f.name is '<fdopen>' at this point -- so we store
343 # the name on this wrapper. We cannot just assign to f.name,
343 # the name on this wrapper. We cannot just assign to f.name,
344 # because that attribute is read-only.
344 # because that attribute is read-only.
345 object.__setattr__(self, r'name', name)
345 object.__setattr__(self, r'name', name)
346 object.__setattr__(self, r'_file', f)
346 object.__setattr__(self, r'_file', f)
347
347
348 def __iter__(self):
348 def __iter__(self):
349 return self._file
349 return self._file
350
350
351 def __getattr__(self, name):
351 def __getattr__(self, name):
352 return getattr(self._file, name)
352 return getattr(self._file, name)
353
353
354 def __setattr__(self, name, value):
354 def __setattr__(self, name, value):
355 '''mimics the read-only attributes of Python file objects
355 '''mimics the read-only attributes of Python file objects
356 by raising 'TypeError: readonly attribute' if someone tries:
356 by raising 'TypeError: readonly attribute' if someone tries:
357 f = posixfile('foo.txt')
357 f = posixfile('foo.txt')
358 f.name = 'bla' '''
358 f.name = 'bla' '''
359 return self._file.__setattr__(name, value)
359 return self._file.__setattr__(name, value)
360
360
361 def __enter__(self):
361 def __enter__(self):
362 return self._file.__enter__()
362 return self._file.__enter__()
363
363
364 def __exit__(self, exc_type, exc_value, exc_tb):
364 def __exit__(self, exc_type, exc_value, exc_tb):
365 return self._file.__exit__(exc_type, exc_value, exc_tb)
365 return self._file.__exit__(exc_type, exc_value, exc_tb)
@@ -1,3729 +1,3731 b''
1 # util.py - Mercurial utility functions and platform specific implementations
1 # util.py - Mercurial utility functions and platform specific implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 """Mercurial utility functions and platform specific implementations.
10 """Mercurial utility functions and platform specific implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from __future__ import absolute_import
16 from __future__ import absolute_import
17
17
18 import bz2
18 import bz2
19 import calendar
19 import calendar
20 import codecs
20 import codecs
21 import collections
21 import collections
22 import datetime
22 import datetime
23 import errno
23 import errno
24 import gc
24 import gc
25 import hashlib
25 import hashlib
26 import imp
26 import imp
27 import os
27 import os
28 import platform as pyplatform
28 import platform as pyplatform
29 import re as remod
29 import re as remod
30 import shutil
30 import shutil
31 import signal
31 import signal
32 import socket
32 import socket
33 import stat
33 import stat
34 import string
34 import string
35 import subprocess
35 import subprocess
36 import sys
36 import sys
37 import tempfile
37 import tempfile
38 import textwrap
38 import textwrap
39 import time
39 import time
40 import traceback
40 import traceback
41 import warnings
41 import warnings
42 import zlib
42 import zlib
43
43
44 from . import (
44 from . import (
45 base85,
45 base85,
46 encoding,
46 encoding,
47 error,
47 error,
48 i18n,
48 i18n,
49 osutil,
50 parsers,
49 parsers,
50 policy,
51 pycompat,
51 pycompat,
52 )
52 )
53
53
54 osutil = policy.importmod(r'osutil')
55
54 b85decode = base85.b85decode
56 b85decode = base85.b85decode
55 b85encode = base85.b85encode
57 b85encode = base85.b85encode
56
58
57 cookielib = pycompat.cookielib
59 cookielib = pycompat.cookielib
58 empty = pycompat.empty
60 empty = pycompat.empty
59 httplib = pycompat.httplib
61 httplib = pycompat.httplib
60 httpserver = pycompat.httpserver
62 httpserver = pycompat.httpserver
61 pickle = pycompat.pickle
63 pickle = pycompat.pickle
62 queue = pycompat.queue
64 queue = pycompat.queue
63 socketserver = pycompat.socketserver
65 socketserver = pycompat.socketserver
64 stderr = pycompat.stderr
66 stderr = pycompat.stderr
65 stdin = pycompat.stdin
67 stdin = pycompat.stdin
66 stdout = pycompat.stdout
68 stdout = pycompat.stdout
67 stringio = pycompat.stringio
69 stringio = pycompat.stringio
68 urlerr = pycompat.urlerr
70 urlerr = pycompat.urlerr
69 urlreq = pycompat.urlreq
71 urlreq = pycompat.urlreq
70 xmlrpclib = pycompat.xmlrpclib
72 xmlrpclib = pycompat.xmlrpclib
71
73
72 def isatty(fp):
74 def isatty(fp):
73 try:
75 try:
74 return fp.isatty()
76 return fp.isatty()
75 except AttributeError:
77 except AttributeError:
76 return False
78 return False
77
79
78 # glibc determines buffering on first write to stdout - if we replace a TTY
80 # glibc determines buffering on first write to stdout - if we replace a TTY
79 # destined stdout with a pipe destined stdout (e.g. pager), we want line
81 # destined stdout with a pipe destined stdout (e.g. pager), we want line
80 # buffering
82 # buffering
81 if isatty(stdout):
83 if isatty(stdout):
82 stdout = os.fdopen(stdout.fileno(), pycompat.sysstr('wb'), 1)
84 stdout = os.fdopen(stdout.fileno(), pycompat.sysstr('wb'), 1)
83
85
84 if pycompat.osname == 'nt':
86 if pycompat.osname == 'nt':
85 from . import windows as platform
87 from . import windows as platform
86 stdout = platform.winstdout(stdout)
88 stdout = platform.winstdout(stdout)
87 else:
89 else:
88 from . import posix as platform
90 from . import posix as platform
89
91
90 _ = i18n._
92 _ = i18n._
91
93
92 bindunixsocket = platform.bindunixsocket
94 bindunixsocket = platform.bindunixsocket
93 cachestat = platform.cachestat
95 cachestat = platform.cachestat
94 checkexec = platform.checkexec
96 checkexec = platform.checkexec
95 checklink = platform.checklink
97 checklink = platform.checklink
96 copymode = platform.copymode
98 copymode = platform.copymode
97 executablepath = platform.executablepath
99 executablepath = platform.executablepath
98 expandglobs = platform.expandglobs
100 expandglobs = platform.expandglobs
99 explainexit = platform.explainexit
101 explainexit = platform.explainexit
100 findexe = platform.findexe
102 findexe = platform.findexe
101 gethgcmd = platform.gethgcmd
103 gethgcmd = platform.gethgcmd
102 getuser = platform.getuser
104 getuser = platform.getuser
103 getpid = os.getpid
105 getpid = os.getpid
104 groupmembers = platform.groupmembers
106 groupmembers = platform.groupmembers
105 groupname = platform.groupname
107 groupname = platform.groupname
106 hidewindow = platform.hidewindow
108 hidewindow = platform.hidewindow
107 isexec = platform.isexec
109 isexec = platform.isexec
108 isowner = platform.isowner
110 isowner = platform.isowner
109 listdir = osutil.listdir
111 listdir = osutil.listdir
110 localpath = platform.localpath
112 localpath = platform.localpath
111 lookupreg = platform.lookupreg
113 lookupreg = platform.lookupreg
112 makedir = platform.makedir
114 makedir = platform.makedir
113 nlinks = platform.nlinks
115 nlinks = platform.nlinks
114 normpath = platform.normpath
116 normpath = platform.normpath
115 normcase = platform.normcase
117 normcase = platform.normcase
116 normcasespec = platform.normcasespec
118 normcasespec = platform.normcasespec
117 normcasefallback = platform.normcasefallback
119 normcasefallback = platform.normcasefallback
118 openhardlinks = platform.openhardlinks
120 openhardlinks = platform.openhardlinks
119 oslink = platform.oslink
121 oslink = platform.oslink
120 parsepatchoutput = platform.parsepatchoutput
122 parsepatchoutput = platform.parsepatchoutput
121 pconvert = platform.pconvert
123 pconvert = platform.pconvert
122 poll = platform.poll
124 poll = platform.poll
123 popen = platform.popen
125 popen = platform.popen
124 posixfile = platform.posixfile
126 posixfile = platform.posixfile
125 quotecommand = platform.quotecommand
127 quotecommand = platform.quotecommand
126 readpipe = platform.readpipe
128 readpipe = platform.readpipe
127 rename = platform.rename
129 rename = platform.rename
128 removedirs = platform.removedirs
130 removedirs = platform.removedirs
129 samedevice = platform.samedevice
131 samedevice = platform.samedevice
130 samefile = platform.samefile
132 samefile = platform.samefile
131 samestat = platform.samestat
133 samestat = platform.samestat
132 setbinary = platform.setbinary
134 setbinary = platform.setbinary
133 setflags = platform.setflags
135 setflags = platform.setflags
134 setsignalhandler = platform.setsignalhandler
136 setsignalhandler = platform.setsignalhandler
135 shellquote = platform.shellquote
137 shellquote = platform.shellquote
136 spawndetached = platform.spawndetached
138 spawndetached = platform.spawndetached
137 split = platform.split
139 split = platform.split
138 sshargs = platform.sshargs
140 sshargs = platform.sshargs
139 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
141 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
140 statisexec = platform.statisexec
142 statisexec = platform.statisexec
141 statislink = platform.statislink
143 statislink = platform.statislink
142 testpid = platform.testpid
144 testpid = platform.testpid
143 umask = platform.umask
145 umask = platform.umask
144 unlink = platform.unlink
146 unlink = platform.unlink
145 username = platform.username
147 username = platform.username
146
148
147 try:
149 try:
148 recvfds = osutil.recvfds
150 recvfds = osutil.recvfds
149 except AttributeError:
151 except AttributeError:
150 pass
152 pass
151 try:
153 try:
152 setprocname = osutil.setprocname
154 setprocname = osutil.setprocname
153 except AttributeError:
155 except AttributeError:
154 pass
156 pass
155
157
156 # Python compatibility
158 # Python compatibility
157
159
158 _notset = object()
160 _notset = object()
159
161
160 # disable Python's problematic floating point timestamps (issue4836)
162 # disable Python's problematic floating point timestamps (issue4836)
161 # (Python hypocritically says you shouldn't change this behavior in
163 # (Python hypocritically says you shouldn't change this behavior in
162 # libraries, and sure enough Mercurial is not a library.)
164 # libraries, and sure enough Mercurial is not a library.)
163 os.stat_float_times(False)
165 os.stat_float_times(False)
164
166
165 def safehasattr(thing, attr):
167 def safehasattr(thing, attr):
166 return getattr(thing, attr, _notset) is not _notset
168 return getattr(thing, attr, _notset) is not _notset
167
169
168 def bitsfrom(container):
170 def bitsfrom(container):
169 bits = 0
171 bits = 0
170 for bit in container:
172 for bit in container:
171 bits |= bit
173 bits |= bit
172 return bits
174 return bits
173
175
174 # python 2.6 still have deprecation warning enabled by default. We do not want
176 # python 2.6 still have deprecation warning enabled by default. We do not want
175 # to display anything to standard user so detect if we are running test and
177 # to display anything to standard user so detect if we are running test and
176 # only use python deprecation warning in this case.
178 # only use python deprecation warning in this case.
177 _dowarn = bool(encoding.environ.get('HGEMITWARNINGS'))
179 _dowarn = bool(encoding.environ.get('HGEMITWARNINGS'))
178 if _dowarn:
180 if _dowarn:
179 # explicitly unfilter our warning for python 2.7
181 # explicitly unfilter our warning for python 2.7
180 #
182 #
181 # The option of setting PYTHONWARNINGS in the test runner was investigated.
183 # The option of setting PYTHONWARNINGS in the test runner was investigated.
182 # However, module name set through PYTHONWARNINGS was exactly matched, so
184 # However, module name set through PYTHONWARNINGS was exactly matched, so
183 # we cannot set 'mercurial' and have it match eg: 'mercurial.scmutil'. This
185 # we cannot set 'mercurial' and have it match eg: 'mercurial.scmutil'. This
184 # makes the whole PYTHONWARNINGS thing useless for our usecase.
186 # makes the whole PYTHONWARNINGS thing useless for our usecase.
185 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'mercurial')
187 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'mercurial')
186 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'hgext')
188 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'hgext')
187 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'hgext3rd')
189 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'hgext3rd')
188
190
189 def nouideprecwarn(msg, version, stacklevel=1):
191 def nouideprecwarn(msg, version, stacklevel=1):
190 """Issue an python native deprecation warning
192 """Issue an python native deprecation warning
191
193
192 This is a noop outside of tests, use 'ui.deprecwarn' when possible.
194 This is a noop outside of tests, use 'ui.deprecwarn' when possible.
193 """
195 """
194 if _dowarn:
196 if _dowarn:
195 msg += ("\n(compatibility will be dropped after Mercurial-%s,"
197 msg += ("\n(compatibility will be dropped after Mercurial-%s,"
196 " update your code.)") % version
198 " update your code.)") % version
197 warnings.warn(msg, DeprecationWarning, stacklevel + 1)
199 warnings.warn(msg, DeprecationWarning, stacklevel + 1)
198
200
199 DIGESTS = {
201 DIGESTS = {
200 'md5': hashlib.md5,
202 'md5': hashlib.md5,
201 'sha1': hashlib.sha1,
203 'sha1': hashlib.sha1,
202 'sha512': hashlib.sha512,
204 'sha512': hashlib.sha512,
203 }
205 }
204 # List of digest types from strongest to weakest
206 # List of digest types from strongest to weakest
205 DIGESTS_BY_STRENGTH = ['sha512', 'sha1', 'md5']
207 DIGESTS_BY_STRENGTH = ['sha512', 'sha1', 'md5']
206
208
207 for k in DIGESTS_BY_STRENGTH:
209 for k in DIGESTS_BY_STRENGTH:
208 assert k in DIGESTS
210 assert k in DIGESTS
209
211
210 class digester(object):
212 class digester(object):
211 """helper to compute digests.
213 """helper to compute digests.
212
214
213 This helper can be used to compute one or more digests given their name.
215 This helper can be used to compute one or more digests given their name.
214
216
215 >>> d = digester(['md5', 'sha1'])
217 >>> d = digester(['md5', 'sha1'])
216 >>> d.update('foo')
218 >>> d.update('foo')
217 >>> [k for k in sorted(d)]
219 >>> [k for k in sorted(d)]
218 ['md5', 'sha1']
220 ['md5', 'sha1']
219 >>> d['md5']
221 >>> d['md5']
220 'acbd18db4cc2f85cedef654fccc4a4d8'
222 'acbd18db4cc2f85cedef654fccc4a4d8'
221 >>> d['sha1']
223 >>> d['sha1']
222 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
224 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
223 >>> digester.preferred(['md5', 'sha1'])
225 >>> digester.preferred(['md5', 'sha1'])
224 'sha1'
226 'sha1'
225 """
227 """
226
228
227 def __init__(self, digests, s=''):
229 def __init__(self, digests, s=''):
228 self._hashes = {}
230 self._hashes = {}
229 for k in digests:
231 for k in digests:
230 if k not in DIGESTS:
232 if k not in DIGESTS:
231 raise Abort(_('unknown digest type: %s') % k)
233 raise Abort(_('unknown digest type: %s') % k)
232 self._hashes[k] = DIGESTS[k]()
234 self._hashes[k] = DIGESTS[k]()
233 if s:
235 if s:
234 self.update(s)
236 self.update(s)
235
237
236 def update(self, data):
238 def update(self, data):
237 for h in self._hashes.values():
239 for h in self._hashes.values():
238 h.update(data)
240 h.update(data)
239
241
240 def __getitem__(self, key):
242 def __getitem__(self, key):
241 if key not in DIGESTS:
243 if key not in DIGESTS:
242 raise Abort(_('unknown digest type: %s') % k)
244 raise Abort(_('unknown digest type: %s') % k)
243 return self._hashes[key].hexdigest()
245 return self._hashes[key].hexdigest()
244
246
245 def __iter__(self):
247 def __iter__(self):
246 return iter(self._hashes)
248 return iter(self._hashes)
247
249
248 @staticmethod
250 @staticmethod
249 def preferred(supported):
251 def preferred(supported):
250 """returns the strongest digest type in both supported and DIGESTS."""
252 """returns the strongest digest type in both supported and DIGESTS."""
251
253
252 for k in DIGESTS_BY_STRENGTH:
254 for k in DIGESTS_BY_STRENGTH:
253 if k in supported:
255 if k in supported:
254 return k
256 return k
255 return None
257 return None
256
258
257 class digestchecker(object):
259 class digestchecker(object):
258 """file handle wrapper that additionally checks content against a given
260 """file handle wrapper that additionally checks content against a given
259 size and digests.
261 size and digests.
260
262
261 d = digestchecker(fh, size, {'md5': '...'})
263 d = digestchecker(fh, size, {'md5': '...'})
262
264
263 When multiple digests are given, all of them are validated.
265 When multiple digests are given, all of them are validated.
264 """
266 """
265
267
266 def __init__(self, fh, size, digests):
268 def __init__(self, fh, size, digests):
267 self._fh = fh
269 self._fh = fh
268 self._size = size
270 self._size = size
269 self._got = 0
271 self._got = 0
270 self._digests = dict(digests)
272 self._digests = dict(digests)
271 self._digester = digester(self._digests.keys())
273 self._digester = digester(self._digests.keys())
272
274
273 def read(self, length=-1):
275 def read(self, length=-1):
274 content = self._fh.read(length)
276 content = self._fh.read(length)
275 self._digester.update(content)
277 self._digester.update(content)
276 self._got += len(content)
278 self._got += len(content)
277 return content
279 return content
278
280
279 def validate(self):
281 def validate(self):
280 if self._size != self._got:
282 if self._size != self._got:
281 raise Abort(_('size mismatch: expected %d, got %d') %
283 raise Abort(_('size mismatch: expected %d, got %d') %
282 (self._size, self._got))
284 (self._size, self._got))
283 for k, v in self._digests.items():
285 for k, v in self._digests.items():
284 if v != self._digester[k]:
286 if v != self._digester[k]:
285 # i18n: first parameter is a digest name
287 # i18n: first parameter is a digest name
286 raise Abort(_('%s mismatch: expected %s, got %s') %
288 raise Abort(_('%s mismatch: expected %s, got %s') %
287 (k, v, self._digester[k]))
289 (k, v, self._digester[k]))
288
290
289 try:
291 try:
290 buffer = buffer
292 buffer = buffer
291 except NameError:
293 except NameError:
292 if not pycompat.ispy3:
294 if not pycompat.ispy3:
293 def buffer(sliceable, offset=0, length=None):
295 def buffer(sliceable, offset=0, length=None):
294 if length is not None:
296 if length is not None:
295 return sliceable[offset:offset + length]
297 return sliceable[offset:offset + length]
296 return sliceable[offset:]
298 return sliceable[offset:]
297 else:
299 else:
298 def buffer(sliceable, offset=0, length=None):
300 def buffer(sliceable, offset=0, length=None):
299 if length is not None:
301 if length is not None:
300 return memoryview(sliceable)[offset:offset + length]
302 return memoryview(sliceable)[offset:offset + length]
301 return memoryview(sliceable)[offset:]
303 return memoryview(sliceable)[offset:]
302
304
303 closefds = pycompat.osname == 'posix'
305 closefds = pycompat.osname == 'posix'
304
306
305 _chunksize = 4096
307 _chunksize = 4096
306
308
307 class bufferedinputpipe(object):
309 class bufferedinputpipe(object):
308 """a manually buffered input pipe
310 """a manually buffered input pipe
309
311
310 Python will not let us use buffered IO and lazy reading with 'polling' at
312 Python will not let us use buffered IO and lazy reading with 'polling' at
311 the same time. We cannot probe the buffer state and select will not detect
313 the same time. We cannot probe the buffer state and select will not detect
312 that data are ready to read if they are already buffered.
314 that data are ready to read if they are already buffered.
313
315
314 This class let us work around that by implementing its own buffering
316 This class let us work around that by implementing its own buffering
315 (allowing efficient readline) while offering a way to know if the buffer is
317 (allowing efficient readline) while offering a way to know if the buffer is
316 empty from the output (allowing collaboration of the buffer with polling).
318 empty from the output (allowing collaboration of the buffer with polling).
317
319
318 This class lives in the 'util' module because it makes use of the 'os'
320 This class lives in the 'util' module because it makes use of the 'os'
319 module from the python stdlib.
321 module from the python stdlib.
320 """
322 """
321
323
322 def __init__(self, input):
324 def __init__(self, input):
323 self._input = input
325 self._input = input
324 self._buffer = []
326 self._buffer = []
325 self._eof = False
327 self._eof = False
326 self._lenbuf = 0
328 self._lenbuf = 0
327
329
328 @property
330 @property
329 def hasbuffer(self):
331 def hasbuffer(self):
330 """True is any data is currently buffered
332 """True is any data is currently buffered
331
333
332 This will be used externally a pre-step for polling IO. If there is
334 This will be used externally a pre-step for polling IO. If there is
333 already data then no polling should be set in place."""
335 already data then no polling should be set in place."""
334 return bool(self._buffer)
336 return bool(self._buffer)
335
337
336 @property
338 @property
337 def closed(self):
339 def closed(self):
338 return self._input.closed
340 return self._input.closed
339
341
340 def fileno(self):
342 def fileno(self):
341 return self._input.fileno()
343 return self._input.fileno()
342
344
343 def close(self):
345 def close(self):
344 return self._input.close()
346 return self._input.close()
345
347
346 def read(self, size):
348 def read(self, size):
347 while (not self._eof) and (self._lenbuf < size):
349 while (not self._eof) and (self._lenbuf < size):
348 self._fillbuffer()
350 self._fillbuffer()
349 return self._frombuffer(size)
351 return self._frombuffer(size)
350
352
351 def readline(self, *args, **kwargs):
353 def readline(self, *args, **kwargs):
352 if 1 < len(self._buffer):
354 if 1 < len(self._buffer):
353 # this should not happen because both read and readline end with a
355 # this should not happen because both read and readline end with a
354 # _frombuffer call that collapse it.
356 # _frombuffer call that collapse it.
355 self._buffer = [''.join(self._buffer)]
357 self._buffer = [''.join(self._buffer)]
356 self._lenbuf = len(self._buffer[0])
358 self._lenbuf = len(self._buffer[0])
357 lfi = -1
359 lfi = -1
358 if self._buffer:
360 if self._buffer:
359 lfi = self._buffer[-1].find('\n')
361 lfi = self._buffer[-1].find('\n')
360 while (not self._eof) and lfi < 0:
362 while (not self._eof) and lfi < 0:
361 self._fillbuffer()
363 self._fillbuffer()
362 if self._buffer:
364 if self._buffer:
363 lfi = self._buffer[-1].find('\n')
365 lfi = self._buffer[-1].find('\n')
364 size = lfi + 1
366 size = lfi + 1
365 if lfi < 0: # end of file
367 if lfi < 0: # end of file
366 size = self._lenbuf
368 size = self._lenbuf
367 elif 1 < len(self._buffer):
369 elif 1 < len(self._buffer):
368 # we need to take previous chunks into account
370 # we need to take previous chunks into account
369 size += self._lenbuf - len(self._buffer[-1])
371 size += self._lenbuf - len(self._buffer[-1])
370 return self._frombuffer(size)
372 return self._frombuffer(size)
371
373
372 def _frombuffer(self, size):
374 def _frombuffer(self, size):
373 """return at most 'size' data from the buffer
375 """return at most 'size' data from the buffer
374
376
375 The data are removed from the buffer."""
377 The data are removed from the buffer."""
376 if size == 0 or not self._buffer:
378 if size == 0 or not self._buffer:
377 return ''
379 return ''
378 buf = self._buffer[0]
380 buf = self._buffer[0]
379 if 1 < len(self._buffer):
381 if 1 < len(self._buffer):
380 buf = ''.join(self._buffer)
382 buf = ''.join(self._buffer)
381
383
382 data = buf[:size]
384 data = buf[:size]
383 buf = buf[len(data):]
385 buf = buf[len(data):]
384 if buf:
386 if buf:
385 self._buffer = [buf]
387 self._buffer = [buf]
386 self._lenbuf = len(buf)
388 self._lenbuf = len(buf)
387 else:
389 else:
388 self._buffer = []
390 self._buffer = []
389 self._lenbuf = 0
391 self._lenbuf = 0
390 return data
392 return data
391
393
392 def _fillbuffer(self):
394 def _fillbuffer(self):
393 """read data to the buffer"""
395 """read data to the buffer"""
394 data = os.read(self._input.fileno(), _chunksize)
396 data = os.read(self._input.fileno(), _chunksize)
395 if not data:
397 if not data:
396 self._eof = True
398 self._eof = True
397 else:
399 else:
398 self._lenbuf += len(data)
400 self._lenbuf += len(data)
399 self._buffer.append(data)
401 self._buffer.append(data)
400
402
401 def popen2(cmd, env=None, newlines=False):
403 def popen2(cmd, env=None, newlines=False):
402 # Setting bufsize to -1 lets the system decide the buffer size.
404 # Setting bufsize to -1 lets the system decide the buffer size.
403 # The default for bufsize is 0, meaning unbuffered. This leads to
405 # The default for bufsize is 0, meaning unbuffered. This leads to
404 # poor performance on Mac OS X: http://bugs.python.org/issue4194
406 # poor performance on Mac OS X: http://bugs.python.org/issue4194
405 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
407 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
406 close_fds=closefds,
408 close_fds=closefds,
407 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
409 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
408 universal_newlines=newlines,
410 universal_newlines=newlines,
409 env=env)
411 env=env)
410 return p.stdin, p.stdout
412 return p.stdin, p.stdout
411
413
412 def popen3(cmd, env=None, newlines=False):
414 def popen3(cmd, env=None, newlines=False):
413 stdin, stdout, stderr, p = popen4(cmd, env, newlines)
415 stdin, stdout, stderr, p = popen4(cmd, env, newlines)
414 return stdin, stdout, stderr
416 return stdin, stdout, stderr
415
417
416 def popen4(cmd, env=None, newlines=False, bufsize=-1):
418 def popen4(cmd, env=None, newlines=False, bufsize=-1):
417 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
419 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
418 close_fds=closefds,
420 close_fds=closefds,
419 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
421 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
420 stderr=subprocess.PIPE,
422 stderr=subprocess.PIPE,
421 universal_newlines=newlines,
423 universal_newlines=newlines,
422 env=env)
424 env=env)
423 return p.stdin, p.stdout, p.stderr, p
425 return p.stdin, p.stdout, p.stderr, p
424
426
425 def version():
427 def version():
426 """Return version information if available."""
428 """Return version information if available."""
427 try:
429 try:
428 from . import __version__
430 from . import __version__
429 return __version__.version
431 return __version__.version
430 except ImportError:
432 except ImportError:
431 return 'unknown'
433 return 'unknown'
432
434
433 def versiontuple(v=None, n=4):
435 def versiontuple(v=None, n=4):
434 """Parses a Mercurial version string into an N-tuple.
436 """Parses a Mercurial version string into an N-tuple.
435
437
436 The version string to be parsed is specified with the ``v`` argument.
438 The version string to be parsed is specified with the ``v`` argument.
437 If it isn't defined, the current Mercurial version string will be parsed.
439 If it isn't defined, the current Mercurial version string will be parsed.
438
440
439 ``n`` can be 2, 3, or 4. Here is how some version strings map to
441 ``n`` can be 2, 3, or 4. Here is how some version strings map to
440 returned values:
442 returned values:
441
443
442 >>> v = '3.6.1+190-df9b73d2d444'
444 >>> v = '3.6.1+190-df9b73d2d444'
443 >>> versiontuple(v, 2)
445 >>> versiontuple(v, 2)
444 (3, 6)
446 (3, 6)
445 >>> versiontuple(v, 3)
447 >>> versiontuple(v, 3)
446 (3, 6, 1)
448 (3, 6, 1)
447 >>> versiontuple(v, 4)
449 >>> versiontuple(v, 4)
448 (3, 6, 1, '190-df9b73d2d444')
450 (3, 6, 1, '190-df9b73d2d444')
449
451
450 >>> versiontuple('3.6.1+190-df9b73d2d444+20151118')
452 >>> versiontuple('3.6.1+190-df9b73d2d444+20151118')
451 (3, 6, 1, '190-df9b73d2d444+20151118')
453 (3, 6, 1, '190-df9b73d2d444+20151118')
452
454
453 >>> v = '3.6'
455 >>> v = '3.6'
454 >>> versiontuple(v, 2)
456 >>> versiontuple(v, 2)
455 (3, 6)
457 (3, 6)
456 >>> versiontuple(v, 3)
458 >>> versiontuple(v, 3)
457 (3, 6, None)
459 (3, 6, None)
458 >>> versiontuple(v, 4)
460 >>> versiontuple(v, 4)
459 (3, 6, None, None)
461 (3, 6, None, None)
460
462
461 >>> v = '3.9-rc'
463 >>> v = '3.9-rc'
462 >>> versiontuple(v, 2)
464 >>> versiontuple(v, 2)
463 (3, 9)
465 (3, 9)
464 >>> versiontuple(v, 3)
466 >>> versiontuple(v, 3)
465 (3, 9, None)
467 (3, 9, None)
466 >>> versiontuple(v, 4)
468 >>> versiontuple(v, 4)
467 (3, 9, None, 'rc')
469 (3, 9, None, 'rc')
468
470
469 >>> v = '3.9-rc+2-02a8fea4289b'
471 >>> v = '3.9-rc+2-02a8fea4289b'
470 >>> versiontuple(v, 2)
472 >>> versiontuple(v, 2)
471 (3, 9)
473 (3, 9)
472 >>> versiontuple(v, 3)
474 >>> versiontuple(v, 3)
473 (3, 9, None)
475 (3, 9, None)
474 >>> versiontuple(v, 4)
476 >>> versiontuple(v, 4)
475 (3, 9, None, 'rc+2-02a8fea4289b')
477 (3, 9, None, 'rc+2-02a8fea4289b')
476 """
478 """
477 if not v:
479 if not v:
478 v = version()
480 v = version()
479 parts = remod.split('[\+-]', v, 1)
481 parts = remod.split('[\+-]', v, 1)
480 if len(parts) == 1:
482 if len(parts) == 1:
481 vparts, extra = parts[0], None
483 vparts, extra = parts[0], None
482 else:
484 else:
483 vparts, extra = parts
485 vparts, extra = parts
484
486
485 vints = []
487 vints = []
486 for i in vparts.split('.'):
488 for i in vparts.split('.'):
487 try:
489 try:
488 vints.append(int(i))
490 vints.append(int(i))
489 except ValueError:
491 except ValueError:
490 break
492 break
491 # (3, 6) -> (3, 6, None)
493 # (3, 6) -> (3, 6, None)
492 while len(vints) < 3:
494 while len(vints) < 3:
493 vints.append(None)
495 vints.append(None)
494
496
495 if n == 2:
497 if n == 2:
496 return (vints[0], vints[1])
498 return (vints[0], vints[1])
497 if n == 3:
499 if n == 3:
498 return (vints[0], vints[1], vints[2])
500 return (vints[0], vints[1], vints[2])
499 if n == 4:
501 if n == 4:
500 return (vints[0], vints[1], vints[2], extra)
502 return (vints[0], vints[1], vints[2], extra)
501
503
502 # used by parsedate
504 # used by parsedate
503 defaultdateformats = (
505 defaultdateformats = (
504 '%Y-%m-%dT%H:%M:%S', # the 'real' ISO8601
506 '%Y-%m-%dT%H:%M:%S', # the 'real' ISO8601
505 '%Y-%m-%dT%H:%M', # without seconds
507 '%Y-%m-%dT%H:%M', # without seconds
506 '%Y-%m-%dT%H%M%S', # another awful but legal variant without :
508 '%Y-%m-%dT%H%M%S', # another awful but legal variant without :
507 '%Y-%m-%dT%H%M', # without seconds
509 '%Y-%m-%dT%H%M', # without seconds
508 '%Y-%m-%d %H:%M:%S', # our common legal variant
510 '%Y-%m-%d %H:%M:%S', # our common legal variant
509 '%Y-%m-%d %H:%M', # without seconds
511 '%Y-%m-%d %H:%M', # without seconds
510 '%Y-%m-%d %H%M%S', # without :
512 '%Y-%m-%d %H%M%S', # without :
511 '%Y-%m-%d %H%M', # without seconds
513 '%Y-%m-%d %H%M', # without seconds
512 '%Y-%m-%d %I:%M:%S%p',
514 '%Y-%m-%d %I:%M:%S%p',
513 '%Y-%m-%d %H:%M',
515 '%Y-%m-%d %H:%M',
514 '%Y-%m-%d %I:%M%p',
516 '%Y-%m-%d %I:%M%p',
515 '%Y-%m-%d',
517 '%Y-%m-%d',
516 '%m-%d',
518 '%m-%d',
517 '%m/%d',
519 '%m/%d',
518 '%m/%d/%y',
520 '%m/%d/%y',
519 '%m/%d/%Y',
521 '%m/%d/%Y',
520 '%a %b %d %H:%M:%S %Y',
522 '%a %b %d %H:%M:%S %Y',
521 '%a %b %d %I:%M:%S%p %Y',
523 '%a %b %d %I:%M:%S%p %Y',
522 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
524 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
523 '%b %d %H:%M:%S %Y',
525 '%b %d %H:%M:%S %Y',
524 '%b %d %I:%M:%S%p %Y',
526 '%b %d %I:%M:%S%p %Y',
525 '%b %d %H:%M:%S',
527 '%b %d %H:%M:%S',
526 '%b %d %I:%M:%S%p',
528 '%b %d %I:%M:%S%p',
527 '%b %d %H:%M',
529 '%b %d %H:%M',
528 '%b %d %I:%M%p',
530 '%b %d %I:%M%p',
529 '%b %d %Y',
531 '%b %d %Y',
530 '%b %d',
532 '%b %d',
531 '%H:%M:%S',
533 '%H:%M:%S',
532 '%I:%M:%S%p',
534 '%I:%M:%S%p',
533 '%H:%M',
535 '%H:%M',
534 '%I:%M%p',
536 '%I:%M%p',
535 )
537 )
536
538
537 extendeddateformats = defaultdateformats + (
539 extendeddateformats = defaultdateformats + (
538 "%Y",
540 "%Y",
539 "%Y-%m",
541 "%Y-%m",
540 "%b",
542 "%b",
541 "%b %Y",
543 "%b %Y",
542 )
544 )
543
545
544 def cachefunc(func):
546 def cachefunc(func):
545 '''cache the result of function calls'''
547 '''cache the result of function calls'''
546 # XXX doesn't handle keywords args
548 # XXX doesn't handle keywords args
547 if func.__code__.co_argcount == 0:
549 if func.__code__.co_argcount == 0:
548 cache = []
550 cache = []
549 def f():
551 def f():
550 if len(cache) == 0:
552 if len(cache) == 0:
551 cache.append(func())
553 cache.append(func())
552 return cache[0]
554 return cache[0]
553 return f
555 return f
554 cache = {}
556 cache = {}
555 if func.__code__.co_argcount == 1:
557 if func.__code__.co_argcount == 1:
556 # we gain a small amount of time because
558 # we gain a small amount of time because
557 # we don't need to pack/unpack the list
559 # we don't need to pack/unpack the list
558 def f(arg):
560 def f(arg):
559 if arg not in cache:
561 if arg not in cache:
560 cache[arg] = func(arg)
562 cache[arg] = func(arg)
561 return cache[arg]
563 return cache[arg]
562 else:
564 else:
563 def f(*args):
565 def f(*args):
564 if args not in cache:
566 if args not in cache:
565 cache[args] = func(*args)
567 cache[args] = func(*args)
566 return cache[args]
568 return cache[args]
567
569
568 return f
570 return f
569
571
570 class sortdict(collections.OrderedDict):
572 class sortdict(collections.OrderedDict):
571 '''a simple sorted dictionary
573 '''a simple sorted dictionary
572
574
573 >>> d1 = sortdict([('a', 0), ('b', 1)])
575 >>> d1 = sortdict([('a', 0), ('b', 1)])
574 >>> d2 = d1.copy()
576 >>> d2 = d1.copy()
575 >>> d2
577 >>> d2
576 sortdict([('a', 0), ('b', 1)])
578 sortdict([('a', 0), ('b', 1)])
577 >>> d2.update([('a', 2)])
579 >>> d2.update([('a', 2)])
578 >>> d2.keys() # should still be in last-set order
580 >>> d2.keys() # should still be in last-set order
579 ['b', 'a']
581 ['b', 'a']
580 '''
582 '''
581
583
582 def __setitem__(self, key, value):
584 def __setitem__(self, key, value):
583 if key in self:
585 if key in self:
584 del self[key]
586 del self[key]
585 super(sortdict, self).__setitem__(key, value)
587 super(sortdict, self).__setitem__(key, value)
586
588
587 class _lrucachenode(object):
589 class _lrucachenode(object):
588 """A node in a doubly linked list.
590 """A node in a doubly linked list.
589
591
590 Holds a reference to nodes on either side as well as a key-value
592 Holds a reference to nodes on either side as well as a key-value
591 pair for the dictionary entry.
593 pair for the dictionary entry.
592 """
594 """
593 __slots__ = (u'next', u'prev', u'key', u'value')
595 __slots__ = (u'next', u'prev', u'key', u'value')
594
596
595 def __init__(self):
597 def __init__(self):
596 self.next = None
598 self.next = None
597 self.prev = None
599 self.prev = None
598
600
599 self.key = _notset
601 self.key = _notset
600 self.value = None
602 self.value = None
601
603
602 def markempty(self):
604 def markempty(self):
603 """Mark the node as emptied."""
605 """Mark the node as emptied."""
604 self.key = _notset
606 self.key = _notset
605
607
606 class lrucachedict(object):
608 class lrucachedict(object):
607 """Dict that caches most recent accesses and sets.
609 """Dict that caches most recent accesses and sets.
608
610
609 The dict consists of an actual backing dict - indexed by original
611 The dict consists of an actual backing dict - indexed by original
610 key - and a doubly linked circular list defining the order of entries in
612 key - and a doubly linked circular list defining the order of entries in
611 the cache.
613 the cache.
612
614
613 The head node is the newest entry in the cache. If the cache is full,
615 The head node is the newest entry in the cache. If the cache is full,
614 we recycle head.prev and make it the new head. Cache accesses result in
616 we recycle head.prev and make it the new head. Cache accesses result in
615 the node being moved to before the existing head and being marked as the
617 the node being moved to before the existing head and being marked as the
616 new head node.
618 new head node.
617 """
619 """
618 def __init__(self, max):
620 def __init__(self, max):
619 self._cache = {}
621 self._cache = {}
620
622
621 self._head = head = _lrucachenode()
623 self._head = head = _lrucachenode()
622 head.prev = head
624 head.prev = head
623 head.next = head
625 head.next = head
624 self._size = 1
626 self._size = 1
625 self._capacity = max
627 self._capacity = max
626
628
627 def __len__(self):
629 def __len__(self):
628 return len(self._cache)
630 return len(self._cache)
629
631
630 def __contains__(self, k):
632 def __contains__(self, k):
631 return k in self._cache
633 return k in self._cache
632
634
633 def __iter__(self):
635 def __iter__(self):
634 # We don't have to iterate in cache order, but why not.
636 # We don't have to iterate in cache order, but why not.
635 n = self._head
637 n = self._head
636 for i in range(len(self._cache)):
638 for i in range(len(self._cache)):
637 yield n.key
639 yield n.key
638 n = n.next
640 n = n.next
639
641
640 def __getitem__(self, k):
642 def __getitem__(self, k):
641 node = self._cache[k]
643 node = self._cache[k]
642 self._movetohead(node)
644 self._movetohead(node)
643 return node.value
645 return node.value
644
646
645 def __setitem__(self, k, v):
647 def __setitem__(self, k, v):
646 node = self._cache.get(k)
648 node = self._cache.get(k)
647 # Replace existing value and mark as newest.
649 # Replace existing value and mark as newest.
648 if node is not None:
650 if node is not None:
649 node.value = v
651 node.value = v
650 self._movetohead(node)
652 self._movetohead(node)
651 return
653 return
652
654
653 if self._size < self._capacity:
655 if self._size < self._capacity:
654 node = self._addcapacity()
656 node = self._addcapacity()
655 else:
657 else:
656 # Grab the last/oldest item.
658 # Grab the last/oldest item.
657 node = self._head.prev
659 node = self._head.prev
658
660
659 # At capacity. Kill the old entry.
661 # At capacity. Kill the old entry.
660 if node.key is not _notset:
662 if node.key is not _notset:
661 del self._cache[node.key]
663 del self._cache[node.key]
662
664
663 node.key = k
665 node.key = k
664 node.value = v
666 node.value = v
665 self._cache[k] = node
667 self._cache[k] = node
666 # And mark it as newest entry. No need to adjust order since it
668 # And mark it as newest entry. No need to adjust order since it
667 # is already self._head.prev.
669 # is already self._head.prev.
668 self._head = node
670 self._head = node
669
671
670 def __delitem__(self, k):
672 def __delitem__(self, k):
671 node = self._cache.pop(k)
673 node = self._cache.pop(k)
672 node.markempty()
674 node.markempty()
673
675
674 # Temporarily mark as newest item before re-adjusting head to make
676 # Temporarily mark as newest item before re-adjusting head to make
675 # this node the oldest item.
677 # this node the oldest item.
676 self._movetohead(node)
678 self._movetohead(node)
677 self._head = node.next
679 self._head = node.next
678
680
679 # Additional dict methods.
681 # Additional dict methods.
680
682
681 def get(self, k, default=None):
683 def get(self, k, default=None):
682 try:
684 try:
683 return self._cache[k].value
685 return self._cache[k].value
684 except KeyError:
686 except KeyError:
685 return default
687 return default
686
688
687 def clear(self):
689 def clear(self):
688 n = self._head
690 n = self._head
689 while n.key is not _notset:
691 while n.key is not _notset:
690 n.markempty()
692 n.markempty()
691 n = n.next
693 n = n.next
692
694
693 self._cache.clear()
695 self._cache.clear()
694
696
695 def copy(self):
697 def copy(self):
696 result = lrucachedict(self._capacity)
698 result = lrucachedict(self._capacity)
697 n = self._head.prev
699 n = self._head.prev
698 # Iterate in oldest-to-newest order, so the copy has the right ordering
700 # Iterate in oldest-to-newest order, so the copy has the right ordering
699 for i in range(len(self._cache)):
701 for i in range(len(self._cache)):
700 result[n.key] = n.value
702 result[n.key] = n.value
701 n = n.prev
703 n = n.prev
702 return result
704 return result
703
705
704 def _movetohead(self, node):
706 def _movetohead(self, node):
705 """Mark a node as the newest, making it the new head.
707 """Mark a node as the newest, making it the new head.
706
708
707 When a node is accessed, it becomes the freshest entry in the LRU
709 When a node is accessed, it becomes the freshest entry in the LRU
708 list, which is denoted by self._head.
710 list, which is denoted by self._head.
709
711
710 Visually, let's make ``N`` the new head node (* denotes head):
712 Visually, let's make ``N`` the new head node (* denotes head):
711
713
712 previous/oldest <-> head <-> next/next newest
714 previous/oldest <-> head <-> next/next newest
713
715
714 ----<->--- A* ---<->-----
716 ----<->--- A* ---<->-----
715 | |
717 | |
716 E <-> D <-> N <-> C <-> B
718 E <-> D <-> N <-> C <-> B
717
719
718 To:
720 To:
719
721
720 ----<->--- N* ---<->-----
722 ----<->--- N* ---<->-----
721 | |
723 | |
722 E <-> D <-> C <-> B <-> A
724 E <-> D <-> C <-> B <-> A
723
725
724 This requires the following moves:
726 This requires the following moves:
725
727
726 C.next = D (node.prev.next = node.next)
728 C.next = D (node.prev.next = node.next)
727 D.prev = C (node.next.prev = node.prev)
729 D.prev = C (node.next.prev = node.prev)
728 E.next = N (head.prev.next = node)
730 E.next = N (head.prev.next = node)
729 N.prev = E (node.prev = head.prev)
731 N.prev = E (node.prev = head.prev)
730 N.next = A (node.next = head)
732 N.next = A (node.next = head)
731 A.prev = N (head.prev = node)
733 A.prev = N (head.prev = node)
732 """
734 """
733 head = self._head
735 head = self._head
734 # C.next = D
736 # C.next = D
735 node.prev.next = node.next
737 node.prev.next = node.next
736 # D.prev = C
738 # D.prev = C
737 node.next.prev = node.prev
739 node.next.prev = node.prev
738 # N.prev = E
740 # N.prev = E
739 node.prev = head.prev
741 node.prev = head.prev
740 # N.next = A
742 # N.next = A
741 # It is tempting to do just "head" here, however if node is
743 # It is tempting to do just "head" here, however if node is
742 # adjacent to head, this will do bad things.
744 # adjacent to head, this will do bad things.
743 node.next = head.prev.next
745 node.next = head.prev.next
744 # E.next = N
746 # E.next = N
745 node.next.prev = node
747 node.next.prev = node
746 # A.prev = N
748 # A.prev = N
747 node.prev.next = node
749 node.prev.next = node
748
750
749 self._head = node
751 self._head = node
750
752
751 def _addcapacity(self):
753 def _addcapacity(self):
752 """Add a node to the circular linked list.
754 """Add a node to the circular linked list.
753
755
754 The new node is inserted before the head node.
756 The new node is inserted before the head node.
755 """
757 """
756 head = self._head
758 head = self._head
757 node = _lrucachenode()
759 node = _lrucachenode()
758 head.prev.next = node
760 head.prev.next = node
759 node.prev = head.prev
761 node.prev = head.prev
760 node.next = head
762 node.next = head
761 head.prev = node
763 head.prev = node
762 self._size += 1
764 self._size += 1
763 return node
765 return node
764
766
765 def lrucachefunc(func):
767 def lrucachefunc(func):
766 '''cache most recent results of function calls'''
768 '''cache most recent results of function calls'''
767 cache = {}
769 cache = {}
768 order = collections.deque()
770 order = collections.deque()
769 if func.__code__.co_argcount == 1:
771 if func.__code__.co_argcount == 1:
770 def f(arg):
772 def f(arg):
771 if arg not in cache:
773 if arg not in cache:
772 if len(cache) > 20:
774 if len(cache) > 20:
773 del cache[order.popleft()]
775 del cache[order.popleft()]
774 cache[arg] = func(arg)
776 cache[arg] = func(arg)
775 else:
777 else:
776 order.remove(arg)
778 order.remove(arg)
777 order.append(arg)
779 order.append(arg)
778 return cache[arg]
780 return cache[arg]
779 else:
781 else:
780 def f(*args):
782 def f(*args):
781 if args not in cache:
783 if args not in cache:
782 if len(cache) > 20:
784 if len(cache) > 20:
783 del cache[order.popleft()]
785 del cache[order.popleft()]
784 cache[args] = func(*args)
786 cache[args] = func(*args)
785 else:
787 else:
786 order.remove(args)
788 order.remove(args)
787 order.append(args)
789 order.append(args)
788 return cache[args]
790 return cache[args]
789
791
790 return f
792 return f
791
793
792 class propertycache(object):
794 class propertycache(object):
793 def __init__(self, func):
795 def __init__(self, func):
794 self.func = func
796 self.func = func
795 self.name = func.__name__
797 self.name = func.__name__
796 def __get__(self, obj, type=None):
798 def __get__(self, obj, type=None):
797 result = self.func(obj)
799 result = self.func(obj)
798 self.cachevalue(obj, result)
800 self.cachevalue(obj, result)
799 return result
801 return result
800
802
801 def cachevalue(self, obj, value):
803 def cachevalue(self, obj, value):
802 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
804 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
803 obj.__dict__[self.name] = value
805 obj.__dict__[self.name] = value
804
806
805 def pipefilter(s, cmd):
807 def pipefilter(s, cmd):
806 '''filter string S through command CMD, returning its output'''
808 '''filter string S through command CMD, returning its output'''
807 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
809 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
808 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
810 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
809 pout, perr = p.communicate(s)
811 pout, perr = p.communicate(s)
810 return pout
812 return pout
811
813
812 def tempfilter(s, cmd):
814 def tempfilter(s, cmd):
813 '''filter string S through a pair of temporary files with CMD.
815 '''filter string S through a pair of temporary files with CMD.
814 CMD is used as a template to create the real command to be run,
816 CMD is used as a template to create the real command to be run,
815 with the strings INFILE and OUTFILE replaced by the real names of
817 with the strings INFILE and OUTFILE replaced by the real names of
816 the temporary files generated.'''
818 the temporary files generated.'''
817 inname, outname = None, None
819 inname, outname = None, None
818 try:
820 try:
819 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
821 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
820 fp = os.fdopen(infd, pycompat.sysstr('wb'))
822 fp = os.fdopen(infd, pycompat.sysstr('wb'))
821 fp.write(s)
823 fp.write(s)
822 fp.close()
824 fp.close()
823 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
825 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
824 os.close(outfd)
826 os.close(outfd)
825 cmd = cmd.replace('INFILE', inname)
827 cmd = cmd.replace('INFILE', inname)
826 cmd = cmd.replace('OUTFILE', outname)
828 cmd = cmd.replace('OUTFILE', outname)
827 code = os.system(cmd)
829 code = os.system(cmd)
828 if pycompat.sysplatform == 'OpenVMS' and code & 1:
830 if pycompat.sysplatform == 'OpenVMS' and code & 1:
829 code = 0
831 code = 0
830 if code:
832 if code:
831 raise Abort(_("command '%s' failed: %s") %
833 raise Abort(_("command '%s' failed: %s") %
832 (cmd, explainexit(code)))
834 (cmd, explainexit(code)))
833 return readfile(outname)
835 return readfile(outname)
834 finally:
836 finally:
835 try:
837 try:
836 if inname:
838 if inname:
837 os.unlink(inname)
839 os.unlink(inname)
838 except OSError:
840 except OSError:
839 pass
841 pass
840 try:
842 try:
841 if outname:
843 if outname:
842 os.unlink(outname)
844 os.unlink(outname)
843 except OSError:
845 except OSError:
844 pass
846 pass
845
847
846 filtertable = {
848 filtertable = {
847 'tempfile:': tempfilter,
849 'tempfile:': tempfilter,
848 'pipe:': pipefilter,
850 'pipe:': pipefilter,
849 }
851 }
850
852
851 def filter(s, cmd):
853 def filter(s, cmd):
852 "filter a string through a command that transforms its input to its output"
854 "filter a string through a command that transforms its input to its output"
853 for name, fn in filtertable.iteritems():
855 for name, fn in filtertable.iteritems():
854 if cmd.startswith(name):
856 if cmd.startswith(name):
855 return fn(s, cmd[len(name):].lstrip())
857 return fn(s, cmd[len(name):].lstrip())
856 return pipefilter(s, cmd)
858 return pipefilter(s, cmd)
857
859
858 def binary(s):
860 def binary(s):
859 """return true if a string is binary data"""
861 """return true if a string is binary data"""
860 return bool(s and '\0' in s)
862 return bool(s and '\0' in s)
861
863
862 def increasingchunks(source, min=1024, max=65536):
864 def increasingchunks(source, min=1024, max=65536):
863 '''return no less than min bytes per chunk while data remains,
865 '''return no less than min bytes per chunk while data remains,
864 doubling min after each chunk until it reaches max'''
866 doubling min after each chunk until it reaches max'''
865 def log2(x):
867 def log2(x):
866 if not x:
868 if not x:
867 return 0
869 return 0
868 i = 0
870 i = 0
869 while x:
871 while x:
870 x >>= 1
872 x >>= 1
871 i += 1
873 i += 1
872 return i - 1
874 return i - 1
873
875
874 buf = []
876 buf = []
875 blen = 0
877 blen = 0
876 for chunk in source:
878 for chunk in source:
877 buf.append(chunk)
879 buf.append(chunk)
878 blen += len(chunk)
880 blen += len(chunk)
879 if blen >= min:
881 if blen >= min:
880 if min < max:
882 if min < max:
881 min = min << 1
883 min = min << 1
882 nmin = 1 << log2(blen)
884 nmin = 1 << log2(blen)
883 if nmin > min:
885 if nmin > min:
884 min = nmin
886 min = nmin
885 if min > max:
887 if min > max:
886 min = max
888 min = max
887 yield ''.join(buf)
889 yield ''.join(buf)
888 blen = 0
890 blen = 0
889 buf = []
891 buf = []
890 if buf:
892 if buf:
891 yield ''.join(buf)
893 yield ''.join(buf)
892
894
893 Abort = error.Abort
895 Abort = error.Abort
894
896
895 def always(fn):
897 def always(fn):
896 return True
898 return True
897
899
898 def never(fn):
900 def never(fn):
899 return False
901 return False
900
902
901 def nogc(func):
903 def nogc(func):
902 """disable garbage collector
904 """disable garbage collector
903
905
904 Python's garbage collector triggers a GC each time a certain number of
906 Python's garbage collector triggers a GC each time a certain number of
905 container objects (the number being defined by gc.get_threshold()) are
907 container objects (the number being defined by gc.get_threshold()) are
906 allocated even when marked not to be tracked by the collector. Tracking has
908 allocated even when marked not to be tracked by the collector. Tracking has
907 no effect on when GCs are triggered, only on what objects the GC looks
909 no effect on when GCs are triggered, only on what objects the GC looks
908 into. As a workaround, disable GC while building complex (huge)
910 into. As a workaround, disable GC while building complex (huge)
909 containers.
911 containers.
910
912
911 This garbage collector issue have been fixed in 2.7.
913 This garbage collector issue have been fixed in 2.7.
912 """
914 """
913 if sys.version_info >= (2, 7):
915 if sys.version_info >= (2, 7):
914 return func
916 return func
915 def wrapper(*args, **kwargs):
917 def wrapper(*args, **kwargs):
916 gcenabled = gc.isenabled()
918 gcenabled = gc.isenabled()
917 gc.disable()
919 gc.disable()
918 try:
920 try:
919 return func(*args, **kwargs)
921 return func(*args, **kwargs)
920 finally:
922 finally:
921 if gcenabled:
923 if gcenabled:
922 gc.enable()
924 gc.enable()
923 return wrapper
925 return wrapper
924
926
925 def pathto(root, n1, n2):
927 def pathto(root, n1, n2):
926 '''return the relative path from one place to another.
928 '''return the relative path from one place to another.
927 root should use os.sep to separate directories
929 root should use os.sep to separate directories
928 n1 should use os.sep to separate directories
930 n1 should use os.sep to separate directories
929 n2 should use "/" to separate directories
931 n2 should use "/" to separate directories
930 returns an os.sep-separated path.
932 returns an os.sep-separated path.
931
933
932 If n1 is a relative path, it's assumed it's
934 If n1 is a relative path, it's assumed it's
933 relative to root.
935 relative to root.
934 n2 should always be relative to root.
936 n2 should always be relative to root.
935 '''
937 '''
936 if not n1:
938 if not n1:
937 return localpath(n2)
939 return localpath(n2)
938 if os.path.isabs(n1):
940 if os.path.isabs(n1):
939 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
941 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
940 return os.path.join(root, localpath(n2))
942 return os.path.join(root, localpath(n2))
941 n2 = '/'.join((pconvert(root), n2))
943 n2 = '/'.join((pconvert(root), n2))
942 a, b = splitpath(n1), n2.split('/')
944 a, b = splitpath(n1), n2.split('/')
943 a.reverse()
945 a.reverse()
944 b.reverse()
946 b.reverse()
945 while a and b and a[-1] == b[-1]:
947 while a and b and a[-1] == b[-1]:
946 a.pop()
948 a.pop()
947 b.pop()
949 b.pop()
948 b.reverse()
950 b.reverse()
949 return pycompat.ossep.join((['..'] * len(a)) + b) or '.'
951 return pycompat.ossep.join((['..'] * len(a)) + b) or '.'
950
952
951 def mainfrozen():
953 def mainfrozen():
952 """return True if we are a frozen executable.
954 """return True if we are a frozen executable.
953
955
954 The code supports py2exe (most common, Windows only) and tools/freeze
956 The code supports py2exe (most common, Windows only) and tools/freeze
955 (portable, not much used).
957 (portable, not much used).
956 """
958 """
957 return (safehasattr(sys, "frozen") or # new py2exe
959 return (safehasattr(sys, "frozen") or # new py2exe
958 safehasattr(sys, "importers") or # old py2exe
960 safehasattr(sys, "importers") or # old py2exe
959 imp.is_frozen(u"__main__")) # tools/freeze
961 imp.is_frozen(u"__main__")) # tools/freeze
960
962
961 # the location of data files matching the source code
963 # the location of data files matching the source code
962 if mainfrozen() and getattr(sys, 'frozen', None) != 'macosx_app':
964 if mainfrozen() and getattr(sys, 'frozen', None) != 'macosx_app':
963 # executable version (py2exe) doesn't support __file__
965 # executable version (py2exe) doesn't support __file__
964 datapath = os.path.dirname(pycompat.sysexecutable)
966 datapath = os.path.dirname(pycompat.sysexecutable)
965 else:
967 else:
966 datapath = os.path.dirname(pycompat.fsencode(__file__))
968 datapath = os.path.dirname(pycompat.fsencode(__file__))
967
969
968 i18n.setdatapath(datapath)
970 i18n.setdatapath(datapath)
969
971
970 _hgexecutable = None
972 _hgexecutable = None
971
973
972 def hgexecutable():
974 def hgexecutable():
973 """return location of the 'hg' executable.
975 """return location of the 'hg' executable.
974
976
975 Defaults to $HG or 'hg' in the search path.
977 Defaults to $HG or 'hg' in the search path.
976 """
978 """
977 if _hgexecutable is None:
979 if _hgexecutable is None:
978 hg = encoding.environ.get('HG')
980 hg = encoding.environ.get('HG')
979 mainmod = sys.modules[pycompat.sysstr('__main__')]
981 mainmod = sys.modules[pycompat.sysstr('__main__')]
980 if hg:
982 if hg:
981 _sethgexecutable(hg)
983 _sethgexecutable(hg)
982 elif mainfrozen():
984 elif mainfrozen():
983 if getattr(sys, 'frozen', None) == 'macosx_app':
985 if getattr(sys, 'frozen', None) == 'macosx_app':
984 # Env variable set by py2app
986 # Env variable set by py2app
985 _sethgexecutable(encoding.environ['EXECUTABLEPATH'])
987 _sethgexecutable(encoding.environ['EXECUTABLEPATH'])
986 else:
988 else:
987 _sethgexecutable(pycompat.sysexecutable)
989 _sethgexecutable(pycompat.sysexecutable)
988 elif (os.path.basename(
990 elif (os.path.basename(
989 pycompat.fsencode(getattr(mainmod, '__file__', ''))) == 'hg'):
991 pycompat.fsencode(getattr(mainmod, '__file__', ''))) == 'hg'):
990 _sethgexecutable(pycompat.fsencode(mainmod.__file__))
992 _sethgexecutable(pycompat.fsencode(mainmod.__file__))
991 else:
993 else:
992 exe = findexe('hg') or os.path.basename(sys.argv[0])
994 exe = findexe('hg') or os.path.basename(sys.argv[0])
993 _sethgexecutable(exe)
995 _sethgexecutable(exe)
994 return _hgexecutable
996 return _hgexecutable
995
997
996 def _sethgexecutable(path):
998 def _sethgexecutable(path):
997 """set location of the 'hg' executable"""
999 """set location of the 'hg' executable"""
998 global _hgexecutable
1000 global _hgexecutable
999 _hgexecutable = path
1001 _hgexecutable = path
1000
1002
1001 def _isstdout(f):
1003 def _isstdout(f):
1002 fileno = getattr(f, 'fileno', None)
1004 fileno = getattr(f, 'fileno', None)
1003 return fileno and fileno() == sys.__stdout__.fileno()
1005 return fileno and fileno() == sys.__stdout__.fileno()
1004
1006
1005 def shellenviron(environ=None):
1007 def shellenviron(environ=None):
1006 """return environ with optional override, useful for shelling out"""
1008 """return environ with optional override, useful for shelling out"""
1007 def py2shell(val):
1009 def py2shell(val):
1008 'convert python object into string that is useful to shell'
1010 'convert python object into string that is useful to shell'
1009 if val is None or val is False:
1011 if val is None or val is False:
1010 return '0'
1012 return '0'
1011 if val is True:
1013 if val is True:
1012 return '1'
1014 return '1'
1013 return str(val)
1015 return str(val)
1014 env = dict(encoding.environ)
1016 env = dict(encoding.environ)
1015 if environ:
1017 if environ:
1016 env.update((k, py2shell(v)) for k, v in environ.iteritems())
1018 env.update((k, py2shell(v)) for k, v in environ.iteritems())
1017 env['HG'] = hgexecutable()
1019 env['HG'] = hgexecutable()
1018 return env
1020 return env
1019
1021
1020 def system(cmd, environ=None, cwd=None, out=None):
1022 def system(cmd, environ=None, cwd=None, out=None):
1021 '''enhanced shell command execution.
1023 '''enhanced shell command execution.
1022 run with environment maybe modified, maybe in different dir.
1024 run with environment maybe modified, maybe in different dir.
1023
1025
1024 if out is specified, it is assumed to be a file-like object that has a
1026 if out is specified, it is assumed to be a file-like object that has a
1025 write() method. stdout and stderr will be redirected to out.'''
1027 write() method. stdout and stderr will be redirected to out.'''
1026 try:
1028 try:
1027 stdout.flush()
1029 stdout.flush()
1028 except Exception:
1030 except Exception:
1029 pass
1031 pass
1030 cmd = quotecommand(cmd)
1032 cmd = quotecommand(cmd)
1031 if pycompat.sysplatform == 'plan9' and (sys.version_info[0] == 2
1033 if pycompat.sysplatform == 'plan9' and (sys.version_info[0] == 2
1032 and sys.version_info[1] < 7):
1034 and sys.version_info[1] < 7):
1033 # subprocess kludge to work around issues in half-baked Python
1035 # subprocess kludge to work around issues in half-baked Python
1034 # ports, notably bichued/python:
1036 # ports, notably bichued/python:
1035 if not cwd is None:
1037 if not cwd is None:
1036 os.chdir(cwd)
1038 os.chdir(cwd)
1037 rc = os.system(cmd)
1039 rc = os.system(cmd)
1038 else:
1040 else:
1039 env = shellenviron(environ)
1041 env = shellenviron(environ)
1040 if out is None or _isstdout(out):
1042 if out is None or _isstdout(out):
1041 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
1043 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
1042 env=env, cwd=cwd)
1044 env=env, cwd=cwd)
1043 else:
1045 else:
1044 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
1046 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
1045 env=env, cwd=cwd, stdout=subprocess.PIPE,
1047 env=env, cwd=cwd, stdout=subprocess.PIPE,
1046 stderr=subprocess.STDOUT)
1048 stderr=subprocess.STDOUT)
1047 for line in iter(proc.stdout.readline, ''):
1049 for line in iter(proc.stdout.readline, ''):
1048 out.write(line)
1050 out.write(line)
1049 proc.wait()
1051 proc.wait()
1050 rc = proc.returncode
1052 rc = proc.returncode
1051 if pycompat.sysplatform == 'OpenVMS' and rc & 1:
1053 if pycompat.sysplatform == 'OpenVMS' and rc & 1:
1052 rc = 0
1054 rc = 0
1053 return rc
1055 return rc
1054
1056
1055 def checksignature(func):
1057 def checksignature(func):
1056 '''wrap a function with code to check for calling errors'''
1058 '''wrap a function with code to check for calling errors'''
1057 def check(*args, **kwargs):
1059 def check(*args, **kwargs):
1058 try:
1060 try:
1059 return func(*args, **kwargs)
1061 return func(*args, **kwargs)
1060 except TypeError:
1062 except TypeError:
1061 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
1063 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
1062 raise error.SignatureError
1064 raise error.SignatureError
1063 raise
1065 raise
1064
1066
1065 return check
1067 return check
1066
1068
1067 # a whilelist of known filesystems where hardlink works reliably
1069 # a whilelist of known filesystems where hardlink works reliably
1068 _hardlinkfswhitelist = {
1070 _hardlinkfswhitelist = {
1069 'btrfs',
1071 'btrfs',
1070 'ext2',
1072 'ext2',
1071 'ext3',
1073 'ext3',
1072 'ext4',
1074 'ext4',
1073 'hfs',
1075 'hfs',
1074 'jfs',
1076 'jfs',
1075 'reiserfs',
1077 'reiserfs',
1076 'tmpfs',
1078 'tmpfs',
1077 'ufs',
1079 'ufs',
1078 'xfs',
1080 'xfs',
1079 'zfs',
1081 'zfs',
1080 }
1082 }
1081
1083
1082 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
1084 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
1083 '''copy a file, preserving mode and optionally other stat info like
1085 '''copy a file, preserving mode and optionally other stat info like
1084 atime/mtime
1086 atime/mtime
1085
1087
1086 checkambig argument is used with filestat, and is useful only if
1088 checkambig argument is used with filestat, and is useful only if
1087 destination file is guarded by any lock (e.g. repo.lock or
1089 destination file is guarded by any lock (e.g. repo.lock or
1088 repo.wlock).
1090 repo.wlock).
1089
1091
1090 copystat and checkambig should be exclusive.
1092 copystat and checkambig should be exclusive.
1091 '''
1093 '''
1092 assert not (copystat and checkambig)
1094 assert not (copystat and checkambig)
1093 oldstat = None
1095 oldstat = None
1094 if os.path.lexists(dest):
1096 if os.path.lexists(dest):
1095 if checkambig:
1097 if checkambig:
1096 oldstat = checkambig and filestat(dest)
1098 oldstat = checkambig and filestat(dest)
1097 unlink(dest)
1099 unlink(dest)
1098 if hardlink:
1100 if hardlink:
1099 # Hardlinks are problematic on CIFS (issue4546), do not allow hardlinks
1101 # Hardlinks are problematic on CIFS (issue4546), do not allow hardlinks
1100 # unless we are confident that dest is on a whitelisted filesystem.
1102 # unless we are confident that dest is on a whitelisted filesystem.
1101 try:
1103 try:
1102 fstype = getfstype(os.path.dirname(dest))
1104 fstype = getfstype(os.path.dirname(dest))
1103 except OSError:
1105 except OSError:
1104 fstype = None
1106 fstype = None
1105 if fstype not in _hardlinkfswhitelist:
1107 if fstype not in _hardlinkfswhitelist:
1106 hardlink = False
1108 hardlink = False
1107 if hardlink:
1109 if hardlink:
1108 try:
1110 try:
1109 oslink(src, dest)
1111 oslink(src, dest)
1110 return
1112 return
1111 except (IOError, OSError):
1113 except (IOError, OSError):
1112 pass # fall back to normal copy
1114 pass # fall back to normal copy
1113 if os.path.islink(src):
1115 if os.path.islink(src):
1114 os.symlink(os.readlink(src), dest)
1116 os.symlink(os.readlink(src), dest)
1115 # copytime is ignored for symlinks, but in general copytime isn't needed
1117 # copytime is ignored for symlinks, but in general copytime isn't needed
1116 # for them anyway
1118 # for them anyway
1117 else:
1119 else:
1118 try:
1120 try:
1119 shutil.copyfile(src, dest)
1121 shutil.copyfile(src, dest)
1120 if copystat:
1122 if copystat:
1121 # copystat also copies mode
1123 # copystat also copies mode
1122 shutil.copystat(src, dest)
1124 shutil.copystat(src, dest)
1123 else:
1125 else:
1124 shutil.copymode(src, dest)
1126 shutil.copymode(src, dest)
1125 if oldstat and oldstat.stat:
1127 if oldstat and oldstat.stat:
1126 newstat = filestat(dest)
1128 newstat = filestat(dest)
1127 if newstat.isambig(oldstat):
1129 if newstat.isambig(oldstat):
1128 # stat of copied file is ambiguous to original one
1130 # stat of copied file is ambiguous to original one
1129 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1131 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1130 os.utime(dest, (advanced, advanced))
1132 os.utime(dest, (advanced, advanced))
1131 except shutil.Error as inst:
1133 except shutil.Error as inst:
1132 raise Abort(str(inst))
1134 raise Abort(str(inst))
1133
1135
1134 def copyfiles(src, dst, hardlink=None, progress=lambda t, pos: None):
1136 def copyfiles(src, dst, hardlink=None, progress=lambda t, pos: None):
1135 """Copy a directory tree using hardlinks if possible."""
1137 """Copy a directory tree using hardlinks if possible."""
1136 num = 0
1138 num = 0
1137
1139
1138 gettopic = lambda: hardlink and _('linking') or _('copying')
1140 gettopic = lambda: hardlink and _('linking') or _('copying')
1139
1141
1140 if os.path.isdir(src):
1142 if os.path.isdir(src):
1141 if hardlink is None:
1143 if hardlink is None:
1142 hardlink = (os.stat(src).st_dev ==
1144 hardlink = (os.stat(src).st_dev ==
1143 os.stat(os.path.dirname(dst)).st_dev)
1145 os.stat(os.path.dirname(dst)).st_dev)
1144 topic = gettopic()
1146 topic = gettopic()
1145 os.mkdir(dst)
1147 os.mkdir(dst)
1146 for name, kind in listdir(src):
1148 for name, kind in listdir(src):
1147 srcname = os.path.join(src, name)
1149 srcname = os.path.join(src, name)
1148 dstname = os.path.join(dst, name)
1150 dstname = os.path.join(dst, name)
1149 def nprog(t, pos):
1151 def nprog(t, pos):
1150 if pos is not None:
1152 if pos is not None:
1151 return progress(t, pos + num)
1153 return progress(t, pos + num)
1152 hardlink, n = copyfiles(srcname, dstname, hardlink, progress=nprog)
1154 hardlink, n = copyfiles(srcname, dstname, hardlink, progress=nprog)
1153 num += n
1155 num += n
1154 else:
1156 else:
1155 if hardlink is None:
1157 if hardlink is None:
1156 hardlink = (os.stat(os.path.dirname(src)).st_dev ==
1158 hardlink = (os.stat(os.path.dirname(src)).st_dev ==
1157 os.stat(os.path.dirname(dst)).st_dev)
1159 os.stat(os.path.dirname(dst)).st_dev)
1158 topic = gettopic()
1160 topic = gettopic()
1159
1161
1160 if hardlink:
1162 if hardlink:
1161 try:
1163 try:
1162 oslink(src, dst)
1164 oslink(src, dst)
1163 except (IOError, OSError):
1165 except (IOError, OSError):
1164 hardlink = False
1166 hardlink = False
1165 shutil.copy(src, dst)
1167 shutil.copy(src, dst)
1166 else:
1168 else:
1167 shutil.copy(src, dst)
1169 shutil.copy(src, dst)
1168 num += 1
1170 num += 1
1169 progress(topic, num)
1171 progress(topic, num)
1170 progress(topic, None)
1172 progress(topic, None)
1171
1173
1172 return hardlink, num
1174 return hardlink, num
1173
1175
1174 _winreservednames = '''con prn aux nul
1176 _winreservednames = '''con prn aux nul
1175 com1 com2 com3 com4 com5 com6 com7 com8 com9
1177 com1 com2 com3 com4 com5 com6 com7 com8 com9
1176 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
1178 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
1177 _winreservedchars = ':*?"<>|'
1179 _winreservedchars = ':*?"<>|'
1178 def checkwinfilename(path):
1180 def checkwinfilename(path):
1179 r'''Check that the base-relative path is a valid filename on Windows.
1181 r'''Check that the base-relative path is a valid filename on Windows.
1180 Returns None if the path is ok, or a UI string describing the problem.
1182 Returns None if the path is ok, or a UI string describing the problem.
1181
1183
1182 >>> checkwinfilename("just/a/normal/path")
1184 >>> checkwinfilename("just/a/normal/path")
1183 >>> checkwinfilename("foo/bar/con.xml")
1185 >>> checkwinfilename("foo/bar/con.xml")
1184 "filename contains 'con', which is reserved on Windows"
1186 "filename contains 'con', which is reserved on Windows"
1185 >>> checkwinfilename("foo/con.xml/bar")
1187 >>> checkwinfilename("foo/con.xml/bar")
1186 "filename contains 'con', which is reserved on Windows"
1188 "filename contains 'con', which is reserved on Windows"
1187 >>> checkwinfilename("foo/bar/xml.con")
1189 >>> checkwinfilename("foo/bar/xml.con")
1188 >>> checkwinfilename("foo/bar/AUX/bla.txt")
1190 >>> checkwinfilename("foo/bar/AUX/bla.txt")
1189 "filename contains 'AUX', which is reserved on Windows"
1191 "filename contains 'AUX', which is reserved on Windows"
1190 >>> checkwinfilename("foo/bar/bla:.txt")
1192 >>> checkwinfilename("foo/bar/bla:.txt")
1191 "filename contains ':', which is reserved on Windows"
1193 "filename contains ':', which is reserved on Windows"
1192 >>> checkwinfilename("foo/bar/b\07la.txt")
1194 >>> checkwinfilename("foo/bar/b\07la.txt")
1193 "filename contains '\\x07', which is invalid on Windows"
1195 "filename contains '\\x07', which is invalid on Windows"
1194 >>> checkwinfilename("foo/bar/bla ")
1196 >>> checkwinfilename("foo/bar/bla ")
1195 "filename ends with ' ', which is not allowed on Windows"
1197 "filename ends with ' ', which is not allowed on Windows"
1196 >>> checkwinfilename("../bar")
1198 >>> checkwinfilename("../bar")
1197 >>> checkwinfilename("foo\\")
1199 >>> checkwinfilename("foo\\")
1198 "filename ends with '\\', which is invalid on Windows"
1200 "filename ends with '\\', which is invalid on Windows"
1199 >>> checkwinfilename("foo\\/bar")
1201 >>> checkwinfilename("foo\\/bar")
1200 "directory name ends with '\\', which is invalid on Windows"
1202 "directory name ends with '\\', which is invalid on Windows"
1201 '''
1203 '''
1202 if path.endswith('\\'):
1204 if path.endswith('\\'):
1203 return _("filename ends with '\\', which is invalid on Windows")
1205 return _("filename ends with '\\', which is invalid on Windows")
1204 if '\\/' in path:
1206 if '\\/' in path:
1205 return _("directory name ends with '\\', which is invalid on Windows")
1207 return _("directory name ends with '\\', which is invalid on Windows")
1206 for n in path.replace('\\', '/').split('/'):
1208 for n in path.replace('\\', '/').split('/'):
1207 if not n:
1209 if not n:
1208 continue
1210 continue
1209 for c in pycompat.bytestr(n):
1211 for c in pycompat.bytestr(n):
1210 if c in _winreservedchars:
1212 if c in _winreservedchars:
1211 return _("filename contains '%s', which is reserved "
1213 return _("filename contains '%s', which is reserved "
1212 "on Windows") % c
1214 "on Windows") % c
1213 if ord(c) <= 31:
1215 if ord(c) <= 31:
1214 return _("filename contains %r, which is invalid "
1216 return _("filename contains %r, which is invalid "
1215 "on Windows") % c
1217 "on Windows") % c
1216 base = n.split('.')[0]
1218 base = n.split('.')[0]
1217 if base and base.lower() in _winreservednames:
1219 if base and base.lower() in _winreservednames:
1218 return _("filename contains '%s', which is reserved "
1220 return _("filename contains '%s', which is reserved "
1219 "on Windows") % base
1221 "on Windows") % base
1220 t = n[-1]
1222 t = n[-1]
1221 if t in '. ' and n not in '..':
1223 if t in '. ' and n not in '..':
1222 return _("filename ends with '%s', which is not allowed "
1224 return _("filename ends with '%s', which is not allowed "
1223 "on Windows") % t
1225 "on Windows") % t
1224
1226
1225 if pycompat.osname == 'nt':
1227 if pycompat.osname == 'nt':
1226 checkosfilename = checkwinfilename
1228 checkosfilename = checkwinfilename
1227 timer = time.clock
1229 timer = time.clock
1228 else:
1230 else:
1229 checkosfilename = platform.checkosfilename
1231 checkosfilename = platform.checkosfilename
1230 timer = time.time
1232 timer = time.time
1231
1233
1232 if safehasattr(time, "perf_counter"):
1234 if safehasattr(time, "perf_counter"):
1233 timer = time.perf_counter
1235 timer = time.perf_counter
1234
1236
1235 def makelock(info, pathname):
1237 def makelock(info, pathname):
1236 try:
1238 try:
1237 return os.symlink(info, pathname)
1239 return os.symlink(info, pathname)
1238 except OSError as why:
1240 except OSError as why:
1239 if why.errno == errno.EEXIST:
1241 if why.errno == errno.EEXIST:
1240 raise
1242 raise
1241 except AttributeError: # no symlink in os
1243 except AttributeError: # no symlink in os
1242 pass
1244 pass
1243
1245
1244 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
1246 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
1245 os.write(ld, info)
1247 os.write(ld, info)
1246 os.close(ld)
1248 os.close(ld)
1247
1249
1248 def readlock(pathname):
1250 def readlock(pathname):
1249 try:
1251 try:
1250 return os.readlink(pathname)
1252 return os.readlink(pathname)
1251 except OSError as why:
1253 except OSError as why:
1252 if why.errno not in (errno.EINVAL, errno.ENOSYS):
1254 if why.errno not in (errno.EINVAL, errno.ENOSYS):
1253 raise
1255 raise
1254 except AttributeError: # no symlink in os
1256 except AttributeError: # no symlink in os
1255 pass
1257 pass
1256 fp = posixfile(pathname)
1258 fp = posixfile(pathname)
1257 r = fp.read()
1259 r = fp.read()
1258 fp.close()
1260 fp.close()
1259 return r
1261 return r
1260
1262
1261 def fstat(fp):
1263 def fstat(fp):
1262 '''stat file object that may not have fileno method.'''
1264 '''stat file object that may not have fileno method.'''
1263 try:
1265 try:
1264 return os.fstat(fp.fileno())
1266 return os.fstat(fp.fileno())
1265 except AttributeError:
1267 except AttributeError:
1266 return os.stat(fp.name)
1268 return os.stat(fp.name)
1267
1269
1268 # File system features
1270 # File system features
1269
1271
1270 def fscasesensitive(path):
1272 def fscasesensitive(path):
1271 """
1273 """
1272 Return true if the given path is on a case-sensitive filesystem
1274 Return true if the given path is on a case-sensitive filesystem
1273
1275
1274 Requires a path (like /foo/.hg) ending with a foldable final
1276 Requires a path (like /foo/.hg) ending with a foldable final
1275 directory component.
1277 directory component.
1276 """
1278 """
1277 s1 = os.lstat(path)
1279 s1 = os.lstat(path)
1278 d, b = os.path.split(path)
1280 d, b = os.path.split(path)
1279 b2 = b.upper()
1281 b2 = b.upper()
1280 if b == b2:
1282 if b == b2:
1281 b2 = b.lower()
1283 b2 = b.lower()
1282 if b == b2:
1284 if b == b2:
1283 return True # no evidence against case sensitivity
1285 return True # no evidence against case sensitivity
1284 p2 = os.path.join(d, b2)
1286 p2 = os.path.join(d, b2)
1285 try:
1287 try:
1286 s2 = os.lstat(p2)
1288 s2 = os.lstat(p2)
1287 if s2 == s1:
1289 if s2 == s1:
1288 return False
1290 return False
1289 return True
1291 return True
1290 except OSError:
1292 except OSError:
1291 return True
1293 return True
1292
1294
1293 try:
1295 try:
1294 import re2
1296 import re2
1295 _re2 = None
1297 _re2 = None
1296 except ImportError:
1298 except ImportError:
1297 _re2 = False
1299 _re2 = False
1298
1300
1299 class _re(object):
1301 class _re(object):
1300 def _checkre2(self):
1302 def _checkre2(self):
1301 global _re2
1303 global _re2
1302 try:
1304 try:
1303 # check if match works, see issue3964
1305 # check if match works, see issue3964
1304 _re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]'))
1306 _re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]'))
1305 except ImportError:
1307 except ImportError:
1306 _re2 = False
1308 _re2 = False
1307
1309
1308 def compile(self, pat, flags=0):
1310 def compile(self, pat, flags=0):
1309 '''Compile a regular expression, using re2 if possible
1311 '''Compile a regular expression, using re2 if possible
1310
1312
1311 For best performance, use only re2-compatible regexp features. The
1313 For best performance, use only re2-compatible regexp features. The
1312 only flags from the re module that are re2-compatible are
1314 only flags from the re module that are re2-compatible are
1313 IGNORECASE and MULTILINE.'''
1315 IGNORECASE and MULTILINE.'''
1314 if _re2 is None:
1316 if _re2 is None:
1315 self._checkre2()
1317 self._checkre2()
1316 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
1318 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
1317 if flags & remod.IGNORECASE:
1319 if flags & remod.IGNORECASE:
1318 pat = '(?i)' + pat
1320 pat = '(?i)' + pat
1319 if flags & remod.MULTILINE:
1321 if flags & remod.MULTILINE:
1320 pat = '(?m)' + pat
1322 pat = '(?m)' + pat
1321 try:
1323 try:
1322 return re2.compile(pat)
1324 return re2.compile(pat)
1323 except re2.error:
1325 except re2.error:
1324 pass
1326 pass
1325 return remod.compile(pat, flags)
1327 return remod.compile(pat, flags)
1326
1328
1327 @propertycache
1329 @propertycache
1328 def escape(self):
1330 def escape(self):
1329 '''Return the version of escape corresponding to self.compile.
1331 '''Return the version of escape corresponding to self.compile.
1330
1332
1331 This is imperfect because whether re2 or re is used for a particular
1333 This is imperfect because whether re2 or re is used for a particular
1332 function depends on the flags, etc, but it's the best we can do.
1334 function depends on the flags, etc, but it's the best we can do.
1333 '''
1335 '''
1334 global _re2
1336 global _re2
1335 if _re2 is None:
1337 if _re2 is None:
1336 self._checkre2()
1338 self._checkre2()
1337 if _re2:
1339 if _re2:
1338 return re2.escape
1340 return re2.escape
1339 else:
1341 else:
1340 return remod.escape
1342 return remod.escape
1341
1343
1342 re = _re()
1344 re = _re()
1343
1345
1344 _fspathcache = {}
1346 _fspathcache = {}
1345 def fspath(name, root):
1347 def fspath(name, root):
1346 '''Get name in the case stored in the filesystem
1348 '''Get name in the case stored in the filesystem
1347
1349
1348 The name should be relative to root, and be normcase-ed for efficiency.
1350 The name should be relative to root, and be normcase-ed for efficiency.
1349
1351
1350 Note that this function is unnecessary, and should not be
1352 Note that this function is unnecessary, and should not be
1351 called, for case-sensitive filesystems (simply because it's expensive).
1353 called, for case-sensitive filesystems (simply because it's expensive).
1352
1354
1353 The root should be normcase-ed, too.
1355 The root should be normcase-ed, too.
1354 '''
1356 '''
1355 def _makefspathcacheentry(dir):
1357 def _makefspathcacheentry(dir):
1356 return dict((normcase(n), n) for n in os.listdir(dir))
1358 return dict((normcase(n), n) for n in os.listdir(dir))
1357
1359
1358 seps = pycompat.ossep
1360 seps = pycompat.ossep
1359 if pycompat.osaltsep:
1361 if pycompat.osaltsep:
1360 seps = seps + pycompat.osaltsep
1362 seps = seps + pycompat.osaltsep
1361 # Protect backslashes. This gets silly very quickly.
1363 # Protect backslashes. This gets silly very quickly.
1362 seps.replace('\\','\\\\')
1364 seps.replace('\\','\\\\')
1363 pattern = remod.compile(br'([^%s]+)|([%s]+)' % (seps, seps))
1365 pattern = remod.compile(br'([^%s]+)|([%s]+)' % (seps, seps))
1364 dir = os.path.normpath(root)
1366 dir = os.path.normpath(root)
1365 result = []
1367 result = []
1366 for part, sep in pattern.findall(name):
1368 for part, sep in pattern.findall(name):
1367 if sep:
1369 if sep:
1368 result.append(sep)
1370 result.append(sep)
1369 continue
1371 continue
1370
1372
1371 if dir not in _fspathcache:
1373 if dir not in _fspathcache:
1372 _fspathcache[dir] = _makefspathcacheentry(dir)
1374 _fspathcache[dir] = _makefspathcacheentry(dir)
1373 contents = _fspathcache[dir]
1375 contents = _fspathcache[dir]
1374
1376
1375 found = contents.get(part)
1377 found = contents.get(part)
1376 if not found:
1378 if not found:
1377 # retry "once per directory" per "dirstate.walk" which
1379 # retry "once per directory" per "dirstate.walk" which
1378 # may take place for each patches of "hg qpush", for example
1380 # may take place for each patches of "hg qpush", for example
1379 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
1381 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
1380 found = contents.get(part)
1382 found = contents.get(part)
1381
1383
1382 result.append(found or part)
1384 result.append(found or part)
1383 dir = os.path.join(dir, part)
1385 dir = os.path.join(dir, part)
1384
1386
1385 return ''.join(result)
1387 return ''.join(result)
1386
1388
1387 def getfstype(dirpath):
1389 def getfstype(dirpath):
1388 '''Get the filesystem type name from a directory (best-effort)
1390 '''Get the filesystem type name from a directory (best-effort)
1389
1391
1390 Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
1392 Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
1391 '''
1393 '''
1392 return getattr(osutil, 'getfstype', lambda x: None)(dirpath)
1394 return getattr(osutil, 'getfstype', lambda x: None)(dirpath)
1393
1395
1394 def checknlink(testfile):
1396 def checknlink(testfile):
1395 '''check whether hardlink count reporting works properly'''
1397 '''check whether hardlink count reporting works properly'''
1396
1398
1397 # testfile may be open, so we need a separate file for checking to
1399 # testfile may be open, so we need a separate file for checking to
1398 # work around issue2543 (or testfile may get lost on Samba shares)
1400 # work around issue2543 (or testfile may get lost on Samba shares)
1399 f1 = testfile + ".hgtmp1"
1401 f1 = testfile + ".hgtmp1"
1400 if os.path.lexists(f1):
1402 if os.path.lexists(f1):
1401 return False
1403 return False
1402 try:
1404 try:
1403 posixfile(f1, 'w').close()
1405 posixfile(f1, 'w').close()
1404 except IOError:
1406 except IOError:
1405 try:
1407 try:
1406 os.unlink(f1)
1408 os.unlink(f1)
1407 except OSError:
1409 except OSError:
1408 pass
1410 pass
1409 return False
1411 return False
1410
1412
1411 f2 = testfile + ".hgtmp2"
1413 f2 = testfile + ".hgtmp2"
1412 fd = None
1414 fd = None
1413 try:
1415 try:
1414 oslink(f1, f2)
1416 oslink(f1, f2)
1415 # nlinks() may behave differently for files on Windows shares if
1417 # nlinks() may behave differently for files on Windows shares if
1416 # the file is open.
1418 # the file is open.
1417 fd = posixfile(f2)
1419 fd = posixfile(f2)
1418 return nlinks(f2) > 1
1420 return nlinks(f2) > 1
1419 except OSError:
1421 except OSError:
1420 return False
1422 return False
1421 finally:
1423 finally:
1422 if fd is not None:
1424 if fd is not None:
1423 fd.close()
1425 fd.close()
1424 for f in (f1, f2):
1426 for f in (f1, f2):
1425 try:
1427 try:
1426 os.unlink(f)
1428 os.unlink(f)
1427 except OSError:
1429 except OSError:
1428 pass
1430 pass
1429
1431
1430 def endswithsep(path):
1432 def endswithsep(path):
1431 '''Check path ends with os.sep or os.altsep.'''
1433 '''Check path ends with os.sep or os.altsep.'''
1432 return (path.endswith(pycompat.ossep)
1434 return (path.endswith(pycompat.ossep)
1433 or pycompat.osaltsep and path.endswith(pycompat.osaltsep))
1435 or pycompat.osaltsep and path.endswith(pycompat.osaltsep))
1434
1436
1435 def splitpath(path):
1437 def splitpath(path):
1436 '''Split path by os.sep.
1438 '''Split path by os.sep.
1437 Note that this function does not use os.altsep because this is
1439 Note that this function does not use os.altsep because this is
1438 an alternative of simple "xxx.split(os.sep)".
1440 an alternative of simple "xxx.split(os.sep)".
1439 It is recommended to use os.path.normpath() before using this
1441 It is recommended to use os.path.normpath() before using this
1440 function if need.'''
1442 function if need.'''
1441 return path.split(pycompat.ossep)
1443 return path.split(pycompat.ossep)
1442
1444
1443 def gui():
1445 def gui():
1444 '''Are we running in a GUI?'''
1446 '''Are we running in a GUI?'''
1445 if pycompat.sysplatform == 'darwin':
1447 if pycompat.sysplatform == 'darwin':
1446 if 'SSH_CONNECTION' in encoding.environ:
1448 if 'SSH_CONNECTION' in encoding.environ:
1447 # handle SSH access to a box where the user is logged in
1449 # handle SSH access to a box where the user is logged in
1448 return False
1450 return False
1449 elif getattr(osutil, 'isgui', None):
1451 elif getattr(osutil, 'isgui', None):
1450 # check if a CoreGraphics session is available
1452 # check if a CoreGraphics session is available
1451 return osutil.isgui()
1453 return osutil.isgui()
1452 else:
1454 else:
1453 # pure build; use a safe default
1455 # pure build; use a safe default
1454 return True
1456 return True
1455 else:
1457 else:
1456 return pycompat.osname == "nt" or encoding.environ.get("DISPLAY")
1458 return pycompat.osname == "nt" or encoding.environ.get("DISPLAY")
1457
1459
1458 def mktempcopy(name, emptyok=False, createmode=None):
1460 def mktempcopy(name, emptyok=False, createmode=None):
1459 """Create a temporary file with the same contents from name
1461 """Create a temporary file with the same contents from name
1460
1462
1461 The permission bits are copied from the original file.
1463 The permission bits are copied from the original file.
1462
1464
1463 If the temporary file is going to be truncated immediately, you
1465 If the temporary file is going to be truncated immediately, you
1464 can use emptyok=True as an optimization.
1466 can use emptyok=True as an optimization.
1465
1467
1466 Returns the name of the temporary file.
1468 Returns the name of the temporary file.
1467 """
1469 """
1468 d, fn = os.path.split(name)
1470 d, fn = os.path.split(name)
1469 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1471 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1470 os.close(fd)
1472 os.close(fd)
1471 # Temporary files are created with mode 0600, which is usually not
1473 # Temporary files are created with mode 0600, which is usually not
1472 # what we want. If the original file already exists, just copy
1474 # what we want. If the original file already exists, just copy
1473 # its mode. Otherwise, manually obey umask.
1475 # its mode. Otherwise, manually obey umask.
1474 copymode(name, temp, createmode)
1476 copymode(name, temp, createmode)
1475 if emptyok:
1477 if emptyok:
1476 return temp
1478 return temp
1477 try:
1479 try:
1478 try:
1480 try:
1479 ifp = posixfile(name, "rb")
1481 ifp = posixfile(name, "rb")
1480 except IOError as inst:
1482 except IOError as inst:
1481 if inst.errno == errno.ENOENT:
1483 if inst.errno == errno.ENOENT:
1482 return temp
1484 return temp
1483 if not getattr(inst, 'filename', None):
1485 if not getattr(inst, 'filename', None):
1484 inst.filename = name
1486 inst.filename = name
1485 raise
1487 raise
1486 ofp = posixfile(temp, "wb")
1488 ofp = posixfile(temp, "wb")
1487 for chunk in filechunkiter(ifp):
1489 for chunk in filechunkiter(ifp):
1488 ofp.write(chunk)
1490 ofp.write(chunk)
1489 ifp.close()
1491 ifp.close()
1490 ofp.close()
1492 ofp.close()
1491 except: # re-raises
1493 except: # re-raises
1492 try: os.unlink(temp)
1494 try: os.unlink(temp)
1493 except OSError: pass
1495 except OSError: pass
1494 raise
1496 raise
1495 return temp
1497 return temp
1496
1498
1497 class filestat(object):
1499 class filestat(object):
1498 """help to exactly detect change of a file
1500 """help to exactly detect change of a file
1499
1501
1500 'stat' attribute is result of 'os.stat()' if specified 'path'
1502 'stat' attribute is result of 'os.stat()' if specified 'path'
1501 exists. Otherwise, it is None. This can avoid preparative
1503 exists. Otherwise, it is None. This can avoid preparative
1502 'exists()' examination on client side of this class.
1504 'exists()' examination on client side of this class.
1503 """
1505 """
1504 def __init__(self, path):
1506 def __init__(self, path):
1505 try:
1507 try:
1506 self.stat = os.stat(path)
1508 self.stat = os.stat(path)
1507 except OSError as err:
1509 except OSError as err:
1508 if err.errno != errno.ENOENT:
1510 if err.errno != errno.ENOENT:
1509 raise
1511 raise
1510 self.stat = None
1512 self.stat = None
1511
1513
1512 __hash__ = object.__hash__
1514 __hash__ = object.__hash__
1513
1515
1514 def __eq__(self, old):
1516 def __eq__(self, old):
1515 try:
1517 try:
1516 # if ambiguity between stat of new and old file is
1518 # if ambiguity between stat of new and old file is
1517 # avoided, comparison of size, ctime and mtime is enough
1519 # avoided, comparison of size, ctime and mtime is enough
1518 # to exactly detect change of a file regardless of platform
1520 # to exactly detect change of a file regardless of platform
1519 return (self.stat.st_size == old.stat.st_size and
1521 return (self.stat.st_size == old.stat.st_size and
1520 self.stat.st_ctime == old.stat.st_ctime and
1522 self.stat.st_ctime == old.stat.st_ctime and
1521 self.stat.st_mtime == old.stat.st_mtime)
1523 self.stat.st_mtime == old.stat.st_mtime)
1522 except AttributeError:
1524 except AttributeError:
1523 return False
1525 return False
1524
1526
1525 def isambig(self, old):
1527 def isambig(self, old):
1526 """Examine whether new (= self) stat is ambiguous against old one
1528 """Examine whether new (= self) stat is ambiguous against old one
1527
1529
1528 "S[N]" below means stat of a file at N-th change:
1530 "S[N]" below means stat of a file at N-th change:
1529
1531
1530 - S[n-1].ctime < S[n].ctime: can detect change of a file
1532 - S[n-1].ctime < S[n].ctime: can detect change of a file
1531 - S[n-1].ctime == S[n].ctime
1533 - S[n-1].ctime == S[n].ctime
1532 - S[n-1].ctime < S[n].mtime: means natural advancing (*1)
1534 - S[n-1].ctime < S[n].mtime: means natural advancing (*1)
1533 - S[n-1].ctime == S[n].mtime: is ambiguous (*2)
1535 - S[n-1].ctime == S[n].mtime: is ambiguous (*2)
1534 - S[n-1].ctime > S[n].mtime: never occurs naturally (don't care)
1536 - S[n-1].ctime > S[n].mtime: never occurs naturally (don't care)
1535 - S[n-1].ctime > S[n].ctime: never occurs naturally (don't care)
1537 - S[n-1].ctime > S[n].ctime: never occurs naturally (don't care)
1536
1538
1537 Case (*2) above means that a file was changed twice or more at
1539 Case (*2) above means that a file was changed twice or more at
1538 same time in sec (= S[n-1].ctime), and comparison of timestamp
1540 same time in sec (= S[n-1].ctime), and comparison of timestamp
1539 is ambiguous.
1541 is ambiguous.
1540
1542
1541 Base idea to avoid such ambiguity is "advance mtime 1 sec, if
1543 Base idea to avoid such ambiguity is "advance mtime 1 sec, if
1542 timestamp is ambiguous".
1544 timestamp is ambiguous".
1543
1545
1544 But advancing mtime only in case (*2) doesn't work as
1546 But advancing mtime only in case (*2) doesn't work as
1545 expected, because naturally advanced S[n].mtime in case (*1)
1547 expected, because naturally advanced S[n].mtime in case (*1)
1546 might be equal to manually advanced S[n-1 or earlier].mtime.
1548 might be equal to manually advanced S[n-1 or earlier].mtime.
1547
1549
1548 Therefore, all "S[n-1].ctime == S[n].ctime" cases should be
1550 Therefore, all "S[n-1].ctime == S[n].ctime" cases should be
1549 treated as ambiguous regardless of mtime, to avoid overlooking
1551 treated as ambiguous regardless of mtime, to avoid overlooking
1550 by confliction between such mtime.
1552 by confliction between such mtime.
1551
1553
1552 Advancing mtime "if isambig(oldstat)" ensures "S[n-1].mtime !=
1554 Advancing mtime "if isambig(oldstat)" ensures "S[n-1].mtime !=
1553 S[n].mtime", even if size of a file isn't changed.
1555 S[n].mtime", even if size of a file isn't changed.
1554 """
1556 """
1555 try:
1557 try:
1556 return (self.stat.st_ctime == old.stat.st_ctime)
1558 return (self.stat.st_ctime == old.stat.st_ctime)
1557 except AttributeError:
1559 except AttributeError:
1558 return False
1560 return False
1559
1561
1560 def avoidambig(self, path, old):
1562 def avoidambig(self, path, old):
1561 """Change file stat of specified path to avoid ambiguity
1563 """Change file stat of specified path to avoid ambiguity
1562
1564
1563 'old' should be previous filestat of 'path'.
1565 'old' should be previous filestat of 'path'.
1564
1566
1565 This skips avoiding ambiguity, if a process doesn't have
1567 This skips avoiding ambiguity, if a process doesn't have
1566 appropriate privileges for 'path'.
1568 appropriate privileges for 'path'.
1567 """
1569 """
1568 advanced = (old.stat.st_mtime + 1) & 0x7fffffff
1570 advanced = (old.stat.st_mtime + 1) & 0x7fffffff
1569 try:
1571 try:
1570 os.utime(path, (advanced, advanced))
1572 os.utime(path, (advanced, advanced))
1571 except OSError as inst:
1573 except OSError as inst:
1572 if inst.errno == errno.EPERM:
1574 if inst.errno == errno.EPERM:
1573 # utime() on the file created by another user causes EPERM,
1575 # utime() on the file created by another user causes EPERM,
1574 # if a process doesn't have appropriate privileges
1576 # if a process doesn't have appropriate privileges
1575 return
1577 return
1576 raise
1578 raise
1577
1579
1578 def __ne__(self, other):
1580 def __ne__(self, other):
1579 return not self == other
1581 return not self == other
1580
1582
1581 class atomictempfile(object):
1583 class atomictempfile(object):
1582 '''writable file object that atomically updates a file
1584 '''writable file object that atomically updates a file
1583
1585
1584 All writes will go to a temporary copy of the original file. Call
1586 All writes will go to a temporary copy of the original file. Call
1585 close() when you are done writing, and atomictempfile will rename
1587 close() when you are done writing, and atomictempfile will rename
1586 the temporary copy to the original name, making the changes
1588 the temporary copy to the original name, making the changes
1587 visible. If the object is destroyed without being closed, all your
1589 visible. If the object is destroyed without being closed, all your
1588 writes are discarded.
1590 writes are discarded.
1589
1591
1590 checkambig argument of constructor is used with filestat, and is
1592 checkambig argument of constructor is used with filestat, and is
1591 useful only if target file is guarded by any lock (e.g. repo.lock
1593 useful only if target file is guarded by any lock (e.g. repo.lock
1592 or repo.wlock).
1594 or repo.wlock).
1593 '''
1595 '''
1594 def __init__(self, name, mode='w+b', createmode=None, checkambig=False):
1596 def __init__(self, name, mode='w+b', createmode=None, checkambig=False):
1595 self.__name = name # permanent name
1597 self.__name = name # permanent name
1596 self._tempname = mktempcopy(name, emptyok=('w' in mode),
1598 self._tempname = mktempcopy(name, emptyok=('w' in mode),
1597 createmode=createmode)
1599 createmode=createmode)
1598 self._fp = posixfile(self._tempname, mode)
1600 self._fp = posixfile(self._tempname, mode)
1599 self._checkambig = checkambig
1601 self._checkambig = checkambig
1600
1602
1601 # delegated methods
1603 # delegated methods
1602 self.read = self._fp.read
1604 self.read = self._fp.read
1603 self.write = self._fp.write
1605 self.write = self._fp.write
1604 self.seek = self._fp.seek
1606 self.seek = self._fp.seek
1605 self.tell = self._fp.tell
1607 self.tell = self._fp.tell
1606 self.fileno = self._fp.fileno
1608 self.fileno = self._fp.fileno
1607
1609
1608 def close(self):
1610 def close(self):
1609 if not self._fp.closed:
1611 if not self._fp.closed:
1610 self._fp.close()
1612 self._fp.close()
1611 filename = localpath(self.__name)
1613 filename = localpath(self.__name)
1612 oldstat = self._checkambig and filestat(filename)
1614 oldstat = self._checkambig and filestat(filename)
1613 if oldstat and oldstat.stat:
1615 if oldstat and oldstat.stat:
1614 rename(self._tempname, filename)
1616 rename(self._tempname, filename)
1615 newstat = filestat(filename)
1617 newstat = filestat(filename)
1616 if newstat.isambig(oldstat):
1618 if newstat.isambig(oldstat):
1617 # stat of changed file is ambiguous to original one
1619 # stat of changed file is ambiguous to original one
1618 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1620 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1619 os.utime(filename, (advanced, advanced))
1621 os.utime(filename, (advanced, advanced))
1620 else:
1622 else:
1621 rename(self._tempname, filename)
1623 rename(self._tempname, filename)
1622
1624
1623 def discard(self):
1625 def discard(self):
1624 if not self._fp.closed:
1626 if not self._fp.closed:
1625 try:
1627 try:
1626 os.unlink(self._tempname)
1628 os.unlink(self._tempname)
1627 except OSError:
1629 except OSError:
1628 pass
1630 pass
1629 self._fp.close()
1631 self._fp.close()
1630
1632
1631 def __del__(self):
1633 def __del__(self):
1632 if safehasattr(self, '_fp'): # constructor actually did something
1634 if safehasattr(self, '_fp'): # constructor actually did something
1633 self.discard()
1635 self.discard()
1634
1636
1635 def __enter__(self):
1637 def __enter__(self):
1636 return self
1638 return self
1637
1639
1638 def __exit__(self, exctype, excvalue, traceback):
1640 def __exit__(self, exctype, excvalue, traceback):
1639 if exctype is not None:
1641 if exctype is not None:
1640 self.discard()
1642 self.discard()
1641 else:
1643 else:
1642 self.close()
1644 self.close()
1643
1645
1644 def unlinkpath(f, ignoremissing=False):
1646 def unlinkpath(f, ignoremissing=False):
1645 """unlink and remove the directory if it is empty"""
1647 """unlink and remove the directory if it is empty"""
1646 if ignoremissing:
1648 if ignoremissing:
1647 tryunlink(f)
1649 tryunlink(f)
1648 else:
1650 else:
1649 unlink(f)
1651 unlink(f)
1650 # try removing directories that might now be empty
1652 # try removing directories that might now be empty
1651 try:
1653 try:
1652 removedirs(os.path.dirname(f))
1654 removedirs(os.path.dirname(f))
1653 except OSError:
1655 except OSError:
1654 pass
1656 pass
1655
1657
1656 def tryunlink(f):
1658 def tryunlink(f):
1657 """Attempt to remove a file, ignoring ENOENT errors."""
1659 """Attempt to remove a file, ignoring ENOENT errors."""
1658 try:
1660 try:
1659 unlink(f)
1661 unlink(f)
1660 except OSError as e:
1662 except OSError as e:
1661 if e.errno != errno.ENOENT:
1663 if e.errno != errno.ENOENT:
1662 raise
1664 raise
1663
1665
1664 def makedirs(name, mode=None, notindexed=False):
1666 def makedirs(name, mode=None, notindexed=False):
1665 """recursive directory creation with parent mode inheritance
1667 """recursive directory creation with parent mode inheritance
1666
1668
1667 Newly created directories are marked as "not to be indexed by
1669 Newly created directories are marked as "not to be indexed by
1668 the content indexing service", if ``notindexed`` is specified
1670 the content indexing service", if ``notindexed`` is specified
1669 for "write" mode access.
1671 for "write" mode access.
1670 """
1672 """
1671 try:
1673 try:
1672 makedir(name, notindexed)
1674 makedir(name, notindexed)
1673 except OSError as err:
1675 except OSError as err:
1674 if err.errno == errno.EEXIST:
1676 if err.errno == errno.EEXIST:
1675 return
1677 return
1676 if err.errno != errno.ENOENT or not name:
1678 if err.errno != errno.ENOENT or not name:
1677 raise
1679 raise
1678 parent = os.path.dirname(os.path.abspath(name))
1680 parent = os.path.dirname(os.path.abspath(name))
1679 if parent == name:
1681 if parent == name:
1680 raise
1682 raise
1681 makedirs(parent, mode, notindexed)
1683 makedirs(parent, mode, notindexed)
1682 try:
1684 try:
1683 makedir(name, notindexed)
1685 makedir(name, notindexed)
1684 except OSError as err:
1686 except OSError as err:
1685 # Catch EEXIST to handle races
1687 # Catch EEXIST to handle races
1686 if err.errno == errno.EEXIST:
1688 if err.errno == errno.EEXIST:
1687 return
1689 return
1688 raise
1690 raise
1689 if mode is not None:
1691 if mode is not None:
1690 os.chmod(name, mode)
1692 os.chmod(name, mode)
1691
1693
1692 def readfile(path):
1694 def readfile(path):
1693 with open(path, 'rb') as fp:
1695 with open(path, 'rb') as fp:
1694 return fp.read()
1696 return fp.read()
1695
1697
1696 def writefile(path, text):
1698 def writefile(path, text):
1697 with open(path, 'wb') as fp:
1699 with open(path, 'wb') as fp:
1698 fp.write(text)
1700 fp.write(text)
1699
1701
1700 def appendfile(path, text):
1702 def appendfile(path, text):
1701 with open(path, 'ab') as fp:
1703 with open(path, 'ab') as fp:
1702 fp.write(text)
1704 fp.write(text)
1703
1705
1704 class chunkbuffer(object):
1706 class chunkbuffer(object):
1705 """Allow arbitrary sized chunks of data to be efficiently read from an
1707 """Allow arbitrary sized chunks of data to be efficiently read from an
1706 iterator over chunks of arbitrary size."""
1708 iterator over chunks of arbitrary size."""
1707
1709
1708 def __init__(self, in_iter):
1710 def __init__(self, in_iter):
1709 """in_iter is the iterator that's iterating over the input chunks."""
1711 """in_iter is the iterator that's iterating over the input chunks."""
1710 def splitbig(chunks):
1712 def splitbig(chunks):
1711 for chunk in chunks:
1713 for chunk in chunks:
1712 if len(chunk) > 2**20:
1714 if len(chunk) > 2**20:
1713 pos = 0
1715 pos = 0
1714 while pos < len(chunk):
1716 while pos < len(chunk):
1715 end = pos + 2 ** 18
1717 end = pos + 2 ** 18
1716 yield chunk[pos:end]
1718 yield chunk[pos:end]
1717 pos = end
1719 pos = end
1718 else:
1720 else:
1719 yield chunk
1721 yield chunk
1720 self.iter = splitbig(in_iter)
1722 self.iter = splitbig(in_iter)
1721 self._queue = collections.deque()
1723 self._queue = collections.deque()
1722 self._chunkoffset = 0
1724 self._chunkoffset = 0
1723
1725
1724 def read(self, l=None):
1726 def read(self, l=None):
1725 """Read L bytes of data from the iterator of chunks of data.
1727 """Read L bytes of data from the iterator of chunks of data.
1726 Returns less than L bytes if the iterator runs dry.
1728 Returns less than L bytes if the iterator runs dry.
1727
1729
1728 If size parameter is omitted, read everything"""
1730 If size parameter is omitted, read everything"""
1729 if l is None:
1731 if l is None:
1730 return ''.join(self.iter)
1732 return ''.join(self.iter)
1731
1733
1732 left = l
1734 left = l
1733 buf = []
1735 buf = []
1734 queue = self._queue
1736 queue = self._queue
1735 while left > 0:
1737 while left > 0:
1736 # refill the queue
1738 # refill the queue
1737 if not queue:
1739 if not queue:
1738 target = 2**18
1740 target = 2**18
1739 for chunk in self.iter:
1741 for chunk in self.iter:
1740 queue.append(chunk)
1742 queue.append(chunk)
1741 target -= len(chunk)
1743 target -= len(chunk)
1742 if target <= 0:
1744 if target <= 0:
1743 break
1745 break
1744 if not queue:
1746 if not queue:
1745 break
1747 break
1746
1748
1747 # The easy way to do this would be to queue.popleft(), modify the
1749 # The easy way to do this would be to queue.popleft(), modify the
1748 # chunk (if necessary), then queue.appendleft(). However, for cases
1750 # chunk (if necessary), then queue.appendleft(). However, for cases
1749 # where we read partial chunk content, this incurs 2 dequeue
1751 # where we read partial chunk content, this incurs 2 dequeue
1750 # mutations and creates a new str for the remaining chunk in the
1752 # mutations and creates a new str for the remaining chunk in the
1751 # queue. Our code below avoids this overhead.
1753 # queue. Our code below avoids this overhead.
1752
1754
1753 chunk = queue[0]
1755 chunk = queue[0]
1754 chunkl = len(chunk)
1756 chunkl = len(chunk)
1755 offset = self._chunkoffset
1757 offset = self._chunkoffset
1756
1758
1757 # Use full chunk.
1759 # Use full chunk.
1758 if offset == 0 and left >= chunkl:
1760 if offset == 0 and left >= chunkl:
1759 left -= chunkl
1761 left -= chunkl
1760 queue.popleft()
1762 queue.popleft()
1761 buf.append(chunk)
1763 buf.append(chunk)
1762 # self._chunkoffset remains at 0.
1764 # self._chunkoffset remains at 0.
1763 continue
1765 continue
1764
1766
1765 chunkremaining = chunkl - offset
1767 chunkremaining = chunkl - offset
1766
1768
1767 # Use all of unconsumed part of chunk.
1769 # Use all of unconsumed part of chunk.
1768 if left >= chunkremaining:
1770 if left >= chunkremaining:
1769 left -= chunkremaining
1771 left -= chunkremaining
1770 queue.popleft()
1772 queue.popleft()
1771 # offset == 0 is enabled by block above, so this won't merely
1773 # offset == 0 is enabled by block above, so this won't merely
1772 # copy via ``chunk[0:]``.
1774 # copy via ``chunk[0:]``.
1773 buf.append(chunk[offset:])
1775 buf.append(chunk[offset:])
1774 self._chunkoffset = 0
1776 self._chunkoffset = 0
1775
1777
1776 # Partial chunk needed.
1778 # Partial chunk needed.
1777 else:
1779 else:
1778 buf.append(chunk[offset:offset + left])
1780 buf.append(chunk[offset:offset + left])
1779 self._chunkoffset += left
1781 self._chunkoffset += left
1780 left -= chunkremaining
1782 left -= chunkremaining
1781
1783
1782 return ''.join(buf)
1784 return ''.join(buf)
1783
1785
1784 def filechunkiter(f, size=131072, limit=None):
1786 def filechunkiter(f, size=131072, limit=None):
1785 """Create a generator that produces the data in the file size
1787 """Create a generator that produces the data in the file size
1786 (default 131072) bytes at a time, up to optional limit (default is
1788 (default 131072) bytes at a time, up to optional limit (default is
1787 to read all data). Chunks may be less than size bytes if the
1789 to read all data). Chunks may be less than size bytes if the
1788 chunk is the last chunk in the file, or the file is a socket or
1790 chunk is the last chunk in the file, or the file is a socket or
1789 some other type of file that sometimes reads less data than is
1791 some other type of file that sometimes reads less data than is
1790 requested."""
1792 requested."""
1791 assert size >= 0
1793 assert size >= 0
1792 assert limit is None or limit >= 0
1794 assert limit is None or limit >= 0
1793 while True:
1795 while True:
1794 if limit is None:
1796 if limit is None:
1795 nbytes = size
1797 nbytes = size
1796 else:
1798 else:
1797 nbytes = min(limit, size)
1799 nbytes = min(limit, size)
1798 s = nbytes and f.read(nbytes)
1800 s = nbytes and f.read(nbytes)
1799 if not s:
1801 if not s:
1800 break
1802 break
1801 if limit:
1803 if limit:
1802 limit -= len(s)
1804 limit -= len(s)
1803 yield s
1805 yield s
1804
1806
1805 def makedate(timestamp=None):
1807 def makedate(timestamp=None):
1806 '''Return a unix timestamp (or the current time) as a (unixtime,
1808 '''Return a unix timestamp (or the current time) as a (unixtime,
1807 offset) tuple based off the local timezone.'''
1809 offset) tuple based off the local timezone.'''
1808 if timestamp is None:
1810 if timestamp is None:
1809 timestamp = time.time()
1811 timestamp = time.time()
1810 if timestamp < 0:
1812 if timestamp < 0:
1811 hint = _("check your clock")
1813 hint = _("check your clock")
1812 raise Abort(_("negative timestamp: %d") % timestamp, hint=hint)
1814 raise Abort(_("negative timestamp: %d") % timestamp, hint=hint)
1813 delta = (datetime.datetime.utcfromtimestamp(timestamp) -
1815 delta = (datetime.datetime.utcfromtimestamp(timestamp) -
1814 datetime.datetime.fromtimestamp(timestamp))
1816 datetime.datetime.fromtimestamp(timestamp))
1815 tz = delta.days * 86400 + delta.seconds
1817 tz = delta.days * 86400 + delta.seconds
1816 return timestamp, tz
1818 return timestamp, tz
1817
1819
1818 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1820 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1819 """represent a (unixtime, offset) tuple as a localized time.
1821 """represent a (unixtime, offset) tuple as a localized time.
1820 unixtime is seconds since the epoch, and offset is the time zone's
1822 unixtime is seconds since the epoch, and offset is the time zone's
1821 number of seconds away from UTC.
1823 number of seconds away from UTC.
1822
1824
1823 >>> datestr((0, 0))
1825 >>> datestr((0, 0))
1824 'Thu Jan 01 00:00:00 1970 +0000'
1826 'Thu Jan 01 00:00:00 1970 +0000'
1825 >>> datestr((42, 0))
1827 >>> datestr((42, 0))
1826 'Thu Jan 01 00:00:42 1970 +0000'
1828 'Thu Jan 01 00:00:42 1970 +0000'
1827 >>> datestr((-42, 0))
1829 >>> datestr((-42, 0))
1828 'Wed Dec 31 23:59:18 1969 +0000'
1830 'Wed Dec 31 23:59:18 1969 +0000'
1829 >>> datestr((0x7fffffff, 0))
1831 >>> datestr((0x7fffffff, 0))
1830 'Tue Jan 19 03:14:07 2038 +0000'
1832 'Tue Jan 19 03:14:07 2038 +0000'
1831 >>> datestr((-0x80000000, 0))
1833 >>> datestr((-0x80000000, 0))
1832 'Fri Dec 13 20:45:52 1901 +0000'
1834 'Fri Dec 13 20:45:52 1901 +0000'
1833 """
1835 """
1834 t, tz = date or makedate()
1836 t, tz = date or makedate()
1835 if "%1" in format or "%2" in format or "%z" in format:
1837 if "%1" in format or "%2" in format or "%z" in format:
1836 sign = (tz > 0) and "-" or "+"
1838 sign = (tz > 0) and "-" or "+"
1837 minutes = abs(tz) // 60
1839 minutes = abs(tz) // 60
1838 q, r = divmod(minutes, 60)
1840 q, r = divmod(minutes, 60)
1839 format = format.replace("%z", "%1%2")
1841 format = format.replace("%z", "%1%2")
1840 format = format.replace("%1", "%c%02d" % (sign, q))
1842 format = format.replace("%1", "%c%02d" % (sign, q))
1841 format = format.replace("%2", "%02d" % r)
1843 format = format.replace("%2", "%02d" % r)
1842 d = t - tz
1844 d = t - tz
1843 if d > 0x7fffffff:
1845 if d > 0x7fffffff:
1844 d = 0x7fffffff
1846 d = 0x7fffffff
1845 elif d < -0x80000000:
1847 elif d < -0x80000000:
1846 d = -0x80000000
1848 d = -0x80000000
1847 # Never use time.gmtime() and datetime.datetime.fromtimestamp()
1849 # Never use time.gmtime() and datetime.datetime.fromtimestamp()
1848 # because they use the gmtime() system call which is buggy on Windows
1850 # because they use the gmtime() system call which is buggy on Windows
1849 # for negative values.
1851 # for negative values.
1850 t = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=d)
1852 t = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=d)
1851 s = encoding.strtolocal(t.strftime(encoding.strfromlocal(format)))
1853 s = encoding.strtolocal(t.strftime(encoding.strfromlocal(format)))
1852 return s
1854 return s
1853
1855
1854 def shortdate(date=None):
1856 def shortdate(date=None):
1855 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1857 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1856 return datestr(date, format='%Y-%m-%d')
1858 return datestr(date, format='%Y-%m-%d')
1857
1859
1858 def parsetimezone(s):
1860 def parsetimezone(s):
1859 """find a trailing timezone, if any, in string, and return a
1861 """find a trailing timezone, if any, in string, and return a
1860 (offset, remainder) pair"""
1862 (offset, remainder) pair"""
1861
1863
1862 if s.endswith("GMT") or s.endswith("UTC"):
1864 if s.endswith("GMT") or s.endswith("UTC"):
1863 return 0, s[:-3].rstrip()
1865 return 0, s[:-3].rstrip()
1864
1866
1865 # Unix-style timezones [+-]hhmm
1867 # Unix-style timezones [+-]hhmm
1866 if len(s) >= 5 and s[-5] in "+-" and s[-4:].isdigit():
1868 if len(s) >= 5 and s[-5] in "+-" and s[-4:].isdigit():
1867 sign = (s[-5] == "+") and 1 or -1
1869 sign = (s[-5] == "+") and 1 or -1
1868 hours = int(s[-4:-2])
1870 hours = int(s[-4:-2])
1869 minutes = int(s[-2:])
1871 minutes = int(s[-2:])
1870 return -sign * (hours * 60 + minutes) * 60, s[:-5].rstrip()
1872 return -sign * (hours * 60 + minutes) * 60, s[:-5].rstrip()
1871
1873
1872 # ISO8601 trailing Z
1874 # ISO8601 trailing Z
1873 if s.endswith("Z") and s[-2:-1].isdigit():
1875 if s.endswith("Z") and s[-2:-1].isdigit():
1874 return 0, s[:-1]
1876 return 0, s[:-1]
1875
1877
1876 # ISO8601-style [+-]hh:mm
1878 # ISO8601-style [+-]hh:mm
1877 if (len(s) >= 6 and s[-6] in "+-" and s[-3] == ":" and
1879 if (len(s) >= 6 and s[-6] in "+-" and s[-3] == ":" and
1878 s[-5:-3].isdigit() and s[-2:].isdigit()):
1880 s[-5:-3].isdigit() and s[-2:].isdigit()):
1879 sign = (s[-6] == "+") and 1 or -1
1881 sign = (s[-6] == "+") and 1 or -1
1880 hours = int(s[-5:-3])
1882 hours = int(s[-5:-3])
1881 minutes = int(s[-2:])
1883 minutes = int(s[-2:])
1882 return -sign * (hours * 60 + minutes) * 60, s[:-6]
1884 return -sign * (hours * 60 + minutes) * 60, s[:-6]
1883
1885
1884 return None, s
1886 return None, s
1885
1887
1886 def strdate(string, format, defaults=None):
1888 def strdate(string, format, defaults=None):
1887 """parse a localized time string and return a (unixtime, offset) tuple.
1889 """parse a localized time string and return a (unixtime, offset) tuple.
1888 if the string cannot be parsed, ValueError is raised."""
1890 if the string cannot be parsed, ValueError is raised."""
1889 if defaults is None:
1891 if defaults is None:
1890 defaults = {}
1892 defaults = {}
1891
1893
1892 # NOTE: unixtime = localunixtime + offset
1894 # NOTE: unixtime = localunixtime + offset
1893 offset, date = parsetimezone(string)
1895 offset, date = parsetimezone(string)
1894
1896
1895 # add missing elements from defaults
1897 # add missing elements from defaults
1896 usenow = False # default to using biased defaults
1898 usenow = False # default to using biased defaults
1897 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1899 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1898 part = pycompat.bytestr(part)
1900 part = pycompat.bytestr(part)
1899 found = [True for p in part if ("%"+p) in format]
1901 found = [True for p in part if ("%"+p) in format]
1900 if not found:
1902 if not found:
1901 date += "@" + defaults[part][usenow]
1903 date += "@" + defaults[part][usenow]
1902 format += "@%" + part[0]
1904 format += "@%" + part[0]
1903 else:
1905 else:
1904 # We've found a specific time element, less specific time
1906 # We've found a specific time element, less specific time
1905 # elements are relative to today
1907 # elements are relative to today
1906 usenow = True
1908 usenow = True
1907
1909
1908 timetuple = time.strptime(encoding.strfromlocal(date),
1910 timetuple = time.strptime(encoding.strfromlocal(date),
1909 encoding.strfromlocal(format))
1911 encoding.strfromlocal(format))
1910 localunixtime = int(calendar.timegm(timetuple))
1912 localunixtime = int(calendar.timegm(timetuple))
1911 if offset is None:
1913 if offset is None:
1912 # local timezone
1914 # local timezone
1913 unixtime = int(time.mktime(timetuple))
1915 unixtime = int(time.mktime(timetuple))
1914 offset = unixtime - localunixtime
1916 offset = unixtime - localunixtime
1915 else:
1917 else:
1916 unixtime = localunixtime + offset
1918 unixtime = localunixtime + offset
1917 return unixtime, offset
1919 return unixtime, offset
1918
1920
1919 def parsedate(date, formats=None, bias=None):
1921 def parsedate(date, formats=None, bias=None):
1920 """parse a localized date/time and return a (unixtime, offset) tuple.
1922 """parse a localized date/time and return a (unixtime, offset) tuple.
1921
1923
1922 The date may be a "unixtime offset" string or in one of the specified
1924 The date may be a "unixtime offset" string or in one of the specified
1923 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1925 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1924
1926
1925 >>> parsedate(' today ') == parsedate(\
1927 >>> parsedate(' today ') == parsedate(\
1926 datetime.date.today().strftime('%b %d'))
1928 datetime.date.today().strftime('%b %d'))
1927 True
1929 True
1928 >>> parsedate( 'yesterday ') == parsedate((datetime.date.today() -\
1930 >>> parsedate( 'yesterday ') == parsedate((datetime.date.today() -\
1929 datetime.timedelta(days=1)\
1931 datetime.timedelta(days=1)\
1930 ).strftime('%b %d'))
1932 ).strftime('%b %d'))
1931 True
1933 True
1932 >>> now, tz = makedate()
1934 >>> now, tz = makedate()
1933 >>> strnow, strtz = parsedate('now')
1935 >>> strnow, strtz = parsedate('now')
1934 >>> (strnow - now) < 1
1936 >>> (strnow - now) < 1
1935 True
1937 True
1936 >>> tz == strtz
1938 >>> tz == strtz
1937 True
1939 True
1938 """
1940 """
1939 if bias is None:
1941 if bias is None:
1940 bias = {}
1942 bias = {}
1941 if not date:
1943 if not date:
1942 return 0, 0
1944 return 0, 0
1943 if isinstance(date, tuple) and len(date) == 2:
1945 if isinstance(date, tuple) and len(date) == 2:
1944 return date
1946 return date
1945 if not formats:
1947 if not formats:
1946 formats = defaultdateformats
1948 formats = defaultdateformats
1947 date = date.strip()
1949 date = date.strip()
1948
1950
1949 if date == 'now' or date == _('now'):
1951 if date == 'now' or date == _('now'):
1950 return makedate()
1952 return makedate()
1951 if date == 'today' or date == _('today'):
1953 if date == 'today' or date == _('today'):
1952 date = datetime.date.today().strftime('%b %d')
1954 date = datetime.date.today().strftime('%b %d')
1953 elif date == 'yesterday' or date == _('yesterday'):
1955 elif date == 'yesterday' or date == _('yesterday'):
1954 date = (datetime.date.today() -
1956 date = (datetime.date.today() -
1955 datetime.timedelta(days=1)).strftime('%b %d')
1957 datetime.timedelta(days=1)).strftime('%b %d')
1956
1958
1957 try:
1959 try:
1958 when, offset = map(int, date.split(' '))
1960 when, offset = map(int, date.split(' '))
1959 except ValueError:
1961 except ValueError:
1960 # fill out defaults
1962 # fill out defaults
1961 now = makedate()
1963 now = makedate()
1962 defaults = {}
1964 defaults = {}
1963 for part in ("d", "mb", "yY", "HI", "M", "S"):
1965 for part in ("d", "mb", "yY", "HI", "M", "S"):
1964 # this piece is for rounding the specific end of unknowns
1966 # this piece is for rounding the specific end of unknowns
1965 b = bias.get(part)
1967 b = bias.get(part)
1966 if b is None:
1968 if b is None:
1967 if part[0:1] in "HMS":
1969 if part[0:1] in "HMS":
1968 b = "00"
1970 b = "00"
1969 else:
1971 else:
1970 b = "0"
1972 b = "0"
1971
1973
1972 # this piece is for matching the generic end to today's date
1974 # this piece is for matching the generic end to today's date
1973 n = datestr(now, "%" + part[0:1])
1975 n = datestr(now, "%" + part[0:1])
1974
1976
1975 defaults[part] = (b, n)
1977 defaults[part] = (b, n)
1976
1978
1977 for format in formats:
1979 for format in formats:
1978 try:
1980 try:
1979 when, offset = strdate(date, format, defaults)
1981 when, offset = strdate(date, format, defaults)
1980 except (ValueError, OverflowError):
1982 except (ValueError, OverflowError):
1981 pass
1983 pass
1982 else:
1984 else:
1983 break
1985 break
1984 else:
1986 else:
1985 raise Abort(_('invalid date: %r') % date)
1987 raise Abort(_('invalid date: %r') % date)
1986 # validate explicit (probably user-specified) date and
1988 # validate explicit (probably user-specified) date and
1987 # time zone offset. values must fit in signed 32 bits for
1989 # time zone offset. values must fit in signed 32 bits for
1988 # current 32-bit linux runtimes. timezones go from UTC-12
1990 # current 32-bit linux runtimes. timezones go from UTC-12
1989 # to UTC+14
1991 # to UTC+14
1990 if when < -0x80000000 or when > 0x7fffffff:
1992 if when < -0x80000000 or when > 0x7fffffff:
1991 raise Abort(_('date exceeds 32 bits: %d') % when)
1993 raise Abort(_('date exceeds 32 bits: %d') % when)
1992 if offset < -50400 or offset > 43200:
1994 if offset < -50400 or offset > 43200:
1993 raise Abort(_('impossible time zone offset: %d') % offset)
1995 raise Abort(_('impossible time zone offset: %d') % offset)
1994 return when, offset
1996 return when, offset
1995
1997
1996 def matchdate(date):
1998 def matchdate(date):
1997 """Return a function that matches a given date match specifier
1999 """Return a function that matches a given date match specifier
1998
2000
1999 Formats include:
2001 Formats include:
2000
2002
2001 '{date}' match a given date to the accuracy provided
2003 '{date}' match a given date to the accuracy provided
2002
2004
2003 '<{date}' on or before a given date
2005 '<{date}' on or before a given date
2004
2006
2005 '>{date}' on or after a given date
2007 '>{date}' on or after a given date
2006
2008
2007 >>> p1 = parsedate("10:29:59")
2009 >>> p1 = parsedate("10:29:59")
2008 >>> p2 = parsedate("10:30:00")
2010 >>> p2 = parsedate("10:30:00")
2009 >>> p3 = parsedate("10:30:59")
2011 >>> p3 = parsedate("10:30:59")
2010 >>> p4 = parsedate("10:31:00")
2012 >>> p4 = parsedate("10:31:00")
2011 >>> p5 = parsedate("Sep 15 10:30:00 1999")
2013 >>> p5 = parsedate("Sep 15 10:30:00 1999")
2012 >>> f = matchdate("10:30")
2014 >>> f = matchdate("10:30")
2013 >>> f(p1[0])
2015 >>> f(p1[0])
2014 False
2016 False
2015 >>> f(p2[0])
2017 >>> f(p2[0])
2016 True
2018 True
2017 >>> f(p3[0])
2019 >>> f(p3[0])
2018 True
2020 True
2019 >>> f(p4[0])
2021 >>> f(p4[0])
2020 False
2022 False
2021 >>> f(p5[0])
2023 >>> f(p5[0])
2022 False
2024 False
2023 """
2025 """
2024
2026
2025 def lower(date):
2027 def lower(date):
2026 d = {'mb': "1", 'd': "1"}
2028 d = {'mb': "1", 'd': "1"}
2027 return parsedate(date, extendeddateformats, d)[0]
2029 return parsedate(date, extendeddateformats, d)[0]
2028
2030
2029 def upper(date):
2031 def upper(date):
2030 d = {'mb': "12", 'HI': "23", 'M': "59", 'S': "59"}
2032 d = {'mb': "12", 'HI': "23", 'M': "59", 'S': "59"}
2031 for days in ("31", "30", "29"):
2033 for days in ("31", "30", "29"):
2032 try:
2034 try:
2033 d["d"] = days
2035 d["d"] = days
2034 return parsedate(date, extendeddateformats, d)[0]
2036 return parsedate(date, extendeddateformats, d)[0]
2035 except Abort:
2037 except Abort:
2036 pass
2038 pass
2037 d["d"] = "28"
2039 d["d"] = "28"
2038 return parsedate(date, extendeddateformats, d)[0]
2040 return parsedate(date, extendeddateformats, d)[0]
2039
2041
2040 date = date.strip()
2042 date = date.strip()
2041
2043
2042 if not date:
2044 if not date:
2043 raise Abort(_("dates cannot consist entirely of whitespace"))
2045 raise Abort(_("dates cannot consist entirely of whitespace"))
2044 elif date[0] == "<":
2046 elif date[0] == "<":
2045 if not date[1:]:
2047 if not date[1:]:
2046 raise Abort(_("invalid day spec, use '<DATE'"))
2048 raise Abort(_("invalid day spec, use '<DATE'"))
2047 when = upper(date[1:])
2049 when = upper(date[1:])
2048 return lambda x: x <= when
2050 return lambda x: x <= when
2049 elif date[0] == ">":
2051 elif date[0] == ">":
2050 if not date[1:]:
2052 if not date[1:]:
2051 raise Abort(_("invalid day spec, use '>DATE'"))
2053 raise Abort(_("invalid day spec, use '>DATE'"))
2052 when = lower(date[1:])
2054 when = lower(date[1:])
2053 return lambda x: x >= when
2055 return lambda x: x >= when
2054 elif date[0] == "-":
2056 elif date[0] == "-":
2055 try:
2057 try:
2056 days = int(date[1:])
2058 days = int(date[1:])
2057 except ValueError:
2059 except ValueError:
2058 raise Abort(_("invalid day spec: %s") % date[1:])
2060 raise Abort(_("invalid day spec: %s") % date[1:])
2059 if days < 0:
2061 if days < 0:
2060 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
2062 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
2061 % date[1:])
2063 % date[1:])
2062 when = makedate()[0] - days * 3600 * 24
2064 when = makedate()[0] - days * 3600 * 24
2063 return lambda x: x >= when
2065 return lambda x: x >= when
2064 elif " to " in date:
2066 elif " to " in date:
2065 a, b = date.split(" to ")
2067 a, b = date.split(" to ")
2066 start, stop = lower(a), upper(b)
2068 start, stop = lower(a), upper(b)
2067 return lambda x: x >= start and x <= stop
2069 return lambda x: x >= start and x <= stop
2068 else:
2070 else:
2069 start, stop = lower(date), upper(date)
2071 start, stop = lower(date), upper(date)
2070 return lambda x: x >= start and x <= stop
2072 return lambda x: x >= start and x <= stop
2071
2073
2072 def stringmatcher(pattern, casesensitive=True):
2074 def stringmatcher(pattern, casesensitive=True):
2073 """
2075 """
2074 accepts a string, possibly starting with 're:' or 'literal:' prefix.
2076 accepts a string, possibly starting with 're:' or 'literal:' prefix.
2075 returns the matcher name, pattern, and matcher function.
2077 returns the matcher name, pattern, and matcher function.
2076 missing or unknown prefixes are treated as literal matches.
2078 missing or unknown prefixes are treated as literal matches.
2077
2079
2078 helper for tests:
2080 helper for tests:
2079 >>> def test(pattern, *tests):
2081 >>> def test(pattern, *tests):
2080 ... kind, pattern, matcher = stringmatcher(pattern)
2082 ... kind, pattern, matcher = stringmatcher(pattern)
2081 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2083 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2082 >>> def itest(pattern, *tests):
2084 >>> def itest(pattern, *tests):
2083 ... kind, pattern, matcher = stringmatcher(pattern, casesensitive=False)
2085 ... kind, pattern, matcher = stringmatcher(pattern, casesensitive=False)
2084 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2086 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2085
2087
2086 exact matching (no prefix):
2088 exact matching (no prefix):
2087 >>> test('abcdefg', 'abc', 'def', 'abcdefg')
2089 >>> test('abcdefg', 'abc', 'def', 'abcdefg')
2088 ('literal', 'abcdefg', [False, False, True])
2090 ('literal', 'abcdefg', [False, False, True])
2089
2091
2090 regex matching ('re:' prefix)
2092 regex matching ('re:' prefix)
2091 >>> test('re:a.+b', 'nomatch', 'fooadef', 'fooadefbar')
2093 >>> test('re:a.+b', 'nomatch', 'fooadef', 'fooadefbar')
2092 ('re', 'a.+b', [False, False, True])
2094 ('re', 'a.+b', [False, False, True])
2093
2095
2094 force exact matches ('literal:' prefix)
2096 force exact matches ('literal:' prefix)
2095 >>> test('literal:re:foobar', 'foobar', 're:foobar')
2097 >>> test('literal:re:foobar', 'foobar', 're:foobar')
2096 ('literal', 're:foobar', [False, True])
2098 ('literal', 're:foobar', [False, True])
2097
2099
2098 unknown prefixes are ignored and treated as literals
2100 unknown prefixes are ignored and treated as literals
2099 >>> test('foo:bar', 'foo', 'bar', 'foo:bar')
2101 >>> test('foo:bar', 'foo', 'bar', 'foo:bar')
2100 ('literal', 'foo:bar', [False, False, True])
2102 ('literal', 'foo:bar', [False, False, True])
2101
2103
2102 case insensitive regex matches
2104 case insensitive regex matches
2103 >>> itest('re:A.+b', 'nomatch', 'fooadef', 'fooadefBar')
2105 >>> itest('re:A.+b', 'nomatch', 'fooadef', 'fooadefBar')
2104 ('re', 'A.+b', [False, False, True])
2106 ('re', 'A.+b', [False, False, True])
2105
2107
2106 case insensitive literal matches
2108 case insensitive literal matches
2107 >>> itest('ABCDEFG', 'abc', 'def', 'abcdefg')
2109 >>> itest('ABCDEFG', 'abc', 'def', 'abcdefg')
2108 ('literal', 'ABCDEFG', [False, False, True])
2110 ('literal', 'ABCDEFG', [False, False, True])
2109 """
2111 """
2110 if pattern.startswith('re:'):
2112 if pattern.startswith('re:'):
2111 pattern = pattern[3:]
2113 pattern = pattern[3:]
2112 try:
2114 try:
2113 flags = 0
2115 flags = 0
2114 if not casesensitive:
2116 if not casesensitive:
2115 flags = remod.I
2117 flags = remod.I
2116 regex = remod.compile(pattern, flags)
2118 regex = remod.compile(pattern, flags)
2117 except remod.error as e:
2119 except remod.error as e:
2118 raise error.ParseError(_('invalid regular expression: %s')
2120 raise error.ParseError(_('invalid regular expression: %s')
2119 % e)
2121 % e)
2120 return 're', pattern, regex.search
2122 return 're', pattern, regex.search
2121 elif pattern.startswith('literal:'):
2123 elif pattern.startswith('literal:'):
2122 pattern = pattern[8:]
2124 pattern = pattern[8:]
2123
2125
2124 match = pattern.__eq__
2126 match = pattern.__eq__
2125
2127
2126 if not casesensitive:
2128 if not casesensitive:
2127 ipat = encoding.lower(pattern)
2129 ipat = encoding.lower(pattern)
2128 match = lambda s: ipat == encoding.lower(s)
2130 match = lambda s: ipat == encoding.lower(s)
2129 return 'literal', pattern, match
2131 return 'literal', pattern, match
2130
2132
2131 def shortuser(user):
2133 def shortuser(user):
2132 """Return a short representation of a user name or email address."""
2134 """Return a short representation of a user name or email address."""
2133 f = user.find('@')
2135 f = user.find('@')
2134 if f >= 0:
2136 if f >= 0:
2135 user = user[:f]
2137 user = user[:f]
2136 f = user.find('<')
2138 f = user.find('<')
2137 if f >= 0:
2139 if f >= 0:
2138 user = user[f + 1:]
2140 user = user[f + 1:]
2139 f = user.find(' ')
2141 f = user.find(' ')
2140 if f >= 0:
2142 if f >= 0:
2141 user = user[:f]
2143 user = user[:f]
2142 f = user.find('.')
2144 f = user.find('.')
2143 if f >= 0:
2145 if f >= 0:
2144 user = user[:f]
2146 user = user[:f]
2145 return user
2147 return user
2146
2148
2147 def emailuser(user):
2149 def emailuser(user):
2148 """Return the user portion of an email address."""
2150 """Return the user portion of an email address."""
2149 f = user.find('@')
2151 f = user.find('@')
2150 if f >= 0:
2152 if f >= 0:
2151 user = user[:f]
2153 user = user[:f]
2152 f = user.find('<')
2154 f = user.find('<')
2153 if f >= 0:
2155 if f >= 0:
2154 user = user[f + 1:]
2156 user = user[f + 1:]
2155 return user
2157 return user
2156
2158
2157 def email(author):
2159 def email(author):
2158 '''get email of author.'''
2160 '''get email of author.'''
2159 r = author.find('>')
2161 r = author.find('>')
2160 if r == -1:
2162 if r == -1:
2161 r = None
2163 r = None
2162 return author[author.find('<') + 1:r]
2164 return author[author.find('<') + 1:r]
2163
2165
2164 def ellipsis(text, maxlength=400):
2166 def ellipsis(text, maxlength=400):
2165 """Trim string to at most maxlength (default: 400) columns in display."""
2167 """Trim string to at most maxlength (default: 400) columns in display."""
2166 return encoding.trim(text, maxlength, ellipsis='...')
2168 return encoding.trim(text, maxlength, ellipsis='...')
2167
2169
2168 def unitcountfn(*unittable):
2170 def unitcountfn(*unittable):
2169 '''return a function that renders a readable count of some quantity'''
2171 '''return a function that renders a readable count of some quantity'''
2170
2172
2171 def go(count):
2173 def go(count):
2172 for multiplier, divisor, format in unittable:
2174 for multiplier, divisor, format in unittable:
2173 if abs(count) >= divisor * multiplier:
2175 if abs(count) >= divisor * multiplier:
2174 return format % (count / float(divisor))
2176 return format % (count / float(divisor))
2175 return unittable[-1][2] % count
2177 return unittable[-1][2] % count
2176
2178
2177 return go
2179 return go
2178
2180
2179 def processlinerange(fromline, toline):
2181 def processlinerange(fromline, toline):
2180 """Check that linerange <fromline>:<toline> makes sense and return a
2182 """Check that linerange <fromline>:<toline> makes sense and return a
2181 0-based range.
2183 0-based range.
2182
2184
2183 >>> processlinerange(10, 20)
2185 >>> processlinerange(10, 20)
2184 (9, 20)
2186 (9, 20)
2185 >>> processlinerange(2, 1)
2187 >>> processlinerange(2, 1)
2186 Traceback (most recent call last):
2188 Traceback (most recent call last):
2187 ...
2189 ...
2188 ParseError: line range must be positive
2190 ParseError: line range must be positive
2189 >>> processlinerange(0, 5)
2191 >>> processlinerange(0, 5)
2190 Traceback (most recent call last):
2192 Traceback (most recent call last):
2191 ...
2193 ...
2192 ParseError: fromline must be strictly positive
2194 ParseError: fromline must be strictly positive
2193 """
2195 """
2194 if toline - fromline < 0:
2196 if toline - fromline < 0:
2195 raise error.ParseError(_("line range must be positive"))
2197 raise error.ParseError(_("line range must be positive"))
2196 if fromline < 1:
2198 if fromline < 1:
2197 raise error.ParseError(_("fromline must be strictly positive"))
2199 raise error.ParseError(_("fromline must be strictly positive"))
2198 return fromline - 1, toline
2200 return fromline - 1, toline
2199
2201
2200 bytecount = unitcountfn(
2202 bytecount = unitcountfn(
2201 (100, 1 << 30, _('%.0f GB')),
2203 (100, 1 << 30, _('%.0f GB')),
2202 (10, 1 << 30, _('%.1f GB')),
2204 (10, 1 << 30, _('%.1f GB')),
2203 (1, 1 << 30, _('%.2f GB')),
2205 (1, 1 << 30, _('%.2f GB')),
2204 (100, 1 << 20, _('%.0f MB')),
2206 (100, 1 << 20, _('%.0f MB')),
2205 (10, 1 << 20, _('%.1f MB')),
2207 (10, 1 << 20, _('%.1f MB')),
2206 (1, 1 << 20, _('%.2f MB')),
2208 (1, 1 << 20, _('%.2f MB')),
2207 (100, 1 << 10, _('%.0f KB')),
2209 (100, 1 << 10, _('%.0f KB')),
2208 (10, 1 << 10, _('%.1f KB')),
2210 (10, 1 << 10, _('%.1f KB')),
2209 (1, 1 << 10, _('%.2f KB')),
2211 (1, 1 << 10, _('%.2f KB')),
2210 (1, 1, _('%.0f bytes')),
2212 (1, 1, _('%.0f bytes')),
2211 )
2213 )
2212
2214
2213 # Matches a single EOL which can either be a CRLF where repeated CR
2215 # Matches a single EOL which can either be a CRLF where repeated CR
2214 # are removed or a LF. We do not care about old Macintosh files, so a
2216 # are removed or a LF. We do not care about old Macintosh files, so a
2215 # stray CR is an error.
2217 # stray CR is an error.
2216 _eolre = remod.compile(br'\r*\n')
2218 _eolre = remod.compile(br'\r*\n')
2217
2219
2218 def tolf(s):
2220 def tolf(s):
2219 return _eolre.sub('\n', s)
2221 return _eolre.sub('\n', s)
2220
2222
2221 def tocrlf(s):
2223 def tocrlf(s):
2222 return _eolre.sub('\r\n', s)
2224 return _eolre.sub('\r\n', s)
2223
2225
2224 if pycompat.oslinesep == '\r\n':
2226 if pycompat.oslinesep == '\r\n':
2225 tonativeeol = tocrlf
2227 tonativeeol = tocrlf
2226 fromnativeeol = tolf
2228 fromnativeeol = tolf
2227 else:
2229 else:
2228 tonativeeol = pycompat.identity
2230 tonativeeol = pycompat.identity
2229 fromnativeeol = pycompat.identity
2231 fromnativeeol = pycompat.identity
2230
2232
2231 def escapestr(s):
2233 def escapestr(s):
2232 # call underlying function of s.encode('string_escape') directly for
2234 # call underlying function of s.encode('string_escape') directly for
2233 # Python 3 compatibility
2235 # Python 3 compatibility
2234 return codecs.escape_encode(s)[0]
2236 return codecs.escape_encode(s)[0]
2235
2237
2236 def unescapestr(s):
2238 def unescapestr(s):
2237 return codecs.escape_decode(s)[0]
2239 return codecs.escape_decode(s)[0]
2238
2240
2239 def uirepr(s):
2241 def uirepr(s):
2240 # Avoid double backslash in Windows path repr()
2242 # Avoid double backslash in Windows path repr()
2241 return repr(s).replace('\\\\', '\\')
2243 return repr(s).replace('\\\\', '\\')
2242
2244
2243 # delay import of textwrap
2245 # delay import of textwrap
2244 def MBTextWrapper(**kwargs):
2246 def MBTextWrapper(**kwargs):
2245 class tw(textwrap.TextWrapper):
2247 class tw(textwrap.TextWrapper):
2246 """
2248 """
2247 Extend TextWrapper for width-awareness.
2249 Extend TextWrapper for width-awareness.
2248
2250
2249 Neither number of 'bytes' in any encoding nor 'characters' is
2251 Neither number of 'bytes' in any encoding nor 'characters' is
2250 appropriate to calculate terminal columns for specified string.
2252 appropriate to calculate terminal columns for specified string.
2251
2253
2252 Original TextWrapper implementation uses built-in 'len()' directly,
2254 Original TextWrapper implementation uses built-in 'len()' directly,
2253 so overriding is needed to use width information of each characters.
2255 so overriding is needed to use width information of each characters.
2254
2256
2255 In addition, characters classified into 'ambiguous' width are
2257 In addition, characters classified into 'ambiguous' width are
2256 treated as wide in East Asian area, but as narrow in other.
2258 treated as wide in East Asian area, but as narrow in other.
2257
2259
2258 This requires use decision to determine width of such characters.
2260 This requires use decision to determine width of such characters.
2259 """
2261 """
2260 def _cutdown(self, ucstr, space_left):
2262 def _cutdown(self, ucstr, space_left):
2261 l = 0
2263 l = 0
2262 colwidth = encoding.ucolwidth
2264 colwidth = encoding.ucolwidth
2263 for i in xrange(len(ucstr)):
2265 for i in xrange(len(ucstr)):
2264 l += colwidth(ucstr[i])
2266 l += colwidth(ucstr[i])
2265 if space_left < l:
2267 if space_left < l:
2266 return (ucstr[:i], ucstr[i:])
2268 return (ucstr[:i], ucstr[i:])
2267 return ucstr, ''
2269 return ucstr, ''
2268
2270
2269 # overriding of base class
2271 # overriding of base class
2270 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
2272 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
2271 space_left = max(width - cur_len, 1)
2273 space_left = max(width - cur_len, 1)
2272
2274
2273 if self.break_long_words:
2275 if self.break_long_words:
2274 cut, res = self._cutdown(reversed_chunks[-1], space_left)
2276 cut, res = self._cutdown(reversed_chunks[-1], space_left)
2275 cur_line.append(cut)
2277 cur_line.append(cut)
2276 reversed_chunks[-1] = res
2278 reversed_chunks[-1] = res
2277 elif not cur_line:
2279 elif not cur_line:
2278 cur_line.append(reversed_chunks.pop())
2280 cur_line.append(reversed_chunks.pop())
2279
2281
2280 # this overriding code is imported from TextWrapper of Python 2.6
2282 # this overriding code is imported from TextWrapper of Python 2.6
2281 # to calculate columns of string by 'encoding.ucolwidth()'
2283 # to calculate columns of string by 'encoding.ucolwidth()'
2282 def _wrap_chunks(self, chunks):
2284 def _wrap_chunks(self, chunks):
2283 colwidth = encoding.ucolwidth
2285 colwidth = encoding.ucolwidth
2284
2286
2285 lines = []
2287 lines = []
2286 if self.width <= 0:
2288 if self.width <= 0:
2287 raise ValueError("invalid width %r (must be > 0)" % self.width)
2289 raise ValueError("invalid width %r (must be > 0)" % self.width)
2288
2290
2289 # Arrange in reverse order so items can be efficiently popped
2291 # Arrange in reverse order so items can be efficiently popped
2290 # from a stack of chucks.
2292 # from a stack of chucks.
2291 chunks.reverse()
2293 chunks.reverse()
2292
2294
2293 while chunks:
2295 while chunks:
2294
2296
2295 # Start the list of chunks that will make up the current line.
2297 # Start the list of chunks that will make up the current line.
2296 # cur_len is just the length of all the chunks in cur_line.
2298 # cur_len is just the length of all the chunks in cur_line.
2297 cur_line = []
2299 cur_line = []
2298 cur_len = 0
2300 cur_len = 0
2299
2301
2300 # Figure out which static string will prefix this line.
2302 # Figure out which static string will prefix this line.
2301 if lines:
2303 if lines:
2302 indent = self.subsequent_indent
2304 indent = self.subsequent_indent
2303 else:
2305 else:
2304 indent = self.initial_indent
2306 indent = self.initial_indent
2305
2307
2306 # Maximum width for this line.
2308 # Maximum width for this line.
2307 width = self.width - len(indent)
2309 width = self.width - len(indent)
2308
2310
2309 # First chunk on line is whitespace -- drop it, unless this
2311 # First chunk on line is whitespace -- drop it, unless this
2310 # is the very beginning of the text (i.e. no lines started yet).
2312 # is the very beginning of the text (i.e. no lines started yet).
2311 if self.drop_whitespace and chunks[-1].strip() == '' and lines:
2313 if self.drop_whitespace and chunks[-1].strip() == '' and lines:
2312 del chunks[-1]
2314 del chunks[-1]
2313
2315
2314 while chunks:
2316 while chunks:
2315 l = colwidth(chunks[-1])
2317 l = colwidth(chunks[-1])
2316
2318
2317 # Can at least squeeze this chunk onto the current line.
2319 # Can at least squeeze this chunk onto the current line.
2318 if cur_len + l <= width:
2320 if cur_len + l <= width:
2319 cur_line.append(chunks.pop())
2321 cur_line.append(chunks.pop())
2320 cur_len += l
2322 cur_len += l
2321
2323
2322 # Nope, this line is full.
2324 # Nope, this line is full.
2323 else:
2325 else:
2324 break
2326 break
2325
2327
2326 # The current line is full, and the next chunk is too big to
2328 # The current line is full, and the next chunk is too big to
2327 # fit on *any* line (not just this one).
2329 # fit on *any* line (not just this one).
2328 if chunks and colwidth(chunks[-1]) > width:
2330 if chunks and colwidth(chunks[-1]) > width:
2329 self._handle_long_word(chunks, cur_line, cur_len, width)
2331 self._handle_long_word(chunks, cur_line, cur_len, width)
2330
2332
2331 # If the last chunk on this line is all whitespace, drop it.
2333 # If the last chunk on this line is all whitespace, drop it.
2332 if (self.drop_whitespace and
2334 if (self.drop_whitespace and
2333 cur_line and cur_line[-1].strip() == ''):
2335 cur_line and cur_line[-1].strip() == ''):
2334 del cur_line[-1]
2336 del cur_line[-1]
2335
2337
2336 # Convert current line back to a string and store it in list
2338 # Convert current line back to a string and store it in list
2337 # of all lines (return value).
2339 # of all lines (return value).
2338 if cur_line:
2340 if cur_line:
2339 lines.append(indent + ''.join(cur_line))
2341 lines.append(indent + ''.join(cur_line))
2340
2342
2341 return lines
2343 return lines
2342
2344
2343 global MBTextWrapper
2345 global MBTextWrapper
2344 MBTextWrapper = tw
2346 MBTextWrapper = tw
2345 return tw(**kwargs)
2347 return tw(**kwargs)
2346
2348
2347 def wrap(line, width, initindent='', hangindent=''):
2349 def wrap(line, width, initindent='', hangindent=''):
2348 maxindent = max(len(hangindent), len(initindent))
2350 maxindent = max(len(hangindent), len(initindent))
2349 if width <= maxindent:
2351 if width <= maxindent:
2350 # adjust for weird terminal size
2352 # adjust for weird terminal size
2351 width = max(78, maxindent + 1)
2353 width = max(78, maxindent + 1)
2352 line = line.decode(pycompat.sysstr(encoding.encoding),
2354 line = line.decode(pycompat.sysstr(encoding.encoding),
2353 pycompat.sysstr(encoding.encodingmode))
2355 pycompat.sysstr(encoding.encodingmode))
2354 initindent = initindent.decode(pycompat.sysstr(encoding.encoding),
2356 initindent = initindent.decode(pycompat.sysstr(encoding.encoding),
2355 pycompat.sysstr(encoding.encodingmode))
2357 pycompat.sysstr(encoding.encodingmode))
2356 hangindent = hangindent.decode(pycompat.sysstr(encoding.encoding),
2358 hangindent = hangindent.decode(pycompat.sysstr(encoding.encoding),
2357 pycompat.sysstr(encoding.encodingmode))
2359 pycompat.sysstr(encoding.encodingmode))
2358 wrapper = MBTextWrapper(width=width,
2360 wrapper = MBTextWrapper(width=width,
2359 initial_indent=initindent,
2361 initial_indent=initindent,
2360 subsequent_indent=hangindent)
2362 subsequent_indent=hangindent)
2361 return wrapper.fill(line).encode(pycompat.sysstr(encoding.encoding))
2363 return wrapper.fill(line).encode(pycompat.sysstr(encoding.encoding))
2362
2364
2363 if (pyplatform.python_implementation() == 'CPython' and
2365 if (pyplatform.python_implementation() == 'CPython' and
2364 sys.version_info < (3, 0)):
2366 sys.version_info < (3, 0)):
2365 # There is an issue in CPython that some IO methods do not handle EINTR
2367 # There is an issue in CPython that some IO methods do not handle EINTR
2366 # correctly. The following table shows what CPython version (and functions)
2368 # correctly. The following table shows what CPython version (and functions)
2367 # are affected (buggy: has the EINTR bug, okay: otherwise):
2369 # are affected (buggy: has the EINTR bug, okay: otherwise):
2368 #
2370 #
2369 # | < 2.7.4 | 2.7.4 to 2.7.12 | >= 3.0
2371 # | < 2.7.4 | 2.7.4 to 2.7.12 | >= 3.0
2370 # --------------------------------------------------
2372 # --------------------------------------------------
2371 # fp.__iter__ | buggy | buggy | okay
2373 # fp.__iter__ | buggy | buggy | okay
2372 # fp.read* | buggy | okay [1] | okay
2374 # fp.read* | buggy | okay [1] | okay
2373 #
2375 #
2374 # [1]: fixed by changeset 67dc99a989cd in the cpython hg repo.
2376 # [1]: fixed by changeset 67dc99a989cd in the cpython hg repo.
2375 #
2377 #
2376 # Here we workaround the EINTR issue for fileobj.__iter__. Other methods
2378 # Here we workaround the EINTR issue for fileobj.__iter__. Other methods
2377 # like "read*" are ignored for now, as Python < 2.7.4 is a minority.
2379 # like "read*" are ignored for now, as Python < 2.7.4 is a minority.
2378 #
2380 #
2379 # Although we can workaround the EINTR issue for fp.__iter__, it is slower:
2381 # Although we can workaround the EINTR issue for fp.__iter__, it is slower:
2380 # "for x in fp" is 4x faster than "for x in iter(fp.readline, '')" in
2382 # "for x in fp" is 4x faster than "for x in iter(fp.readline, '')" in
2381 # CPython 2, because CPython 2 maintains an internal readahead buffer for
2383 # CPython 2, because CPython 2 maintains an internal readahead buffer for
2382 # fp.__iter__ but not other fp.read* methods.
2384 # fp.__iter__ but not other fp.read* methods.
2383 #
2385 #
2384 # On modern systems like Linux, the "read" syscall cannot be interrupted
2386 # On modern systems like Linux, the "read" syscall cannot be interrupted
2385 # when reading "fast" files like on-disk files. So the EINTR issue only
2387 # when reading "fast" files like on-disk files. So the EINTR issue only
2386 # affects things like pipes, sockets, ttys etc. We treat "normal" (S_ISREG)
2388 # affects things like pipes, sockets, ttys etc. We treat "normal" (S_ISREG)
2387 # files approximately as "fast" files and use the fast (unsafe) code path,
2389 # files approximately as "fast" files and use the fast (unsafe) code path,
2388 # to minimize the performance impact.
2390 # to minimize the performance impact.
2389 if sys.version_info >= (2, 7, 4):
2391 if sys.version_info >= (2, 7, 4):
2390 # fp.readline deals with EINTR correctly, use it as a workaround.
2392 # fp.readline deals with EINTR correctly, use it as a workaround.
2391 def _safeiterfile(fp):
2393 def _safeiterfile(fp):
2392 return iter(fp.readline, '')
2394 return iter(fp.readline, '')
2393 else:
2395 else:
2394 # fp.read* are broken too, manually deal with EINTR in a stupid way.
2396 # fp.read* are broken too, manually deal with EINTR in a stupid way.
2395 # note: this may block longer than necessary because of bufsize.
2397 # note: this may block longer than necessary because of bufsize.
2396 def _safeiterfile(fp, bufsize=4096):
2398 def _safeiterfile(fp, bufsize=4096):
2397 fd = fp.fileno()
2399 fd = fp.fileno()
2398 line = ''
2400 line = ''
2399 while True:
2401 while True:
2400 try:
2402 try:
2401 buf = os.read(fd, bufsize)
2403 buf = os.read(fd, bufsize)
2402 except OSError as ex:
2404 except OSError as ex:
2403 # os.read only raises EINTR before any data is read
2405 # os.read only raises EINTR before any data is read
2404 if ex.errno == errno.EINTR:
2406 if ex.errno == errno.EINTR:
2405 continue
2407 continue
2406 else:
2408 else:
2407 raise
2409 raise
2408 line += buf
2410 line += buf
2409 if '\n' in buf:
2411 if '\n' in buf:
2410 splitted = line.splitlines(True)
2412 splitted = line.splitlines(True)
2411 line = ''
2413 line = ''
2412 for l in splitted:
2414 for l in splitted:
2413 if l[-1] == '\n':
2415 if l[-1] == '\n':
2414 yield l
2416 yield l
2415 else:
2417 else:
2416 line = l
2418 line = l
2417 if not buf:
2419 if not buf:
2418 break
2420 break
2419 if line:
2421 if line:
2420 yield line
2422 yield line
2421
2423
2422 def iterfile(fp):
2424 def iterfile(fp):
2423 fastpath = True
2425 fastpath = True
2424 if type(fp) is file:
2426 if type(fp) is file:
2425 fastpath = stat.S_ISREG(os.fstat(fp.fileno()).st_mode)
2427 fastpath = stat.S_ISREG(os.fstat(fp.fileno()).st_mode)
2426 if fastpath:
2428 if fastpath:
2427 return fp
2429 return fp
2428 else:
2430 else:
2429 return _safeiterfile(fp)
2431 return _safeiterfile(fp)
2430 else:
2432 else:
2431 # PyPy and CPython 3 do not have the EINTR issue thus no workaround needed.
2433 # PyPy and CPython 3 do not have the EINTR issue thus no workaround needed.
2432 def iterfile(fp):
2434 def iterfile(fp):
2433 return fp
2435 return fp
2434
2436
2435 def iterlines(iterator):
2437 def iterlines(iterator):
2436 for chunk in iterator:
2438 for chunk in iterator:
2437 for line in chunk.splitlines():
2439 for line in chunk.splitlines():
2438 yield line
2440 yield line
2439
2441
2440 def expandpath(path):
2442 def expandpath(path):
2441 return os.path.expanduser(os.path.expandvars(path))
2443 return os.path.expanduser(os.path.expandvars(path))
2442
2444
2443 def hgcmd():
2445 def hgcmd():
2444 """Return the command used to execute current hg
2446 """Return the command used to execute current hg
2445
2447
2446 This is different from hgexecutable() because on Windows we want
2448 This is different from hgexecutable() because on Windows we want
2447 to avoid things opening new shell windows like batch files, so we
2449 to avoid things opening new shell windows like batch files, so we
2448 get either the python call or current executable.
2450 get either the python call or current executable.
2449 """
2451 """
2450 if mainfrozen():
2452 if mainfrozen():
2451 if getattr(sys, 'frozen', None) == 'macosx_app':
2453 if getattr(sys, 'frozen', None) == 'macosx_app':
2452 # Env variable set by py2app
2454 # Env variable set by py2app
2453 return [encoding.environ['EXECUTABLEPATH']]
2455 return [encoding.environ['EXECUTABLEPATH']]
2454 else:
2456 else:
2455 return [pycompat.sysexecutable]
2457 return [pycompat.sysexecutable]
2456 return gethgcmd()
2458 return gethgcmd()
2457
2459
2458 def rundetached(args, condfn):
2460 def rundetached(args, condfn):
2459 """Execute the argument list in a detached process.
2461 """Execute the argument list in a detached process.
2460
2462
2461 condfn is a callable which is called repeatedly and should return
2463 condfn is a callable which is called repeatedly and should return
2462 True once the child process is known to have started successfully.
2464 True once the child process is known to have started successfully.
2463 At this point, the child process PID is returned. If the child
2465 At this point, the child process PID is returned. If the child
2464 process fails to start or finishes before condfn() evaluates to
2466 process fails to start or finishes before condfn() evaluates to
2465 True, return -1.
2467 True, return -1.
2466 """
2468 """
2467 # Windows case is easier because the child process is either
2469 # Windows case is easier because the child process is either
2468 # successfully starting and validating the condition or exiting
2470 # successfully starting and validating the condition or exiting
2469 # on failure. We just poll on its PID. On Unix, if the child
2471 # on failure. We just poll on its PID. On Unix, if the child
2470 # process fails to start, it will be left in a zombie state until
2472 # process fails to start, it will be left in a zombie state until
2471 # the parent wait on it, which we cannot do since we expect a long
2473 # the parent wait on it, which we cannot do since we expect a long
2472 # running process on success. Instead we listen for SIGCHLD telling
2474 # running process on success. Instead we listen for SIGCHLD telling
2473 # us our child process terminated.
2475 # us our child process terminated.
2474 terminated = set()
2476 terminated = set()
2475 def handler(signum, frame):
2477 def handler(signum, frame):
2476 terminated.add(os.wait())
2478 terminated.add(os.wait())
2477 prevhandler = None
2479 prevhandler = None
2478 SIGCHLD = getattr(signal, 'SIGCHLD', None)
2480 SIGCHLD = getattr(signal, 'SIGCHLD', None)
2479 if SIGCHLD is not None:
2481 if SIGCHLD is not None:
2480 prevhandler = signal.signal(SIGCHLD, handler)
2482 prevhandler = signal.signal(SIGCHLD, handler)
2481 try:
2483 try:
2482 pid = spawndetached(args)
2484 pid = spawndetached(args)
2483 while not condfn():
2485 while not condfn():
2484 if ((pid in terminated or not testpid(pid))
2486 if ((pid in terminated or not testpid(pid))
2485 and not condfn()):
2487 and not condfn()):
2486 return -1
2488 return -1
2487 time.sleep(0.1)
2489 time.sleep(0.1)
2488 return pid
2490 return pid
2489 finally:
2491 finally:
2490 if prevhandler is not None:
2492 if prevhandler is not None:
2491 signal.signal(signal.SIGCHLD, prevhandler)
2493 signal.signal(signal.SIGCHLD, prevhandler)
2492
2494
2493 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
2495 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
2494 """Return the result of interpolating items in the mapping into string s.
2496 """Return the result of interpolating items in the mapping into string s.
2495
2497
2496 prefix is a single character string, or a two character string with
2498 prefix is a single character string, or a two character string with
2497 a backslash as the first character if the prefix needs to be escaped in
2499 a backslash as the first character if the prefix needs to be escaped in
2498 a regular expression.
2500 a regular expression.
2499
2501
2500 fn is an optional function that will be applied to the replacement text
2502 fn is an optional function that will be applied to the replacement text
2501 just before replacement.
2503 just before replacement.
2502
2504
2503 escape_prefix is an optional flag that allows using doubled prefix for
2505 escape_prefix is an optional flag that allows using doubled prefix for
2504 its escaping.
2506 its escaping.
2505 """
2507 """
2506 fn = fn or (lambda s: s)
2508 fn = fn or (lambda s: s)
2507 patterns = '|'.join(mapping.keys())
2509 patterns = '|'.join(mapping.keys())
2508 if escape_prefix:
2510 if escape_prefix:
2509 patterns += '|' + prefix
2511 patterns += '|' + prefix
2510 if len(prefix) > 1:
2512 if len(prefix) > 1:
2511 prefix_char = prefix[1:]
2513 prefix_char = prefix[1:]
2512 else:
2514 else:
2513 prefix_char = prefix
2515 prefix_char = prefix
2514 mapping[prefix_char] = prefix_char
2516 mapping[prefix_char] = prefix_char
2515 r = remod.compile(r'%s(%s)' % (prefix, patterns))
2517 r = remod.compile(r'%s(%s)' % (prefix, patterns))
2516 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
2518 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
2517
2519
2518 def getport(port):
2520 def getport(port):
2519 """Return the port for a given network service.
2521 """Return the port for a given network service.
2520
2522
2521 If port is an integer, it's returned as is. If it's a string, it's
2523 If port is an integer, it's returned as is. If it's a string, it's
2522 looked up using socket.getservbyname(). If there's no matching
2524 looked up using socket.getservbyname(). If there's no matching
2523 service, error.Abort is raised.
2525 service, error.Abort is raised.
2524 """
2526 """
2525 try:
2527 try:
2526 return int(port)
2528 return int(port)
2527 except ValueError:
2529 except ValueError:
2528 pass
2530 pass
2529
2531
2530 try:
2532 try:
2531 return socket.getservbyname(port)
2533 return socket.getservbyname(port)
2532 except socket.error:
2534 except socket.error:
2533 raise Abort(_("no port number associated with service '%s'") % port)
2535 raise Abort(_("no port number associated with service '%s'") % port)
2534
2536
2535 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
2537 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
2536 '0': False, 'no': False, 'false': False, 'off': False,
2538 '0': False, 'no': False, 'false': False, 'off': False,
2537 'never': False}
2539 'never': False}
2538
2540
2539 def parsebool(s):
2541 def parsebool(s):
2540 """Parse s into a boolean.
2542 """Parse s into a boolean.
2541
2543
2542 If s is not a valid boolean, returns None.
2544 If s is not a valid boolean, returns None.
2543 """
2545 """
2544 return _booleans.get(s.lower(), None)
2546 return _booleans.get(s.lower(), None)
2545
2547
2546 _hextochr = dict((a + b, chr(int(a + b, 16)))
2548 _hextochr = dict((a + b, chr(int(a + b, 16)))
2547 for a in string.hexdigits for b in string.hexdigits)
2549 for a in string.hexdigits for b in string.hexdigits)
2548
2550
2549 class url(object):
2551 class url(object):
2550 r"""Reliable URL parser.
2552 r"""Reliable URL parser.
2551
2553
2552 This parses URLs and provides attributes for the following
2554 This parses URLs and provides attributes for the following
2553 components:
2555 components:
2554
2556
2555 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
2557 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
2556
2558
2557 Missing components are set to None. The only exception is
2559 Missing components are set to None. The only exception is
2558 fragment, which is set to '' if present but empty.
2560 fragment, which is set to '' if present but empty.
2559
2561
2560 If parsefragment is False, fragment is included in query. If
2562 If parsefragment is False, fragment is included in query. If
2561 parsequery is False, query is included in path. If both are
2563 parsequery is False, query is included in path. If both are
2562 False, both fragment and query are included in path.
2564 False, both fragment and query are included in path.
2563
2565
2564 See http://www.ietf.org/rfc/rfc2396.txt for more information.
2566 See http://www.ietf.org/rfc/rfc2396.txt for more information.
2565
2567
2566 Note that for backward compatibility reasons, bundle URLs do not
2568 Note that for backward compatibility reasons, bundle URLs do not
2567 take host names. That means 'bundle://../' has a path of '../'.
2569 take host names. That means 'bundle://../' has a path of '../'.
2568
2570
2569 Examples:
2571 Examples:
2570
2572
2571 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
2573 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
2572 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
2574 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
2573 >>> url('ssh://[::1]:2200//home/joe/repo')
2575 >>> url('ssh://[::1]:2200//home/joe/repo')
2574 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
2576 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
2575 >>> url('file:///home/joe/repo')
2577 >>> url('file:///home/joe/repo')
2576 <url scheme: 'file', path: '/home/joe/repo'>
2578 <url scheme: 'file', path: '/home/joe/repo'>
2577 >>> url('file:///c:/temp/foo/')
2579 >>> url('file:///c:/temp/foo/')
2578 <url scheme: 'file', path: 'c:/temp/foo/'>
2580 <url scheme: 'file', path: 'c:/temp/foo/'>
2579 >>> url('bundle:foo')
2581 >>> url('bundle:foo')
2580 <url scheme: 'bundle', path: 'foo'>
2582 <url scheme: 'bundle', path: 'foo'>
2581 >>> url('bundle://../foo')
2583 >>> url('bundle://../foo')
2582 <url scheme: 'bundle', path: '../foo'>
2584 <url scheme: 'bundle', path: '../foo'>
2583 >>> url(r'c:\foo\bar')
2585 >>> url(r'c:\foo\bar')
2584 <url path: 'c:\\foo\\bar'>
2586 <url path: 'c:\\foo\\bar'>
2585 >>> url(r'\\blah\blah\blah')
2587 >>> url(r'\\blah\blah\blah')
2586 <url path: '\\\\blah\\blah\\blah'>
2588 <url path: '\\\\blah\\blah\\blah'>
2587 >>> url(r'\\blah\blah\blah#baz')
2589 >>> url(r'\\blah\blah\blah#baz')
2588 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
2590 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
2589 >>> url(r'file:///C:\users\me')
2591 >>> url(r'file:///C:\users\me')
2590 <url scheme: 'file', path: 'C:\\users\\me'>
2592 <url scheme: 'file', path: 'C:\\users\\me'>
2591
2593
2592 Authentication credentials:
2594 Authentication credentials:
2593
2595
2594 >>> url('ssh://joe:xyz@x/repo')
2596 >>> url('ssh://joe:xyz@x/repo')
2595 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
2597 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
2596 >>> url('ssh://joe@x/repo')
2598 >>> url('ssh://joe@x/repo')
2597 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
2599 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
2598
2600
2599 Query strings and fragments:
2601 Query strings and fragments:
2600
2602
2601 >>> url('http://host/a?b#c')
2603 >>> url('http://host/a?b#c')
2602 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
2604 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
2603 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
2605 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
2604 <url scheme: 'http', host: 'host', path: 'a?b#c'>
2606 <url scheme: 'http', host: 'host', path: 'a?b#c'>
2605
2607
2606 Empty path:
2608 Empty path:
2607
2609
2608 >>> url('')
2610 >>> url('')
2609 <url path: ''>
2611 <url path: ''>
2610 >>> url('#a')
2612 >>> url('#a')
2611 <url path: '', fragment: 'a'>
2613 <url path: '', fragment: 'a'>
2612 >>> url('http://host/')
2614 >>> url('http://host/')
2613 <url scheme: 'http', host: 'host', path: ''>
2615 <url scheme: 'http', host: 'host', path: ''>
2614 >>> url('http://host/#a')
2616 >>> url('http://host/#a')
2615 <url scheme: 'http', host: 'host', path: '', fragment: 'a'>
2617 <url scheme: 'http', host: 'host', path: '', fragment: 'a'>
2616
2618
2617 Only scheme:
2619 Only scheme:
2618
2620
2619 >>> url('http:')
2621 >>> url('http:')
2620 <url scheme: 'http'>
2622 <url scheme: 'http'>
2621 """
2623 """
2622
2624
2623 _safechars = "!~*'()+"
2625 _safechars = "!~*'()+"
2624 _safepchars = "/!~*'()+:\\"
2626 _safepchars = "/!~*'()+:\\"
2625 _matchscheme = remod.compile('^[a-zA-Z0-9+.\\-]+:').match
2627 _matchscheme = remod.compile('^[a-zA-Z0-9+.\\-]+:').match
2626
2628
2627 def __init__(self, path, parsequery=True, parsefragment=True):
2629 def __init__(self, path, parsequery=True, parsefragment=True):
2628 # We slowly chomp away at path until we have only the path left
2630 # We slowly chomp away at path until we have only the path left
2629 self.scheme = self.user = self.passwd = self.host = None
2631 self.scheme = self.user = self.passwd = self.host = None
2630 self.port = self.path = self.query = self.fragment = None
2632 self.port = self.path = self.query = self.fragment = None
2631 self._localpath = True
2633 self._localpath = True
2632 self._hostport = ''
2634 self._hostport = ''
2633 self._origpath = path
2635 self._origpath = path
2634
2636
2635 if parsefragment and '#' in path:
2637 if parsefragment and '#' in path:
2636 path, self.fragment = path.split('#', 1)
2638 path, self.fragment = path.split('#', 1)
2637
2639
2638 # special case for Windows drive letters and UNC paths
2640 # special case for Windows drive letters and UNC paths
2639 if hasdriveletter(path) or path.startswith('\\\\'):
2641 if hasdriveletter(path) or path.startswith('\\\\'):
2640 self.path = path
2642 self.path = path
2641 return
2643 return
2642
2644
2643 # For compatibility reasons, we can't handle bundle paths as
2645 # For compatibility reasons, we can't handle bundle paths as
2644 # normal URLS
2646 # normal URLS
2645 if path.startswith('bundle:'):
2647 if path.startswith('bundle:'):
2646 self.scheme = 'bundle'
2648 self.scheme = 'bundle'
2647 path = path[7:]
2649 path = path[7:]
2648 if path.startswith('//'):
2650 if path.startswith('//'):
2649 path = path[2:]
2651 path = path[2:]
2650 self.path = path
2652 self.path = path
2651 return
2653 return
2652
2654
2653 if self._matchscheme(path):
2655 if self._matchscheme(path):
2654 parts = path.split(':', 1)
2656 parts = path.split(':', 1)
2655 if parts[0]:
2657 if parts[0]:
2656 self.scheme, path = parts
2658 self.scheme, path = parts
2657 self._localpath = False
2659 self._localpath = False
2658
2660
2659 if not path:
2661 if not path:
2660 path = None
2662 path = None
2661 if self._localpath:
2663 if self._localpath:
2662 self.path = ''
2664 self.path = ''
2663 return
2665 return
2664 else:
2666 else:
2665 if self._localpath:
2667 if self._localpath:
2666 self.path = path
2668 self.path = path
2667 return
2669 return
2668
2670
2669 if parsequery and '?' in path:
2671 if parsequery and '?' in path:
2670 path, self.query = path.split('?', 1)
2672 path, self.query = path.split('?', 1)
2671 if not path:
2673 if not path:
2672 path = None
2674 path = None
2673 if not self.query:
2675 if not self.query:
2674 self.query = None
2676 self.query = None
2675
2677
2676 # // is required to specify a host/authority
2678 # // is required to specify a host/authority
2677 if path and path.startswith('//'):
2679 if path and path.startswith('//'):
2678 parts = path[2:].split('/', 1)
2680 parts = path[2:].split('/', 1)
2679 if len(parts) > 1:
2681 if len(parts) > 1:
2680 self.host, path = parts
2682 self.host, path = parts
2681 else:
2683 else:
2682 self.host = parts[0]
2684 self.host = parts[0]
2683 path = None
2685 path = None
2684 if not self.host:
2686 if not self.host:
2685 self.host = None
2687 self.host = None
2686 # path of file:///d is /d
2688 # path of file:///d is /d
2687 # path of file:///d:/ is d:/, not /d:/
2689 # path of file:///d:/ is d:/, not /d:/
2688 if path and not hasdriveletter(path):
2690 if path and not hasdriveletter(path):
2689 path = '/' + path
2691 path = '/' + path
2690
2692
2691 if self.host and '@' in self.host:
2693 if self.host and '@' in self.host:
2692 self.user, self.host = self.host.rsplit('@', 1)
2694 self.user, self.host = self.host.rsplit('@', 1)
2693 if ':' in self.user:
2695 if ':' in self.user:
2694 self.user, self.passwd = self.user.split(':', 1)
2696 self.user, self.passwd = self.user.split(':', 1)
2695 if not self.host:
2697 if not self.host:
2696 self.host = None
2698 self.host = None
2697
2699
2698 # Don't split on colons in IPv6 addresses without ports
2700 # Don't split on colons in IPv6 addresses without ports
2699 if (self.host and ':' in self.host and
2701 if (self.host and ':' in self.host and
2700 not (self.host.startswith('[') and self.host.endswith(']'))):
2702 not (self.host.startswith('[') and self.host.endswith(']'))):
2701 self._hostport = self.host
2703 self._hostport = self.host
2702 self.host, self.port = self.host.rsplit(':', 1)
2704 self.host, self.port = self.host.rsplit(':', 1)
2703 if not self.host:
2705 if not self.host:
2704 self.host = None
2706 self.host = None
2705
2707
2706 if (self.host and self.scheme == 'file' and
2708 if (self.host and self.scheme == 'file' and
2707 self.host not in ('localhost', '127.0.0.1', '[::1]')):
2709 self.host not in ('localhost', '127.0.0.1', '[::1]')):
2708 raise Abort(_('file:// URLs can only refer to localhost'))
2710 raise Abort(_('file:// URLs can only refer to localhost'))
2709
2711
2710 self.path = path
2712 self.path = path
2711
2713
2712 # leave the query string escaped
2714 # leave the query string escaped
2713 for a in ('user', 'passwd', 'host', 'port',
2715 for a in ('user', 'passwd', 'host', 'port',
2714 'path', 'fragment'):
2716 'path', 'fragment'):
2715 v = getattr(self, a)
2717 v = getattr(self, a)
2716 if v is not None:
2718 if v is not None:
2717 setattr(self, a, urlreq.unquote(v))
2719 setattr(self, a, urlreq.unquote(v))
2718
2720
2719 def __repr__(self):
2721 def __repr__(self):
2720 attrs = []
2722 attrs = []
2721 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
2723 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
2722 'query', 'fragment'):
2724 'query', 'fragment'):
2723 v = getattr(self, a)
2725 v = getattr(self, a)
2724 if v is not None:
2726 if v is not None:
2725 attrs.append('%s: %r' % (a, v))
2727 attrs.append('%s: %r' % (a, v))
2726 return '<url %s>' % ', '.join(attrs)
2728 return '<url %s>' % ', '.join(attrs)
2727
2729
2728 def __str__(self):
2730 def __str__(self):
2729 r"""Join the URL's components back into a URL string.
2731 r"""Join the URL's components back into a URL string.
2730
2732
2731 Examples:
2733 Examples:
2732
2734
2733 >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
2735 >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
2734 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
2736 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
2735 >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
2737 >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
2736 'http://user:pw@host:80/?foo=bar&baz=42'
2738 'http://user:pw@host:80/?foo=bar&baz=42'
2737 >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
2739 >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
2738 'http://user:pw@host:80/?foo=bar%3dbaz'
2740 'http://user:pw@host:80/?foo=bar%3dbaz'
2739 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
2741 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
2740 'ssh://user:pw@[::1]:2200//home/joe#'
2742 'ssh://user:pw@[::1]:2200//home/joe#'
2741 >>> str(url('http://localhost:80//'))
2743 >>> str(url('http://localhost:80//'))
2742 'http://localhost:80//'
2744 'http://localhost:80//'
2743 >>> str(url('http://localhost:80/'))
2745 >>> str(url('http://localhost:80/'))
2744 'http://localhost:80/'
2746 'http://localhost:80/'
2745 >>> str(url('http://localhost:80'))
2747 >>> str(url('http://localhost:80'))
2746 'http://localhost:80/'
2748 'http://localhost:80/'
2747 >>> str(url('bundle:foo'))
2749 >>> str(url('bundle:foo'))
2748 'bundle:foo'
2750 'bundle:foo'
2749 >>> str(url('bundle://../foo'))
2751 >>> str(url('bundle://../foo'))
2750 'bundle:../foo'
2752 'bundle:../foo'
2751 >>> str(url('path'))
2753 >>> str(url('path'))
2752 'path'
2754 'path'
2753 >>> str(url('file:///tmp/foo/bar'))
2755 >>> str(url('file:///tmp/foo/bar'))
2754 'file:///tmp/foo/bar'
2756 'file:///tmp/foo/bar'
2755 >>> str(url('file:///c:/tmp/foo/bar'))
2757 >>> str(url('file:///c:/tmp/foo/bar'))
2756 'file:///c:/tmp/foo/bar'
2758 'file:///c:/tmp/foo/bar'
2757 >>> print url(r'bundle:foo\bar')
2759 >>> print url(r'bundle:foo\bar')
2758 bundle:foo\bar
2760 bundle:foo\bar
2759 >>> print url(r'file:///D:\data\hg')
2761 >>> print url(r'file:///D:\data\hg')
2760 file:///D:\data\hg
2762 file:///D:\data\hg
2761 """
2763 """
2762 return encoding.strfromlocal(self.__bytes__())
2764 return encoding.strfromlocal(self.__bytes__())
2763
2765
2764 def __bytes__(self):
2766 def __bytes__(self):
2765 if self._localpath:
2767 if self._localpath:
2766 s = self.path
2768 s = self.path
2767 if self.scheme == 'bundle':
2769 if self.scheme == 'bundle':
2768 s = 'bundle:' + s
2770 s = 'bundle:' + s
2769 if self.fragment:
2771 if self.fragment:
2770 s += '#' + self.fragment
2772 s += '#' + self.fragment
2771 return s
2773 return s
2772
2774
2773 s = self.scheme + ':'
2775 s = self.scheme + ':'
2774 if self.user or self.passwd or self.host:
2776 if self.user or self.passwd or self.host:
2775 s += '//'
2777 s += '//'
2776 elif self.scheme and (not self.path or self.path.startswith('/')
2778 elif self.scheme and (not self.path or self.path.startswith('/')
2777 or hasdriveletter(self.path)):
2779 or hasdriveletter(self.path)):
2778 s += '//'
2780 s += '//'
2779 if hasdriveletter(self.path):
2781 if hasdriveletter(self.path):
2780 s += '/'
2782 s += '/'
2781 if self.user:
2783 if self.user:
2782 s += urlreq.quote(self.user, safe=self._safechars)
2784 s += urlreq.quote(self.user, safe=self._safechars)
2783 if self.passwd:
2785 if self.passwd:
2784 s += ':' + urlreq.quote(self.passwd, safe=self._safechars)
2786 s += ':' + urlreq.quote(self.passwd, safe=self._safechars)
2785 if self.user or self.passwd:
2787 if self.user or self.passwd:
2786 s += '@'
2788 s += '@'
2787 if self.host:
2789 if self.host:
2788 if not (self.host.startswith('[') and self.host.endswith(']')):
2790 if not (self.host.startswith('[') and self.host.endswith(']')):
2789 s += urlreq.quote(self.host)
2791 s += urlreq.quote(self.host)
2790 else:
2792 else:
2791 s += self.host
2793 s += self.host
2792 if self.port:
2794 if self.port:
2793 s += ':' + urlreq.quote(self.port)
2795 s += ':' + urlreq.quote(self.port)
2794 if self.host:
2796 if self.host:
2795 s += '/'
2797 s += '/'
2796 if self.path:
2798 if self.path:
2797 # TODO: similar to the query string, we should not unescape the
2799 # TODO: similar to the query string, we should not unescape the
2798 # path when we store it, the path might contain '%2f' = '/',
2800 # path when we store it, the path might contain '%2f' = '/',
2799 # which we should *not* escape.
2801 # which we should *not* escape.
2800 s += urlreq.quote(self.path, safe=self._safepchars)
2802 s += urlreq.quote(self.path, safe=self._safepchars)
2801 if self.query:
2803 if self.query:
2802 # we store the query in escaped form.
2804 # we store the query in escaped form.
2803 s += '?' + self.query
2805 s += '?' + self.query
2804 if self.fragment is not None:
2806 if self.fragment is not None:
2805 s += '#' + urlreq.quote(self.fragment, safe=self._safepchars)
2807 s += '#' + urlreq.quote(self.fragment, safe=self._safepchars)
2806 return s
2808 return s
2807
2809
2808 def authinfo(self):
2810 def authinfo(self):
2809 user, passwd = self.user, self.passwd
2811 user, passwd = self.user, self.passwd
2810 try:
2812 try:
2811 self.user, self.passwd = None, None
2813 self.user, self.passwd = None, None
2812 s = bytes(self)
2814 s = bytes(self)
2813 finally:
2815 finally:
2814 self.user, self.passwd = user, passwd
2816 self.user, self.passwd = user, passwd
2815 if not self.user:
2817 if not self.user:
2816 return (s, None)
2818 return (s, None)
2817 # authinfo[1] is passed to urllib2 password manager, and its
2819 # authinfo[1] is passed to urllib2 password manager, and its
2818 # URIs must not contain credentials. The host is passed in the
2820 # URIs must not contain credentials. The host is passed in the
2819 # URIs list because Python < 2.4.3 uses only that to search for
2821 # URIs list because Python < 2.4.3 uses only that to search for
2820 # a password.
2822 # a password.
2821 return (s, (None, (s, self.host),
2823 return (s, (None, (s, self.host),
2822 self.user, self.passwd or ''))
2824 self.user, self.passwd or ''))
2823
2825
2824 def isabs(self):
2826 def isabs(self):
2825 if self.scheme and self.scheme != 'file':
2827 if self.scheme and self.scheme != 'file':
2826 return True # remote URL
2828 return True # remote URL
2827 if hasdriveletter(self.path):
2829 if hasdriveletter(self.path):
2828 return True # absolute for our purposes - can't be joined()
2830 return True # absolute for our purposes - can't be joined()
2829 if self.path.startswith(r'\\'):
2831 if self.path.startswith(r'\\'):
2830 return True # Windows UNC path
2832 return True # Windows UNC path
2831 if self.path.startswith('/'):
2833 if self.path.startswith('/'):
2832 return True # POSIX-style
2834 return True # POSIX-style
2833 return False
2835 return False
2834
2836
2835 def localpath(self):
2837 def localpath(self):
2836 if self.scheme == 'file' or self.scheme == 'bundle':
2838 if self.scheme == 'file' or self.scheme == 'bundle':
2837 path = self.path or '/'
2839 path = self.path or '/'
2838 # For Windows, we need to promote hosts containing drive
2840 # For Windows, we need to promote hosts containing drive
2839 # letters to paths with drive letters.
2841 # letters to paths with drive letters.
2840 if hasdriveletter(self._hostport):
2842 if hasdriveletter(self._hostport):
2841 path = self._hostport + '/' + self.path
2843 path = self._hostport + '/' + self.path
2842 elif (self.host is not None and self.path
2844 elif (self.host is not None and self.path
2843 and not hasdriveletter(path)):
2845 and not hasdriveletter(path)):
2844 path = '/' + path
2846 path = '/' + path
2845 return path
2847 return path
2846 return self._origpath
2848 return self._origpath
2847
2849
2848 def islocal(self):
2850 def islocal(self):
2849 '''whether localpath will return something that posixfile can open'''
2851 '''whether localpath will return something that posixfile can open'''
2850 return (not self.scheme or self.scheme == 'file'
2852 return (not self.scheme or self.scheme == 'file'
2851 or self.scheme == 'bundle')
2853 or self.scheme == 'bundle')
2852
2854
2853 def hasscheme(path):
2855 def hasscheme(path):
2854 return bool(url(path).scheme)
2856 return bool(url(path).scheme)
2855
2857
2856 def hasdriveletter(path):
2858 def hasdriveletter(path):
2857 return path and path[1:2] == ':' and path[0:1].isalpha()
2859 return path and path[1:2] == ':' and path[0:1].isalpha()
2858
2860
2859 def urllocalpath(path):
2861 def urllocalpath(path):
2860 return url(path, parsequery=False, parsefragment=False).localpath()
2862 return url(path, parsequery=False, parsefragment=False).localpath()
2861
2863
2862 def hidepassword(u):
2864 def hidepassword(u):
2863 '''hide user credential in a url string'''
2865 '''hide user credential in a url string'''
2864 u = url(u)
2866 u = url(u)
2865 if u.passwd:
2867 if u.passwd:
2866 u.passwd = '***'
2868 u.passwd = '***'
2867 return bytes(u)
2869 return bytes(u)
2868
2870
2869 def removeauth(u):
2871 def removeauth(u):
2870 '''remove all authentication information from a url string'''
2872 '''remove all authentication information from a url string'''
2871 u = url(u)
2873 u = url(u)
2872 u.user = u.passwd = None
2874 u.user = u.passwd = None
2873 return str(u)
2875 return str(u)
2874
2876
2875 timecount = unitcountfn(
2877 timecount = unitcountfn(
2876 (1, 1e3, _('%.0f s')),
2878 (1, 1e3, _('%.0f s')),
2877 (100, 1, _('%.1f s')),
2879 (100, 1, _('%.1f s')),
2878 (10, 1, _('%.2f s')),
2880 (10, 1, _('%.2f s')),
2879 (1, 1, _('%.3f s')),
2881 (1, 1, _('%.3f s')),
2880 (100, 0.001, _('%.1f ms')),
2882 (100, 0.001, _('%.1f ms')),
2881 (10, 0.001, _('%.2f ms')),
2883 (10, 0.001, _('%.2f ms')),
2882 (1, 0.001, _('%.3f ms')),
2884 (1, 0.001, _('%.3f ms')),
2883 (100, 0.000001, _('%.1f us')),
2885 (100, 0.000001, _('%.1f us')),
2884 (10, 0.000001, _('%.2f us')),
2886 (10, 0.000001, _('%.2f us')),
2885 (1, 0.000001, _('%.3f us')),
2887 (1, 0.000001, _('%.3f us')),
2886 (100, 0.000000001, _('%.1f ns')),
2888 (100, 0.000000001, _('%.1f ns')),
2887 (10, 0.000000001, _('%.2f ns')),
2889 (10, 0.000000001, _('%.2f ns')),
2888 (1, 0.000000001, _('%.3f ns')),
2890 (1, 0.000000001, _('%.3f ns')),
2889 )
2891 )
2890
2892
2891 _timenesting = [0]
2893 _timenesting = [0]
2892
2894
2893 def timed(func):
2895 def timed(func):
2894 '''Report the execution time of a function call to stderr.
2896 '''Report the execution time of a function call to stderr.
2895
2897
2896 During development, use as a decorator when you need to measure
2898 During development, use as a decorator when you need to measure
2897 the cost of a function, e.g. as follows:
2899 the cost of a function, e.g. as follows:
2898
2900
2899 @util.timed
2901 @util.timed
2900 def foo(a, b, c):
2902 def foo(a, b, c):
2901 pass
2903 pass
2902 '''
2904 '''
2903
2905
2904 def wrapper(*args, **kwargs):
2906 def wrapper(*args, **kwargs):
2905 start = timer()
2907 start = timer()
2906 indent = 2
2908 indent = 2
2907 _timenesting[0] += indent
2909 _timenesting[0] += indent
2908 try:
2910 try:
2909 return func(*args, **kwargs)
2911 return func(*args, **kwargs)
2910 finally:
2912 finally:
2911 elapsed = timer() - start
2913 elapsed = timer() - start
2912 _timenesting[0] -= indent
2914 _timenesting[0] -= indent
2913 stderr.write('%s%s: %s\n' %
2915 stderr.write('%s%s: %s\n' %
2914 (' ' * _timenesting[0], func.__name__,
2916 (' ' * _timenesting[0], func.__name__,
2915 timecount(elapsed)))
2917 timecount(elapsed)))
2916 return wrapper
2918 return wrapper
2917
2919
2918 _sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
2920 _sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
2919 ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
2921 ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
2920
2922
2921 def sizetoint(s):
2923 def sizetoint(s):
2922 '''Convert a space specifier to a byte count.
2924 '''Convert a space specifier to a byte count.
2923
2925
2924 >>> sizetoint('30')
2926 >>> sizetoint('30')
2925 30
2927 30
2926 >>> sizetoint('2.2kb')
2928 >>> sizetoint('2.2kb')
2927 2252
2929 2252
2928 >>> sizetoint('6M')
2930 >>> sizetoint('6M')
2929 6291456
2931 6291456
2930 '''
2932 '''
2931 t = s.strip().lower()
2933 t = s.strip().lower()
2932 try:
2934 try:
2933 for k, u in _sizeunits:
2935 for k, u in _sizeunits:
2934 if t.endswith(k):
2936 if t.endswith(k):
2935 return int(float(t[:-len(k)]) * u)
2937 return int(float(t[:-len(k)]) * u)
2936 return int(t)
2938 return int(t)
2937 except ValueError:
2939 except ValueError:
2938 raise error.ParseError(_("couldn't parse size: %s") % s)
2940 raise error.ParseError(_("couldn't parse size: %s") % s)
2939
2941
2940 class hooks(object):
2942 class hooks(object):
2941 '''A collection of hook functions that can be used to extend a
2943 '''A collection of hook functions that can be used to extend a
2942 function's behavior. Hooks are called in lexicographic order,
2944 function's behavior. Hooks are called in lexicographic order,
2943 based on the names of their sources.'''
2945 based on the names of their sources.'''
2944
2946
2945 def __init__(self):
2947 def __init__(self):
2946 self._hooks = []
2948 self._hooks = []
2947
2949
2948 def add(self, source, hook):
2950 def add(self, source, hook):
2949 self._hooks.append((source, hook))
2951 self._hooks.append((source, hook))
2950
2952
2951 def __call__(self, *args):
2953 def __call__(self, *args):
2952 self._hooks.sort(key=lambda x: x[0])
2954 self._hooks.sort(key=lambda x: x[0])
2953 results = []
2955 results = []
2954 for source, hook in self._hooks:
2956 for source, hook in self._hooks:
2955 results.append(hook(*args))
2957 results.append(hook(*args))
2956 return results
2958 return results
2957
2959
2958 def getstackframes(skip=0, line=' %-*s in %s\n', fileline='%s:%s', depth=0):
2960 def getstackframes(skip=0, line=' %-*s in %s\n', fileline='%s:%s', depth=0):
2959 '''Yields lines for a nicely formatted stacktrace.
2961 '''Yields lines for a nicely formatted stacktrace.
2960 Skips the 'skip' last entries, then return the last 'depth' entries.
2962 Skips the 'skip' last entries, then return the last 'depth' entries.
2961 Each file+linenumber is formatted according to fileline.
2963 Each file+linenumber is formatted according to fileline.
2962 Each line is formatted according to line.
2964 Each line is formatted according to line.
2963 If line is None, it yields:
2965 If line is None, it yields:
2964 length of longest filepath+line number,
2966 length of longest filepath+line number,
2965 filepath+linenumber,
2967 filepath+linenumber,
2966 function
2968 function
2967
2969
2968 Not be used in production code but very convenient while developing.
2970 Not be used in production code but very convenient while developing.
2969 '''
2971 '''
2970 entries = [(fileline % (fn, ln), func)
2972 entries = [(fileline % (fn, ln), func)
2971 for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]
2973 for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]
2972 ][-depth:]
2974 ][-depth:]
2973 if entries:
2975 if entries:
2974 fnmax = max(len(entry[0]) for entry in entries)
2976 fnmax = max(len(entry[0]) for entry in entries)
2975 for fnln, func in entries:
2977 for fnln, func in entries:
2976 if line is None:
2978 if line is None:
2977 yield (fnmax, fnln, func)
2979 yield (fnmax, fnln, func)
2978 else:
2980 else:
2979 yield line % (fnmax, fnln, func)
2981 yield line % (fnmax, fnln, func)
2980
2982
2981 def debugstacktrace(msg='stacktrace', skip=0,
2983 def debugstacktrace(msg='stacktrace', skip=0,
2982 f=stderr, otherf=stdout, depth=0):
2984 f=stderr, otherf=stdout, depth=0):
2983 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
2985 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
2984 Skips the 'skip' entries closest to the call, then show 'depth' entries.
2986 Skips the 'skip' entries closest to the call, then show 'depth' entries.
2985 By default it will flush stdout first.
2987 By default it will flush stdout first.
2986 It can be used everywhere and intentionally does not require an ui object.
2988 It can be used everywhere and intentionally does not require an ui object.
2987 Not be used in production code but very convenient while developing.
2989 Not be used in production code but very convenient while developing.
2988 '''
2990 '''
2989 if otherf:
2991 if otherf:
2990 otherf.flush()
2992 otherf.flush()
2991 f.write('%s at:\n' % msg.rstrip())
2993 f.write('%s at:\n' % msg.rstrip())
2992 for line in getstackframes(skip + 1, depth=depth):
2994 for line in getstackframes(skip + 1, depth=depth):
2993 f.write(line)
2995 f.write(line)
2994 f.flush()
2996 f.flush()
2995
2997
2996 class dirs(object):
2998 class dirs(object):
2997 '''a multiset of directory names from a dirstate or manifest'''
2999 '''a multiset of directory names from a dirstate or manifest'''
2998
3000
2999 def __init__(self, map, skip=None):
3001 def __init__(self, map, skip=None):
3000 self._dirs = {}
3002 self._dirs = {}
3001 addpath = self.addpath
3003 addpath = self.addpath
3002 if safehasattr(map, 'iteritems') and skip is not None:
3004 if safehasattr(map, 'iteritems') and skip is not None:
3003 for f, s in map.iteritems():
3005 for f, s in map.iteritems():
3004 if s[0] != skip:
3006 if s[0] != skip:
3005 addpath(f)
3007 addpath(f)
3006 else:
3008 else:
3007 for f in map:
3009 for f in map:
3008 addpath(f)
3010 addpath(f)
3009
3011
3010 def addpath(self, path):
3012 def addpath(self, path):
3011 dirs = self._dirs
3013 dirs = self._dirs
3012 for base in finddirs(path):
3014 for base in finddirs(path):
3013 if base in dirs:
3015 if base in dirs:
3014 dirs[base] += 1
3016 dirs[base] += 1
3015 return
3017 return
3016 dirs[base] = 1
3018 dirs[base] = 1
3017
3019
3018 def delpath(self, path):
3020 def delpath(self, path):
3019 dirs = self._dirs
3021 dirs = self._dirs
3020 for base in finddirs(path):
3022 for base in finddirs(path):
3021 if dirs[base] > 1:
3023 if dirs[base] > 1:
3022 dirs[base] -= 1
3024 dirs[base] -= 1
3023 return
3025 return
3024 del dirs[base]
3026 del dirs[base]
3025
3027
3026 def __iter__(self):
3028 def __iter__(self):
3027 return iter(self._dirs)
3029 return iter(self._dirs)
3028
3030
3029 def __contains__(self, d):
3031 def __contains__(self, d):
3030 return d in self._dirs
3032 return d in self._dirs
3031
3033
3032 if safehasattr(parsers, 'dirs'):
3034 if safehasattr(parsers, 'dirs'):
3033 dirs = parsers.dirs
3035 dirs = parsers.dirs
3034
3036
3035 def finddirs(path):
3037 def finddirs(path):
3036 pos = path.rfind('/')
3038 pos = path.rfind('/')
3037 while pos != -1:
3039 while pos != -1:
3038 yield path[:pos]
3040 yield path[:pos]
3039 pos = path.rfind('/', 0, pos)
3041 pos = path.rfind('/', 0, pos)
3040
3042
3041 class ctxmanager(object):
3043 class ctxmanager(object):
3042 '''A context manager for use in 'with' blocks to allow multiple
3044 '''A context manager for use in 'with' blocks to allow multiple
3043 contexts to be entered at once. This is both safer and more
3045 contexts to be entered at once. This is both safer and more
3044 flexible than contextlib.nested.
3046 flexible than contextlib.nested.
3045
3047
3046 Once Mercurial supports Python 2.7+, this will become mostly
3048 Once Mercurial supports Python 2.7+, this will become mostly
3047 unnecessary.
3049 unnecessary.
3048 '''
3050 '''
3049
3051
3050 def __init__(self, *args):
3052 def __init__(self, *args):
3051 '''Accepts a list of no-argument functions that return context
3053 '''Accepts a list of no-argument functions that return context
3052 managers. These will be invoked at __call__ time.'''
3054 managers. These will be invoked at __call__ time.'''
3053 self._pending = args
3055 self._pending = args
3054 self._atexit = []
3056 self._atexit = []
3055
3057
3056 def __enter__(self):
3058 def __enter__(self):
3057 return self
3059 return self
3058
3060
3059 def enter(self):
3061 def enter(self):
3060 '''Create and enter context managers in the order in which they were
3062 '''Create and enter context managers in the order in which they were
3061 passed to the constructor.'''
3063 passed to the constructor.'''
3062 values = []
3064 values = []
3063 for func in self._pending:
3065 for func in self._pending:
3064 obj = func()
3066 obj = func()
3065 values.append(obj.__enter__())
3067 values.append(obj.__enter__())
3066 self._atexit.append(obj.__exit__)
3068 self._atexit.append(obj.__exit__)
3067 del self._pending
3069 del self._pending
3068 return values
3070 return values
3069
3071
3070 def atexit(self, func, *args, **kwargs):
3072 def atexit(self, func, *args, **kwargs):
3071 '''Add a function to call when this context manager exits. The
3073 '''Add a function to call when this context manager exits. The
3072 ordering of multiple atexit calls is unspecified, save that
3074 ordering of multiple atexit calls is unspecified, save that
3073 they will happen before any __exit__ functions.'''
3075 they will happen before any __exit__ functions.'''
3074 def wrapper(exc_type, exc_val, exc_tb):
3076 def wrapper(exc_type, exc_val, exc_tb):
3075 func(*args, **kwargs)
3077 func(*args, **kwargs)
3076 self._atexit.append(wrapper)
3078 self._atexit.append(wrapper)
3077 return func
3079 return func
3078
3080
3079 def __exit__(self, exc_type, exc_val, exc_tb):
3081 def __exit__(self, exc_type, exc_val, exc_tb):
3080 '''Context managers are exited in the reverse order from which
3082 '''Context managers are exited in the reverse order from which
3081 they were created.'''
3083 they were created.'''
3082 received = exc_type is not None
3084 received = exc_type is not None
3083 suppressed = False
3085 suppressed = False
3084 pending = None
3086 pending = None
3085 self._atexit.reverse()
3087 self._atexit.reverse()
3086 for exitfunc in self._atexit:
3088 for exitfunc in self._atexit:
3087 try:
3089 try:
3088 if exitfunc(exc_type, exc_val, exc_tb):
3090 if exitfunc(exc_type, exc_val, exc_tb):
3089 suppressed = True
3091 suppressed = True
3090 exc_type = None
3092 exc_type = None
3091 exc_val = None
3093 exc_val = None
3092 exc_tb = None
3094 exc_tb = None
3093 except BaseException:
3095 except BaseException:
3094 pending = sys.exc_info()
3096 pending = sys.exc_info()
3095 exc_type, exc_val, exc_tb = pending = sys.exc_info()
3097 exc_type, exc_val, exc_tb = pending = sys.exc_info()
3096 del self._atexit
3098 del self._atexit
3097 if pending:
3099 if pending:
3098 raise exc_val
3100 raise exc_val
3099 return received and suppressed
3101 return received and suppressed
3100
3102
3101 # compression code
3103 # compression code
3102
3104
3103 SERVERROLE = 'server'
3105 SERVERROLE = 'server'
3104 CLIENTROLE = 'client'
3106 CLIENTROLE = 'client'
3105
3107
3106 compewireprotosupport = collections.namedtuple(u'compenginewireprotosupport',
3108 compewireprotosupport = collections.namedtuple(u'compenginewireprotosupport',
3107 (u'name', u'serverpriority',
3109 (u'name', u'serverpriority',
3108 u'clientpriority'))
3110 u'clientpriority'))
3109
3111
3110 class compressormanager(object):
3112 class compressormanager(object):
3111 """Holds registrations of various compression engines.
3113 """Holds registrations of various compression engines.
3112
3114
3113 This class essentially abstracts the differences between compression
3115 This class essentially abstracts the differences between compression
3114 engines to allow new compression formats to be added easily, possibly from
3116 engines to allow new compression formats to be added easily, possibly from
3115 extensions.
3117 extensions.
3116
3118
3117 Compressors are registered against the global instance by calling its
3119 Compressors are registered against the global instance by calling its
3118 ``register()`` method.
3120 ``register()`` method.
3119 """
3121 """
3120 def __init__(self):
3122 def __init__(self):
3121 self._engines = {}
3123 self._engines = {}
3122 # Bundle spec human name to engine name.
3124 # Bundle spec human name to engine name.
3123 self._bundlenames = {}
3125 self._bundlenames = {}
3124 # Internal bundle identifier to engine name.
3126 # Internal bundle identifier to engine name.
3125 self._bundletypes = {}
3127 self._bundletypes = {}
3126 # Revlog header to engine name.
3128 # Revlog header to engine name.
3127 self._revlogheaders = {}
3129 self._revlogheaders = {}
3128 # Wire proto identifier to engine name.
3130 # Wire proto identifier to engine name.
3129 self._wiretypes = {}
3131 self._wiretypes = {}
3130
3132
3131 def __getitem__(self, key):
3133 def __getitem__(self, key):
3132 return self._engines[key]
3134 return self._engines[key]
3133
3135
3134 def __contains__(self, key):
3136 def __contains__(self, key):
3135 return key in self._engines
3137 return key in self._engines
3136
3138
3137 def __iter__(self):
3139 def __iter__(self):
3138 return iter(self._engines.keys())
3140 return iter(self._engines.keys())
3139
3141
3140 def register(self, engine):
3142 def register(self, engine):
3141 """Register a compression engine with the manager.
3143 """Register a compression engine with the manager.
3142
3144
3143 The argument must be a ``compressionengine`` instance.
3145 The argument must be a ``compressionengine`` instance.
3144 """
3146 """
3145 if not isinstance(engine, compressionengine):
3147 if not isinstance(engine, compressionengine):
3146 raise ValueError(_('argument must be a compressionengine'))
3148 raise ValueError(_('argument must be a compressionengine'))
3147
3149
3148 name = engine.name()
3150 name = engine.name()
3149
3151
3150 if name in self._engines:
3152 if name in self._engines:
3151 raise error.Abort(_('compression engine %s already registered') %
3153 raise error.Abort(_('compression engine %s already registered') %
3152 name)
3154 name)
3153
3155
3154 bundleinfo = engine.bundletype()
3156 bundleinfo = engine.bundletype()
3155 if bundleinfo:
3157 if bundleinfo:
3156 bundlename, bundletype = bundleinfo
3158 bundlename, bundletype = bundleinfo
3157
3159
3158 if bundlename in self._bundlenames:
3160 if bundlename in self._bundlenames:
3159 raise error.Abort(_('bundle name %s already registered') %
3161 raise error.Abort(_('bundle name %s already registered') %
3160 bundlename)
3162 bundlename)
3161 if bundletype in self._bundletypes:
3163 if bundletype in self._bundletypes:
3162 raise error.Abort(_('bundle type %s already registered by %s') %
3164 raise error.Abort(_('bundle type %s already registered by %s') %
3163 (bundletype, self._bundletypes[bundletype]))
3165 (bundletype, self._bundletypes[bundletype]))
3164
3166
3165 # No external facing name declared.
3167 # No external facing name declared.
3166 if bundlename:
3168 if bundlename:
3167 self._bundlenames[bundlename] = name
3169 self._bundlenames[bundlename] = name
3168
3170
3169 self._bundletypes[bundletype] = name
3171 self._bundletypes[bundletype] = name
3170
3172
3171 wiresupport = engine.wireprotosupport()
3173 wiresupport = engine.wireprotosupport()
3172 if wiresupport:
3174 if wiresupport:
3173 wiretype = wiresupport.name
3175 wiretype = wiresupport.name
3174 if wiretype in self._wiretypes:
3176 if wiretype in self._wiretypes:
3175 raise error.Abort(_('wire protocol compression %s already '
3177 raise error.Abort(_('wire protocol compression %s already '
3176 'registered by %s') %
3178 'registered by %s') %
3177 (wiretype, self._wiretypes[wiretype]))
3179 (wiretype, self._wiretypes[wiretype]))
3178
3180
3179 self._wiretypes[wiretype] = name
3181 self._wiretypes[wiretype] = name
3180
3182
3181 revlogheader = engine.revlogheader()
3183 revlogheader = engine.revlogheader()
3182 if revlogheader and revlogheader in self._revlogheaders:
3184 if revlogheader and revlogheader in self._revlogheaders:
3183 raise error.Abort(_('revlog header %s already registered by %s') %
3185 raise error.Abort(_('revlog header %s already registered by %s') %
3184 (revlogheader, self._revlogheaders[revlogheader]))
3186 (revlogheader, self._revlogheaders[revlogheader]))
3185
3187
3186 if revlogheader:
3188 if revlogheader:
3187 self._revlogheaders[revlogheader] = name
3189 self._revlogheaders[revlogheader] = name
3188
3190
3189 self._engines[name] = engine
3191 self._engines[name] = engine
3190
3192
3191 @property
3193 @property
3192 def supportedbundlenames(self):
3194 def supportedbundlenames(self):
3193 return set(self._bundlenames.keys())
3195 return set(self._bundlenames.keys())
3194
3196
3195 @property
3197 @property
3196 def supportedbundletypes(self):
3198 def supportedbundletypes(self):
3197 return set(self._bundletypes.keys())
3199 return set(self._bundletypes.keys())
3198
3200
3199 def forbundlename(self, bundlename):
3201 def forbundlename(self, bundlename):
3200 """Obtain a compression engine registered to a bundle name.
3202 """Obtain a compression engine registered to a bundle name.
3201
3203
3202 Will raise KeyError if the bundle type isn't registered.
3204 Will raise KeyError if the bundle type isn't registered.
3203
3205
3204 Will abort if the engine is known but not available.
3206 Will abort if the engine is known but not available.
3205 """
3207 """
3206 engine = self._engines[self._bundlenames[bundlename]]
3208 engine = self._engines[self._bundlenames[bundlename]]
3207 if not engine.available():
3209 if not engine.available():
3208 raise error.Abort(_('compression engine %s could not be loaded') %
3210 raise error.Abort(_('compression engine %s could not be loaded') %
3209 engine.name())
3211 engine.name())
3210 return engine
3212 return engine
3211
3213
3212 def forbundletype(self, bundletype):
3214 def forbundletype(self, bundletype):
3213 """Obtain a compression engine registered to a bundle type.
3215 """Obtain a compression engine registered to a bundle type.
3214
3216
3215 Will raise KeyError if the bundle type isn't registered.
3217 Will raise KeyError if the bundle type isn't registered.
3216
3218
3217 Will abort if the engine is known but not available.
3219 Will abort if the engine is known but not available.
3218 """
3220 """
3219 engine = self._engines[self._bundletypes[bundletype]]
3221 engine = self._engines[self._bundletypes[bundletype]]
3220 if not engine.available():
3222 if not engine.available():
3221 raise error.Abort(_('compression engine %s could not be loaded') %
3223 raise error.Abort(_('compression engine %s could not be loaded') %
3222 engine.name())
3224 engine.name())
3223 return engine
3225 return engine
3224
3226
3225 def supportedwireengines(self, role, onlyavailable=True):
3227 def supportedwireengines(self, role, onlyavailable=True):
3226 """Obtain compression engines that support the wire protocol.
3228 """Obtain compression engines that support the wire protocol.
3227
3229
3228 Returns a list of engines in prioritized order, most desired first.
3230 Returns a list of engines in prioritized order, most desired first.
3229
3231
3230 If ``onlyavailable`` is set, filter out engines that can't be
3232 If ``onlyavailable`` is set, filter out engines that can't be
3231 loaded.
3233 loaded.
3232 """
3234 """
3233 assert role in (SERVERROLE, CLIENTROLE)
3235 assert role in (SERVERROLE, CLIENTROLE)
3234
3236
3235 attr = 'serverpriority' if role == SERVERROLE else 'clientpriority'
3237 attr = 'serverpriority' if role == SERVERROLE else 'clientpriority'
3236
3238
3237 engines = [self._engines[e] for e in self._wiretypes.values()]
3239 engines = [self._engines[e] for e in self._wiretypes.values()]
3238 if onlyavailable:
3240 if onlyavailable:
3239 engines = [e for e in engines if e.available()]
3241 engines = [e for e in engines if e.available()]
3240
3242
3241 def getkey(e):
3243 def getkey(e):
3242 # Sort first by priority, highest first. In case of tie, sort
3244 # Sort first by priority, highest first. In case of tie, sort
3243 # alphabetically. This is arbitrary, but ensures output is
3245 # alphabetically. This is arbitrary, but ensures output is
3244 # stable.
3246 # stable.
3245 w = e.wireprotosupport()
3247 w = e.wireprotosupport()
3246 return -1 * getattr(w, attr), w.name
3248 return -1 * getattr(w, attr), w.name
3247
3249
3248 return list(sorted(engines, key=getkey))
3250 return list(sorted(engines, key=getkey))
3249
3251
3250 def forwiretype(self, wiretype):
3252 def forwiretype(self, wiretype):
3251 engine = self._engines[self._wiretypes[wiretype]]
3253 engine = self._engines[self._wiretypes[wiretype]]
3252 if not engine.available():
3254 if not engine.available():
3253 raise error.Abort(_('compression engine %s could not be loaded') %
3255 raise error.Abort(_('compression engine %s could not be loaded') %
3254 engine.name())
3256 engine.name())
3255 return engine
3257 return engine
3256
3258
3257 def forrevlogheader(self, header):
3259 def forrevlogheader(self, header):
3258 """Obtain a compression engine registered to a revlog header.
3260 """Obtain a compression engine registered to a revlog header.
3259
3261
3260 Will raise KeyError if the revlog header value isn't registered.
3262 Will raise KeyError if the revlog header value isn't registered.
3261 """
3263 """
3262 return self._engines[self._revlogheaders[header]]
3264 return self._engines[self._revlogheaders[header]]
3263
3265
3264 compengines = compressormanager()
3266 compengines = compressormanager()
3265
3267
3266 class compressionengine(object):
3268 class compressionengine(object):
3267 """Base class for compression engines.
3269 """Base class for compression engines.
3268
3270
3269 Compression engines must implement the interface defined by this class.
3271 Compression engines must implement the interface defined by this class.
3270 """
3272 """
3271 def name(self):
3273 def name(self):
3272 """Returns the name of the compression engine.
3274 """Returns the name of the compression engine.
3273
3275
3274 This is the key the engine is registered under.
3276 This is the key the engine is registered under.
3275
3277
3276 This method must be implemented.
3278 This method must be implemented.
3277 """
3279 """
3278 raise NotImplementedError()
3280 raise NotImplementedError()
3279
3281
3280 def available(self):
3282 def available(self):
3281 """Whether the compression engine is available.
3283 """Whether the compression engine is available.
3282
3284
3283 The intent of this method is to allow optional compression engines
3285 The intent of this method is to allow optional compression engines
3284 that may not be available in all installations (such as engines relying
3286 that may not be available in all installations (such as engines relying
3285 on C extensions that may not be present).
3287 on C extensions that may not be present).
3286 """
3288 """
3287 return True
3289 return True
3288
3290
3289 def bundletype(self):
3291 def bundletype(self):
3290 """Describes bundle identifiers for this engine.
3292 """Describes bundle identifiers for this engine.
3291
3293
3292 If this compression engine isn't supported for bundles, returns None.
3294 If this compression engine isn't supported for bundles, returns None.
3293
3295
3294 If this engine can be used for bundles, returns a 2-tuple of strings of
3296 If this engine can be used for bundles, returns a 2-tuple of strings of
3295 the user-facing "bundle spec" compression name and an internal
3297 the user-facing "bundle spec" compression name and an internal
3296 identifier used to denote the compression format within bundles. To
3298 identifier used to denote the compression format within bundles. To
3297 exclude the name from external usage, set the first element to ``None``.
3299 exclude the name from external usage, set the first element to ``None``.
3298
3300
3299 If bundle compression is supported, the class must also implement
3301 If bundle compression is supported, the class must also implement
3300 ``compressstream`` and `decompressorreader``.
3302 ``compressstream`` and `decompressorreader``.
3301
3303
3302 The docstring of this method is used in the help system to tell users
3304 The docstring of this method is used in the help system to tell users
3303 about this engine.
3305 about this engine.
3304 """
3306 """
3305 return None
3307 return None
3306
3308
3307 def wireprotosupport(self):
3309 def wireprotosupport(self):
3308 """Declare support for this compression format on the wire protocol.
3310 """Declare support for this compression format on the wire protocol.
3309
3311
3310 If this compression engine isn't supported for compressing wire
3312 If this compression engine isn't supported for compressing wire
3311 protocol payloads, returns None.
3313 protocol payloads, returns None.
3312
3314
3313 Otherwise, returns ``compenginewireprotosupport`` with the following
3315 Otherwise, returns ``compenginewireprotosupport`` with the following
3314 fields:
3316 fields:
3315
3317
3316 * String format identifier
3318 * String format identifier
3317 * Integer priority for the server
3319 * Integer priority for the server
3318 * Integer priority for the client
3320 * Integer priority for the client
3319
3321
3320 The integer priorities are used to order the advertisement of format
3322 The integer priorities are used to order the advertisement of format
3321 support by server and client. The highest integer is advertised
3323 support by server and client. The highest integer is advertised
3322 first. Integers with non-positive values aren't advertised.
3324 first. Integers with non-positive values aren't advertised.
3323
3325
3324 The priority values are somewhat arbitrary and only used for default
3326 The priority values are somewhat arbitrary and only used for default
3325 ordering. The relative order can be changed via config options.
3327 ordering. The relative order can be changed via config options.
3326
3328
3327 If wire protocol compression is supported, the class must also implement
3329 If wire protocol compression is supported, the class must also implement
3328 ``compressstream`` and ``decompressorreader``.
3330 ``compressstream`` and ``decompressorreader``.
3329 """
3331 """
3330 return None
3332 return None
3331
3333
3332 def revlogheader(self):
3334 def revlogheader(self):
3333 """Header added to revlog chunks that identifies this engine.
3335 """Header added to revlog chunks that identifies this engine.
3334
3336
3335 If this engine can be used to compress revlogs, this method should
3337 If this engine can be used to compress revlogs, this method should
3336 return the bytes used to identify chunks compressed with this engine.
3338 return the bytes used to identify chunks compressed with this engine.
3337 Else, the method should return ``None`` to indicate it does not
3339 Else, the method should return ``None`` to indicate it does not
3338 participate in revlog compression.
3340 participate in revlog compression.
3339 """
3341 """
3340 return None
3342 return None
3341
3343
3342 def compressstream(self, it, opts=None):
3344 def compressstream(self, it, opts=None):
3343 """Compress an iterator of chunks.
3345 """Compress an iterator of chunks.
3344
3346
3345 The method receives an iterator (ideally a generator) of chunks of
3347 The method receives an iterator (ideally a generator) of chunks of
3346 bytes to be compressed. It returns an iterator (ideally a generator)
3348 bytes to be compressed. It returns an iterator (ideally a generator)
3347 of bytes of chunks representing the compressed output.
3349 of bytes of chunks representing the compressed output.
3348
3350
3349 Optionally accepts an argument defining how to perform compression.
3351 Optionally accepts an argument defining how to perform compression.
3350 Each engine treats this argument differently.
3352 Each engine treats this argument differently.
3351 """
3353 """
3352 raise NotImplementedError()
3354 raise NotImplementedError()
3353
3355
3354 def decompressorreader(self, fh):
3356 def decompressorreader(self, fh):
3355 """Perform decompression on a file object.
3357 """Perform decompression on a file object.
3356
3358
3357 Argument is an object with a ``read(size)`` method that returns
3359 Argument is an object with a ``read(size)`` method that returns
3358 compressed data. Return value is an object with a ``read(size)`` that
3360 compressed data. Return value is an object with a ``read(size)`` that
3359 returns uncompressed data.
3361 returns uncompressed data.
3360 """
3362 """
3361 raise NotImplementedError()
3363 raise NotImplementedError()
3362
3364
3363 def revlogcompressor(self, opts=None):
3365 def revlogcompressor(self, opts=None):
3364 """Obtain an object that can be used to compress revlog entries.
3366 """Obtain an object that can be used to compress revlog entries.
3365
3367
3366 The object has a ``compress(data)`` method that compresses binary
3368 The object has a ``compress(data)`` method that compresses binary
3367 data. This method returns compressed binary data or ``None`` if
3369 data. This method returns compressed binary data or ``None`` if
3368 the data could not be compressed (too small, not compressible, etc).
3370 the data could not be compressed (too small, not compressible, etc).
3369 The returned data should have a header uniquely identifying this
3371 The returned data should have a header uniquely identifying this
3370 compression format so decompression can be routed to this engine.
3372 compression format so decompression can be routed to this engine.
3371 This header should be identified by the ``revlogheader()`` return
3373 This header should be identified by the ``revlogheader()`` return
3372 value.
3374 value.
3373
3375
3374 The object has a ``decompress(data)`` method that decompresses
3376 The object has a ``decompress(data)`` method that decompresses
3375 data. The method will only be called if ``data`` begins with
3377 data. The method will only be called if ``data`` begins with
3376 ``revlogheader()``. The method should return the raw, uncompressed
3378 ``revlogheader()``. The method should return the raw, uncompressed
3377 data or raise a ``RevlogError``.
3379 data or raise a ``RevlogError``.
3378
3380
3379 The object is reusable but is not thread safe.
3381 The object is reusable but is not thread safe.
3380 """
3382 """
3381 raise NotImplementedError()
3383 raise NotImplementedError()
3382
3384
3383 class _zlibengine(compressionengine):
3385 class _zlibengine(compressionengine):
3384 def name(self):
3386 def name(self):
3385 return 'zlib'
3387 return 'zlib'
3386
3388
3387 def bundletype(self):
3389 def bundletype(self):
3388 """zlib compression using the DEFLATE algorithm.
3390 """zlib compression using the DEFLATE algorithm.
3389
3391
3390 All Mercurial clients should support this format. The compression
3392 All Mercurial clients should support this format. The compression
3391 algorithm strikes a reasonable balance between compression ratio
3393 algorithm strikes a reasonable balance between compression ratio
3392 and size.
3394 and size.
3393 """
3395 """
3394 return 'gzip', 'GZ'
3396 return 'gzip', 'GZ'
3395
3397
3396 def wireprotosupport(self):
3398 def wireprotosupport(self):
3397 return compewireprotosupport('zlib', 20, 20)
3399 return compewireprotosupport('zlib', 20, 20)
3398
3400
3399 def revlogheader(self):
3401 def revlogheader(self):
3400 return 'x'
3402 return 'x'
3401
3403
3402 def compressstream(self, it, opts=None):
3404 def compressstream(self, it, opts=None):
3403 opts = opts or {}
3405 opts = opts or {}
3404
3406
3405 z = zlib.compressobj(opts.get('level', -1))
3407 z = zlib.compressobj(opts.get('level', -1))
3406 for chunk in it:
3408 for chunk in it:
3407 data = z.compress(chunk)
3409 data = z.compress(chunk)
3408 # Not all calls to compress emit data. It is cheaper to inspect
3410 # Not all calls to compress emit data. It is cheaper to inspect
3409 # here than to feed empty chunks through generator.
3411 # here than to feed empty chunks through generator.
3410 if data:
3412 if data:
3411 yield data
3413 yield data
3412
3414
3413 yield z.flush()
3415 yield z.flush()
3414
3416
3415 def decompressorreader(self, fh):
3417 def decompressorreader(self, fh):
3416 def gen():
3418 def gen():
3417 d = zlib.decompressobj()
3419 d = zlib.decompressobj()
3418 for chunk in filechunkiter(fh):
3420 for chunk in filechunkiter(fh):
3419 while chunk:
3421 while chunk:
3420 # Limit output size to limit memory.
3422 # Limit output size to limit memory.
3421 yield d.decompress(chunk, 2 ** 18)
3423 yield d.decompress(chunk, 2 ** 18)
3422 chunk = d.unconsumed_tail
3424 chunk = d.unconsumed_tail
3423
3425
3424 return chunkbuffer(gen())
3426 return chunkbuffer(gen())
3425
3427
3426 class zlibrevlogcompressor(object):
3428 class zlibrevlogcompressor(object):
3427 def compress(self, data):
3429 def compress(self, data):
3428 insize = len(data)
3430 insize = len(data)
3429 # Caller handles empty input case.
3431 # Caller handles empty input case.
3430 assert insize > 0
3432 assert insize > 0
3431
3433
3432 if insize < 44:
3434 if insize < 44:
3433 return None
3435 return None
3434
3436
3435 elif insize <= 1000000:
3437 elif insize <= 1000000:
3436 compressed = zlib.compress(data)
3438 compressed = zlib.compress(data)
3437 if len(compressed) < insize:
3439 if len(compressed) < insize:
3438 return compressed
3440 return compressed
3439 return None
3441 return None
3440
3442
3441 # zlib makes an internal copy of the input buffer, doubling
3443 # zlib makes an internal copy of the input buffer, doubling
3442 # memory usage for large inputs. So do streaming compression
3444 # memory usage for large inputs. So do streaming compression
3443 # on large inputs.
3445 # on large inputs.
3444 else:
3446 else:
3445 z = zlib.compressobj()
3447 z = zlib.compressobj()
3446 parts = []
3448 parts = []
3447 pos = 0
3449 pos = 0
3448 while pos < insize:
3450 while pos < insize:
3449 pos2 = pos + 2**20
3451 pos2 = pos + 2**20
3450 parts.append(z.compress(data[pos:pos2]))
3452 parts.append(z.compress(data[pos:pos2]))
3451 pos = pos2
3453 pos = pos2
3452 parts.append(z.flush())
3454 parts.append(z.flush())
3453
3455
3454 if sum(map(len, parts)) < insize:
3456 if sum(map(len, parts)) < insize:
3455 return ''.join(parts)
3457 return ''.join(parts)
3456 return None
3458 return None
3457
3459
3458 def decompress(self, data):
3460 def decompress(self, data):
3459 try:
3461 try:
3460 return zlib.decompress(data)
3462 return zlib.decompress(data)
3461 except zlib.error as e:
3463 except zlib.error as e:
3462 raise error.RevlogError(_('revlog decompress error: %s') %
3464 raise error.RevlogError(_('revlog decompress error: %s') %
3463 str(e))
3465 str(e))
3464
3466
3465 def revlogcompressor(self, opts=None):
3467 def revlogcompressor(self, opts=None):
3466 return self.zlibrevlogcompressor()
3468 return self.zlibrevlogcompressor()
3467
3469
3468 compengines.register(_zlibengine())
3470 compengines.register(_zlibengine())
3469
3471
3470 class _bz2engine(compressionengine):
3472 class _bz2engine(compressionengine):
3471 def name(self):
3473 def name(self):
3472 return 'bz2'
3474 return 'bz2'
3473
3475
3474 def bundletype(self):
3476 def bundletype(self):
3475 """An algorithm that produces smaller bundles than ``gzip``.
3477 """An algorithm that produces smaller bundles than ``gzip``.
3476
3478
3477 All Mercurial clients should support this format.
3479 All Mercurial clients should support this format.
3478
3480
3479 This engine will likely produce smaller bundles than ``gzip`` but
3481 This engine will likely produce smaller bundles than ``gzip`` but
3480 will be significantly slower, both during compression and
3482 will be significantly slower, both during compression and
3481 decompression.
3483 decompression.
3482
3484
3483 If available, the ``zstd`` engine can yield similar or better
3485 If available, the ``zstd`` engine can yield similar or better
3484 compression at much higher speeds.
3486 compression at much higher speeds.
3485 """
3487 """
3486 return 'bzip2', 'BZ'
3488 return 'bzip2', 'BZ'
3487
3489
3488 # We declare a protocol name but don't advertise by default because
3490 # We declare a protocol name but don't advertise by default because
3489 # it is slow.
3491 # it is slow.
3490 def wireprotosupport(self):
3492 def wireprotosupport(self):
3491 return compewireprotosupport('bzip2', 0, 0)
3493 return compewireprotosupport('bzip2', 0, 0)
3492
3494
3493 def compressstream(self, it, opts=None):
3495 def compressstream(self, it, opts=None):
3494 opts = opts or {}
3496 opts = opts or {}
3495 z = bz2.BZ2Compressor(opts.get('level', 9))
3497 z = bz2.BZ2Compressor(opts.get('level', 9))
3496 for chunk in it:
3498 for chunk in it:
3497 data = z.compress(chunk)
3499 data = z.compress(chunk)
3498 if data:
3500 if data:
3499 yield data
3501 yield data
3500
3502
3501 yield z.flush()
3503 yield z.flush()
3502
3504
3503 def decompressorreader(self, fh):
3505 def decompressorreader(self, fh):
3504 def gen():
3506 def gen():
3505 d = bz2.BZ2Decompressor()
3507 d = bz2.BZ2Decompressor()
3506 for chunk in filechunkiter(fh):
3508 for chunk in filechunkiter(fh):
3507 yield d.decompress(chunk)
3509 yield d.decompress(chunk)
3508
3510
3509 return chunkbuffer(gen())
3511 return chunkbuffer(gen())
3510
3512
3511 compengines.register(_bz2engine())
3513 compengines.register(_bz2engine())
3512
3514
3513 class _truncatedbz2engine(compressionengine):
3515 class _truncatedbz2engine(compressionengine):
3514 def name(self):
3516 def name(self):
3515 return 'bz2truncated'
3517 return 'bz2truncated'
3516
3518
3517 def bundletype(self):
3519 def bundletype(self):
3518 return None, '_truncatedBZ'
3520 return None, '_truncatedBZ'
3519
3521
3520 # We don't implement compressstream because it is hackily handled elsewhere.
3522 # We don't implement compressstream because it is hackily handled elsewhere.
3521
3523
3522 def decompressorreader(self, fh):
3524 def decompressorreader(self, fh):
3523 def gen():
3525 def gen():
3524 # The input stream doesn't have the 'BZ' header. So add it back.
3526 # The input stream doesn't have the 'BZ' header. So add it back.
3525 d = bz2.BZ2Decompressor()
3527 d = bz2.BZ2Decompressor()
3526 d.decompress('BZ')
3528 d.decompress('BZ')
3527 for chunk in filechunkiter(fh):
3529 for chunk in filechunkiter(fh):
3528 yield d.decompress(chunk)
3530 yield d.decompress(chunk)
3529
3531
3530 return chunkbuffer(gen())
3532 return chunkbuffer(gen())
3531
3533
3532 compengines.register(_truncatedbz2engine())
3534 compengines.register(_truncatedbz2engine())
3533
3535
3534 class _noopengine(compressionengine):
3536 class _noopengine(compressionengine):
3535 def name(self):
3537 def name(self):
3536 return 'none'
3538 return 'none'
3537
3539
3538 def bundletype(self):
3540 def bundletype(self):
3539 """No compression is performed.
3541 """No compression is performed.
3540
3542
3541 Use this compression engine to explicitly disable compression.
3543 Use this compression engine to explicitly disable compression.
3542 """
3544 """
3543 return 'none', 'UN'
3545 return 'none', 'UN'
3544
3546
3545 # Clients always support uncompressed payloads. Servers don't because
3547 # Clients always support uncompressed payloads. Servers don't because
3546 # unless you are on a fast network, uncompressed payloads can easily
3548 # unless you are on a fast network, uncompressed payloads can easily
3547 # saturate your network pipe.
3549 # saturate your network pipe.
3548 def wireprotosupport(self):
3550 def wireprotosupport(self):
3549 return compewireprotosupport('none', 0, 10)
3551 return compewireprotosupport('none', 0, 10)
3550
3552
3551 # We don't implement revlogheader because it is handled specially
3553 # We don't implement revlogheader because it is handled specially
3552 # in the revlog class.
3554 # in the revlog class.
3553
3555
3554 def compressstream(self, it, opts=None):
3556 def compressstream(self, it, opts=None):
3555 return it
3557 return it
3556
3558
3557 def decompressorreader(self, fh):
3559 def decompressorreader(self, fh):
3558 return fh
3560 return fh
3559
3561
3560 class nooprevlogcompressor(object):
3562 class nooprevlogcompressor(object):
3561 def compress(self, data):
3563 def compress(self, data):
3562 return None
3564 return None
3563
3565
3564 def revlogcompressor(self, opts=None):
3566 def revlogcompressor(self, opts=None):
3565 return self.nooprevlogcompressor()
3567 return self.nooprevlogcompressor()
3566
3568
3567 compengines.register(_noopengine())
3569 compengines.register(_noopengine())
3568
3570
3569 class _zstdengine(compressionengine):
3571 class _zstdengine(compressionengine):
3570 def name(self):
3572 def name(self):
3571 return 'zstd'
3573 return 'zstd'
3572
3574
3573 @propertycache
3575 @propertycache
3574 def _module(self):
3576 def _module(self):
3575 # Not all installs have the zstd module available. So defer importing
3577 # Not all installs have the zstd module available. So defer importing
3576 # until first access.
3578 # until first access.
3577 try:
3579 try:
3578 from . import zstd
3580 from . import zstd
3579 # Force delayed import.
3581 # Force delayed import.
3580 zstd.__version__
3582 zstd.__version__
3581 return zstd
3583 return zstd
3582 except ImportError:
3584 except ImportError:
3583 return None
3585 return None
3584
3586
3585 def available(self):
3587 def available(self):
3586 return bool(self._module)
3588 return bool(self._module)
3587
3589
3588 def bundletype(self):
3590 def bundletype(self):
3589 """A modern compression algorithm that is fast and highly flexible.
3591 """A modern compression algorithm that is fast and highly flexible.
3590
3592
3591 Only supported by Mercurial 4.1 and newer clients.
3593 Only supported by Mercurial 4.1 and newer clients.
3592
3594
3593 With the default settings, zstd compression is both faster and yields
3595 With the default settings, zstd compression is both faster and yields
3594 better compression than ``gzip``. It also frequently yields better
3596 better compression than ``gzip``. It also frequently yields better
3595 compression than ``bzip2`` while operating at much higher speeds.
3597 compression than ``bzip2`` while operating at much higher speeds.
3596
3598
3597 If this engine is available and backwards compatibility is not a
3599 If this engine is available and backwards compatibility is not a
3598 concern, it is likely the best available engine.
3600 concern, it is likely the best available engine.
3599 """
3601 """
3600 return 'zstd', 'ZS'
3602 return 'zstd', 'ZS'
3601
3603
3602 def wireprotosupport(self):
3604 def wireprotosupport(self):
3603 return compewireprotosupport('zstd', 50, 50)
3605 return compewireprotosupport('zstd', 50, 50)
3604
3606
3605 def revlogheader(self):
3607 def revlogheader(self):
3606 return '\x28'
3608 return '\x28'
3607
3609
3608 def compressstream(self, it, opts=None):
3610 def compressstream(self, it, opts=None):
3609 opts = opts or {}
3611 opts = opts or {}
3610 # zstd level 3 is almost always significantly faster than zlib
3612 # zstd level 3 is almost always significantly faster than zlib
3611 # while providing no worse compression. It strikes a good balance
3613 # while providing no worse compression. It strikes a good balance
3612 # between speed and compression.
3614 # between speed and compression.
3613 level = opts.get('level', 3)
3615 level = opts.get('level', 3)
3614
3616
3615 zstd = self._module
3617 zstd = self._module
3616 z = zstd.ZstdCompressor(level=level).compressobj()
3618 z = zstd.ZstdCompressor(level=level).compressobj()
3617 for chunk in it:
3619 for chunk in it:
3618 data = z.compress(chunk)
3620 data = z.compress(chunk)
3619 if data:
3621 if data:
3620 yield data
3622 yield data
3621
3623
3622 yield z.flush()
3624 yield z.flush()
3623
3625
3624 def decompressorreader(self, fh):
3626 def decompressorreader(self, fh):
3625 zstd = self._module
3627 zstd = self._module
3626 dctx = zstd.ZstdDecompressor()
3628 dctx = zstd.ZstdDecompressor()
3627 return chunkbuffer(dctx.read_from(fh))
3629 return chunkbuffer(dctx.read_from(fh))
3628
3630
3629 class zstdrevlogcompressor(object):
3631 class zstdrevlogcompressor(object):
3630 def __init__(self, zstd, level=3):
3632 def __init__(self, zstd, level=3):
3631 # Writing the content size adds a few bytes to the output. However,
3633 # Writing the content size adds a few bytes to the output. However,
3632 # it allows decompression to be more optimal since we can
3634 # it allows decompression to be more optimal since we can
3633 # pre-allocate a buffer to hold the result.
3635 # pre-allocate a buffer to hold the result.
3634 self._cctx = zstd.ZstdCompressor(level=level,
3636 self._cctx = zstd.ZstdCompressor(level=level,
3635 write_content_size=True)
3637 write_content_size=True)
3636 self._dctx = zstd.ZstdDecompressor()
3638 self._dctx = zstd.ZstdDecompressor()
3637 self._compinsize = zstd.COMPRESSION_RECOMMENDED_INPUT_SIZE
3639 self._compinsize = zstd.COMPRESSION_RECOMMENDED_INPUT_SIZE
3638 self._decompinsize = zstd.DECOMPRESSION_RECOMMENDED_INPUT_SIZE
3640 self._decompinsize = zstd.DECOMPRESSION_RECOMMENDED_INPUT_SIZE
3639
3641
3640 def compress(self, data):
3642 def compress(self, data):
3641 insize = len(data)
3643 insize = len(data)
3642 # Caller handles empty input case.
3644 # Caller handles empty input case.
3643 assert insize > 0
3645 assert insize > 0
3644
3646
3645 if insize < 50:
3647 if insize < 50:
3646 return None
3648 return None
3647
3649
3648 elif insize <= 1000000:
3650 elif insize <= 1000000:
3649 compressed = self._cctx.compress(data)
3651 compressed = self._cctx.compress(data)
3650 if len(compressed) < insize:
3652 if len(compressed) < insize:
3651 return compressed
3653 return compressed
3652 return None
3654 return None
3653 else:
3655 else:
3654 z = self._cctx.compressobj()
3656 z = self._cctx.compressobj()
3655 chunks = []
3657 chunks = []
3656 pos = 0
3658 pos = 0
3657 while pos < insize:
3659 while pos < insize:
3658 pos2 = pos + self._compinsize
3660 pos2 = pos + self._compinsize
3659 chunk = z.compress(data[pos:pos2])
3661 chunk = z.compress(data[pos:pos2])
3660 if chunk:
3662 if chunk:
3661 chunks.append(chunk)
3663 chunks.append(chunk)
3662 pos = pos2
3664 pos = pos2
3663 chunks.append(z.flush())
3665 chunks.append(z.flush())
3664
3666
3665 if sum(map(len, chunks)) < insize:
3667 if sum(map(len, chunks)) < insize:
3666 return ''.join(chunks)
3668 return ''.join(chunks)
3667 return None
3669 return None
3668
3670
3669 def decompress(self, data):
3671 def decompress(self, data):
3670 insize = len(data)
3672 insize = len(data)
3671
3673
3672 try:
3674 try:
3673 # This was measured to be faster than other streaming
3675 # This was measured to be faster than other streaming
3674 # decompressors.
3676 # decompressors.
3675 dobj = self._dctx.decompressobj()
3677 dobj = self._dctx.decompressobj()
3676 chunks = []
3678 chunks = []
3677 pos = 0
3679 pos = 0
3678 while pos < insize:
3680 while pos < insize:
3679 pos2 = pos + self._decompinsize
3681 pos2 = pos + self._decompinsize
3680 chunk = dobj.decompress(data[pos:pos2])
3682 chunk = dobj.decompress(data[pos:pos2])
3681 if chunk:
3683 if chunk:
3682 chunks.append(chunk)
3684 chunks.append(chunk)
3683 pos = pos2
3685 pos = pos2
3684 # Frame should be exhausted, so no finish() API.
3686 # Frame should be exhausted, so no finish() API.
3685
3687
3686 return ''.join(chunks)
3688 return ''.join(chunks)
3687 except Exception as e:
3689 except Exception as e:
3688 raise error.RevlogError(_('revlog decompress error: %s') %
3690 raise error.RevlogError(_('revlog decompress error: %s') %
3689 str(e))
3691 str(e))
3690
3692
3691 def revlogcompressor(self, opts=None):
3693 def revlogcompressor(self, opts=None):
3692 opts = opts or {}
3694 opts = opts or {}
3693 return self.zstdrevlogcompressor(self._module,
3695 return self.zstdrevlogcompressor(self._module,
3694 level=opts.get('level', 3))
3696 level=opts.get('level', 3))
3695
3697
3696 compengines.register(_zstdengine())
3698 compengines.register(_zstdengine())
3697
3699
3698 def bundlecompressiontopics():
3700 def bundlecompressiontopics():
3699 """Obtains a list of available bundle compressions for use in help."""
3701 """Obtains a list of available bundle compressions for use in help."""
3700 # help.makeitemsdocs() expects a dict of names to items with a .__doc__.
3702 # help.makeitemsdocs() expects a dict of names to items with a .__doc__.
3701 items = {}
3703 items = {}
3702
3704
3703 # We need to format the docstring. So use a dummy object/type to hold it
3705 # We need to format the docstring. So use a dummy object/type to hold it
3704 # rather than mutating the original.
3706 # rather than mutating the original.
3705 class docobject(object):
3707 class docobject(object):
3706 pass
3708 pass
3707
3709
3708 for name in compengines:
3710 for name in compengines:
3709 engine = compengines[name]
3711 engine = compengines[name]
3710
3712
3711 if not engine.available():
3713 if not engine.available():
3712 continue
3714 continue
3713
3715
3714 bt = engine.bundletype()
3716 bt = engine.bundletype()
3715 if not bt or not bt[0]:
3717 if not bt or not bt[0]:
3716 continue
3718 continue
3717
3719
3718 doc = pycompat.sysstr('``%s``\n %s') % (
3720 doc = pycompat.sysstr('``%s``\n %s') % (
3719 bt[0], engine.bundletype.__doc__)
3721 bt[0], engine.bundletype.__doc__)
3720
3722
3721 value = docobject()
3723 value = docobject()
3722 value.__doc__ = doc
3724 value.__doc__ = doc
3723
3725
3724 items[bt[0]] = value
3726 items[bt[0]] = value
3725
3727
3726 return items
3728 return items
3727
3729
3728 # convenient shortcut
3730 # convenient shortcut
3729 dst = debugstacktrace
3731 dst = debugstacktrace
@@ -1,475 +1,477 b''
1 # windows.py - Windows utility function implementations for Mercurial
1 # windows.py - Windows utility function implementations for Mercurial
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import msvcrt
11 import msvcrt
12 import os
12 import os
13 import re
13 import re
14 import stat
14 import stat
15 import sys
15 import sys
16
16
17 from .i18n import _
17 from .i18n import _
18 from . import (
18 from . import (
19 encoding,
19 encoding,
20 osutil,
20 policy,
21 pycompat,
21 pycompat,
22 win32,
22 win32,
23 )
23 )
24
24
25 try:
25 try:
26 import _winreg as winreg
26 import _winreg as winreg
27 winreg.CloseKey
27 winreg.CloseKey
28 except ImportError:
28 except ImportError:
29 import winreg
29 import winreg
30
30
31 osutil = policy.importmod(r'osutil')
32
31 executablepath = win32.executablepath
33 executablepath = win32.executablepath
32 getuser = win32.getuser
34 getuser = win32.getuser
33 hidewindow = win32.hidewindow
35 hidewindow = win32.hidewindow
34 makedir = win32.makedir
36 makedir = win32.makedir
35 nlinks = win32.nlinks
37 nlinks = win32.nlinks
36 oslink = win32.oslink
38 oslink = win32.oslink
37 samedevice = win32.samedevice
39 samedevice = win32.samedevice
38 samefile = win32.samefile
40 samefile = win32.samefile
39 setsignalhandler = win32.setsignalhandler
41 setsignalhandler = win32.setsignalhandler
40 spawndetached = win32.spawndetached
42 spawndetached = win32.spawndetached
41 split = os.path.split
43 split = os.path.split
42 testpid = win32.testpid
44 testpid = win32.testpid
43 unlink = win32.unlink
45 unlink = win32.unlink
44
46
45 umask = 0o022
47 umask = 0o022
46
48
47 class mixedfilemodewrapper(object):
49 class mixedfilemodewrapper(object):
48 """Wraps a file handle when it is opened in read/write mode.
50 """Wraps a file handle when it is opened in read/write mode.
49
51
50 fopen() and fdopen() on Windows have a specific-to-Windows requirement
52 fopen() and fdopen() on Windows have a specific-to-Windows requirement
51 that files opened with mode r+, w+, or a+ make a call to a file positioning
53 that files opened with mode r+, w+, or a+ make a call to a file positioning
52 function when switching between reads and writes. Without this extra call,
54 function when switching between reads and writes. Without this extra call,
53 Python will raise a not very intuitive "IOError: [Errno 0] Error."
55 Python will raise a not very intuitive "IOError: [Errno 0] Error."
54
56
55 This class wraps posixfile instances when the file is opened in read/write
57 This class wraps posixfile instances when the file is opened in read/write
56 mode and automatically adds checks or inserts appropriate file positioning
58 mode and automatically adds checks or inserts appropriate file positioning
57 calls when necessary.
59 calls when necessary.
58 """
60 """
59 OPNONE = 0
61 OPNONE = 0
60 OPREAD = 1
62 OPREAD = 1
61 OPWRITE = 2
63 OPWRITE = 2
62
64
63 def __init__(self, fp):
65 def __init__(self, fp):
64 object.__setattr__(self, r'_fp', fp)
66 object.__setattr__(self, r'_fp', fp)
65 object.__setattr__(self, r'_lastop', 0)
67 object.__setattr__(self, r'_lastop', 0)
66
68
67 def __enter__(self):
69 def __enter__(self):
68 return self._fp.__enter__()
70 return self._fp.__enter__()
69
71
70 def __exit__(self, exc_type, exc_val, exc_tb):
72 def __exit__(self, exc_type, exc_val, exc_tb):
71 self._fp.__exit__(exc_type, exc_val, exc_tb)
73 self._fp.__exit__(exc_type, exc_val, exc_tb)
72
74
73 def __getattr__(self, name):
75 def __getattr__(self, name):
74 return getattr(self._fp, name)
76 return getattr(self._fp, name)
75
77
76 def __setattr__(self, name, value):
78 def __setattr__(self, name, value):
77 return self._fp.__setattr__(name, value)
79 return self._fp.__setattr__(name, value)
78
80
79 def _noopseek(self):
81 def _noopseek(self):
80 self._fp.seek(0, os.SEEK_CUR)
82 self._fp.seek(0, os.SEEK_CUR)
81
83
82 def seek(self, *args, **kwargs):
84 def seek(self, *args, **kwargs):
83 object.__setattr__(self, r'_lastop', self.OPNONE)
85 object.__setattr__(self, r'_lastop', self.OPNONE)
84 return self._fp.seek(*args, **kwargs)
86 return self._fp.seek(*args, **kwargs)
85
87
86 def write(self, d):
88 def write(self, d):
87 if self._lastop == self.OPREAD:
89 if self._lastop == self.OPREAD:
88 self._noopseek()
90 self._noopseek()
89
91
90 object.__setattr__(self, r'_lastop', self.OPWRITE)
92 object.__setattr__(self, r'_lastop', self.OPWRITE)
91 return self._fp.write(d)
93 return self._fp.write(d)
92
94
93 def writelines(self, *args, **kwargs):
95 def writelines(self, *args, **kwargs):
94 if self._lastop == self.OPREAD:
96 if self._lastop == self.OPREAD:
95 self._noopeseek()
97 self._noopeseek()
96
98
97 object.__setattr__(self, r'_lastop', self.OPWRITE)
99 object.__setattr__(self, r'_lastop', self.OPWRITE)
98 return self._fp.writelines(*args, **kwargs)
100 return self._fp.writelines(*args, **kwargs)
99
101
100 def read(self, *args, **kwargs):
102 def read(self, *args, **kwargs):
101 if self._lastop == self.OPWRITE:
103 if self._lastop == self.OPWRITE:
102 self._noopseek()
104 self._noopseek()
103
105
104 object.__setattr__(self, r'_lastop', self.OPREAD)
106 object.__setattr__(self, r'_lastop', self.OPREAD)
105 return self._fp.read(*args, **kwargs)
107 return self._fp.read(*args, **kwargs)
106
108
107 def readline(self, *args, **kwargs):
109 def readline(self, *args, **kwargs):
108 if self._lastop == self.OPWRITE:
110 if self._lastop == self.OPWRITE:
109 self._noopseek()
111 self._noopseek()
110
112
111 object.__setattr__(self, r'_lastop', self.OPREAD)
113 object.__setattr__(self, r'_lastop', self.OPREAD)
112 return self._fp.readline(*args, **kwargs)
114 return self._fp.readline(*args, **kwargs)
113
115
114 def readlines(self, *args, **kwargs):
116 def readlines(self, *args, **kwargs):
115 if self._lastop == self.OPWRITE:
117 if self._lastop == self.OPWRITE:
116 self._noopseek()
118 self._noopseek()
117
119
118 object.__setattr__(self, r'_lastop', self.OPREAD)
120 object.__setattr__(self, r'_lastop', self.OPREAD)
119 return self._fp.readlines(*args, **kwargs)
121 return self._fp.readlines(*args, **kwargs)
120
122
121 def posixfile(name, mode='r', buffering=-1):
123 def posixfile(name, mode='r', buffering=-1):
122 '''Open a file with even more POSIX-like semantics'''
124 '''Open a file with even more POSIX-like semantics'''
123 try:
125 try:
124 fp = osutil.posixfile(name, mode, buffering) # may raise WindowsError
126 fp = osutil.posixfile(name, mode, buffering) # may raise WindowsError
125
127
126 # The position when opening in append mode is implementation defined, so
128 # The position when opening in append mode is implementation defined, so
127 # make it consistent with other platforms, which position at EOF.
129 # make it consistent with other platforms, which position at EOF.
128 if 'a' in mode:
130 if 'a' in mode:
129 fp.seek(0, os.SEEK_END)
131 fp.seek(0, os.SEEK_END)
130
132
131 if '+' in mode:
133 if '+' in mode:
132 return mixedfilemodewrapper(fp)
134 return mixedfilemodewrapper(fp)
133
135
134 return fp
136 return fp
135 except WindowsError as err:
137 except WindowsError as err:
136 # convert to a friendlier exception
138 # convert to a friendlier exception
137 raise IOError(err.errno, '%s: %s' % (name, err.strerror))
139 raise IOError(err.errno, '%s: %s' % (name, err.strerror))
138
140
139 # may be wrapped by win32mbcs extension
141 # may be wrapped by win32mbcs extension
140 listdir = osutil.listdir
142 listdir = osutil.listdir
141
143
142 class winstdout(object):
144 class winstdout(object):
143 '''stdout on windows misbehaves if sent through a pipe'''
145 '''stdout on windows misbehaves if sent through a pipe'''
144
146
145 def __init__(self, fp):
147 def __init__(self, fp):
146 self.fp = fp
148 self.fp = fp
147
149
148 def __getattr__(self, key):
150 def __getattr__(self, key):
149 return getattr(self.fp, key)
151 return getattr(self.fp, key)
150
152
151 def close(self):
153 def close(self):
152 try:
154 try:
153 self.fp.close()
155 self.fp.close()
154 except IOError:
156 except IOError:
155 pass
157 pass
156
158
157 def write(self, s):
159 def write(self, s):
158 try:
160 try:
159 # This is workaround for "Not enough space" error on
161 # This is workaround for "Not enough space" error on
160 # writing large size of data to console.
162 # writing large size of data to console.
161 limit = 16000
163 limit = 16000
162 l = len(s)
164 l = len(s)
163 start = 0
165 start = 0
164 self.softspace = 0
166 self.softspace = 0
165 while start < l:
167 while start < l:
166 end = start + limit
168 end = start + limit
167 self.fp.write(s[start:end])
169 self.fp.write(s[start:end])
168 start = end
170 start = end
169 except IOError as inst:
171 except IOError as inst:
170 if inst.errno != 0:
172 if inst.errno != 0:
171 raise
173 raise
172 self.close()
174 self.close()
173 raise IOError(errno.EPIPE, 'Broken pipe')
175 raise IOError(errno.EPIPE, 'Broken pipe')
174
176
175 def flush(self):
177 def flush(self):
176 try:
178 try:
177 return self.fp.flush()
179 return self.fp.flush()
178 except IOError as inst:
180 except IOError as inst:
179 if inst.errno != errno.EINVAL:
181 if inst.errno != errno.EINVAL:
180 raise
182 raise
181 self.close()
183 self.close()
182 raise IOError(errno.EPIPE, 'Broken pipe')
184 raise IOError(errno.EPIPE, 'Broken pipe')
183
185
184 def _is_win_9x():
186 def _is_win_9x():
185 '''return true if run on windows 95, 98 or me.'''
187 '''return true if run on windows 95, 98 or me.'''
186 try:
188 try:
187 return sys.getwindowsversion()[3] == 1
189 return sys.getwindowsversion()[3] == 1
188 except AttributeError:
190 except AttributeError:
189 return 'command' in encoding.environ.get('comspec', '')
191 return 'command' in encoding.environ.get('comspec', '')
190
192
191 def openhardlinks():
193 def openhardlinks():
192 return not _is_win_9x()
194 return not _is_win_9x()
193
195
194 def parsepatchoutput(output_line):
196 def parsepatchoutput(output_line):
195 """parses the output produced by patch and returns the filename"""
197 """parses the output produced by patch and returns the filename"""
196 pf = output_line[14:]
198 pf = output_line[14:]
197 if pf[0] == '`':
199 if pf[0] == '`':
198 pf = pf[1:-1] # Remove the quotes
200 pf = pf[1:-1] # Remove the quotes
199 return pf
201 return pf
200
202
201 def sshargs(sshcmd, host, user, port):
203 def sshargs(sshcmd, host, user, port):
202 '''Build argument list for ssh or Plink'''
204 '''Build argument list for ssh or Plink'''
203 pflag = 'plink' in sshcmd.lower() and '-P' or '-p'
205 pflag = 'plink' in sshcmd.lower() and '-P' or '-p'
204 args = user and ("%s@%s" % (user, host)) or host
206 args = user and ("%s@%s" % (user, host)) or host
205 return port and ("%s %s %s" % (args, pflag, port)) or args
207 return port and ("%s %s %s" % (args, pflag, port)) or args
206
208
207 def setflags(f, l, x):
209 def setflags(f, l, x):
208 pass
210 pass
209
211
210 def copymode(src, dst, mode=None):
212 def copymode(src, dst, mode=None):
211 pass
213 pass
212
214
213 def checkexec(path):
215 def checkexec(path):
214 return False
216 return False
215
217
216 def checklink(path):
218 def checklink(path):
217 return False
219 return False
218
220
219 def setbinary(fd):
221 def setbinary(fd):
220 # When run without console, pipes may expose invalid
222 # When run without console, pipes may expose invalid
221 # fileno(), usually set to -1.
223 # fileno(), usually set to -1.
222 fno = getattr(fd, 'fileno', None)
224 fno = getattr(fd, 'fileno', None)
223 if fno is not None and fno() >= 0:
225 if fno is not None and fno() >= 0:
224 msvcrt.setmode(fno(), os.O_BINARY)
226 msvcrt.setmode(fno(), os.O_BINARY)
225
227
226 def pconvert(path):
228 def pconvert(path):
227 return path.replace(pycompat.ossep, '/')
229 return path.replace(pycompat.ossep, '/')
228
230
229 def localpath(path):
231 def localpath(path):
230 return path.replace('/', '\\')
232 return path.replace('/', '\\')
231
233
232 def normpath(path):
234 def normpath(path):
233 return pconvert(os.path.normpath(path))
235 return pconvert(os.path.normpath(path))
234
236
235 def normcase(path):
237 def normcase(path):
236 return encoding.upper(path) # NTFS compares via upper()
238 return encoding.upper(path) # NTFS compares via upper()
237
239
238 # see posix.py for definitions
240 # see posix.py for definitions
239 normcasespec = encoding.normcasespecs.upper
241 normcasespec = encoding.normcasespecs.upper
240 normcasefallback = encoding.upperfallback
242 normcasefallback = encoding.upperfallback
241
243
242 def samestat(s1, s2):
244 def samestat(s1, s2):
243 return False
245 return False
244
246
245 # A sequence of backslashes is special iff it precedes a double quote:
247 # A sequence of backslashes is special iff it precedes a double quote:
246 # - if there's an even number of backslashes, the double quote is not
248 # - if there's an even number of backslashes, the double quote is not
247 # quoted (i.e. it ends the quoted region)
249 # quoted (i.e. it ends the quoted region)
248 # - if there's an odd number of backslashes, the double quote is quoted
250 # - if there's an odd number of backslashes, the double quote is quoted
249 # - in both cases, every pair of backslashes is unquoted into a single
251 # - in both cases, every pair of backslashes is unquoted into a single
250 # backslash
252 # backslash
251 # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx )
253 # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx )
252 # So, to quote a string, we must surround it in double quotes, double
254 # So, to quote a string, we must surround it in double quotes, double
253 # the number of backslashes that precede double quotes and add another
255 # the number of backslashes that precede double quotes and add another
254 # backslash before every double quote (being careful with the double
256 # backslash before every double quote (being careful with the double
255 # quote we've appended to the end)
257 # quote we've appended to the end)
256 _quotere = None
258 _quotere = None
257 _needsshellquote = None
259 _needsshellquote = None
258 def shellquote(s):
260 def shellquote(s):
259 r"""
261 r"""
260 >>> shellquote(r'C:\Users\xyz')
262 >>> shellquote(r'C:\Users\xyz')
261 '"C:\\Users\\xyz"'
263 '"C:\\Users\\xyz"'
262 >>> shellquote(r'C:\Users\xyz/mixed')
264 >>> shellquote(r'C:\Users\xyz/mixed')
263 '"C:\\Users\\xyz/mixed"'
265 '"C:\\Users\\xyz/mixed"'
264 >>> # Would be safe not to quote too, since it is all double backslashes
266 >>> # Would be safe not to quote too, since it is all double backslashes
265 >>> shellquote(r'C:\\Users\\xyz')
267 >>> shellquote(r'C:\\Users\\xyz')
266 '"C:\\\\Users\\\\xyz"'
268 '"C:\\\\Users\\\\xyz"'
267 >>> # But this must be quoted
269 >>> # But this must be quoted
268 >>> shellquote(r'C:\\Users\\xyz/abc')
270 >>> shellquote(r'C:\\Users\\xyz/abc')
269 '"C:\\\\Users\\\\xyz/abc"'
271 '"C:\\\\Users\\\\xyz/abc"'
270 """
272 """
271 global _quotere
273 global _quotere
272 if _quotere is None:
274 if _quotere is None:
273 _quotere = re.compile(r'(\\*)("|\\$)')
275 _quotere = re.compile(r'(\\*)("|\\$)')
274 global _needsshellquote
276 global _needsshellquote
275 if _needsshellquote is None:
277 if _needsshellquote is None:
276 # ":" is also treated as "safe character", because it is used as a part
278 # ":" is also treated as "safe character", because it is used as a part
277 # of path name on Windows. "\" is also part of a path name, but isn't
279 # of path name on Windows. "\" is also part of a path name, but isn't
278 # safe because shlex.split() (kind of) treats it as an escape char and
280 # safe because shlex.split() (kind of) treats it as an escape char and
279 # drops it. It will leave the next character, even if it is another
281 # drops it. It will leave the next character, even if it is another
280 # "\".
282 # "\".
281 _needsshellquote = re.compile(r'[^a-zA-Z0-9._:/-]').search
283 _needsshellquote = re.compile(r'[^a-zA-Z0-9._:/-]').search
282 if s and not _needsshellquote(s) and not _quotere.search(s):
284 if s and not _needsshellquote(s) and not _quotere.search(s):
283 # "s" shouldn't have to be quoted
285 # "s" shouldn't have to be quoted
284 return s
286 return s
285 return '"%s"' % _quotere.sub(r'\1\1\\\2', s)
287 return '"%s"' % _quotere.sub(r'\1\1\\\2', s)
286
288
287 def quotecommand(cmd):
289 def quotecommand(cmd):
288 """Build a command string suitable for os.popen* calls."""
290 """Build a command string suitable for os.popen* calls."""
289 if sys.version_info < (2, 7, 1):
291 if sys.version_info < (2, 7, 1):
290 # Python versions since 2.7.1 do this extra quoting themselves
292 # Python versions since 2.7.1 do this extra quoting themselves
291 return '"' + cmd + '"'
293 return '"' + cmd + '"'
292 return cmd
294 return cmd
293
295
294 def popen(command, mode='r'):
296 def popen(command, mode='r'):
295 # Work around "popen spawned process may not write to stdout
297 # Work around "popen spawned process may not write to stdout
296 # under windows"
298 # under windows"
297 # http://bugs.python.org/issue1366
299 # http://bugs.python.org/issue1366
298 command += " 2> %s" % os.devnull
300 command += " 2> %s" % os.devnull
299 return os.popen(quotecommand(command), mode)
301 return os.popen(quotecommand(command), mode)
300
302
301 def explainexit(code):
303 def explainexit(code):
302 return _("exited with status %d") % code, code
304 return _("exited with status %d") % code, code
303
305
304 # if you change this stub into a real check, please try to implement the
306 # if you change this stub into a real check, please try to implement the
305 # username and groupname functions above, too.
307 # username and groupname functions above, too.
306 def isowner(st):
308 def isowner(st):
307 return True
309 return True
308
310
309 def findexe(command):
311 def findexe(command):
310 '''Find executable for command searching like cmd.exe does.
312 '''Find executable for command searching like cmd.exe does.
311 If command is a basename then PATH is searched for command.
313 If command is a basename then PATH is searched for command.
312 PATH isn't searched if command is an absolute or relative path.
314 PATH isn't searched if command is an absolute or relative path.
313 An extension from PATHEXT is found and added if not present.
315 An extension from PATHEXT is found and added if not present.
314 If command isn't found None is returned.'''
316 If command isn't found None is returned.'''
315 pathext = encoding.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD')
317 pathext = encoding.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD')
316 pathexts = [ext for ext in pathext.lower().split(pycompat.ospathsep)]
318 pathexts = [ext for ext in pathext.lower().split(pycompat.ospathsep)]
317 if os.path.splitext(command)[1].lower() in pathexts:
319 if os.path.splitext(command)[1].lower() in pathexts:
318 pathexts = ['']
320 pathexts = ['']
319
321
320 def findexisting(pathcommand):
322 def findexisting(pathcommand):
321 'Will append extension (if needed) and return existing file'
323 'Will append extension (if needed) and return existing file'
322 for ext in pathexts:
324 for ext in pathexts:
323 executable = pathcommand + ext
325 executable = pathcommand + ext
324 if os.path.exists(executable):
326 if os.path.exists(executable):
325 return executable
327 return executable
326 return None
328 return None
327
329
328 if pycompat.ossep in command:
330 if pycompat.ossep in command:
329 return findexisting(command)
331 return findexisting(command)
330
332
331 for path in encoding.environ.get('PATH', '').split(pycompat.ospathsep):
333 for path in encoding.environ.get('PATH', '').split(pycompat.ospathsep):
332 executable = findexisting(os.path.join(path, command))
334 executable = findexisting(os.path.join(path, command))
333 if executable is not None:
335 if executable is not None:
334 return executable
336 return executable
335 return findexisting(os.path.expanduser(os.path.expandvars(command)))
337 return findexisting(os.path.expanduser(os.path.expandvars(command)))
336
338
337 _wantedkinds = {stat.S_IFREG, stat.S_IFLNK}
339 _wantedkinds = {stat.S_IFREG, stat.S_IFLNK}
338
340
339 def statfiles(files):
341 def statfiles(files):
340 '''Stat each file in files. Yield each stat, or None if a file
342 '''Stat each file in files. Yield each stat, or None if a file
341 does not exist or has a type we don't care about.
343 does not exist or has a type we don't care about.
342
344
343 Cluster and cache stat per directory to minimize number of OS stat calls.'''
345 Cluster and cache stat per directory to minimize number of OS stat calls.'''
344 dircache = {} # dirname -> filename -> status | None if file does not exist
346 dircache = {} # dirname -> filename -> status | None if file does not exist
345 getkind = stat.S_IFMT
347 getkind = stat.S_IFMT
346 for nf in files:
348 for nf in files:
347 nf = normcase(nf)
349 nf = normcase(nf)
348 dir, base = os.path.split(nf)
350 dir, base = os.path.split(nf)
349 if not dir:
351 if not dir:
350 dir = '.'
352 dir = '.'
351 cache = dircache.get(dir, None)
353 cache = dircache.get(dir, None)
352 if cache is None:
354 if cache is None:
353 try:
355 try:
354 dmap = dict([(normcase(n), s)
356 dmap = dict([(normcase(n), s)
355 for n, k, s in listdir(dir, True)
357 for n, k, s in listdir(dir, True)
356 if getkind(s.st_mode) in _wantedkinds])
358 if getkind(s.st_mode) in _wantedkinds])
357 except OSError as err:
359 except OSError as err:
358 # Python >= 2.5 returns ENOENT and adds winerror field
360 # Python >= 2.5 returns ENOENT and adds winerror field
359 # EINVAL is raised if dir is not a directory.
361 # EINVAL is raised if dir is not a directory.
360 if err.errno not in (errno.ENOENT, errno.EINVAL,
362 if err.errno not in (errno.ENOENT, errno.EINVAL,
361 errno.ENOTDIR):
363 errno.ENOTDIR):
362 raise
364 raise
363 dmap = {}
365 dmap = {}
364 cache = dircache.setdefault(dir, dmap)
366 cache = dircache.setdefault(dir, dmap)
365 yield cache.get(base, None)
367 yield cache.get(base, None)
366
368
367 def username(uid=None):
369 def username(uid=None):
368 """Return the name of the user with the given uid.
370 """Return the name of the user with the given uid.
369
371
370 If uid is None, return the name of the current user."""
372 If uid is None, return the name of the current user."""
371 return None
373 return None
372
374
373 def groupname(gid=None):
375 def groupname(gid=None):
374 """Return the name of the group with the given gid.
376 """Return the name of the group with the given gid.
375
377
376 If gid is None, return the name of the current group."""
378 If gid is None, return the name of the current group."""
377 return None
379 return None
378
380
379 def removedirs(name):
381 def removedirs(name):
380 """special version of os.removedirs that does not remove symlinked
382 """special version of os.removedirs that does not remove symlinked
381 directories or junction points if they actually contain files"""
383 directories or junction points if they actually contain files"""
382 if listdir(name):
384 if listdir(name):
383 return
385 return
384 os.rmdir(name)
386 os.rmdir(name)
385 head, tail = os.path.split(name)
387 head, tail = os.path.split(name)
386 if not tail:
388 if not tail:
387 head, tail = os.path.split(head)
389 head, tail = os.path.split(head)
388 while head and tail:
390 while head and tail:
389 try:
391 try:
390 if listdir(head):
392 if listdir(head):
391 return
393 return
392 os.rmdir(head)
394 os.rmdir(head)
393 except (ValueError, OSError):
395 except (ValueError, OSError):
394 break
396 break
395 head, tail = os.path.split(head)
397 head, tail = os.path.split(head)
396
398
397 def rename(src, dst):
399 def rename(src, dst):
398 '''atomically rename file src to dst, replacing dst if it exists'''
400 '''atomically rename file src to dst, replacing dst if it exists'''
399 try:
401 try:
400 os.rename(src, dst)
402 os.rename(src, dst)
401 except OSError as e:
403 except OSError as e:
402 if e.errno != errno.EEXIST:
404 if e.errno != errno.EEXIST:
403 raise
405 raise
404 unlink(dst)
406 unlink(dst)
405 os.rename(src, dst)
407 os.rename(src, dst)
406
408
407 def gethgcmd():
409 def gethgcmd():
408 return [sys.executable] + sys.argv[:1]
410 return [sys.executable] + sys.argv[:1]
409
411
410 def groupmembers(name):
412 def groupmembers(name):
411 # Don't support groups on Windows for now
413 # Don't support groups on Windows for now
412 raise KeyError
414 raise KeyError
413
415
414 def isexec(f):
416 def isexec(f):
415 return False
417 return False
416
418
417 class cachestat(object):
419 class cachestat(object):
418 def __init__(self, path):
420 def __init__(self, path):
419 pass
421 pass
420
422
421 def cacheable(self):
423 def cacheable(self):
422 return False
424 return False
423
425
424 def lookupreg(key, valname=None, scope=None):
426 def lookupreg(key, valname=None, scope=None):
425 ''' Look up a key/value name in the Windows registry.
427 ''' Look up a key/value name in the Windows registry.
426
428
427 valname: value name. If unspecified, the default value for the key
429 valname: value name. If unspecified, the default value for the key
428 is used.
430 is used.
429 scope: optionally specify scope for registry lookup, this can be
431 scope: optionally specify scope for registry lookup, this can be
430 a sequence of scopes to look up in order. Default (CURRENT_USER,
432 a sequence of scopes to look up in order. Default (CURRENT_USER,
431 LOCAL_MACHINE).
433 LOCAL_MACHINE).
432 '''
434 '''
433 if scope is None:
435 if scope is None:
434 scope = (winreg.HKEY_CURRENT_USER, winreg.HKEY_LOCAL_MACHINE)
436 scope = (winreg.HKEY_CURRENT_USER, winreg.HKEY_LOCAL_MACHINE)
435 elif not isinstance(scope, (list, tuple)):
437 elif not isinstance(scope, (list, tuple)):
436 scope = (scope,)
438 scope = (scope,)
437 for s in scope:
439 for s in scope:
438 try:
440 try:
439 val = winreg.QueryValueEx(winreg.OpenKey(s, key), valname)[0]
441 val = winreg.QueryValueEx(winreg.OpenKey(s, key), valname)[0]
440 # never let a Unicode string escape into the wild
442 # never let a Unicode string escape into the wild
441 return encoding.unitolocal(val)
443 return encoding.unitolocal(val)
442 except EnvironmentError:
444 except EnvironmentError:
443 pass
445 pass
444
446
445 expandglobs = True
447 expandglobs = True
446
448
447 def statislink(st):
449 def statislink(st):
448 '''check whether a stat result is a symlink'''
450 '''check whether a stat result is a symlink'''
449 return False
451 return False
450
452
451 def statisexec(st):
453 def statisexec(st):
452 '''check whether a stat result is an executable file'''
454 '''check whether a stat result is an executable file'''
453 return False
455 return False
454
456
455 def poll(fds):
457 def poll(fds):
456 # see posix.py for description
458 # see posix.py for description
457 raise NotImplementedError()
459 raise NotImplementedError()
458
460
459 def readpipe(pipe):
461 def readpipe(pipe):
460 """Read all available data from a pipe."""
462 """Read all available data from a pipe."""
461 chunks = []
463 chunks = []
462 while True:
464 while True:
463 size = win32.peekpipe(pipe)
465 size = win32.peekpipe(pipe)
464 if not size:
466 if not size:
465 break
467 break
466
468
467 s = pipe.read(size)
469 s = pipe.read(size)
468 if not s:
470 if not s:
469 break
471 break
470 chunks.append(s)
472 chunks.append(s)
471
473
472 return ''.join(chunks)
474 return ''.join(chunks)
473
475
474 def bindunixsocket(sock, path):
476 def bindunixsocket(sock, path):
475 raise NotImplementedError('unsupported platform')
477 raise NotImplementedError('unsupported platform')
@@ -1,802 +1,802 b''
1 #
1 #
2 # This is the mercurial setup script.
2 # This is the mercurial setup script.
3 #
3 #
4 # 'python setup.py install', or
4 # 'python setup.py install', or
5 # 'python setup.py --help' for more options
5 # 'python setup.py --help' for more options
6
6
7 import sys, platform
7 import sys, platform
8 if sys.version_info < (2, 7, 0, 'final'):
8 if sys.version_info < (2, 7, 0, 'final'):
9 raise SystemExit('Mercurial requires Python 2.7 or later.')
9 raise SystemExit('Mercurial requires Python 2.7 or later.')
10
10
11 if sys.version_info[0] >= 3:
11 if sys.version_info[0] >= 3:
12 printf = eval('print')
12 printf = eval('print')
13 libdir_escape = 'unicode_escape'
13 libdir_escape = 'unicode_escape'
14 else:
14 else:
15 libdir_escape = 'string_escape'
15 libdir_escape = 'string_escape'
16 def printf(*args, **kwargs):
16 def printf(*args, **kwargs):
17 f = kwargs.get('file', sys.stdout)
17 f = kwargs.get('file', sys.stdout)
18 end = kwargs.get('end', '\n')
18 end = kwargs.get('end', '\n')
19 f.write(b' '.join(args) + end)
19 f.write(b' '.join(args) + end)
20
20
21 # Solaris Python packaging brain damage
21 # Solaris Python packaging brain damage
22 try:
22 try:
23 import hashlib
23 import hashlib
24 sha = hashlib.sha1()
24 sha = hashlib.sha1()
25 except ImportError:
25 except ImportError:
26 try:
26 try:
27 import sha
27 import sha
28 sha.sha # silence unused import warning
28 sha.sha # silence unused import warning
29 except ImportError:
29 except ImportError:
30 raise SystemExit(
30 raise SystemExit(
31 "Couldn't import standard hashlib (incomplete Python install).")
31 "Couldn't import standard hashlib (incomplete Python install).")
32
32
33 try:
33 try:
34 import zlib
34 import zlib
35 zlib.compressobj # silence unused import warning
35 zlib.compressobj # silence unused import warning
36 except ImportError:
36 except ImportError:
37 raise SystemExit(
37 raise SystemExit(
38 "Couldn't import standard zlib (incomplete Python install).")
38 "Couldn't import standard zlib (incomplete Python install).")
39
39
40 # The base IronPython distribution (as of 2.7.1) doesn't support bz2
40 # The base IronPython distribution (as of 2.7.1) doesn't support bz2
41 isironpython = False
41 isironpython = False
42 try:
42 try:
43 isironpython = (platform.python_implementation()
43 isironpython = (platform.python_implementation()
44 .lower().find("ironpython") != -1)
44 .lower().find("ironpython") != -1)
45 except AttributeError:
45 except AttributeError:
46 pass
46 pass
47
47
48 if isironpython:
48 if isironpython:
49 sys.stderr.write("warning: IronPython detected (no bz2 support)\n")
49 sys.stderr.write("warning: IronPython detected (no bz2 support)\n")
50 else:
50 else:
51 try:
51 try:
52 import bz2
52 import bz2
53 bz2.BZ2Compressor # silence unused import warning
53 bz2.BZ2Compressor # silence unused import warning
54 except ImportError:
54 except ImportError:
55 raise SystemExit(
55 raise SystemExit(
56 "Couldn't import standard bz2 (incomplete Python install).")
56 "Couldn't import standard bz2 (incomplete Python install).")
57
57
58 ispypy = "PyPy" in sys.version
58 ispypy = "PyPy" in sys.version
59
59
60 import ctypes
60 import ctypes
61 import os, stat, subprocess, time
61 import os, stat, subprocess, time
62 import re
62 import re
63 import shutil
63 import shutil
64 import tempfile
64 import tempfile
65 from distutils import log
65 from distutils import log
66 # We have issues with setuptools on some platforms and builders. Until
66 # We have issues with setuptools on some platforms and builders. Until
67 # those are resolved, setuptools is opt-in except for platforms where
67 # those are resolved, setuptools is opt-in except for platforms where
68 # we don't have issues.
68 # we don't have issues.
69 if os.name == 'nt' or 'FORCE_SETUPTOOLS' in os.environ:
69 if os.name == 'nt' or 'FORCE_SETUPTOOLS' in os.environ:
70 from setuptools import setup
70 from setuptools import setup
71 else:
71 else:
72 from distutils.core import setup
72 from distutils.core import setup
73 from distutils.ccompiler import new_compiler
73 from distutils.ccompiler import new_compiler
74 from distutils.core import Command, Extension
74 from distutils.core import Command, Extension
75 from distutils.dist import Distribution
75 from distutils.dist import Distribution
76 from distutils.command.build import build
76 from distutils.command.build import build
77 from distutils.command.build_ext import build_ext
77 from distutils.command.build_ext import build_ext
78 from distutils.command.build_py import build_py
78 from distutils.command.build_py import build_py
79 from distutils.command.build_scripts import build_scripts
79 from distutils.command.build_scripts import build_scripts
80 from distutils.command.install_lib import install_lib
80 from distutils.command.install_lib import install_lib
81 from distutils.command.install_scripts import install_scripts
81 from distutils.command.install_scripts import install_scripts
82 from distutils.spawn import spawn, find_executable
82 from distutils.spawn import spawn, find_executable
83 from distutils import file_util
83 from distutils import file_util
84 from distutils.errors import (
84 from distutils.errors import (
85 CCompilerError,
85 CCompilerError,
86 DistutilsError,
86 DistutilsError,
87 DistutilsExecError,
87 DistutilsExecError,
88 )
88 )
89 from distutils.sysconfig import get_python_inc, get_config_var
89 from distutils.sysconfig import get_python_inc, get_config_var
90 from distutils.version import StrictVersion
90 from distutils.version import StrictVersion
91
91
92 scripts = ['hg']
92 scripts = ['hg']
93 if os.name == 'nt':
93 if os.name == 'nt':
94 # We remove hg.bat if we are able to build hg.exe.
94 # We remove hg.bat if we are able to build hg.exe.
95 scripts.append('contrib/win32/hg.bat')
95 scripts.append('contrib/win32/hg.bat')
96
96
97 def cancompile(cc, code):
97 def cancompile(cc, code):
98 tmpdir = tempfile.mkdtemp(prefix='hg-install-')
98 tmpdir = tempfile.mkdtemp(prefix='hg-install-')
99 devnull = oldstderr = None
99 devnull = oldstderr = None
100 try:
100 try:
101 fname = os.path.join(tmpdir, 'testcomp.c')
101 fname = os.path.join(tmpdir, 'testcomp.c')
102 f = open(fname, 'w')
102 f = open(fname, 'w')
103 f.write(code)
103 f.write(code)
104 f.close()
104 f.close()
105 # Redirect stderr to /dev/null to hide any error messages
105 # Redirect stderr to /dev/null to hide any error messages
106 # from the compiler.
106 # from the compiler.
107 # This will have to be changed if we ever have to check
107 # This will have to be changed if we ever have to check
108 # for a function on Windows.
108 # for a function on Windows.
109 devnull = open('/dev/null', 'w')
109 devnull = open('/dev/null', 'w')
110 oldstderr = os.dup(sys.stderr.fileno())
110 oldstderr = os.dup(sys.stderr.fileno())
111 os.dup2(devnull.fileno(), sys.stderr.fileno())
111 os.dup2(devnull.fileno(), sys.stderr.fileno())
112 objects = cc.compile([fname], output_dir=tmpdir)
112 objects = cc.compile([fname], output_dir=tmpdir)
113 cc.link_executable(objects, os.path.join(tmpdir, "a.out"))
113 cc.link_executable(objects, os.path.join(tmpdir, "a.out"))
114 return True
114 return True
115 except Exception:
115 except Exception:
116 return False
116 return False
117 finally:
117 finally:
118 if oldstderr is not None:
118 if oldstderr is not None:
119 os.dup2(oldstderr, sys.stderr.fileno())
119 os.dup2(oldstderr, sys.stderr.fileno())
120 if devnull is not None:
120 if devnull is not None:
121 devnull.close()
121 devnull.close()
122 shutil.rmtree(tmpdir)
122 shutil.rmtree(tmpdir)
123
123
124 # simplified version of distutils.ccompiler.CCompiler.has_function
124 # simplified version of distutils.ccompiler.CCompiler.has_function
125 # that actually removes its temporary files.
125 # that actually removes its temporary files.
126 def hasfunction(cc, funcname):
126 def hasfunction(cc, funcname):
127 code = 'int main(void) { %s(); }\n' % funcname
127 code = 'int main(void) { %s(); }\n' % funcname
128 return cancompile(cc, code)
128 return cancompile(cc, code)
129
129
130 def hasheader(cc, headername):
130 def hasheader(cc, headername):
131 code = '#include <%s>\nint main(void) { return 0; }\n' % headername
131 code = '#include <%s>\nint main(void) { return 0; }\n' % headername
132 return cancompile(cc, code)
132 return cancompile(cc, code)
133
133
134 # py2exe needs to be installed to work
134 # py2exe needs to be installed to work
135 try:
135 try:
136 import py2exe
136 import py2exe
137 py2exe.Distribution # silence unused import warning
137 py2exe.Distribution # silence unused import warning
138 py2exeloaded = True
138 py2exeloaded = True
139 # import py2exe's patched Distribution class
139 # import py2exe's patched Distribution class
140 from distutils.core import Distribution
140 from distutils.core import Distribution
141 except ImportError:
141 except ImportError:
142 py2exeloaded = False
142 py2exeloaded = False
143
143
144 def runcmd(cmd, env):
144 def runcmd(cmd, env):
145 if (sys.platform == 'plan9'
145 if (sys.platform == 'plan9'
146 and (sys.version_info[0] == 2 and sys.version_info[1] < 7)):
146 and (sys.version_info[0] == 2 and sys.version_info[1] < 7)):
147 # subprocess kludge to work around issues in half-baked Python
147 # subprocess kludge to work around issues in half-baked Python
148 # ports, notably bichued/python:
148 # ports, notably bichued/python:
149 _, out, err = os.popen3(cmd)
149 _, out, err = os.popen3(cmd)
150 return str(out), str(err)
150 return str(out), str(err)
151 else:
151 else:
152 p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
152 p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
153 stderr=subprocess.PIPE, env=env)
153 stderr=subprocess.PIPE, env=env)
154 out, err = p.communicate()
154 out, err = p.communicate()
155 return out, err
155 return out, err
156
156
157 def runhg(cmd, env):
157 def runhg(cmd, env):
158 out, err = runcmd(cmd, env)
158 out, err = runcmd(cmd, env)
159 # If root is executing setup.py, but the repository is owned by
159 # If root is executing setup.py, but the repository is owned by
160 # another user (as in "sudo python setup.py install") we will get
160 # another user (as in "sudo python setup.py install") we will get
161 # trust warnings since the .hg/hgrc file is untrusted. That is
161 # trust warnings since the .hg/hgrc file is untrusted. That is
162 # fine, we don't want to load it anyway. Python may warn about
162 # fine, we don't want to load it anyway. Python may warn about
163 # a missing __init__.py in mercurial/locale, we also ignore that.
163 # a missing __init__.py in mercurial/locale, we also ignore that.
164 err = [e for e in err.splitlines()
164 err = [e for e in err.splitlines()
165 if not e.startswith(b'not trusting file') \
165 if not e.startswith(b'not trusting file') \
166 and not e.startswith(b'warning: Not importing') \
166 and not e.startswith(b'warning: Not importing') \
167 and not e.startswith(b'obsolete feature not enabled')]
167 and not e.startswith(b'obsolete feature not enabled')]
168 if err:
168 if err:
169 printf("stderr from '%s':" % (' '.join(cmd)), file=sys.stderr)
169 printf("stderr from '%s':" % (' '.join(cmd)), file=sys.stderr)
170 printf(b'\n'.join([b' ' + e for e in err]), file=sys.stderr)
170 printf(b'\n'.join([b' ' + e for e in err]), file=sys.stderr)
171 return ''
171 return ''
172 return out
172 return out
173
173
174 version = ''
174 version = ''
175
175
176 # Execute hg out of this directory with a custom environment which takes care
176 # Execute hg out of this directory with a custom environment which takes care
177 # to not use any hgrc files and do no localization.
177 # to not use any hgrc files and do no localization.
178 env = {'HGMODULEPOLICY': 'py',
178 env = {'HGMODULEPOLICY': 'py',
179 'HGRCPATH': '',
179 'HGRCPATH': '',
180 'LANGUAGE': 'C',
180 'LANGUAGE': 'C',
181 'PATH': ''} # make pypi modules that use os.environ['PATH'] happy
181 'PATH': ''} # make pypi modules that use os.environ['PATH'] happy
182 if 'LD_LIBRARY_PATH' in os.environ:
182 if 'LD_LIBRARY_PATH' in os.environ:
183 env['LD_LIBRARY_PATH'] = os.environ['LD_LIBRARY_PATH']
183 env['LD_LIBRARY_PATH'] = os.environ['LD_LIBRARY_PATH']
184 if 'SystemRoot' in os.environ:
184 if 'SystemRoot' in os.environ:
185 # Copy SystemRoot into the custom environment for Python 2.6
185 # Copy SystemRoot into the custom environment for Python 2.6
186 # under Windows. Otherwise, the subprocess will fail with
186 # under Windows. Otherwise, the subprocess will fail with
187 # error 0xc0150004. See: http://bugs.python.org/issue3440
187 # error 0xc0150004. See: http://bugs.python.org/issue3440
188 env['SystemRoot'] = os.environ['SystemRoot']
188 env['SystemRoot'] = os.environ['SystemRoot']
189
189
190 if os.path.isdir('.hg'):
190 if os.path.isdir('.hg'):
191 cmd = [sys.executable, 'hg', 'log', '-r', '.', '--template', '{tags}\n']
191 cmd = [sys.executable, 'hg', 'log', '-r', '.', '--template', '{tags}\n']
192 numerictags = [t for t in runhg(cmd, env).split() if t[0].isdigit()]
192 numerictags = [t for t in runhg(cmd, env).split() if t[0].isdigit()]
193 hgid = runhg([sys.executable, 'hg', 'id', '-i'], env).strip()
193 hgid = runhg([sys.executable, 'hg', 'id', '-i'], env).strip()
194 if numerictags: # tag(s) found
194 if numerictags: # tag(s) found
195 version = numerictags[-1]
195 version = numerictags[-1]
196 if hgid.endswith('+'): # propagate the dirty status to the tag
196 if hgid.endswith('+'): # propagate the dirty status to the tag
197 version += '+'
197 version += '+'
198 else: # no tag found
198 else: # no tag found
199 ltagcmd = [sys.executable, 'hg', 'parents', '--template',
199 ltagcmd = [sys.executable, 'hg', 'parents', '--template',
200 '{latesttag}']
200 '{latesttag}']
201 ltag = runhg(ltagcmd, env)
201 ltag = runhg(ltagcmd, env)
202 changessincecmd = [sys.executable, 'hg', 'log', '-T', 'x\n', '-r',
202 changessincecmd = [sys.executable, 'hg', 'log', '-T', 'x\n', '-r',
203 "only(.,'%s')" % ltag]
203 "only(.,'%s')" % ltag]
204 changessince = len(runhg(changessincecmd, env).splitlines())
204 changessince = len(runhg(changessincecmd, env).splitlines())
205 version = '%s+%s-%s' % (ltag, changessince, hgid)
205 version = '%s+%s-%s' % (ltag, changessince, hgid)
206 if version.endswith('+'):
206 if version.endswith('+'):
207 version += time.strftime('%Y%m%d')
207 version += time.strftime('%Y%m%d')
208 elif os.path.exists('.hg_archival.txt'):
208 elif os.path.exists('.hg_archival.txt'):
209 kw = dict([[t.strip() for t in l.split(':', 1)]
209 kw = dict([[t.strip() for t in l.split(':', 1)]
210 for l in open('.hg_archival.txt')])
210 for l in open('.hg_archival.txt')])
211 if 'tag' in kw:
211 if 'tag' in kw:
212 version = kw['tag']
212 version = kw['tag']
213 elif 'latesttag' in kw:
213 elif 'latesttag' in kw:
214 if 'changessincelatesttag' in kw:
214 if 'changessincelatesttag' in kw:
215 version = '%(latesttag)s+%(changessincelatesttag)s-%(node).12s' % kw
215 version = '%(latesttag)s+%(changessincelatesttag)s-%(node).12s' % kw
216 else:
216 else:
217 version = '%(latesttag)s+%(latesttagdistance)s-%(node).12s' % kw
217 version = '%(latesttag)s+%(latesttagdistance)s-%(node).12s' % kw
218 else:
218 else:
219 version = kw.get('node', '')[:12]
219 version = kw.get('node', '')[:12]
220
220
221 if version:
221 if version:
222 with open("mercurial/__version__.py", "w") as f:
222 with open("mercurial/__version__.py", "w") as f:
223 f.write('# this file is autogenerated by setup.py\n')
223 f.write('# this file is autogenerated by setup.py\n')
224 f.write('version = "%s"\n' % version)
224 f.write('version = "%s"\n' % version)
225
225
226 try:
226 try:
227 oldpolicy = os.environ.get('HGMODULEPOLICY', None)
227 oldpolicy = os.environ.get('HGMODULEPOLICY', None)
228 os.environ['HGMODULEPOLICY'] = 'py'
228 os.environ['HGMODULEPOLICY'] = 'py'
229 from mercurial import __version__
229 from mercurial import __version__
230 version = __version__.version
230 version = __version__.version
231 except ImportError:
231 except ImportError:
232 version = 'unknown'
232 version = 'unknown'
233 finally:
233 finally:
234 if oldpolicy is None:
234 if oldpolicy is None:
235 del os.environ['HGMODULEPOLICY']
235 del os.environ['HGMODULEPOLICY']
236 else:
236 else:
237 os.environ['HGMODULEPOLICY'] = oldpolicy
237 os.environ['HGMODULEPOLICY'] = oldpolicy
238
238
239 class hgbuild(build):
239 class hgbuild(build):
240 # Insert hgbuildmo first so that files in mercurial/locale/ are found
240 # Insert hgbuildmo first so that files in mercurial/locale/ are found
241 # when build_py is run next.
241 # when build_py is run next.
242 sub_commands = [('build_mo', None)] + build.sub_commands
242 sub_commands = [('build_mo', None)] + build.sub_commands
243
243
244 class hgbuildmo(build):
244 class hgbuildmo(build):
245
245
246 description = "build translations (.mo files)"
246 description = "build translations (.mo files)"
247
247
248 def run(self):
248 def run(self):
249 if not find_executable('msgfmt'):
249 if not find_executable('msgfmt'):
250 self.warn("could not find msgfmt executable, no translations "
250 self.warn("could not find msgfmt executable, no translations "
251 "will be built")
251 "will be built")
252 return
252 return
253
253
254 podir = 'i18n'
254 podir = 'i18n'
255 if not os.path.isdir(podir):
255 if not os.path.isdir(podir):
256 self.warn("could not find %s/ directory" % podir)
256 self.warn("could not find %s/ directory" % podir)
257 return
257 return
258
258
259 join = os.path.join
259 join = os.path.join
260 for po in os.listdir(podir):
260 for po in os.listdir(podir):
261 if not po.endswith('.po'):
261 if not po.endswith('.po'):
262 continue
262 continue
263 pofile = join(podir, po)
263 pofile = join(podir, po)
264 modir = join('locale', po[:-3], 'LC_MESSAGES')
264 modir = join('locale', po[:-3], 'LC_MESSAGES')
265 mofile = join(modir, 'hg.mo')
265 mofile = join(modir, 'hg.mo')
266 mobuildfile = join('mercurial', mofile)
266 mobuildfile = join('mercurial', mofile)
267 cmd = ['msgfmt', '-v', '-o', mobuildfile, pofile]
267 cmd = ['msgfmt', '-v', '-o', mobuildfile, pofile]
268 if sys.platform != 'sunos5':
268 if sys.platform != 'sunos5':
269 # msgfmt on Solaris does not know about -c
269 # msgfmt on Solaris does not know about -c
270 cmd.append('-c')
270 cmd.append('-c')
271 self.mkpath(join('mercurial', modir))
271 self.mkpath(join('mercurial', modir))
272 self.make_file([pofile], mobuildfile, spawn, (cmd,))
272 self.make_file([pofile], mobuildfile, spawn, (cmd,))
273
273
274
274
275 class hgdist(Distribution):
275 class hgdist(Distribution):
276 pure = False
276 pure = False
277 cffi = ispypy
277 cffi = ispypy
278
278
279 global_options = Distribution.global_options + \
279 global_options = Distribution.global_options + \
280 [('pure', None, "use pure (slow) Python "
280 [('pure', None, "use pure (slow) Python "
281 "code instead of C extensions"),
281 "code instead of C extensions"),
282 ]
282 ]
283
283
284 def has_ext_modules(self):
284 def has_ext_modules(self):
285 # self.ext_modules is emptied in hgbuildpy.finalize_options which is
285 # self.ext_modules is emptied in hgbuildpy.finalize_options which is
286 # too late for some cases
286 # too late for some cases
287 return not self.pure and Distribution.has_ext_modules(self)
287 return not self.pure and Distribution.has_ext_modules(self)
288
288
289 # This is ugly as a one-liner. So use a variable.
289 # This is ugly as a one-liner. So use a variable.
290 buildextnegops = dict(getattr(build_ext, 'negative_options', {}))
290 buildextnegops = dict(getattr(build_ext, 'negative_options', {}))
291 buildextnegops['no-zstd'] = 'zstd'
291 buildextnegops['no-zstd'] = 'zstd'
292
292
293 class hgbuildext(build_ext):
293 class hgbuildext(build_ext):
294 user_options = build_ext.user_options + [
294 user_options = build_ext.user_options + [
295 ('zstd', None, 'compile zstd bindings [default]'),
295 ('zstd', None, 'compile zstd bindings [default]'),
296 ('no-zstd', None, 'do not compile zstd bindings'),
296 ('no-zstd', None, 'do not compile zstd bindings'),
297 ]
297 ]
298
298
299 boolean_options = build_ext.boolean_options + ['zstd']
299 boolean_options = build_ext.boolean_options + ['zstd']
300 negative_opt = buildextnegops
300 negative_opt = buildextnegops
301
301
302 def initialize_options(self):
302 def initialize_options(self):
303 self.zstd = True
303 self.zstd = True
304 return build_ext.initialize_options(self)
304 return build_ext.initialize_options(self)
305
305
306 def build_extensions(self):
306 def build_extensions(self):
307 # Filter out zstd if disabled via argument.
307 # Filter out zstd if disabled via argument.
308 if not self.zstd:
308 if not self.zstd:
309 self.extensions = [e for e in self.extensions
309 self.extensions = [e for e in self.extensions
310 if e.name != 'mercurial.zstd']
310 if e.name != 'mercurial.zstd']
311
311
312 return build_ext.build_extensions(self)
312 return build_ext.build_extensions(self)
313
313
314 def build_extension(self, ext):
314 def build_extension(self, ext):
315 try:
315 try:
316 build_ext.build_extension(self, ext)
316 build_ext.build_extension(self, ext)
317 except CCompilerError:
317 except CCompilerError:
318 if not getattr(ext, 'optional', False):
318 if not getattr(ext, 'optional', False):
319 raise
319 raise
320 log.warn("Failed to build optional extension '%s' (skipping)",
320 log.warn("Failed to build optional extension '%s' (skipping)",
321 ext.name)
321 ext.name)
322
322
323 class hgbuildscripts(build_scripts):
323 class hgbuildscripts(build_scripts):
324 def run(self):
324 def run(self):
325 if os.name != 'nt' or self.distribution.pure:
325 if os.name != 'nt' or self.distribution.pure:
326 return build_scripts.run(self)
326 return build_scripts.run(self)
327
327
328 exebuilt = False
328 exebuilt = False
329 try:
329 try:
330 self.run_command('build_hgexe')
330 self.run_command('build_hgexe')
331 exebuilt = True
331 exebuilt = True
332 except (DistutilsError, CCompilerError):
332 except (DistutilsError, CCompilerError):
333 log.warn('failed to build optional hg.exe')
333 log.warn('failed to build optional hg.exe')
334
334
335 if exebuilt:
335 if exebuilt:
336 # Copying hg.exe to the scripts build directory ensures it is
336 # Copying hg.exe to the scripts build directory ensures it is
337 # installed by the install_scripts command.
337 # installed by the install_scripts command.
338 hgexecommand = self.get_finalized_command('build_hgexe')
338 hgexecommand = self.get_finalized_command('build_hgexe')
339 dest = os.path.join(self.build_dir, 'hg.exe')
339 dest = os.path.join(self.build_dir, 'hg.exe')
340 self.mkpath(self.build_dir)
340 self.mkpath(self.build_dir)
341 self.copy_file(hgexecommand.hgexepath, dest)
341 self.copy_file(hgexecommand.hgexepath, dest)
342
342
343 # Remove hg.bat because it is redundant with hg.exe.
343 # Remove hg.bat because it is redundant with hg.exe.
344 self.scripts.remove('contrib/win32/hg.bat')
344 self.scripts.remove('contrib/win32/hg.bat')
345
345
346 return build_scripts.run(self)
346 return build_scripts.run(self)
347
347
348 class hgbuildpy(build_py):
348 class hgbuildpy(build_py):
349 def finalize_options(self):
349 def finalize_options(self):
350 build_py.finalize_options(self)
350 build_py.finalize_options(self)
351
351
352 if self.distribution.pure:
352 if self.distribution.pure:
353 self.distribution.ext_modules = []
353 self.distribution.ext_modules = []
354 elif self.distribution.cffi:
354 elif self.distribution.cffi:
355 from mercurial.cffi import (
355 from mercurial.cffi import (
356 bdiff,
356 bdiff,
357 mpatch,
357 mpatch,
358 )
358 )
359 exts = [mpatch.ffi.distutils_extension(),
359 exts = [mpatch.ffi.distutils_extension(),
360 bdiff.ffi.distutils_extension()]
360 bdiff.ffi.distutils_extension()]
361 # cffi modules go here
361 # cffi modules go here
362 if sys.platform == 'darwin':
362 if sys.platform == 'darwin':
363 from mercurial.cffi import osutil
363 from mercurial.cffi import osutil
364 exts.append(osutil.ffi.distutils_extension())
364 exts.append(osutil.ffi.distutils_extension())
365 self.distribution.ext_modules = exts
365 self.distribution.ext_modules = exts
366 else:
366 else:
367 h = os.path.join(get_python_inc(), 'Python.h')
367 h = os.path.join(get_python_inc(), 'Python.h')
368 if not os.path.exists(h):
368 if not os.path.exists(h):
369 raise SystemExit('Python headers are required to build '
369 raise SystemExit('Python headers are required to build '
370 'Mercurial but weren\'t found in %s' % h)
370 'Mercurial but weren\'t found in %s' % h)
371
371
372 def run(self):
372 def run(self):
373 if self.distribution.pure:
373 if self.distribution.pure:
374 modulepolicy = 'py'
374 modulepolicy = 'py'
375 elif self.build_lib == '.':
375 elif self.build_lib == '.':
376 # in-place build should run without rebuilding C extensions
376 # in-place build should run without rebuilding C extensions
377 modulepolicy = 'allow'
377 modulepolicy = 'allow'
378 else:
378 else:
379 modulepolicy = 'c'
379 modulepolicy = 'c'
380 with open("mercurial/__modulepolicy__.py", "w") as f:
380 with open("mercurial/__modulepolicy__.py", "w") as f:
381 f.write('# this file is autogenerated by setup.py\n')
381 f.write('# this file is autogenerated by setup.py\n')
382 f.write('modulepolicy = b"%s"\n' % modulepolicy)
382 f.write('modulepolicy = b"%s"\n' % modulepolicy)
383
383
384 build_py.run(self)
384 build_py.run(self)
385
385
386 class buildhgextindex(Command):
386 class buildhgextindex(Command):
387 description = 'generate prebuilt index of hgext (for frozen package)'
387 description = 'generate prebuilt index of hgext (for frozen package)'
388 user_options = []
388 user_options = []
389 _indexfilename = 'hgext/__index__.py'
389 _indexfilename = 'hgext/__index__.py'
390
390
391 def initialize_options(self):
391 def initialize_options(self):
392 pass
392 pass
393
393
394 def finalize_options(self):
394 def finalize_options(self):
395 pass
395 pass
396
396
397 def run(self):
397 def run(self):
398 if os.path.exists(self._indexfilename):
398 if os.path.exists(self._indexfilename):
399 with open(self._indexfilename, 'w') as f:
399 with open(self._indexfilename, 'w') as f:
400 f.write('# empty\n')
400 f.write('# empty\n')
401
401
402 # here no extension enabled, disabled() lists up everything
402 # here no extension enabled, disabled() lists up everything
403 code = ('import pprint; from mercurial import extensions; '
403 code = ('import pprint; from mercurial import extensions; '
404 'pprint.pprint(extensions.disabled())')
404 'pprint.pprint(extensions.disabled())')
405 out, err = runcmd([sys.executable, '-c', code], env)
405 out, err = runcmd([sys.executable, '-c', code], env)
406 if err:
406 if err:
407 raise DistutilsExecError(err)
407 raise DistutilsExecError(err)
408
408
409 with open(self._indexfilename, 'w') as f:
409 with open(self._indexfilename, 'w') as f:
410 f.write('# this file is autogenerated by setup.py\n')
410 f.write('# this file is autogenerated by setup.py\n')
411 f.write('docs = ')
411 f.write('docs = ')
412 f.write(out)
412 f.write(out)
413
413
414 class buildhgexe(build_ext):
414 class buildhgexe(build_ext):
415 description = 'compile hg.exe from mercurial/exewrapper.c'
415 description = 'compile hg.exe from mercurial/exewrapper.c'
416
416
417 def build_extensions(self):
417 def build_extensions(self):
418 if os.name != 'nt':
418 if os.name != 'nt':
419 return
419 return
420 if isinstance(self.compiler, HackedMingw32CCompiler):
420 if isinstance(self.compiler, HackedMingw32CCompiler):
421 self.compiler.compiler_so = self.compiler.compiler # no -mdll
421 self.compiler.compiler_so = self.compiler.compiler # no -mdll
422 self.compiler.dll_libraries = [] # no -lmsrvc90
422 self.compiler.dll_libraries = [] # no -lmsrvc90
423
423
424 # Different Python installs can have different Python library
424 # Different Python installs can have different Python library
425 # names. e.g. the official CPython distribution uses pythonXY.dll
425 # names. e.g. the official CPython distribution uses pythonXY.dll
426 # and MinGW uses libpythonX.Y.dll.
426 # and MinGW uses libpythonX.Y.dll.
427 _kernel32 = ctypes.windll.kernel32
427 _kernel32 = ctypes.windll.kernel32
428 _kernel32.GetModuleFileNameA.argtypes = [ctypes.c_void_p,
428 _kernel32.GetModuleFileNameA.argtypes = [ctypes.c_void_p,
429 ctypes.c_void_p,
429 ctypes.c_void_p,
430 ctypes.c_ulong]
430 ctypes.c_ulong]
431 _kernel32.GetModuleFileNameA.restype = ctypes.c_ulong
431 _kernel32.GetModuleFileNameA.restype = ctypes.c_ulong
432 size = 1000
432 size = 1000
433 buf = ctypes.create_string_buffer(size + 1)
433 buf = ctypes.create_string_buffer(size + 1)
434 filelen = _kernel32.GetModuleFileNameA(sys.dllhandle, ctypes.byref(buf),
434 filelen = _kernel32.GetModuleFileNameA(sys.dllhandle, ctypes.byref(buf),
435 size)
435 size)
436
436
437 if filelen > 0 and filelen != size:
437 if filelen > 0 and filelen != size:
438 dllbasename = os.path.basename(buf.value)
438 dllbasename = os.path.basename(buf.value)
439 if not dllbasename.lower().endswith('.dll'):
439 if not dllbasename.lower().endswith('.dll'):
440 raise SystemExit('Python DLL does not end with .dll: %s' %
440 raise SystemExit('Python DLL does not end with .dll: %s' %
441 dllbasename)
441 dllbasename)
442 pythonlib = dllbasename[:-4]
442 pythonlib = dllbasename[:-4]
443 else:
443 else:
444 log.warn('could not determine Python DLL filename; '
444 log.warn('could not determine Python DLL filename; '
445 'assuming pythonXY')
445 'assuming pythonXY')
446
446
447 hv = sys.hexversion
447 hv = sys.hexversion
448 pythonlib = 'python%d%d' % (hv >> 24, (hv >> 16) & 0xff)
448 pythonlib = 'python%d%d' % (hv >> 24, (hv >> 16) & 0xff)
449
449
450 log.info('using %s as Python library name' % pythonlib)
450 log.info('using %s as Python library name' % pythonlib)
451 with open('mercurial/hgpythonlib.h', 'wb') as f:
451 with open('mercurial/hgpythonlib.h', 'wb') as f:
452 f.write('/* this file is autogenerated by setup.py */\n')
452 f.write('/* this file is autogenerated by setup.py */\n')
453 f.write('#define HGPYTHONLIB "%s"\n' % pythonlib)
453 f.write('#define HGPYTHONLIB "%s"\n' % pythonlib)
454 objects = self.compiler.compile(['mercurial/exewrapper.c'],
454 objects = self.compiler.compile(['mercurial/exewrapper.c'],
455 output_dir=self.build_temp)
455 output_dir=self.build_temp)
456 dir = os.path.dirname(self.get_ext_fullpath('dummy'))
456 dir = os.path.dirname(self.get_ext_fullpath('dummy'))
457 target = os.path.join(dir, 'hg')
457 target = os.path.join(dir, 'hg')
458 self.compiler.link_executable(objects, target,
458 self.compiler.link_executable(objects, target,
459 libraries=[],
459 libraries=[],
460 output_dir=self.build_temp)
460 output_dir=self.build_temp)
461
461
462 @property
462 @property
463 def hgexepath(self):
463 def hgexepath(self):
464 dir = os.path.dirname(self.get_ext_fullpath('dummy'))
464 dir = os.path.dirname(self.get_ext_fullpath('dummy'))
465 return os.path.join(self.build_temp, dir, 'hg.exe')
465 return os.path.join(self.build_temp, dir, 'hg.exe')
466
466
467 class hginstalllib(install_lib):
467 class hginstalllib(install_lib):
468 '''
468 '''
469 This is a specialization of install_lib that replaces the copy_file used
469 This is a specialization of install_lib that replaces the copy_file used
470 there so that it supports setting the mode of files after copying them,
470 there so that it supports setting the mode of files after copying them,
471 instead of just preserving the mode that the files originally had. If your
471 instead of just preserving the mode that the files originally had. If your
472 system has a umask of something like 027, preserving the permissions when
472 system has a umask of something like 027, preserving the permissions when
473 copying will lead to a broken install.
473 copying will lead to a broken install.
474
474
475 Note that just passing keep_permissions=False to copy_file would be
475 Note that just passing keep_permissions=False to copy_file would be
476 insufficient, as it might still be applying a umask.
476 insufficient, as it might still be applying a umask.
477 '''
477 '''
478
478
479 def run(self):
479 def run(self):
480 realcopyfile = file_util.copy_file
480 realcopyfile = file_util.copy_file
481 def copyfileandsetmode(*args, **kwargs):
481 def copyfileandsetmode(*args, **kwargs):
482 src, dst = args[0], args[1]
482 src, dst = args[0], args[1]
483 dst, copied = realcopyfile(*args, **kwargs)
483 dst, copied = realcopyfile(*args, **kwargs)
484 if copied:
484 if copied:
485 st = os.stat(src)
485 st = os.stat(src)
486 # Persist executable bit (apply it to group and other if user
486 # Persist executable bit (apply it to group and other if user
487 # has it)
487 # has it)
488 if st[stat.ST_MODE] & stat.S_IXUSR:
488 if st[stat.ST_MODE] & stat.S_IXUSR:
489 setmode = int('0755', 8)
489 setmode = int('0755', 8)
490 else:
490 else:
491 setmode = int('0644', 8)
491 setmode = int('0644', 8)
492 m = stat.S_IMODE(st[stat.ST_MODE])
492 m = stat.S_IMODE(st[stat.ST_MODE])
493 m = (m & ~int('0777', 8)) | setmode
493 m = (m & ~int('0777', 8)) | setmode
494 os.chmod(dst, m)
494 os.chmod(dst, m)
495 file_util.copy_file = copyfileandsetmode
495 file_util.copy_file = copyfileandsetmode
496 try:
496 try:
497 install_lib.run(self)
497 install_lib.run(self)
498 finally:
498 finally:
499 file_util.copy_file = realcopyfile
499 file_util.copy_file = realcopyfile
500
500
501 class hginstallscripts(install_scripts):
501 class hginstallscripts(install_scripts):
502 '''
502 '''
503 This is a specialization of install_scripts that replaces the @LIBDIR@ with
503 This is a specialization of install_scripts that replaces the @LIBDIR@ with
504 the configured directory for modules. If possible, the path is made relative
504 the configured directory for modules. If possible, the path is made relative
505 to the directory for scripts.
505 to the directory for scripts.
506 '''
506 '''
507
507
508 def initialize_options(self):
508 def initialize_options(self):
509 install_scripts.initialize_options(self)
509 install_scripts.initialize_options(self)
510
510
511 self.install_lib = None
511 self.install_lib = None
512
512
513 def finalize_options(self):
513 def finalize_options(self):
514 install_scripts.finalize_options(self)
514 install_scripts.finalize_options(self)
515 self.set_undefined_options('install',
515 self.set_undefined_options('install',
516 ('install_lib', 'install_lib'))
516 ('install_lib', 'install_lib'))
517
517
518 def run(self):
518 def run(self):
519 install_scripts.run(self)
519 install_scripts.run(self)
520
520
521 # It only makes sense to replace @LIBDIR@ with the install path if
521 # It only makes sense to replace @LIBDIR@ with the install path if
522 # the install path is known. For wheels, the logic below calculates
522 # the install path is known. For wheels, the logic below calculates
523 # the libdir to be "../..". This is because the internal layout of a
523 # the libdir to be "../..". This is because the internal layout of a
524 # wheel archive looks like:
524 # wheel archive looks like:
525 #
525 #
526 # mercurial-3.6.1.data/scripts/hg
526 # mercurial-3.6.1.data/scripts/hg
527 # mercurial/__init__.py
527 # mercurial/__init__.py
528 #
528 #
529 # When installing wheels, the subdirectories of the "<pkg>.data"
529 # When installing wheels, the subdirectories of the "<pkg>.data"
530 # directory are translated to system local paths and files therein
530 # directory are translated to system local paths and files therein
531 # are copied in place. The mercurial/* files are installed into the
531 # are copied in place. The mercurial/* files are installed into the
532 # site-packages directory. However, the site-packages directory
532 # site-packages directory. However, the site-packages directory
533 # isn't known until wheel install time. This means we have no clue
533 # isn't known until wheel install time. This means we have no clue
534 # at wheel generation time what the installed site-packages directory
534 # at wheel generation time what the installed site-packages directory
535 # will be. And, wheels don't appear to provide the ability to register
535 # will be. And, wheels don't appear to provide the ability to register
536 # custom code to run during wheel installation. This all means that
536 # custom code to run during wheel installation. This all means that
537 # we can't reliably set the libdir in wheels: the default behavior
537 # we can't reliably set the libdir in wheels: the default behavior
538 # of looking in sys.path must do.
538 # of looking in sys.path must do.
539
539
540 if (os.path.splitdrive(self.install_dir)[0] !=
540 if (os.path.splitdrive(self.install_dir)[0] !=
541 os.path.splitdrive(self.install_lib)[0]):
541 os.path.splitdrive(self.install_lib)[0]):
542 # can't make relative paths from one drive to another, so use an
542 # can't make relative paths from one drive to another, so use an
543 # absolute path instead
543 # absolute path instead
544 libdir = self.install_lib
544 libdir = self.install_lib
545 else:
545 else:
546 common = os.path.commonprefix((self.install_dir, self.install_lib))
546 common = os.path.commonprefix((self.install_dir, self.install_lib))
547 rest = self.install_dir[len(common):]
547 rest = self.install_dir[len(common):]
548 uplevel = len([n for n in os.path.split(rest) if n])
548 uplevel = len([n for n in os.path.split(rest) if n])
549
549
550 libdir = uplevel * ('..' + os.sep) + self.install_lib[len(common):]
550 libdir = uplevel * ('..' + os.sep) + self.install_lib[len(common):]
551
551
552 for outfile in self.outfiles:
552 for outfile in self.outfiles:
553 with open(outfile, 'rb') as fp:
553 with open(outfile, 'rb') as fp:
554 data = fp.read()
554 data = fp.read()
555
555
556 # skip binary files
556 # skip binary files
557 if b'\0' in data:
557 if b'\0' in data:
558 continue
558 continue
559
559
560 # During local installs, the shebang will be rewritten to the final
560 # During local installs, the shebang will be rewritten to the final
561 # install path. During wheel packaging, the shebang has a special
561 # install path. During wheel packaging, the shebang has a special
562 # value.
562 # value.
563 if data.startswith(b'#!python'):
563 if data.startswith(b'#!python'):
564 log.info('not rewriting @LIBDIR@ in %s because install path '
564 log.info('not rewriting @LIBDIR@ in %s because install path '
565 'not known' % outfile)
565 'not known' % outfile)
566 continue
566 continue
567
567
568 data = data.replace(b'@LIBDIR@', libdir.encode(libdir_escape))
568 data = data.replace(b'@LIBDIR@', libdir.encode(libdir_escape))
569 with open(outfile, 'wb') as fp:
569 with open(outfile, 'wb') as fp:
570 fp.write(data)
570 fp.write(data)
571
571
572 cmdclass = {'build': hgbuild,
572 cmdclass = {'build': hgbuild,
573 'build_mo': hgbuildmo,
573 'build_mo': hgbuildmo,
574 'build_ext': hgbuildext,
574 'build_ext': hgbuildext,
575 'build_py': hgbuildpy,
575 'build_py': hgbuildpy,
576 'build_scripts': hgbuildscripts,
576 'build_scripts': hgbuildscripts,
577 'build_hgextindex': buildhgextindex,
577 'build_hgextindex': buildhgextindex,
578 'install_lib': hginstalllib,
578 'install_lib': hginstalllib,
579 'install_scripts': hginstallscripts,
579 'install_scripts': hginstallscripts,
580 'build_hgexe': buildhgexe,
580 'build_hgexe': buildhgexe,
581 }
581 }
582
582
583 packages = ['mercurial',
583 packages = ['mercurial',
584 'mercurial.cext',
584 'mercurial.cext',
585 'mercurial.hgweb',
585 'mercurial.hgweb',
586 'mercurial.httpclient',
586 'mercurial.httpclient',
587 'mercurial.pure',
587 'mercurial.pure',
588 'hgext', 'hgext.convert', 'hgext.fsmonitor',
588 'hgext', 'hgext.convert', 'hgext.fsmonitor',
589 'hgext.fsmonitor.pywatchman', 'hgext.highlight',
589 'hgext.fsmonitor.pywatchman', 'hgext.highlight',
590 'hgext.largefiles', 'hgext.zeroconf', 'hgext3rd']
590 'hgext.largefiles', 'hgext.zeroconf', 'hgext3rd']
591
591
592 common_depends = ['mercurial/bitmanipulation.h',
592 common_depends = ['mercurial/bitmanipulation.h',
593 'mercurial/compat.h',
593 'mercurial/compat.h',
594 'mercurial/util.h']
594 'mercurial/util.h']
595 common_include_dirs = ['mercurial']
595 common_include_dirs = ['mercurial']
596
596
597 osutil_cflags = []
597 osutil_cflags = []
598 osutil_ldflags = []
598 osutil_ldflags = []
599
599
600 # platform specific macros
600 # platform specific macros
601 for plat, func in [('bsd', 'setproctitle')]:
601 for plat, func in [('bsd', 'setproctitle')]:
602 if re.search(plat, sys.platform) and hasfunction(new_compiler(), func):
602 if re.search(plat, sys.platform) and hasfunction(new_compiler(), func):
603 osutil_cflags.append('-DHAVE_%s' % func.upper())
603 osutil_cflags.append('-DHAVE_%s' % func.upper())
604
604
605 for plat, macro, code in [
605 for plat, macro, code in [
606 ('bsd|darwin', 'BSD_STATFS', '''
606 ('bsd|darwin', 'BSD_STATFS', '''
607 #include <sys/param.h>
607 #include <sys/param.h>
608 #include <sys/mount.h>
608 #include <sys/mount.h>
609 int main() { struct statfs s; return sizeof(s.f_fstypename); }
609 int main() { struct statfs s; return sizeof(s.f_fstypename); }
610 '''),
610 '''),
611 ('linux', 'LINUX_STATFS', '''
611 ('linux', 'LINUX_STATFS', '''
612 #include <linux/magic.h>
612 #include <linux/magic.h>
613 #include <sys/vfs.h>
613 #include <sys/vfs.h>
614 int main() { struct statfs s; return sizeof(s.f_type); }
614 int main() { struct statfs s; return sizeof(s.f_type); }
615 '''),
615 '''),
616 ]:
616 ]:
617 if re.search(plat, sys.platform) and cancompile(new_compiler(), code):
617 if re.search(plat, sys.platform) and cancompile(new_compiler(), code):
618 osutil_cflags.append('-DHAVE_%s' % macro)
618 osutil_cflags.append('-DHAVE_%s' % macro)
619
619
620 if sys.platform == 'darwin':
620 if sys.platform == 'darwin':
621 osutil_ldflags += ['-framework', 'ApplicationServices']
621 osutil_ldflags += ['-framework', 'ApplicationServices']
622
622
623 extmodules = [
623 extmodules = [
624 Extension('mercurial.base85', ['mercurial/base85.c'],
624 Extension('mercurial.base85', ['mercurial/base85.c'],
625 include_dirs=common_include_dirs,
625 include_dirs=common_include_dirs,
626 depends=common_depends),
626 depends=common_depends),
627 Extension('mercurial.bdiff', ['mercurial/bdiff.c',
627 Extension('mercurial.bdiff', ['mercurial/bdiff.c',
628 'mercurial/bdiff_module.c'],
628 'mercurial/bdiff_module.c'],
629 include_dirs=common_include_dirs,
629 include_dirs=common_include_dirs,
630 depends=common_depends + ['mercurial/bdiff.h']),
630 depends=common_depends + ['mercurial/bdiff.h']),
631 Extension('mercurial.diffhelpers', ['mercurial/diffhelpers.c'],
631 Extension('mercurial.diffhelpers', ['mercurial/diffhelpers.c'],
632 include_dirs=common_include_dirs,
632 include_dirs=common_include_dirs,
633 depends=common_depends),
633 depends=common_depends),
634 Extension('mercurial.mpatch', ['mercurial/mpatch.c',
634 Extension('mercurial.mpatch', ['mercurial/mpatch.c',
635 'mercurial/mpatch_module.c'],
635 'mercurial/mpatch_module.c'],
636 include_dirs=common_include_dirs,
636 include_dirs=common_include_dirs,
637 depends=common_depends),
637 depends=common_depends),
638 Extension('mercurial.parsers', ['mercurial/dirs.c',
638 Extension('mercurial.parsers', ['mercurial/dirs.c',
639 'mercurial/manifest.c',
639 'mercurial/manifest.c',
640 'mercurial/parsers.c',
640 'mercurial/parsers.c',
641 'mercurial/pathencode.c'],
641 'mercurial/pathencode.c'],
642 include_dirs=common_include_dirs,
642 include_dirs=common_include_dirs,
643 depends=common_depends),
643 depends=common_depends),
644 Extension('mercurial.osutil', ['mercurial/osutil.c'],
644 Extension('mercurial.cext.osutil', ['mercurial/cext/osutil.c'],
645 include_dirs=common_include_dirs,
645 include_dirs=common_include_dirs,
646 extra_compile_args=osutil_cflags,
646 extra_compile_args=osutil_cflags,
647 extra_link_args=osutil_ldflags,
647 extra_link_args=osutil_ldflags,
648 depends=common_depends),
648 depends=common_depends),
649 Extension('hgext.fsmonitor.pywatchman.bser',
649 Extension('hgext.fsmonitor.pywatchman.bser',
650 ['hgext/fsmonitor/pywatchman/bser.c']),
650 ['hgext/fsmonitor/pywatchman/bser.c']),
651 ]
651 ]
652
652
653 sys.path.insert(0, 'contrib/python-zstandard')
653 sys.path.insert(0, 'contrib/python-zstandard')
654 import setup_zstd
654 import setup_zstd
655 extmodules.append(setup_zstd.get_c_extension(name='mercurial.zstd'))
655 extmodules.append(setup_zstd.get_c_extension(name='mercurial.zstd'))
656
656
657 try:
657 try:
658 from distutils import cygwinccompiler
658 from distutils import cygwinccompiler
659
659
660 # the -mno-cygwin option has been deprecated for years
660 # the -mno-cygwin option has been deprecated for years
661 compiler = cygwinccompiler.Mingw32CCompiler
661 compiler = cygwinccompiler.Mingw32CCompiler
662
662
663 class HackedMingw32CCompiler(cygwinccompiler.Mingw32CCompiler):
663 class HackedMingw32CCompiler(cygwinccompiler.Mingw32CCompiler):
664 def __init__(self, *args, **kwargs):
664 def __init__(self, *args, **kwargs):
665 compiler.__init__(self, *args, **kwargs)
665 compiler.__init__(self, *args, **kwargs)
666 for i in 'compiler compiler_so linker_exe linker_so'.split():
666 for i in 'compiler compiler_so linker_exe linker_so'.split():
667 try:
667 try:
668 getattr(self, i).remove('-mno-cygwin')
668 getattr(self, i).remove('-mno-cygwin')
669 except ValueError:
669 except ValueError:
670 pass
670 pass
671
671
672 cygwinccompiler.Mingw32CCompiler = HackedMingw32CCompiler
672 cygwinccompiler.Mingw32CCompiler = HackedMingw32CCompiler
673 except ImportError:
673 except ImportError:
674 # the cygwinccompiler package is not available on some Python
674 # the cygwinccompiler package is not available on some Python
675 # distributions like the ones from the optware project for Synology
675 # distributions like the ones from the optware project for Synology
676 # DiskStation boxes
676 # DiskStation boxes
677 class HackedMingw32CCompiler(object):
677 class HackedMingw32CCompiler(object):
678 pass
678 pass
679
679
680 packagedata = {'mercurial': ['locale/*/LC_MESSAGES/hg.mo',
680 packagedata = {'mercurial': ['locale/*/LC_MESSAGES/hg.mo',
681 'help/*.txt',
681 'help/*.txt',
682 'help/internals/*.txt',
682 'help/internals/*.txt',
683 'default.d/*.rc',
683 'default.d/*.rc',
684 'dummycert.pem']}
684 'dummycert.pem']}
685
685
686 def ordinarypath(p):
686 def ordinarypath(p):
687 return p and p[0] != '.' and p[-1] != '~'
687 return p and p[0] != '.' and p[-1] != '~'
688
688
689 for root in ('templates',):
689 for root in ('templates',):
690 for curdir, dirs, files in os.walk(os.path.join('mercurial', root)):
690 for curdir, dirs, files in os.walk(os.path.join('mercurial', root)):
691 curdir = curdir.split(os.sep, 1)[1]
691 curdir = curdir.split(os.sep, 1)[1]
692 dirs[:] = filter(ordinarypath, dirs)
692 dirs[:] = filter(ordinarypath, dirs)
693 for f in filter(ordinarypath, files):
693 for f in filter(ordinarypath, files):
694 f = os.path.join(curdir, f)
694 f = os.path.join(curdir, f)
695 packagedata['mercurial'].append(f)
695 packagedata['mercurial'].append(f)
696
696
697 datafiles = []
697 datafiles = []
698
698
699 # distutils expects version to be str/unicode. Converting it to
699 # distutils expects version to be str/unicode. Converting it to
700 # unicode on Python 2 still works because it won't contain any
700 # unicode on Python 2 still works because it won't contain any
701 # non-ascii bytes and will be implicitly converted back to bytes
701 # non-ascii bytes and will be implicitly converted back to bytes
702 # when operated on.
702 # when operated on.
703 assert isinstance(version, bytes)
703 assert isinstance(version, bytes)
704 setupversion = version.decode('ascii')
704 setupversion = version.decode('ascii')
705
705
706 extra = {}
706 extra = {}
707
707
708 if py2exeloaded:
708 if py2exeloaded:
709 extra['console'] = [
709 extra['console'] = [
710 {'script':'hg',
710 {'script':'hg',
711 'copyright':'Copyright (C) 2005-2017 Matt Mackall and others',
711 'copyright':'Copyright (C) 2005-2017 Matt Mackall and others',
712 'product_version':version}]
712 'product_version':version}]
713 # sub command of 'build' because 'py2exe' does not handle sub_commands
713 # sub command of 'build' because 'py2exe' does not handle sub_commands
714 build.sub_commands.insert(0, ('build_hgextindex', None))
714 build.sub_commands.insert(0, ('build_hgextindex', None))
715 # put dlls in sub directory so that they won't pollute PATH
715 # put dlls in sub directory so that they won't pollute PATH
716 extra['zipfile'] = 'lib/library.zip'
716 extra['zipfile'] = 'lib/library.zip'
717
717
718 if os.name == 'nt':
718 if os.name == 'nt':
719 # Windows binary file versions for exe/dll files must have the
719 # Windows binary file versions for exe/dll files must have the
720 # form W.X.Y.Z, where W,X,Y,Z are numbers in the range 0..65535
720 # form W.X.Y.Z, where W,X,Y,Z are numbers in the range 0..65535
721 setupversion = version.split('+', 1)[0]
721 setupversion = version.split('+', 1)[0]
722
722
723 if sys.platform == 'darwin' and os.path.exists('/usr/bin/xcodebuild'):
723 if sys.platform == 'darwin' and os.path.exists('/usr/bin/xcodebuild'):
724 version = runcmd(['/usr/bin/xcodebuild', '-version'], {})[0].splitlines()
724 version = runcmd(['/usr/bin/xcodebuild', '-version'], {})[0].splitlines()
725 if version:
725 if version:
726 version = version[0]
726 version = version[0]
727 if sys.version_info[0] == 3:
727 if sys.version_info[0] == 3:
728 version = version.decode('utf-8')
728 version = version.decode('utf-8')
729 xcode4 = (version.startswith('Xcode') and
729 xcode4 = (version.startswith('Xcode') and
730 StrictVersion(version.split()[1]) >= StrictVersion('4.0'))
730 StrictVersion(version.split()[1]) >= StrictVersion('4.0'))
731 xcode51 = re.match(r'^Xcode\s+5\.1', version) is not None
731 xcode51 = re.match(r'^Xcode\s+5\.1', version) is not None
732 else:
732 else:
733 # xcodebuild returns empty on OS X Lion with XCode 4.3 not
733 # xcodebuild returns empty on OS X Lion with XCode 4.3 not
734 # installed, but instead with only command-line tools. Assume
734 # installed, but instead with only command-line tools. Assume
735 # that only happens on >= Lion, thus no PPC support.
735 # that only happens on >= Lion, thus no PPC support.
736 xcode4 = True
736 xcode4 = True
737 xcode51 = False
737 xcode51 = False
738
738
739 # XCode 4.0 dropped support for ppc architecture, which is hardcoded in
739 # XCode 4.0 dropped support for ppc architecture, which is hardcoded in
740 # distutils.sysconfig
740 # distutils.sysconfig
741 if xcode4:
741 if xcode4:
742 os.environ['ARCHFLAGS'] = ''
742 os.environ['ARCHFLAGS'] = ''
743
743
744 # XCode 5.1 changes clang such that it now fails to compile if the
744 # XCode 5.1 changes clang such that it now fails to compile if the
745 # -mno-fused-madd flag is passed, but the version of Python shipped with
745 # -mno-fused-madd flag is passed, but the version of Python shipped with
746 # OS X 10.9 Mavericks includes this flag. This causes problems in all
746 # OS X 10.9 Mavericks includes this flag. This causes problems in all
747 # C extension modules, and a bug has been filed upstream at
747 # C extension modules, and a bug has been filed upstream at
748 # http://bugs.python.org/issue21244. We also need to patch this here
748 # http://bugs.python.org/issue21244. We also need to patch this here
749 # so Mercurial can continue to compile in the meantime.
749 # so Mercurial can continue to compile in the meantime.
750 if xcode51:
750 if xcode51:
751 cflags = get_config_var('CFLAGS')
751 cflags = get_config_var('CFLAGS')
752 if cflags and re.search(r'-mno-fused-madd\b', cflags) is not None:
752 if cflags and re.search(r'-mno-fused-madd\b', cflags) is not None:
753 os.environ['CFLAGS'] = (
753 os.environ['CFLAGS'] = (
754 os.environ.get('CFLAGS', '') + ' -Qunused-arguments')
754 os.environ.get('CFLAGS', '') + ' -Qunused-arguments')
755
755
756 setup(name='mercurial',
756 setup(name='mercurial',
757 version=setupversion,
757 version=setupversion,
758 author='Matt Mackall and many others',
758 author='Matt Mackall and many others',
759 author_email='mercurial@mercurial-scm.org',
759 author_email='mercurial@mercurial-scm.org',
760 url='https://mercurial-scm.org/',
760 url='https://mercurial-scm.org/',
761 download_url='https://mercurial-scm.org/release/',
761 download_url='https://mercurial-scm.org/release/',
762 description=('Fast scalable distributed SCM (revision control, version '
762 description=('Fast scalable distributed SCM (revision control, version '
763 'control) system'),
763 'control) system'),
764 long_description=('Mercurial is a distributed SCM tool written in Python.'
764 long_description=('Mercurial is a distributed SCM tool written in Python.'
765 ' It is used by a number of large projects that require'
765 ' It is used by a number of large projects that require'
766 ' fast, reliable distributed revision control, such as '
766 ' fast, reliable distributed revision control, such as '
767 'Mozilla.'),
767 'Mozilla.'),
768 license='GNU GPLv2 or any later version',
768 license='GNU GPLv2 or any later version',
769 classifiers=[
769 classifiers=[
770 'Development Status :: 6 - Mature',
770 'Development Status :: 6 - Mature',
771 'Environment :: Console',
771 'Environment :: Console',
772 'Intended Audience :: Developers',
772 'Intended Audience :: Developers',
773 'Intended Audience :: System Administrators',
773 'Intended Audience :: System Administrators',
774 'License :: OSI Approved :: GNU General Public License (GPL)',
774 'License :: OSI Approved :: GNU General Public License (GPL)',
775 'Natural Language :: Danish',
775 'Natural Language :: Danish',
776 'Natural Language :: English',
776 'Natural Language :: English',
777 'Natural Language :: German',
777 'Natural Language :: German',
778 'Natural Language :: Italian',
778 'Natural Language :: Italian',
779 'Natural Language :: Japanese',
779 'Natural Language :: Japanese',
780 'Natural Language :: Portuguese (Brazilian)',
780 'Natural Language :: Portuguese (Brazilian)',
781 'Operating System :: Microsoft :: Windows',
781 'Operating System :: Microsoft :: Windows',
782 'Operating System :: OS Independent',
782 'Operating System :: OS Independent',
783 'Operating System :: POSIX',
783 'Operating System :: POSIX',
784 'Programming Language :: C',
784 'Programming Language :: C',
785 'Programming Language :: Python',
785 'Programming Language :: Python',
786 'Topic :: Software Development :: Version Control',
786 'Topic :: Software Development :: Version Control',
787 ],
787 ],
788 scripts=scripts,
788 scripts=scripts,
789 packages=packages,
789 packages=packages,
790 ext_modules=extmodules,
790 ext_modules=extmodules,
791 data_files=datafiles,
791 data_files=datafiles,
792 package_data=packagedata,
792 package_data=packagedata,
793 cmdclass=cmdclass,
793 cmdclass=cmdclass,
794 distclass=hgdist,
794 distclass=hgdist,
795 options={'py2exe': {'packages': ['hgext', 'email']},
795 options={'py2exe': {'packages': ['hgext', 'email']},
796 'bdist_mpkg': {'zipdist': False,
796 'bdist_mpkg': {'zipdist': False,
797 'license': 'COPYING',
797 'license': 'COPYING',
798 'readme': 'contrib/macosx/Readme.html',
798 'readme': 'contrib/macosx/Readme.html',
799 'welcome': 'contrib/macosx/Welcome.html',
799 'welcome': 'contrib/macosx/Welcome.html',
800 },
800 },
801 },
801 },
802 **extra)
802 **extra)
General Comments 0
You need to be logged in to leave comments. Login now