##// END OF EJS Templates
extras: re-use Projection from jaraco.collections
Jason R. Coombs -
r51261:82e5a9b1 default
parent child Browse files
Show More
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
@@ -0,0 +1,56 b''
1 # adapted from jaraco.collections 3.9
2
3 import collections
4
5
6 class Projection(collections.abc.Mapping):
7 """
8 Project a set of keys over a mapping
9
10 >>> sample = {'a': 1, 'b': 2, 'c': 3}
11 >>> prj = Projection(['a', 'c', 'd'], sample)
12 >>> prj == {'a': 1, 'c': 3}
13 True
14
15 Keys should only appear if they were specified and exist in the space.
16
17 >>> sorted(list(prj.keys()))
18 ['a', 'c']
19
20 Attempting to access a key not in the projection
21 results in a KeyError.
22
23 >>> prj['b']
24 Traceback (most recent call last):
25 ...
26 KeyError: 'b'
27
28 Use the projection to update another dict.
29
30 >>> target = {'a': 2, 'b': 2}
31 >>> target.update(prj)
32 >>> target == {'a': 1, 'b': 2, 'c': 3}
33 True
34
35 Also note that Projection keeps a reference to the original dict, so
36 if you modify the original dict, that could modify the Projection.
37
38 >>> del sample['a']
39 >>> dict(prj)
40 {'c': 3}
41 """
42
43 def __init__(self, keys, space):
44 self._keys = tuple(keys)
45 self._space = space
46
47 def __getitem__(self, key):
48 if key not in self._keys:
49 raise KeyError(key)
50 return self._space[key]
51
52 def __iter__(self):
53 return iter(set(self._keys).intersection(self._space))
54
55 def __len__(self):
56 return len(tuple(iter(self)))
@@ -1,772 +1,773 b''
1 #!/usr/bin/env python3
1 #!/usr/bin/env python3
2
2
3
3
4 import ast
4 import ast
5 import collections
5 import collections
6 import io
6 import io
7 import os
7 import os
8 import sys
8 import sys
9
9
10 # Import a minimal set of stdlib modules needed for list_stdlib_modules()
10 # Import a minimal set of stdlib modules needed for list_stdlib_modules()
11 # to work when run from a virtualenv. The modules were chosen empirically
11 # to work when run from a virtualenv. The modules were chosen empirically
12 # so that the return value matches the return value without virtualenv.
12 # so that the return value matches the return value without virtualenv.
13 if True: # disable lexical sorting checks
13 if True: # disable lexical sorting checks
14 try:
14 try:
15 import BaseHTTPServer as basehttpserver
15 import BaseHTTPServer as basehttpserver
16 except ImportError:
16 except ImportError:
17 basehttpserver = None
17 basehttpserver = None
18 import zlib
18 import zlib
19
19
20 import testparseutil
20 import testparseutil
21
21
22 # Allow list of modules that symbols can be directly imported from.
22 # Allow list of modules that symbols can be directly imported from.
23 allowsymbolimports = (
23 allowsymbolimports = (
24 '__future__',
24 '__future__',
25 'breezy',
25 'breezy',
26 'concurrent',
26 'concurrent',
27 'hgclient',
27 'hgclient',
28 'mercurial',
28 'mercurial',
29 'mercurial.hgweb.common',
29 'mercurial.hgweb.common',
30 'mercurial.hgweb.request',
30 'mercurial.hgweb.request',
31 'mercurial.i18n',
31 'mercurial.i18n',
32 'mercurial.interfaces',
32 'mercurial.interfaces',
33 'mercurial.node',
33 'mercurial.node',
34 'mercurial.pycompat',
34 'mercurial.pycompat',
35 # for revlog to re-export constant to extensions
35 # for revlog to re-export constant to extensions
36 'mercurial.revlogutils.constants',
36 'mercurial.revlogutils.constants',
37 'mercurial.revlogutils.flagutil',
37 'mercurial.revlogutils.flagutil',
38 # for cffi modules to re-export pure functions
38 # for cffi modules to re-export pure functions
39 'mercurial.pure.base85',
39 'mercurial.pure.base85',
40 'mercurial.pure.bdiff',
40 'mercurial.pure.bdiff',
41 'mercurial.pure.mpatch',
41 'mercurial.pure.mpatch',
42 'mercurial.pure.osutil',
42 'mercurial.pure.osutil',
43 'mercurial.pure.parsers',
43 'mercurial.pure.parsers',
44 # third-party imports should be directly imported
44 # third-party imports should be directly imported
45 'mercurial.thirdparty',
45 'mercurial.thirdparty',
46 'mercurial.thirdparty.attr',
46 'mercurial.thirdparty.attr',
47 'mercurial.thirdparty.jaraco.collections',
47 'mercurial.thirdparty.zope',
48 'mercurial.thirdparty.zope',
48 'mercurial.thirdparty.zope.interface',
49 'mercurial.thirdparty.zope.interface',
49 'typing',
50 'typing',
50 'xml.etree.ElementTree',
51 'xml.etree.ElementTree',
51 )
52 )
52
53
53 # Allow list of symbols that can be directly imported.
54 # Allow list of symbols that can be directly imported.
54 directsymbols = ('demandimport',)
55 directsymbols = ('demandimport',)
55
56
56 # Modules that must be aliased because they are commonly confused with
57 # Modules that must be aliased because they are commonly confused with
57 # common variables and can create aliasing and readability issues.
58 # common variables and can create aliasing and readability issues.
58 requirealias = {
59 requirealias = {
59 'ui': 'uimod',
60 'ui': 'uimod',
60 }
61 }
61
62
62
63
63 def walklocal(root):
64 def walklocal(root):
64 """Recursively yield all descendant nodes but not in a different scope"""
65 """Recursively yield all descendant nodes but not in a different scope"""
65 todo = collections.deque(ast.iter_child_nodes(root))
66 todo = collections.deque(ast.iter_child_nodes(root))
66 yield root, False
67 yield root, False
67 while todo:
68 while todo:
68 node = todo.popleft()
69 node = todo.popleft()
69 newscope = isinstance(node, ast.FunctionDef)
70 newscope = isinstance(node, ast.FunctionDef)
70 if not newscope:
71 if not newscope:
71 todo.extend(ast.iter_child_nodes(node))
72 todo.extend(ast.iter_child_nodes(node))
72 yield node, newscope
73 yield node, newscope
73
74
74
75
75 def dotted_name_of_path(path):
76 def dotted_name_of_path(path):
76 """Given a relative path to a source file, return its dotted module name.
77 """Given a relative path to a source file, return its dotted module name.
77
78
78 >>> dotted_name_of_path('mercurial/error.py')
79 >>> dotted_name_of_path('mercurial/error.py')
79 'mercurial.error'
80 'mercurial.error'
80 >>> dotted_name_of_path('zlibmodule.so')
81 >>> dotted_name_of_path('zlibmodule.so')
81 'zlib'
82 'zlib'
82 """
83 """
83 parts = path.replace(os.sep, '/').split('/')
84 parts = path.replace(os.sep, '/').split('/')
84 parts[-1] = parts[-1].split('.', 1)[0] # remove .py and .so and .ARCH.so
85 parts[-1] = parts[-1].split('.', 1)[0] # remove .py and .so and .ARCH.so
85 if parts[-1].endswith('module'):
86 if parts[-1].endswith('module'):
86 parts[-1] = parts[-1][:-6]
87 parts[-1] = parts[-1][:-6]
87 return '.'.join(parts)
88 return '.'.join(parts)
88
89
89
90
90 def fromlocalfunc(modulename, localmods):
91 def fromlocalfunc(modulename, localmods):
91 """Get a function to examine which locally defined module the
92 """Get a function to examine which locally defined module the
92 target source imports via a specified name.
93 target source imports via a specified name.
93
94
94 `modulename` is an `dotted_name_of_path()`-ed source file path,
95 `modulename` is an `dotted_name_of_path()`-ed source file path,
95 which may have `.__init__` at the end of it, of the target source.
96 which may have `.__init__` at the end of it, of the target source.
96
97
97 `localmods` is a set of absolute `dotted_name_of_path()`-ed source file
98 `localmods` is a set of absolute `dotted_name_of_path()`-ed source file
98 paths of locally defined (= Mercurial specific) modules.
99 paths of locally defined (= Mercurial specific) modules.
99
100
100 This function assumes that module names not existing in
101 This function assumes that module names not existing in
101 `localmods` are from the Python standard library.
102 `localmods` are from the Python standard library.
102
103
103 This function returns the function, which takes `name` argument,
104 This function returns the function, which takes `name` argument,
104 and returns `(absname, dottedpath, hassubmod)` tuple if `name`
105 and returns `(absname, dottedpath, hassubmod)` tuple if `name`
105 matches against locally defined module. Otherwise, it returns
106 matches against locally defined module. Otherwise, it returns
106 False.
107 False.
107
108
108 It is assumed that `name` doesn't have `.__init__`.
109 It is assumed that `name` doesn't have `.__init__`.
109
110
110 `absname` is an absolute module name of specified `name`
111 `absname` is an absolute module name of specified `name`
111 (e.g. "hgext.convert"). This can be used to compose prefix for sub
112 (e.g. "hgext.convert"). This can be used to compose prefix for sub
112 modules or so.
113 modules or so.
113
114
114 `dottedpath` is a `dotted_name_of_path()`-ed source file path
115 `dottedpath` is a `dotted_name_of_path()`-ed source file path
115 (e.g. "hgext.convert.__init__") of `name`. This is used to look
116 (e.g. "hgext.convert.__init__") of `name`. This is used to look
116 module up in `localmods` again.
117 module up in `localmods` again.
117
118
118 `hassubmod` is whether it may have sub modules under it (for
119 `hassubmod` is whether it may have sub modules under it (for
119 convenient, even though this is also equivalent to "absname !=
120 convenient, even though this is also equivalent to "absname !=
120 dottednpath")
121 dottednpath")
121
122
122 >>> localmods = {'foo.__init__', 'foo.foo1',
123 >>> localmods = {'foo.__init__', 'foo.foo1',
123 ... 'foo.bar.__init__', 'foo.bar.bar1',
124 ... 'foo.bar.__init__', 'foo.bar.bar1',
124 ... 'baz.__init__', 'baz.baz1'}
125 ... 'baz.__init__', 'baz.baz1'}
125 >>> fromlocal = fromlocalfunc('foo.xxx', localmods)
126 >>> fromlocal = fromlocalfunc('foo.xxx', localmods)
126 >>> # relative
127 >>> # relative
127 >>> fromlocal('foo1')
128 >>> fromlocal('foo1')
128 ('foo.foo1', 'foo.foo1', False)
129 ('foo.foo1', 'foo.foo1', False)
129 >>> fromlocal('bar')
130 >>> fromlocal('bar')
130 ('foo.bar', 'foo.bar.__init__', True)
131 ('foo.bar', 'foo.bar.__init__', True)
131 >>> fromlocal('bar.bar1')
132 >>> fromlocal('bar.bar1')
132 ('foo.bar.bar1', 'foo.bar.bar1', False)
133 ('foo.bar.bar1', 'foo.bar.bar1', False)
133 >>> # absolute
134 >>> # absolute
134 >>> fromlocal('baz')
135 >>> fromlocal('baz')
135 ('baz', 'baz.__init__', True)
136 ('baz', 'baz.__init__', True)
136 >>> fromlocal('baz.baz1')
137 >>> fromlocal('baz.baz1')
137 ('baz.baz1', 'baz.baz1', False)
138 ('baz.baz1', 'baz.baz1', False)
138 >>> # unknown = maybe standard library
139 >>> # unknown = maybe standard library
139 >>> fromlocal('os')
140 >>> fromlocal('os')
140 False
141 False
141 >>> fromlocal(None, 1)
142 >>> fromlocal(None, 1)
142 ('foo', 'foo.__init__', True)
143 ('foo', 'foo.__init__', True)
143 >>> fromlocal('foo1', 1)
144 >>> fromlocal('foo1', 1)
144 ('foo.foo1', 'foo.foo1', False)
145 ('foo.foo1', 'foo.foo1', False)
145 >>> fromlocal2 = fromlocalfunc('foo.xxx.yyy', localmods)
146 >>> fromlocal2 = fromlocalfunc('foo.xxx.yyy', localmods)
146 >>> fromlocal2(None, 2)
147 >>> fromlocal2(None, 2)
147 ('foo', 'foo.__init__', True)
148 ('foo', 'foo.__init__', True)
148 >>> fromlocal2('bar2', 1)
149 >>> fromlocal2('bar2', 1)
149 False
150 False
150 >>> fromlocal2('bar', 2)
151 >>> fromlocal2('bar', 2)
151 ('foo.bar', 'foo.bar.__init__', True)
152 ('foo.bar', 'foo.bar.__init__', True)
152 """
153 """
153 if not isinstance(modulename, str):
154 if not isinstance(modulename, str):
154 modulename = modulename.decode('ascii')
155 modulename = modulename.decode('ascii')
155 prefix = '.'.join(modulename.split('.')[:-1])
156 prefix = '.'.join(modulename.split('.')[:-1])
156 if prefix:
157 if prefix:
157 prefix += '.'
158 prefix += '.'
158
159
159 def fromlocal(name, level=0):
160 def fromlocal(name, level=0):
160 # name is false value when relative imports are used.
161 # name is false value when relative imports are used.
161 if not name:
162 if not name:
162 # If relative imports are used, level must not be absolute.
163 # If relative imports are used, level must not be absolute.
163 assert level > 0
164 assert level > 0
164 candidates = ['.'.join(modulename.split('.')[:-level])]
165 candidates = ['.'.join(modulename.split('.')[:-level])]
165 else:
166 else:
166 if not level:
167 if not level:
167 # Check relative name first.
168 # Check relative name first.
168 candidates = [prefix + name, name]
169 candidates = [prefix + name, name]
169 else:
170 else:
170 candidates = [
171 candidates = [
171 '.'.join(modulename.split('.')[:-level]) + '.' + name
172 '.'.join(modulename.split('.')[:-level]) + '.' + name
172 ]
173 ]
173
174
174 for n in candidates:
175 for n in candidates:
175 if n in localmods:
176 if n in localmods:
176 return (n, n, False)
177 return (n, n, False)
177 dottedpath = n + '.__init__'
178 dottedpath = n + '.__init__'
178 if dottedpath in localmods:
179 if dottedpath in localmods:
179 return (n, dottedpath, True)
180 return (n, dottedpath, True)
180 return False
181 return False
181
182
182 return fromlocal
183 return fromlocal
183
184
184
185
185 def populateextmods(localmods):
186 def populateextmods(localmods):
186 """Populate C extension modules based on pure modules"""
187 """Populate C extension modules based on pure modules"""
187 newlocalmods = set(localmods)
188 newlocalmods = set(localmods)
188 for n in localmods:
189 for n in localmods:
189 if n.startswith('mercurial.pure.'):
190 if n.startswith('mercurial.pure.'):
190 m = n[len('mercurial.pure.') :]
191 m = n[len('mercurial.pure.') :]
191 newlocalmods.add('mercurial.cext.' + m)
192 newlocalmods.add('mercurial.cext.' + m)
192 newlocalmods.add('mercurial.cffi._' + m)
193 newlocalmods.add('mercurial.cffi._' + m)
193 return newlocalmods
194 return newlocalmods
194
195
195
196
196 def list_stdlib_modules():
197 def list_stdlib_modules():
197 """List the modules present in the stdlib.
198 """List the modules present in the stdlib.
198
199
199 >>> py3 = sys.version_info[0] >= 3
200 >>> py3 = sys.version_info[0] >= 3
200 >>> mods = set(list_stdlib_modules())
201 >>> mods = set(list_stdlib_modules())
201 >>> 'BaseHTTPServer' in mods or py3
202 >>> 'BaseHTTPServer' in mods or py3
202 True
203 True
203
204
204 os.path isn't really a module, so it's missing:
205 os.path isn't really a module, so it's missing:
205
206
206 >>> 'os.path' in mods
207 >>> 'os.path' in mods
207 False
208 False
208
209
209 sys requires special treatment, because it's baked into the
210 sys requires special treatment, because it's baked into the
210 interpreter, but it should still appear:
211 interpreter, but it should still appear:
211
212
212 >>> 'sys' in mods
213 >>> 'sys' in mods
213 True
214 True
214
215
215 >>> 'collections' in mods
216 >>> 'collections' in mods
216 True
217 True
217
218
218 >>> 'cStringIO' in mods or py3
219 >>> 'cStringIO' in mods or py3
219 True
220 True
220
221
221 >>> 'cffi' in mods
222 >>> 'cffi' in mods
222 True
223 True
223 """
224 """
224 for m in sys.builtin_module_names:
225 for m in sys.builtin_module_names:
225 yield m
226 yield m
226 # These modules only exist on windows, but we should always
227 # These modules only exist on windows, but we should always
227 # consider them stdlib.
228 # consider them stdlib.
228 for m in ['msvcrt', '_winreg']:
229 for m in ['msvcrt', '_winreg']:
229 yield m
230 yield m
230 yield '__builtin__'
231 yield '__builtin__'
231 yield 'builtins' # python3 only
232 yield 'builtins' # python3 only
232 yield 'importlib.abc' # python3 only
233 yield 'importlib.abc' # python3 only
233 yield 'importlib.machinery' # python3 only
234 yield 'importlib.machinery' # python3 only
234 yield 'importlib.util' # python3 only
235 yield 'importlib.util' # python3 only
235 yield 'packaging.version'
236 yield 'packaging.version'
236 for m in 'fcntl', 'grp', 'pwd', 'termios': # Unix only
237 for m in 'fcntl', 'grp', 'pwd', 'termios': # Unix only
237 yield m
238 yield m
238 for m in 'cPickle', 'datetime': # in Python (not C) on PyPy
239 for m in 'cPickle', 'datetime': # in Python (not C) on PyPy
239 yield m
240 yield m
240 for m in ['cffi']:
241 for m in ['cffi']:
241 yield m
242 yield m
242 stdlib_prefixes = {sys.prefix, sys.exec_prefix}
243 stdlib_prefixes = {sys.prefix, sys.exec_prefix}
243 # We need to supplement the list of prefixes for the search to work
244 # We need to supplement the list of prefixes for the search to work
244 # when run from within a virtualenv.
245 # when run from within a virtualenv.
245 for mod in (basehttpserver, zlib):
246 for mod in (basehttpserver, zlib):
246 if mod is None:
247 if mod is None:
247 continue
248 continue
248 try:
249 try:
249 # Not all module objects have a __file__ attribute.
250 # Not all module objects have a __file__ attribute.
250 filename = mod.__file__
251 filename = mod.__file__
251 except AttributeError:
252 except AttributeError:
252 continue
253 continue
253 dirname = os.path.dirname(filename)
254 dirname = os.path.dirname(filename)
254 for prefix in stdlib_prefixes:
255 for prefix in stdlib_prefixes:
255 if dirname.startswith(prefix):
256 if dirname.startswith(prefix):
256 # Then this directory is redundant.
257 # Then this directory is redundant.
257 break
258 break
258 else:
259 else:
259 stdlib_prefixes.add(dirname)
260 stdlib_prefixes.add(dirname)
260 sourceroot = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
261 sourceroot = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
261 for libpath in sys.path:
262 for libpath in sys.path:
262 # We want to walk everything in sys.path that starts with something in
263 # We want to walk everything in sys.path that starts with something in
263 # stdlib_prefixes, but not directories from the hg sources.
264 # stdlib_prefixes, but not directories from the hg sources.
264 if os.path.abspath(libpath).startswith(sourceroot) or not any(
265 if os.path.abspath(libpath).startswith(sourceroot) or not any(
265 libpath.startswith(p) for p in stdlib_prefixes
266 libpath.startswith(p) for p in stdlib_prefixes
266 ):
267 ):
267 continue
268 continue
268 for top, dirs, files in os.walk(libpath):
269 for top, dirs, files in os.walk(libpath):
269 if 'dist-packages' in top.split(os.path.sep):
270 if 'dist-packages' in top.split(os.path.sep):
270 continue
271 continue
271 for i, d in reversed(list(enumerate(dirs))):
272 for i, d in reversed(list(enumerate(dirs))):
272 if (
273 if (
273 not os.path.exists(os.path.join(top, d, '__init__.py'))
274 not os.path.exists(os.path.join(top, d, '__init__.py'))
274 or top == libpath
275 or top == libpath
275 and d in ('hgdemandimport', 'hgext', 'mercurial')
276 and d in ('hgdemandimport', 'hgext', 'mercurial')
276 ):
277 ):
277 del dirs[i]
278 del dirs[i]
278 for name in files:
279 for name in files:
279 if not name.endswith(('.py', '.so', '.pyc', '.pyo', '.pyd')):
280 if not name.endswith(('.py', '.so', '.pyc', '.pyo', '.pyd')):
280 continue
281 continue
281 if name.startswith('__init__.py'):
282 if name.startswith('__init__.py'):
282 full_path = top
283 full_path = top
283 else:
284 else:
284 full_path = os.path.join(top, name)
285 full_path = os.path.join(top, name)
285 rel_path = full_path[len(libpath) + 1 :]
286 rel_path = full_path[len(libpath) + 1 :]
286 mod = dotted_name_of_path(rel_path)
287 mod = dotted_name_of_path(rel_path)
287 yield mod
288 yield mod
288
289
289
290
290 stdlib_modules = set(list_stdlib_modules())
291 stdlib_modules = set(list_stdlib_modules())
291
292
292
293
293 def imported_modules(source, modulename, f, localmods, ignore_nested=False):
294 def imported_modules(source, modulename, f, localmods, ignore_nested=False):
294 """Given the source of a file as a string, yield the names
295 """Given the source of a file as a string, yield the names
295 imported by that file.
296 imported by that file.
296
297
297 Args:
298 Args:
298 source: The python source to examine as a string.
299 source: The python source to examine as a string.
299 modulename: of specified python source (may have `__init__`)
300 modulename: of specified python source (may have `__init__`)
300 localmods: set of locally defined module names (may have `__init__`)
301 localmods: set of locally defined module names (may have `__init__`)
301 ignore_nested: If true, import statements that do not start in
302 ignore_nested: If true, import statements that do not start in
302 column zero will be ignored.
303 column zero will be ignored.
303
304
304 Returns:
305 Returns:
305 A list of absolute module names imported by the given source.
306 A list of absolute module names imported by the given source.
306
307
307 >>> f = 'foo/xxx.py'
308 >>> f = 'foo/xxx.py'
308 >>> modulename = 'foo.xxx'
309 >>> modulename = 'foo.xxx'
309 >>> localmods = {'foo.__init__': True,
310 >>> localmods = {'foo.__init__': True,
310 ... 'foo.foo1': True, 'foo.foo2': True,
311 ... 'foo.foo1': True, 'foo.foo2': True,
311 ... 'foo.bar.__init__': True, 'foo.bar.bar1': True,
312 ... 'foo.bar.__init__': True, 'foo.bar.bar1': True,
312 ... 'baz.__init__': True, 'baz.baz1': True }
313 ... 'baz.__init__': True, 'baz.baz1': True }
313 >>> # standard library (= not locally defined ones)
314 >>> # standard library (= not locally defined ones)
314 >>> sorted(imported_modules(
315 >>> sorted(imported_modules(
315 ... 'from stdlib1 import foo, bar; import stdlib2',
316 ... 'from stdlib1 import foo, bar; import stdlib2',
316 ... modulename, f, localmods))
317 ... modulename, f, localmods))
317 []
318 []
318 >>> # relative importing
319 >>> # relative importing
319 >>> sorted(imported_modules(
320 >>> sorted(imported_modules(
320 ... 'import foo1; from bar import bar1',
321 ... 'import foo1; from bar import bar1',
321 ... modulename, f, localmods))
322 ... modulename, f, localmods))
322 ['foo.bar.bar1', 'foo.foo1']
323 ['foo.bar.bar1', 'foo.foo1']
323 >>> sorted(imported_modules(
324 >>> sorted(imported_modules(
324 ... 'from bar.bar1 import name1, name2, name3',
325 ... 'from bar.bar1 import name1, name2, name3',
325 ... modulename, f, localmods))
326 ... modulename, f, localmods))
326 ['foo.bar.bar1']
327 ['foo.bar.bar1']
327 >>> # absolute importing
328 >>> # absolute importing
328 >>> sorted(imported_modules(
329 >>> sorted(imported_modules(
329 ... 'from baz import baz1, name1',
330 ... 'from baz import baz1, name1',
330 ... modulename, f, localmods))
331 ... modulename, f, localmods))
331 ['baz.__init__', 'baz.baz1']
332 ['baz.__init__', 'baz.baz1']
332 >>> # mixed importing, even though it shouldn't be recommended
333 >>> # mixed importing, even though it shouldn't be recommended
333 >>> sorted(imported_modules(
334 >>> sorted(imported_modules(
334 ... 'import stdlib, foo1, baz',
335 ... 'import stdlib, foo1, baz',
335 ... modulename, f, localmods))
336 ... modulename, f, localmods))
336 ['baz.__init__', 'foo.foo1']
337 ['baz.__init__', 'foo.foo1']
337 >>> # ignore_nested
338 >>> # ignore_nested
338 >>> sorted(imported_modules(
339 >>> sorted(imported_modules(
339 ... '''import foo
340 ... '''import foo
340 ... def wat():
341 ... def wat():
341 ... import bar
342 ... import bar
342 ... ''', modulename, f, localmods))
343 ... ''', modulename, f, localmods))
343 ['foo.__init__', 'foo.bar.__init__']
344 ['foo.__init__', 'foo.bar.__init__']
344 >>> sorted(imported_modules(
345 >>> sorted(imported_modules(
345 ... '''import foo
346 ... '''import foo
346 ... def wat():
347 ... def wat():
347 ... import bar
348 ... import bar
348 ... ''', modulename, f, localmods, ignore_nested=True))
349 ... ''', modulename, f, localmods, ignore_nested=True))
349 ['foo.__init__']
350 ['foo.__init__']
350 """
351 """
351 fromlocal = fromlocalfunc(modulename, localmods)
352 fromlocal = fromlocalfunc(modulename, localmods)
352 for node in ast.walk(ast.parse(source, f)):
353 for node in ast.walk(ast.parse(source, f)):
353 if ignore_nested and getattr(node, 'col_offset', 0) > 0:
354 if ignore_nested and getattr(node, 'col_offset', 0) > 0:
354 continue
355 continue
355 if isinstance(node, ast.Import):
356 if isinstance(node, ast.Import):
356 for n in node.names:
357 for n in node.names:
357 found = fromlocal(n.name)
358 found = fromlocal(n.name)
358 if not found:
359 if not found:
359 # this should import standard library
360 # this should import standard library
360 continue
361 continue
361 yield found[1]
362 yield found[1]
362 elif isinstance(node, ast.ImportFrom):
363 elif isinstance(node, ast.ImportFrom):
363 found = fromlocal(node.module, node.level)
364 found = fromlocal(node.module, node.level)
364 if not found:
365 if not found:
365 # this should import standard library
366 # this should import standard library
366 continue
367 continue
367
368
368 absname, dottedpath, hassubmod = found
369 absname, dottedpath, hassubmod = found
369 if not hassubmod:
370 if not hassubmod:
370 # "dottedpath" is not a package; must be imported
371 # "dottedpath" is not a package; must be imported
371 yield dottedpath
372 yield dottedpath
372 # examination of "node.names" should be redundant
373 # examination of "node.names" should be redundant
373 # e.g.: from mercurial.node import nullid, nullrev
374 # e.g.: from mercurial.node import nullid, nullrev
374 continue
375 continue
375
376
376 modnotfound = False
377 modnotfound = False
377 prefix = absname + '.'
378 prefix = absname + '.'
378 for n in node.names:
379 for n in node.names:
379 found = fromlocal(prefix + n.name)
380 found = fromlocal(prefix + n.name)
380 if not found:
381 if not found:
381 # this should be a function or a property of "node.module"
382 # this should be a function or a property of "node.module"
382 modnotfound = True
383 modnotfound = True
383 continue
384 continue
384 yield found[1]
385 yield found[1]
385 if modnotfound and dottedpath != modulename:
386 if modnotfound and dottedpath != modulename:
386 # "dottedpath" is a package, but imported because of non-module
387 # "dottedpath" is a package, but imported because of non-module
387 # lookup
388 # lookup
388 # specifically allow "from . import foo" from __init__.py
389 # specifically allow "from . import foo" from __init__.py
389 yield dottedpath
390 yield dottedpath
390
391
391
392
392 def verify_import_convention(module, source, localmods):
393 def verify_import_convention(module, source, localmods):
393 """Verify imports match our established coding convention."""
394 """Verify imports match our established coding convention."""
394 root = ast.parse(source)
395 root = ast.parse(source)
395
396
396 return verify_modern_convention(module, root, localmods)
397 return verify_modern_convention(module, root, localmods)
397
398
398
399
399 def verify_modern_convention(module, root, localmods, root_col_offset=0):
400 def verify_modern_convention(module, root, localmods, root_col_offset=0):
400 """Verify a file conforms to the modern import convention rules.
401 """Verify a file conforms to the modern import convention rules.
401
402
402 The rules of the modern convention are:
403 The rules of the modern convention are:
403
404
404 * Ordering is stdlib followed by local imports. Each group is lexically
405 * Ordering is stdlib followed by local imports. Each group is lexically
405 sorted.
406 sorted.
406 * Importing multiple modules via "import X, Y" is not allowed: use
407 * Importing multiple modules via "import X, Y" is not allowed: use
407 separate import statements.
408 separate import statements.
408 * Importing multiple modules via "from X import ..." is allowed if using
409 * Importing multiple modules via "from X import ..." is allowed if using
409 parenthesis and one entry per line.
410 parenthesis and one entry per line.
410 * Only 1 relative import statement per import level ("from .", "from ..")
411 * Only 1 relative import statement per import level ("from .", "from ..")
411 is allowed.
412 is allowed.
412 * Relative imports from higher levels must occur before lower levels. e.g.
413 * Relative imports from higher levels must occur before lower levels. e.g.
413 "from .." must be before "from .".
414 "from .." must be before "from .".
414 * Imports from peer packages should use relative import (e.g. do not
415 * Imports from peer packages should use relative import (e.g. do not
415 "import mercurial.foo" from a "mercurial.*" module).
416 "import mercurial.foo" from a "mercurial.*" module).
416 * Symbols can only be imported from specific modules (see
417 * Symbols can only be imported from specific modules (see
417 `allowsymbolimports`). For other modules, first import the module then
418 `allowsymbolimports`). For other modules, first import the module then
418 assign the symbol to a module-level variable. In addition, these imports
419 assign the symbol to a module-level variable. In addition, these imports
419 must be performed before other local imports. This rule only
420 must be performed before other local imports. This rule only
420 applies to import statements outside of any blocks.
421 applies to import statements outside of any blocks.
421 * Relative imports from the standard library are not allowed, unless that
422 * Relative imports from the standard library are not allowed, unless that
422 library is also a local module.
423 library is also a local module.
423 * Certain modules must be aliased to alternate names to avoid aliasing
424 * Certain modules must be aliased to alternate names to avoid aliasing
424 and readability problems. See `requirealias`.
425 and readability problems. See `requirealias`.
425 """
426 """
426 if not isinstance(module, str):
427 if not isinstance(module, str):
427 module = module.decode('ascii')
428 module = module.decode('ascii')
428 topmodule = module.split('.')[0]
429 topmodule = module.split('.')[0]
429 fromlocal = fromlocalfunc(module, localmods)
430 fromlocal = fromlocalfunc(module, localmods)
430
431
431 # Whether a local/non-stdlib import has been performed.
432 # Whether a local/non-stdlib import has been performed.
432 seenlocal = None
433 seenlocal = None
433 # Whether a local/non-stdlib, non-symbol import has been seen.
434 # Whether a local/non-stdlib, non-symbol import has been seen.
434 seennonsymbollocal = False
435 seennonsymbollocal = False
435 # The last name to be imported (for sorting).
436 # The last name to be imported (for sorting).
436 lastname = None
437 lastname = None
437 laststdlib = None
438 laststdlib = None
438 # Relative import levels encountered so far.
439 # Relative import levels encountered so far.
439 seenlevels = set()
440 seenlevels = set()
440
441
441 for node, newscope in walklocal(root):
442 for node, newscope in walklocal(root):
442
443
443 def msg(fmt, *args):
444 def msg(fmt, *args):
444 return (fmt % args, node.lineno)
445 return (fmt % args, node.lineno)
445
446
446 if newscope:
447 if newscope:
447 # Check for local imports in function
448 # Check for local imports in function
448 for r in verify_modern_convention(
449 for r in verify_modern_convention(
449 module, node, localmods, node.col_offset + 4
450 module, node, localmods, node.col_offset + 4
450 ):
451 ):
451 yield r
452 yield r
452 elif isinstance(node, ast.Import):
453 elif isinstance(node, ast.Import):
453 # Disallow "import foo, bar" and require separate imports
454 # Disallow "import foo, bar" and require separate imports
454 # for each module.
455 # for each module.
455 if len(node.names) > 1:
456 if len(node.names) > 1:
456 yield msg(
457 yield msg(
457 'multiple imported names: %s',
458 'multiple imported names: %s',
458 ', '.join(n.name for n in node.names),
459 ', '.join(n.name for n in node.names),
459 )
460 )
460
461
461 name = node.names[0].name
462 name = node.names[0].name
462 asname = node.names[0].asname
463 asname = node.names[0].asname
463
464
464 stdlib = name in stdlib_modules
465 stdlib = name in stdlib_modules
465
466
466 # Ignore sorting rules on imports inside blocks.
467 # Ignore sorting rules on imports inside blocks.
467 if node.col_offset == root_col_offset:
468 if node.col_offset == root_col_offset:
468 if lastname and name < lastname and laststdlib == stdlib:
469 if lastname and name < lastname and laststdlib == stdlib:
469 yield msg(
470 yield msg(
470 'imports not lexically sorted: %s < %s', name, lastname
471 'imports not lexically sorted: %s < %s', name, lastname
471 )
472 )
472
473
473 lastname = name
474 lastname = name
474 laststdlib = stdlib
475 laststdlib = stdlib
475
476
476 # stdlib imports should be before local imports.
477 # stdlib imports should be before local imports.
477 if stdlib and seenlocal and node.col_offset == root_col_offset:
478 if stdlib and seenlocal and node.col_offset == root_col_offset:
478 yield msg(
479 yield msg(
479 'stdlib import "%s" follows local import: %s',
480 'stdlib import "%s" follows local import: %s',
480 name,
481 name,
481 seenlocal,
482 seenlocal,
482 )
483 )
483
484
484 if not stdlib:
485 if not stdlib:
485 seenlocal = name
486 seenlocal = name
486
487
487 # Import of sibling modules should use relative imports.
488 # Import of sibling modules should use relative imports.
488 topname = name.split('.')[0]
489 topname = name.split('.')[0]
489 if topname == topmodule:
490 if topname == topmodule:
490 yield msg('import should be relative: %s', name)
491 yield msg('import should be relative: %s', name)
491
492
492 if name in requirealias and asname != requirealias[name]:
493 if name in requirealias and asname != requirealias[name]:
493 yield msg(
494 yield msg(
494 '%s module must be "as" aliased to %s',
495 '%s module must be "as" aliased to %s',
495 name,
496 name,
496 requirealias[name],
497 requirealias[name],
497 )
498 )
498
499
499 elif isinstance(node, ast.ImportFrom):
500 elif isinstance(node, ast.ImportFrom):
500 # Resolve the full imported module name.
501 # Resolve the full imported module name.
501 if node.level > 0:
502 if node.level > 0:
502 fullname = '.'.join(module.split('.')[: -node.level])
503 fullname = '.'.join(module.split('.')[: -node.level])
503 if node.module:
504 if node.module:
504 fullname += '.%s' % node.module
505 fullname += '.%s' % node.module
505 else:
506 else:
506 assert node.module
507 assert node.module
507 fullname = node.module
508 fullname = node.module
508
509
509 topname = fullname.split('.')[0]
510 topname = fullname.split('.')[0]
510 if topname == topmodule:
511 if topname == topmodule:
511 yield msg('import should be relative: %s', fullname)
512 yield msg('import should be relative: %s', fullname)
512
513
513 # __future__ is special since it needs to come first and use
514 # __future__ is special since it needs to come first and use
514 # symbol import.
515 # symbol import.
515 if fullname != '__future__':
516 if fullname != '__future__':
516 if not fullname or (
517 if not fullname or (
517 fullname in stdlib_modules
518 fullname in stdlib_modules
518 # allow standard 'from typing import ...' style
519 # allow standard 'from typing import ...' style
519 and fullname.startswith('.')
520 and fullname.startswith('.')
520 and fullname not in localmods
521 and fullname not in localmods
521 and fullname + '.__init__' not in localmods
522 and fullname + '.__init__' not in localmods
522 ):
523 ):
523 yield msg('relative import of stdlib module')
524 yield msg('relative import of stdlib module')
524 else:
525 else:
525 seenlocal = fullname
526 seenlocal = fullname
526
527
527 # Direct symbol import is only allowed from certain modules and
528 # Direct symbol import is only allowed from certain modules and
528 # must occur before non-symbol imports.
529 # must occur before non-symbol imports.
529 found = fromlocal(node.module, node.level)
530 found = fromlocal(node.module, node.level)
530 if found and found[2]: # node.module is a package
531 if found and found[2]: # node.module is a package
531 prefix = found[0] + '.'
532 prefix = found[0] + '.'
532 symbols = (
533 symbols = (
533 n.name for n in node.names if not fromlocal(prefix + n.name)
534 n.name for n in node.names if not fromlocal(prefix + n.name)
534 )
535 )
535 else:
536 else:
536 symbols = (n.name for n in node.names)
537 symbols = (n.name for n in node.names)
537 symbols = [sym for sym in symbols if sym not in directsymbols]
538 symbols = [sym for sym in symbols if sym not in directsymbols]
538 if node.module and node.col_offset == root_col_offset:
539 if node.module and node.col_offset == root_col_offset:
539 if symbols and fullname not in allowsymbolimports:
540 if symbols and fullname not in allowsymbolimports:
540 yield msg(
541 yield msg(
541 'direct symbol import %s from %s',
542 'direct symbol import %s from %s',
542 ', '.join(symbols),
543 ', '.join(symbols),
543 fullname,
544 fullname,
544 )
545 )
545
546
546 if symbols and seennonsymbollocal:
547 if symbols and seennonsymbollocal:
547 yield msg(
548 yield msg(
548 'symbol import follows non-symbol import: %s', fullname
549 'symbol import follows non-symbol import: %s', fullname
549 )
550 )
550 if not symbols and fullname not in stdlib_modules:
551 if not symbols and fullname not in stdlib_modules:
551 seennonsymbollocal = True
552 seennonsymbollocal = True
552
553
553 if not node.module:
554 if not node.module:
554 assert node.level
555 assert node.level
555
556
556 # Only allow 1 group per level.
557 # Only allow 1 group per level.
557 if (
558 if (
558 node.level in seenlevels
559 node.level in seenlevels
559 and node.col_offset == root_col_offset
560 and node.col_offset == root_col_offset
560 ):
561 ):
561 yield msg(
562 yield msg(
562 'multiple "from %s import" statements', '.' * node.level
563 'multiple "from %s import" statements', '.' * node.level
563 )
564 )
564
565
565 # Higher-level groups come before lower-level groups.
566 # Higher-level groups come before lower-level groups.
566 if any(node.level > l for l in seenlevels):
567 if any(node.level > l for l in seenlevels):
567 yield msg(
568 yield msg(
568 'higher-level import should come first: %s', fullname
569 'higher-level import should come first: %s', fullname
569 )
570 )
570
571
571 seenlevels.add(node.level)
572 seenlevels.add(node.level)
572
573
573 # Entries in "from .X import ( ... )" lists must be lexically
574 # Entries in "from .X import ( ... )" lists must be lexically
574 # sorted.
575 # sorted.
575 lastentryname = None
576 lastentryname = None
576
577
577 for n in node.names:
578 for n in node.names:
578 if lastentryname and n.name < lastentryname:
579 if lastentryname and n.name < lastentryname:
579 yield msg(
580 yield msg(
580 'imports from %s not lexically sorted: %s < %s',
581 'imports from %s not lexically sorted: %s < %s',
581 fullname,
582 fullname,
582 n.name,
583 n.name,
583 lastentryname,
584 lastentryname,
584 )
585 )
585
586
586 lastentryname = n.name
587 lastentryname = n.name
587
588
588 if n.name in requirealias and n.asname != requirealias[n.name]:
589 if n.name in requirealias and n.asname != requirealias[n.name]:
589 yield msg(
590 yield msg(
590 '%s from %s must be "as" aliased to %s',
591 '%s from %s must be "as" aliased to %s',
591 n.name,
592 n.name,
592 fullname,
593 fullname,
593 requirealias[n.name],
594 requirealias[n.name],
594 )
595 )
595
596
596
597
597 class CircularImport(Exception):
598 class CircularImport(Exception):
598 pass
599 pass
599
600
600
601
601 def checkmod(mod, imports):
602 def checkmod(mod, imports):
602 shortest = {}
603 shortest = {}
603 visit = [[mod]]
604 visit = [[mod]]
604 while visit:
605 while visit:
605 path = visit.pop(0)
606 path = visit.pop(0)
606 for i in sorted(imports.get(path[-1], [])):
607 for i in sorted(imports.get(path[-1], [])):
607 if len(path) < shortest.get(i, 1000):
608 if len(path) < shortest.get(i, 1000):
608 shortest[i] = len(path)
609 shortest[i] = len(path)
609 if i in path:
610 if i in path:
610 if i == path[0]:
611 if i == path[0]:
611 raise CircularImport(path)
612 raise CircularImport(path)
612 continue
613 continue
613 visit.append(path + [i])
614 visit.append(path + [i])
614
615
615
616
616 def rotatecycle(cycle):
617 def rotatecycle(cycle):
617 """arrange a cycle so that the lexicographically first module listed first
618 """arrange a cycle so that the lexicographically first module listed first
618
619
619 >>> rotatecycle(['foo', 'bar'])
620 >>> rotatecycle(['foo', 'bar'])
620 ['bar', 'foo', 'bar']
621 ['bar', 'foo', 'bar']
621 """
622 """
622 lowest = min(cycle)
623 lowest = min(cycle)
623 idx = cycle.index(lowest)
624 idx = cycle.index(lowest)
624 return cycle[idx:] + cycle[:idx] + [lowest]
625 return cycle[idx:] + cycle[:idx] + [lowest]
625
626
626
627
627 def find_cycles(imports):
628 def find_cycles(imports):
628 """Find cycles in an already-loaded import graph.
629 """Find cycles in an already-loaded import graph.
629
630
630 All module names recorded in `imports` should be absolute one.
631 All module names recorded in `imports` should be absolute one.
631
632
632 >>> imports = {'top.foo': ['top.bar', 'os.path', 'top.qux'],
633 >>> imports = {'top.foo': ['top.bar', 'os.path', 'top.qux'],
633 ... 'top.bar': ['top.baz', 'sys'],
634 ... 'top.bar': ['top.baz', 'sys'],
634 ... 'top.baz': ['top.foo'],
635 ... 'top.baz': ['top.foo'],
635 ... 'top.qux': ['top.foo']}
636 ... 'top.qux': ['top.foo']}
636 >>> print('\\n'.join(sorted(find_cycles(imports))))
637 >>> print('\\n'.join(sorted(find_cycles(imports))))
637 top.bar -> top.baz -> top.foo -> top.bar
638 top.bar -> top.baz -> top.foo -> top.bar
638 top.foo -> top.qux -> top.foo
639 top.foo -> top.qux -> top.foo
639 """
640 """
640 cycles = set()
641 cycles = set()
641 for mod in sorted(imports.keys()):
642 for mod in sorted(imports.keys()):
642 try:
643 try:
643 checkmod(mod, imports)
644 checkmod(mod, imports)
644 except CircularImport as e:
645 except CircularImport as e:
645 cycle = e.args[0]
646 cycle = e.args[0]
646 cycles.add(" -> ".join(rotatecycle(cycle)))
647 cycles.add(" -> ".join(rotatecycle(cycle)))
647 return cycles
648 return cycles
648
649
649
650
650 def _cycle_sortkey(c):
651 def _cycle_sortkey(c):
651 return len(c), c
652 return len(c), c
652
653
653
654
654 def embedded(f, modname, src):
655 def embedded(f, modname, src):
655 """Extract embedded python code
656 """Extract embedded python code
656
657
657 >>> def _forcestr(thing):
658 >>> def _forcestr(thing):
658 ... if not isinstance(thing, str):
659 ... if not isinstance(thing, str):
659 ... return thing.decode('ascii')
660 ... return thing.decode('ascii')
660 ... return thing
661 ... return thing
661 >>> def test(fn, lines):
662 >>> def test(fn, lines):
662 ... for s, m, f, l in embedded(fn, b"example", lines):
663 ... for s, m, f, l in embedded(fn, b"example", lines):
663 ... print("%s %s %d" % (_forcestr(m), _forcestr(f), l))
664 ... print("%s %s %d" % (_forcestr(m), _forcestr(f), l))
664 ... print(repr(_forcestr(s)))
665 ... print(repr(_forcestr(s)))
665 >>> lines = [
666 >>> lines = [
666 ... 'comment',
667 ... 'comment',
667 ... ' >>> from __future__ import print_function',
668 ... ' >>> from __future__ import print_function',
668 ... " >>> ' multiline",
669 ... " >>> ' multiline",
669 ... " ... string'",
670 ... " ... string'",
670 ... ' ',
671 ... ' ',
671 ... 'comment',
672 ... 'comment',
672 ... ' $ cat > foo.py <<EOF',
673 ... ' $ cat > foo.py <<EOF',
673 ... ' > from __future__ import print_function',
674 ... ' > from __future__ import print_function',
674 ... ' > EOF',
675 ... ' > EOF',
675 ... ]
676 ... ]
676 >>> test(b"example.t", lines)
677 >>> test(b"example.t", lines)
677 example[2] doctest.py 1
678 example[2] doctest.py 1
678 "from __future__ import print_function\\n' multiline\\nstring'\\n\\n"
679 "from __future__ import print_function\\n' multiline\\nstring'\\n\\n"
679 example[8] foo.py 7
680 example[8] foo.py 7
680 'from __future__ import print_function\\n'
681 'from __future__ import print_function\\n'
681 """
682 """
682 errors = []
683 errors = []
683 for name, starts, ends, code in testparseutil.pyembedded(f, src, errors):
684 for name, starts, ends, code in testparseutil.pyembedded(f, src, errors):
684 if not name:
685 if not name:
685 # use 'doctest.py', in order to make already existing
686 # use 'doctest.py', in order to make already existing
686 # doctest above pass instantly
687 # doctest above pass instantly
687 name = 'doctest.py'
688 name = 'doctest.py'
688 # "starts" is "line number" (1-origin), but embedded() is
689 # "starts" is "line number" (1-origin), but embedded() is
689 # expected to return "line offset" (0-origin). Therefore, this
690 # expected to return "line offset" (0-origin). Therefore, this
690 # yields "starts - 1".
691 # yields "starts - 1".
691 if not isinstance(modname, str):
692 if not isinstance(modname, str):
692 modname = modname.decode('utf8')
693 modname = modname.decode('utf8')
693 yield code, "%s[%d]" % (modname, starts), name, starts - 1
694 yield code, "%s[%d]" % (modname, starts), name, starts - 1
694
695
695
696
696 def sources(f, modname):
697 def sources(f, modname):
697 """Yields possibly multiple sources from a filepath
698 """Yields possibly multiple sources from a filepath
698
699
699 input: filepath, modulename
700 input: filepath, modulename
700 yields: script(string), modulename, filepath, linenumber
701 yields: script(string), modulename, filepath, linenumber
701
702
702 For embedded scripts, the modulename and filepath will be different
703 For embedded scripts, the modulename and filepath will be different
703 from the function arguments. linenumber is an offset relative to
704 from the function arguments. linenumber is an offset relative to
704 the input file.
705 the input file.
705 """
706 """
706 py = False
707 py = False
707 if not f.endswith('.t'):
708 if not f.endswith('.t'):
708 with open(f, 'rb') as src:
709 with open(f, 'rb') as src:
709 yield src.read(), modname, f, 0
710 yield src.read(), modname, f, 0
710 py = True
711 py = True
711 if py or f.endswith('.t'):
712 if py or f.endswith('.t'):
712 # Strictly speaking we should sniff for the magic header that denotes
713 # Strictly speaking we should sniff for the magic header that denotes
713 # Python source file encoding. But in reality we don't use anything
714 # Python source file encoding. But in reality we don't use anything
714 # other than ASCII (mainly) and UTF-8 (in a few exceptions), so
715 # other than ASCII (mainly) and UTF-8 (in a few exceptions), so
715 # simplicity is fine.
716 # simplicity is fine.
716 with io.open(f, 'r', encoding='utf-8') as src:
717 with io.open(f, 'r', encoding='utf-8') as src:
717 for script, modname, t, line in embedded(f, modname, src):
718 for script, modname, t, line in embedded(f, modname, src):
718 yield script, modname.encode('utf8'), t, line
719 yield script, modname.encode('utf8'), t, line
719
720
720
721
721 def main(argv):
722 def main(argv):
722 if len(argv) < 2 or (argv[1] == '-' and len(argv) > 2):
723 if len(argv) < 2 or (argv[1] == '-' and len(argv) > 2):
723 print('Usage: %s {-|file [file] [file] ...}')
724 print('Usage: %s {-|file [file] [file] ...}')
724 return 1
725 return 1
725 if argv[1] == '-':
726 if argv[1] == '-':
726 argv = argv[:1]
727 argv = argv[:1]
727 argv.extend(l.rstrip() for l in sys.stdin.readlines())
728 argv.extend(l.rstrip() for l in sys.stdin.readlines())
728 localmodpaths = {}
729 localmodpaths = {}
729 used_imports = {}
730 used_imports = {}
730 any_errors = False
731 any_errors = False
731 for source_path in argv[1:]:
732 for source_path in argv[1:]:
732 modname = dotted_name_of_path(source_path)
733 modname = dotted_name_of_path(source_path)
733 localmodpaths[modname] = source_path
734 localmodpaths[modname] = source_path
734 localmods = populateextmods(localmodpaths)
735 localmods = populateextmods(localmodpaths)
735 for localmodname, source_path in sorted(localmodpaths.items()):
736 for localmodname, source_path in sorted(localmodpaths.items()):
736 if not isinstance(localmodname, bytes):
737 if not isinstance(localmodname, bytes):
737 # This is only safe because all hg's files are ascii
738 # This is only safe because all hg's files are ascii
738 localmodname = localmodname.encode('ascii')
739 localmodname = localmodname.encode('ascii')
739 for src, modname, name, line in sources(source_path, localmodname):
740 for src, modname, name, line in sources(source_path, localmodname):
740 try:
741 try:
741 used_imports[modname] = sorted(
742 used_imports[modname] = sorted(
742 imported_modules(
743 imported_modules(
743 src, modname, name, localmods, ignore_nested=True
744 src, modname, name, localmods, ignore_nested=True
744 )
745 )
745 )
746 )
746 for error, lineno in verify_import_convention(
747 for error, lineno in verify_import_convention(
747 modname, src, localmods
748 modname, src, localmods
748 ):
749 ):
749 any_errors = True
750 any_errors = True
750 print('%s:%d: %s' % (source_path, lineno + line, error))
751 print('%s:%d: %s' % (source_path, lineno + line, error))
751 except SyntaxError as e:
752 except SyntaxError as e:
752 print(
753 print(
753 '%s:%d: SyntaxError: %s' % (source_path, e.lineno + line, e)
754 '%s:%d: SyntaxError: %s' % (source_path, e.lineno + line, e)
754 )
755 )
755 cycles = find_cycles(used_imports)
756 cycles = find_cycles(used_imports)
756 if cycles:
757 if cycles:
757 firstmods = set()
758 firstmods = set()
758 for c in sorted(cycles, key=_cycle_sortkey):
759 for c in sorted(cycles, key=_cycle_sortkey):
759 first = c.split()[0]
760 first = c.split()[0]
760 # As a rough cut, ignore any cycle that starts with the
761 # As a rough cut, ignore any cycle that starts with the
761 # same module as some other cycle. Otherwise we see lots
762 # same module as some other cycle. Otherwise we see lots
762 # of cycles that are effectively duplicates.
763 # of cycles that are effectively duplicates.
763 if first in firstmods:
764 if first in firstmods:
764 continue
765 continue
765 print('Import cycle:', c)
766 print('Import cycle:', c)
766 firstmods.add(first)
767 firstmods.add(first)
767 any_errors = True
768 any_errors = True
768 return any_errors != 0
769 return any_errors != 0
769
770
770
771
771 if __name__ == '__main__':
772 if __name__ == '__main__':
772 sys.exit(int(main(sys.argv)))
773 sys.exit(int(main(sys.argv)))
@@ -1,2299 +1,2290 b''
1 # rebase.py - rebasing feature for mercurial
1 # rebase.py - rebasing feature for mercurial
2 #
2 #
3 # Copyright 2008 Stefano Tortarolo <stefano.tortarolo at gmail dot com>
3 # Copyright 2008 Stefano Tortarolo <stefano.tortarolo at gmail dot com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to move sets of revisions to a different ancestor
8 '''command to move sets of revisions to a different ancestor
9
9
10 This extension lets you rebase changesets in an existing Mercurial
10 This extension lets you rebase changesets in an existing Mercurial
11 repository.
11 repository.
12
12
13 For more information:
13 For more information:
14 https://mercurial-scm.org/wiki/RebaseExtension
14 https://mercurial-scm.org/wiki/RebaseExtension
15 '''
15 '''
16
16
17
17
18 import os
18 import os
19
19
20 from mercurial.i18n import _
20 from mercurial.i18n import _
21 from mercurial.node import (
21 from mercurial.node import (
22 nullrev,
22 nullrev,
23 short,
23 short,
24 wdirrev,
24 wdirrev,
25 )
25 )
26 from mercurial.pycompat import open
26 from mercurial.pycompat import open
27 from mercurial.thirdparty.jaraco.collections import Projection
27 from mercurial import (
28 from mercurial import (
28 bookmarks,
29 bookmarks,
29 cmdutil,
30 cmdutil,
30 commands,
31 commands,
31 copies,
32 copies,
32 destutil,
33 destutil,
33 error,
34 error,
34 extensions,
35 extensions,
35 logcmdutil,
36 logcmdutil,
36 merge as mergemod,
37 merge as mergemod,
37 mergestate as mergestatemod,
38 mergestate as mergestatemod,
38 mergeutil,
39 mergeutil,
39 obsolete,
40 obsolete,
40 obsutil,
41 obsutil,
41 patch,
42 patch,
42 phases,
43 phases,
43 pycompat,
44 pycompat,
44 registrar,
45 registrar,
45 repair,
46 repair,
46 revset,
47 revset,
47 revsetlang,
48 revsetlang,
48 rewriteutil,
49 rewriteutil,
49 scmutil,
50 scmutil,
50 smartset,
51 smartset,
51 state as statemod,
52 state as statemod,
52 util,
53 util,
53 )
54 )
54
55
56
55 # The following constants are used throughout the rebase module. The ordering of
57 # The following constants are used throughout the rebase module. The ordering of
56 # their values must be maintained.
58 # their values must be maintained.
57
59
58 # Indicates that a revision needs to be rebased
60 # Indicates that a revision needs to be rebased
59 revtodo = -1
61 revtodo = -1
60 revtodostr = b'-1'
62 revtodostr = b'-1'
61
63
62 # legacy revstates no longer needed in current code
64 # legacy revstates no longer needed in current code
63 # -2: nullmerge, -3: revignored, -4: revprecursor, -5: revpruned
65 # -2: nullmerge, -3: revignored, -4: revprecursor, -5: revpruned
64 legacystates = {b'-2', b'-3', b'-4', b'-5'}
66 legacystates = {b'-2', b'-3', b'-4', b'-5'}
65
67
66 cmdtable = {}
68 cmdtable = {}
67 command = registrar.command(cmdtable)
69 command = registrar.command(cmdtable)
68
70
69 configtable = {}
71 configtable = {}
70 configitem = registrar.configitem(configtable)
72 configitem = registrar.configitem(configtable)
71 configitem(
73 configitem(
72 b'devel',
74 b'devel',
73 b'rebase.force-in-memory-merge',
75 b'rebase.force-in-memory-merge',
74 default=False,
76 default=False,
75 )
77 )
76 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
78 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
77 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
79 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
78 # be specifying the version(s) of Mercurial they are tested with, or
80 # be specifying the version(s) of Mercurial they are tested with, or
79 # leave the attribute unspecified.
81 # leave the attribute unspecified.
80 testedwith = b'ships-with-hg-core'
82 testedwith = b'ships-with-hg-core'
81
83
82
84
83 def _nothingtorebase():
85 def _nothingtorebase():
84 return 1
86 return 1
85
87
86
88
87 def retained_extras():
89 def retained_extras():
88 """
90 """
89 Yield the names of the extras to be retained.
91 Yield the names of the extras to be retained.
90 """
92 """
91 # graft
93 # graft
92 yield b'source'
94 yield b'source'
93 yield b'intermediate-source'
95 yield b'intermediate-source'
94
96
95
97
96 def _project(orig, names):
97 """Project a subset of names from orig."""
98 names_saved = tuple(names)
99 values = (orig.get(name, None) for name in names_saved)
100 return {
101 name: value
102 for name, value in zip(names_saved, values)
103 if value is not None
104 }
105
106
107 def _save_extras(ctx, extra):
98 def _save_extras(ctx, extra):
108 extra.update(_project(ctx.extra(), retained_extras()))
99 extra.update(Projection(retained_extras(), ctx.extra()))
109
100
110
101
111 def _savebranch(ctx, extra):
102 def _savebranch(ctx, extra):
112 extra[b'branch'] = ctx.branch()
103 extra[b'branch'] = ctx.branch()
113
104
114
105
115 def _destrebase(repo, sourceset, destspace=None):
106 def _destrebase(repo, sourceset, destspace=None):
116 """small wrapper around destmerge to pass the right extra args
107 """small wrapper around destmerge to pass the right extra args
117
108
118 Please wrap destutil.destmerge instead."""
109 Please wrap destutil.destmerge instead."""
119 return destutil.destmerge(
110 return destutil.destmerge(
120 repo,
111 repo,
121 action=b'rebase',
112 action=b'rebase',
122 sourceset=sourceset,
113 sourceset=sourceset,
123 onheadcheck=False,
114 onheadcheck=False,
124 destspace=destspace,
115 destspace=destspace,
125 )
116 )
126
117
127
118
128 revsetpredicate = registrar.revsetpredicate()
119 revsetpredicate = registrar.revsetpredicate()
129
120
130
121
131 @revsetpredicate(b'_destrebase')
122 @revsetpredicate(b'_destrebase')
132 def _revsetdestrebase(repo, subset, x):
123 def _revsetdestrebase(repo, subset, x):
133 # ``_rebasedefaultdest()``
124 # ``_rebasedefaultdest()``
134
125
135 # default destination for rebase.
126 # default destination for rebase.
136 # # XXX: Currently private because I expect the signature to change.
127 # # XXX: Currently private because I expect the signature to change.
137 # # XXX: - bailing out in case of ambiguity vs returning all data.
128 # # XXX: - bailing out in case of ambiguity vs returning all data.
138 # i18n: "_rebasedefaultdest" is a keyword
129 # i18n: "_rebasedefaultdest" is a keyword
139 sourceset = None
130 sourceset = None
140 if x is not None:
131 if x is not None:
141 sourceset = revset.getset(repo, smartset.fullreposet(repo), x)
132 sourceset = revset.getset(repo, smartset.fullreposet(repo), x)
142 return subset & smartset.baseset([_destrebase(repo, sourceset)])
133 return subset & smartset.baseset([_destrebase(repo, sourceset)])
143
134
144
135
145 @revsetpredicate(b'_destautoorphanrebase')
136 @revsetpredicate(b'_destautoorphanrebase')
146 def _revsetdestautoorphanrebase(repo, subset, x):
137 def _revsetdestautoorphanrebase(repo, subset, x):
147 # ``_destautoorphanrebase()``
138 # ``_destautoorphanrebase()``
148
139
149 # automatic rebase destination for a single orphan revision.
140 # automatic rebase destination for a single orphan revision.
150 unfi = repo.unfiltered()
141 unfi = repo.unfiltered()
151 obsoleted = unfi.revs(b'obsolete()')
142 obsoleted = unfi.revs(b'obsolete()')
152
143
153 src = revset.getset(repo, subset, x).first()
144 src = revset.getset(repo, subset, x).first()
154
145
155 # Empty src or already obsoleted - Do not return a destination
146 # Empty src or already obsoleted - Do not return a destination
156 if not src or src in obsoleted:
147 if not src or src in obsoleted:
157 return smartset.baseset()
148 return smartset.baseset()
158 dests = destutil.orphanpossibledestination(repo, src)
149 dests = destutil.orphanpossibledestination(repo, src)
159 if len(dests) > 1:
150 if len(dests) > 1:
160 raise error.StateError(
151 raise error.StateError(
161 _(b"ambiguous automatic rebase: %r could end up on any of %r")
152 _(b"ambiguous automatic rebase: %r could end up on any of %r")
162 % (src, dests)
153 % (src, dests)
163 )
154 )
164 # We have zero or one destination, so we can just return here.
155 # We have zero or one destination, so we can just return here.
165 return smartset.baseset(dests)
156 return smartset.baseset(dests)
166
157
167
158
168 def _ctxdesc(ctx):
159 def _ctxdesc(ctx):
169 """short description for a context"""
160 """short description for a context"""
170 return cmdutil.format_changeset_summary(
161 return cmdutil.format_changeset_summary(
171 ctx.repo().ui, ctx, command=b'rebase'
162 ctx.repo().ui, ctx, command=b'rebase'
172 )
163 )
173
164
174
165
175 class rebaseruntime:
166 class rebaseruntime:
176 """This class is a container for rebase runtime state"""
167 """This class is a container for rebase runtime state"""
177
168
178 def __init__(self, repo, ui, inmemory=False, dryrun=False, opts=None):
169 def __init__(self, repo, ui, inmemory=False, dryrun=False, opts=None):
179 if opts is None:
170 if opts is None:
180 opts = {}
171 opts = {}
181
172
182 # prepared: whether we have rebasestate prepared or not. Currently it
173 # prepared: whether we have rebasestate prepared or not. Currently it
183 # decides whether "self.repo" is unfiltered or not.
174 # decides whether "self.repo" is unfiltered or not.
184 # The rebasestate has explicit hash to hash instructions not depending
175 # The rebasestate has explicit hash to hash instructions not depending
185 # on visibility. If rebasestate exists (in-memory or on-disk), use
176 # on visibility. If rebasestate exists (in-memory or on-disk), use
186 # unfiltered repo to avoid visibility issues.
177 # unfiltered repo to avoid visibility issues.
187 # Before knowing rebasestate (i.e. when starting a new rebase (not
178 # Before knowing rebasestate (i.e. when starting a new rebase (not
188 # --continue or --abort)), the original repo should be used so
179 # --continue or --abort)), the original repo should be used so
189 # visibility-dependent revsets are correct.
180 # visibility-dependent revsets are correct.
190 self.prepared = False
181 self.prepared = False
191 self.resume = False
182 self.resume = False
192 self._repo = repo
183 self._repo = repo
193
184
194 self.ui = ui
185 self.ui = ui
195 self.opts = opts
186 self.opts = opts
196 self.originalwd = None
187 self.originalwd = None
197 self.external = nullrev
188 self.external = nullrev
198 # Mapping between the old revision id and either what is the new rebased
189 # Mapping between the old revision id and either what is the new rebased
199 # revision or what needs to be done with the old revision. The state
190 # revision or what needs to be done with the old revision. The state
200 # dict will be what contains most of the rebase progress state.
191 # dict will be what contains most of the rebase progress state.
201 self.state = {}
192 self.state = {}
202 self.activebookmark = None
193 self.activebookmark = None
203 self.destmap = {}
194 self.destmap = {}
204 self.skipped = set()
195 self.skipped = set()
205
196
206 self.collapsef = opts.get('collapse', False)
197 self.collapsef = opts.get('collapse', False)
207 self.collapsemsg = cmdutil.logmessage(ui, pycompat.byteskwargs(opts))
198 self.collapsemsg = cmdutil.logmessage(ui, pycompat.byteskwargs(opts))
208 self.date = opts.get('date', None)
199 self.date = opts.get('date', None)
209
200
210 e = opts.get('extrafn') # internal, used by e.g. hgsubversion
201 e = opts.get('extrafn') # internal, used by e.g. hgsubversion
211 self.extrafns = [_save_extras]
202 self.extrafns = [_save_extras]
212 if e:
203 if e:
213 self.extrafns = [e]
204 self.extrafns = [e]
214
205
215 self.backupf = ui.configbool(b'rewrite', b'backup-bundle')
206 self.backupf = ui.configbool(b'rewrite', b'backup-bundle')
216 self.keepf = opts.get('keep', False)
207 self.keepf = opts.get('keep', False)
217 self.keepbranchesf = opts.get('keepbranches', False)
208 self.keepbranchesf = opts.get('keepbranches', False)
218 self.skipemptysuccessorf = rewriteutil.skip_empty_successor(
209 self.skipemptysuccessorf = rewriteutil.skip_empty_successor(
219 repo.ui, b'rebase'
210 repo.ui, b'rebase'
220 )
211 )
221 self.obsolete_with_successor_in_destination = {}
212 self.obsolete_with_successor_in_destination = {}
222 self.obsolete_with_successor_in_rebase_set = set()
213 self.obsolete_with_successor_in_rebase_set = set()
223 self.inmemory = inmemory
214 self.inmemory = inmemory
224 self.dryrun = dryrun
215 self.dryrun = dryrun
225 self.stateobj = statemod.cmdstate(repo, b'rebasestate')
216 self.stateobj = statemod.cmdstate(repo, b'rebasestate')
226
217
227 @property
218 @property
228 def repo(self):
219 def repo(self):
229 if self.prepared:
220 if self.prepared:
230 return self._repo.unfiltered()
221 return self._repo.unfiltered()
231 else:
222 else:
232 return self._repo
223 return self._repo
233
224
234 def storestatus(self, tr=None):
225 def storestatus(self, tr=None):
235 """Store the current status to allow recovery"""
226 """Store the current status to allow recovery"""
236 if tr:
227 if tr:
237 tr.addfilegenerator(
228 tr.addfilegenerator(
238 b'rebasestate',
229 b'rebasestate',
239 (b'rebasestate',),
230 (b'rebasestate',),
240 self._writestatus,
231 self._writestatus,
241 location=b'plain',
232 location=b'plain',
242 )
233 )
243 else:
234 else:
244 with self.repo.vfs(b"rebasestate", b"w") as f:
235 with self.repo.vfs(b"rebasestate", b"w") as f:
245 self._writestatus(f)
236 self._writestatus(f)
246
237
247 def _writestatus(self, f):
238 def _writestatus(self, f):
248 repo = self.repo
239 repo = self.repo
249 assert repo.filtername is None
240 assert repo.filtername is None
250 f.write(repo[self.originalwd].hex() + b'\n')
241 f.write(repo[self.originalwd].hex() + b'\n')
251 # was "dest". we now write dest per src root below.
242 # was "dest". we now write dest per src root below.
252 f.write(b'\n')
243 f.write(b'\n')
253 f.write(repo[self.external].hex() + b'\n')
244 f.write(repo[self.external].hex() + b'\n')
254 f.write(b'%d\n' % int(self.collapsef))
245 f.write(b'%d\n' % int(self.collapsef))
255 f.write(b'%d\n' % int(self.keepf))
246 f.write(b'%d\n' % int(self.keepf))
256 f.write(b'%d\n' % int(self.keepbranchesf))
247 f.write(b'%d\n' % int(self.keepbranchesf))
257 f.write(b'%s\n' % (self.activebookmark or b''))
248 f.write(b'%s\n' % (self.activebookmark or b''))
258 destmap = self.destmap
249 destmap = self.destmap
259 for d, v in self.state.items():
250 for d, v in self.state.items():
260 oldrev = repo[d].hex()
251 oldrev = repo[d].hex()
261 if v >= 0:
252 if v >= 0:
262 newrev = repo[v].hex()
253 newrev = repo[v].hex()
263 else:
254 else:
264 newrev = b"%d" % v
255 newrev = b"%d" % v
265 destnode = repo[destmap[d]].hex()
256 destnode = repo[destmap[d]].hex()
266 f.write(b"%s:%s:%s\n" % (oldrev, newrev, destnode))
257 f.write(b"%s:%s:%s\n" % (oldrev, newrev, destnode))
267 repo.ui.debug(b'rebase status stored\n')
258 repo.ui.debug(b'rebase status stored\n')
268
259
269 def restorestatus(self):
260 def restorestatus(self):
270 """Restore a previously stored status"""
261 """Restore a previously stored status"""
271 if not self.stateobj.exists():
262 if not self.stateobj.exists():
272 cmdutil.wrongtooltocontinue(self.repo, _(b'rebase'))
263 cmdutil.wrongtooltocontinue(self.repo, _(b'rebase'))
273
264
274 data = self._read()
265 data = self._read()
275 self.repo.ui.debug(b'rebase status resumed\n')
266 self.repo.ui.debug(b'rebase status resumed\n')
276
267
277 self.originalwd = data[b'originalwd']
268 self.originalwd = data[b'originalwd']
278 self.destmap = data[b'destmap']
269 self.destmap = data[b'destmap']
279 self.state = data[b'state']
270 self.state = data[b'state']
280 self.skipped = data[b'skipped']
271 self.skipped = data[b'skipped']
281 self.collapsef = data[b'collapse']
272 self.collapsef = data[b'collapse']
282 self.keepf = data[b'keep']
273 self.keepf = data[b'keep']
283 self.keepbranchesf = data[b'keepbranches']
274 self.keepbranchesf = data[b'keepbranches']
284 self.external = data[b'external']
275 self.external = data[b'external']
285 self.activebookmark = data[b'activebookmark']
276 self.activebookmark = data[b'activebookmark']
286
277
287 def _read(self):
278 def _read(self):
288 self.prepared = True
279 self.prepared = True
289 repo = self.repo
280 repo = self.repo
290 assert repo.filtername is None
281 assert repo.filtername is None
291 data = {
282 data = {
292 b'keepbranches': None,
283 b'keepbranches': None,
293 b'collapse': None,
284 b'collapse': None,
294 b'activebookmark': None,
285 b'activebookmark': None,
295 b'external': nullrev,
286 b'external': nullrev,
296 b'keep': None,
287 b'keep': None,
297 b'originalwd': None,
288 b'originalwd': None,
298 }
289 }
299 legacydest = None
290 legacydest = None
300 state = {}
291 state = {}
301 destmap = {}
292 destmap = {}
302
293
303 if True:
294 if True:
304 f = repo.vfs(b"rebasestate")
295 f = repo.vfs(b"rebasestate")
305 for i, l in enumerate(f.read().splitlines()):
296 for i, l in enumerate(f.read().splitlines()):
306 if i == 0:
297 if i == 0:
307 data[b'originalwd'] = repo[l].rev()
298 data[b'originalwd'] = repo[l].rev()
308 elif i == 1:
299 elif i == 1:
309 # this line should be empty in newer version. but legacy
300 # this line should be empty in newer version. but legacy
310 # clients may still use it
301 # clients may still use it
311 if l:
302 if l:
312 legacydest = repo[l].rev()
303 legacydest = repo[l].rev()
313 elif i == 2:
304 elif i == 2:
314 data[b'external'] = repo[l].rev()
305 data[b'external'] = repo[l].rev()
315 elif i == 3:
306 elif i == 3:
316 data[b'collapse'] = bool(int(l))
307 data[b'collapse'] = bool(int(l))
317 elif i == 4:
308 elif i == 4:
318 data[b'keep'] = bool(int(l))
309 data[b'keep'] = bool(int(l))
319 elif i == 5:
310 elif i == 5:
320 data[b'keepbranches'] = bool(int(l))
311 data[b'keepbranches'] = bool(int(l))
321 elif i == 6 and not (len(l) == 81 and b':' in l):
312 elif i == 6 and not (len(l) == 81 and b':' in l):
322 # line 6 is a recent addition, so for backwards
313 # line 6 is a recent addition, so for backwards
323 # compatibility check that the line doesn't look like the
314 # compatibility check that the line doesn't look like the
324 # oldrev:newrev lines
315 # oldrev:newrev lines
325 data[b'activebookmark'] = l
316 data[b'activebookmark'] = l
326 else:
317 else:
327 args = l.split(b':')
318 args = l.split(b':')
328 oldrev = repo[args[0]].rev()
319 oldrev = repo[args[0]].rev()
329 newrev = args[1]
320 newrev = args[1]
330 if newrev in legacystates:
321 if newrev in legacystates:
331 continue
322 continue
332 if len(args) > 2:
323 if len(args) > 2:
333 destrev = repo[args[2]].rev()
324 destrev = repo[args[2]].rev()
334 else:
325 else:
335 destrev = legacydest
326 destrev = legacydest
336 destmap[oldrev] = destrev
327 destmap[oldrev] = destrev
337 if newrev == revtodostr:
328 if newrev == revtodostr:
338 state[oldrev] = revtodo
329 state[oldrev] = revtodo
339 # Legacy compat special case
330 # Legacy compat special case
340 else:
331 else:
341 state[oldrev] = repo[newrev].rev()
332 state[oldrev] = repo[newrev].rev()
342
333
343 if data[b'keepbranches'] is None:
334 if data[b'keepbranches'] is None:
344 raise error.Abort(_(b'.hg/rebasestate is incomplete'))
335 raise error.Abort(_(b'.hg/rebasestate is incomplete'))
345
336
346 data[b'destmap'] = destmap
337 data[b'destmap'] = destmap
347 data[b'state'] = state
338 data[b'state'] = state
348 skipped = set()
339 skipped = set()
349 # recompute the set of skipped revs
340 # recompute the set of skipped revs
350 if not data[b'collapse']:
341 if not data[b'collapse']:
351 seen = set(destmap.values())
342 seen = set(destmap.values())
352 for old, new in sorted(state.items()):
343 for old, new in sorted(state.items()):
353 if new != revtodo and new in seen:
344 if new != revtodo and new in seen:
354 skipped.add(old)
345 skipped.add(old)
355 seen.add(new)
346 seen.add(new)
356 data[b'skipped'] = skipped
347 data[b'skipped'] = skipped
357 repo.ui.debug(
348 repo.ui.debug(
358 b'computed skipped revs: %s\n'
349 b'computed skipped revs: %s\n'
359 % (b' '.join(b'%d' % r for r in sorted(skipped)) or b'')
350 % (b' '.join(b'%d' % r for r in sorted(skipped)) or b'')
360 )
351 )
361
352
362 return data
353 return data
363
354
364 def _handleskippingobsolete(self):
355 def _handleskippingobsolete(self):
365 """Compute structures necessary for skipping obsolete revisions"""
356 """Compute structures necessary for skipping obsolete revisions"""
366 if self.keepf:
357 if self.keepf:
367 return
358 return
368 if not self.ui.configbool(b'experimental', b'rebaseskipobsolete'):
359 if not self.ui.configbool(b'experimental', b'rebaseskipobsolete'):
369 return
360 return
370 obsoleteset = {r for r in self.state if self.repo[r].obsolete()}
361 obsoleteset = {r for r in self.state if self.repo[r].obsolete()}
371 (
362 (
372 self.obsolete_with_successor_in_destination,
363 self.obsolete_with_successor_in_destination,
373 self.obsolete_with_successor_in_rebase_set,
364 self.obsolete_with_successor_in_rebase_set,
374 ) = _compute_obsolete_sets(self.repo, obsoleteset, self.destmap)
365 ) = _compute_obsolete_sets(self.repo, obsoleteset, self.destmap)
375 skippedset = set(self.obsolete_with_successor_in_destination)
366 skippedset = set(self.obsolete_with_successor_in_destination)
376 skippedset.update(self.obsolete_with_successor_in_rebase_set)
367 skippedset.update(self.obsolete_with_successor_in_rebase_set)
377 _checkobsrebase(self.repo, self.ui, obsoleteset, skippedset)
368 _checkobsrebase(self.repo, self.ui, obsoleteset, skippedset)
378 if obsolete.isenabled(self.repo, obsolete.allowdivergenceopt):
369 if obsolete.isenabled(self.repo, obsolete.allowdivergenceopt):
379 self.obsolete_with_successor_in_rebase_set = set()
370 self.obsolete_with_successor_in_rebase_set = set()
380 else:
371 else:
381 for rev in self.repo.revs(
372 for rev in self.repo.revs(
382 b'descendants(%ld) and not %ld',
373 b'descendants(%ld) and not %ld',
383 self.obsolete_with_successor_in_rebase_set,
374 self.obsolete_with_successor_in_rebase_set,
384 self.obsolete_with_successor_in_rebase_set,
375 self.obsolete_with_successor_in_rebase_set,
385 ):
376 ):
386 self.state.pop(rev, None)
377 self.state.pop(rev, None)
387 self.destmap.pop(rev, None)
378 self.destmap.pop(rev, None)
388
379
389 def _prepareabortorcontinue(
380 def _prepareabortorcontinue(
390 self, isabort, backup=True, suppwarns=False, dryrun=False, confirm=False
381 self, isabort, backup=True, suppwarns=False, dryrun=False, confirm=False
391 ):
382 ):
392 self.resume = True
383 self.resume = True
393 try:
384 try:
394 self.restorestatus()
385 self.restorestatus()
395 # Calculate self.obsolete_* sets
386 # Calculate self.obsolete_* sets
396 self._handleskippingobsolete()
387 self._handleskippingobsolete()
397 self.collapsemsg = restorecollapsemsg(self.repo, isabort)
388 self.collapsemsg = restorecollapsemsg(self.repo, isabort)
398 except error.RepoLookupError:
389 except error.RepoLookupError:
399 if isabort:
390 if isabort:
400 clearstatus(self.repo)
391 clearstatus(self.repo)
401 clearcollapsemsg(self.repo)
392 clearcollapsemsg(self.repo)
402 self.repo.ui.warn(
393 self.repo.ui.warn(
403 _(
394 _(
404 b'rebase aborted (no revision is removed,'
395 b'rebase aborted (no revision is removed,'
405 b' only broken state is cleared)\n'
396 b' only broken state is cleared)\n'
406 )
397 )
407 )
398 )
408 return 0
399 return 0
409 else:
400 else:
410 msg = _(b'cannot continue inconsistent rebase')
401 msg = _(b'cannot continue inconsistent rebase')
411 hint = _(b'use "hg rebase --abort" to clear broken state')
402 hint = _(b'use "hg rebase --abort" to clear broken state')
412 raise error.Abort(msg, hint=hint)
403 raise error.Abort(msg, hint=hint)
413
404
414 if isabort:
405 if isabort:
415 backup = backup and self.backupf
406 backup = backup and self.backupf
416 return self._abort(
407 return self._abort(
417 backup=backup,
408 backup=backup,
418 suppwarns=suppwarns,
409 suppwarns=suppwarns,
419 dryrun=dryrun,
410 dryrun=dryrun,
420 confirm=confirm,
411 confirm=confirm,
421 )
412 )
422
413
423 def _preparenewrebase(self, destmap):
414 def _preparenewrebase(self, destmap):
424 if not destmap:
415 if not destmap:
425 return _nothingtorebase()
416 return _nothingtorebase()
426
417
427 result = buildstate(self.repo, destmap, self.collapsef)
418 result = buildstate(self.repo, destmap, self.collapsef)
428
419
429 if not result:
420 if not result:
430 # Empty state built, nothing to rebase
421 # Empty state built, nothing to rebase
431 self.ui.status(_(b'nothing to rebase\n'))
422 self.ui.status(_(b'nothing to rebase\n'))
432 return _nothingtorebase()
423 return _nothingtorebase()
433
424
434 (self.originalwd, self.destmap, self.state) = result
425 (self.originalwd, self.destmap, self.state) = result
435 if self.collapsef:
426 if self.collapsef:
436 dests = set(self.destmap.values())
427 dests = set(self.destmap.values())
437 if len(dests) != 1:
428 if len(dests) != 1:
438 raise error.InputError(
429 raise error.InputError(
439 _(b'--collapse does not work with multiple destinations')
430 _(b'--collapse does not work with multiple destinations')
440 )
431 )
441 destrev = next(iter(dests))
432 destrev = next(iter(dests))
442 destancestors = self.repo.changelog.ancestors(
433 destancestors = self.repo.changelog.ancestors(
443 [destrev], inclusive=True
434 [destrev], inclusive=True
444 )
435 )
445 self.external = externalparent(self.repo, self.state, destancestors)
436 self.external = externalparent(self.repo, self.state, destancestors)
446
437
447 for destrev in sorted(set(destmap.values())):
438 for destrev in sorted(set(destmap.values())):
448 dest = self.repo[destrev]
439 dest = self.repo[destrev]
449 if dest.closesbranch() and not self.keepbranchesf:
440 if dest.closesbranch() and not self.keepbranchesf:
450 self.ui.status(_(b'reopening closed branch head %s\n') % dest)
441 self.ui.status(_(b'reopening closed branch head %s\n') % dest)
451
442
452 # Calculate self.obsolete_* sets
443 # Calculate self.obsolete_* sets
453 self._handleskippingobsolete()
444 self._handleskippingobsolete()
454
445
455 if not self.keepf:
446 if not self.keepf:
456 rebaseset = set(destmap.keys())
447 rebaseset = set(destmap.keys())
457 rebaseset -= set(self.obsolete_with_successor_in_destination)
448 rebaseset -= set(self.obsolete_with_successor_in_destination)
458 rebaseset -= self.obsolete_with_successor_in_rebase_set
449 rebaseset -= self.obsolete_with_successor_in_rebase_set
459 # We have our own divergence-checking in the rebase extension
450 # We have our own divergence-checking in the rebase extension
460 overrides = {}
451 overrides = {}
461 if obsolete.isenabled(self.repo, obsolete.createmarkersopt):
452 if obsolete.isenabled(self.repo, obsolete.createmarkersopt):
462 overrides = {
453 overrides = {
463 (b'experimental', b'evolution.allowdivergence'): b'true'
454 (b'experimental', b'evolution.allowdivergence'): b'true'
464 }
455 }
465 try:
456 try:
466 with self.ui.configoverride(overrides):
457 with self.ui.configoverride(overrides):
467 rewriteutil.precheck(self.repo, rebaseset, action=b'rebase')
458 rewriteutil.precheck(self.repo, rebaseset, action=b'rebase')
468 except error.Abort as e:
459 except error.Abort as e:
469 if e.hint is None:
460 if e.hint is None:
470 e.hint = _(b'use --keep to keep original changesets')
461 e.hint = _(b'use --keep to keep original changesets')
471 raise e
462 raise e
472
463
473 self.prepared = True
464 self.prepared = True
474
465
475 def _assignworkingcopy(self):
466 def _assignworkingcopy(self):
476 if self.inmemory:
467 if self.inmemory:
477 from mercurial.context import overlayworkingctx
468 from mercurial.context import overlayworkingctx
478
469
479 self.wctx = overlayworkingctx(self.repo)
470 self.wctx = overlayworkingctx(self.repo)
480 self.repo.ui.debug(b"rebasing in memory\n")
471 self.repo.ui.debug(b"rebasing in memory\n")
481 else:
472 else:
482 self.wctx = self.repo[None]
473 self.wctx = self.repo[None]
483 self.repo.ui.debug(b"rebasing on disk\n")
474 self.repo.ui.debug(b"rebasing on disk\n")
484 self.repo.ui.log(
475 self.repo.ui.log(
485 b"rebase",
476 b"rebase",
486 b"using in-memory rebase: %r\n",
477 b"using in-memory rebase: %r\n",
487 self.inmemory,
478 self.inmemory,
488 rebase_imm_used=self.inmemory,
479 rebase_imm_used=self.inmemory,
489 )
480 )
490
481
491 def _performrebase(self, tr):
482 def _performrebase(self, tr):
492 self._assignworkingcopy()
483 self._assignworkingcopy()
493 repo, ui = self.repo, self.ui
484 repo, ui = self.repo, self.ui
494 if self.keepbranchesf:
485 if self.keepbranchesf:
495 # insert _savebranch at the start of extrafns so if
486 # insert _savebranch at the start of extrafns so if
496 # there's a user-provided extrafn it can clobber branch if
487 # there's a user-provided extrafn it can clobber branch if
497 # desired
488 # desired
498 self.extrafns.insert(0, _savebranch)
489 self.extrafns.insert(0, _savebranch)
499 if self.collapsef:
490 if self.collapsef:
500 branches = set()
491 branches = set()
501 for rev in self.state:
492 for rev in self.state:
502 branches.add(repo[rev].branch())
493 branches.add(repo[rev].branch())
503 if len(branches) > 1:
494 if len(branches) > 1:
504 raise error.InputError(
495 raise error.InputError(
505 _(b'cannot collapse multiple named branches')
496 _(b'cannot collapse multiple named branches')
506 )
497 )
507
498
508 # Keep track of the active bookmarks in order to reset them later
499 # Keep track of the active bookmarks in order to reset them later
509 self.activebookmark = self.activebookmark or repo._activebookmark
500 self.activebookmark = self.activebookmark or repo._activebookmark
510 if self.activebookmark:
501 if self.activebookmark:
511 bookmarks.deactivate(repo)
502 bookmarks.deactivate(repo)
512
503
513 # Store the state before we begin so users can run 'hg rebase --abort'
504 # Store the state before we begin so users can run 'hg rebase --abort'
514 # if we fail before the transaction closes.
505 # if we fail before the transaction closes.
515 self.storestatus()
506 self.storestatus()
516 if tr:
507 if tr:
517 # When using single transaction, store state when transaction
508 # When using single transaction, store state when transaction
518 # commits.
509 # commits.
519 self.storestatus(tr)
510 self.storestatus(tr)
520
511
521 cands = [k for k, v in self.state.items() if v == revtodo]
512 cands = [k for k, v in self.state.items() if v == revtodo]
522 p = repo.ui.makeprogress(
513 p = repo.ui.makeprogress(
523 _(b"rebasing"), unit=_(b'changesets'), total=len(cands)
514 _(b"rebasing"), unit=_(b'changesets'), total=len(cands)
524 )
515 )
525
516
526 def progress(ctx):
517 def progress(ctx):
527 p.increment(item=(b"%d:%s" % (ctx.rev(), ctx)))
518 p.increment(item=(b"%d:%s" % (ctx.rev(), ctx)))
528
519
529 for subset in sortsource(self.destmap):
520 for subset in sortsource(self.destmap):
530 sortedrevs = self.repo.revs(b'sort(%ld, -topo)', subset)
521 sortedrevs = self.repo.revs(b'sort(%ld, -topo)', subset)
531 for rev in sortedrevs:
522 for rev in sortedrevs:
532 self._rebasenode(tr, rev, progress)
523 self._rebasenode(tr, rev, progress)
533 p.complete()
524 p.complete()
534 ui.note(_(b'rebase merging completed\n'))
525 ui.note(_(b'rebase merging completed\n'))
535
526
536 def _concludenode(self, rev, editor, commitmsg=None):
527 def _concludenode(self, rev, editor, commitmsg=None):
537 """Commit the wd changes with parents p1 and p2.
528 """Commit the wd changes with parents p1 and p2.
538
529
539 Reuse commit info from rev but also store useful information in extra.
530 Reuse commit info from rev but also store useful information in extra.
540 Return node of committed revision."""
531 Return node of committed revision."""
541 repo = self.repo
532 repo = self.repo
542 ctx = repo[rev]
533 ctx = repo[rev]
543 if commitmsg is None:
534 if commitmsg is None:
544 commitmsg = ctx.description()
535 commitmsg = ctx.description()
545
536
546 # Skip replacement if collapsing, as that degenerates to p1 for all
537 # Skip replacement if collapsing, as that degenerates to p1 for all
547 # nodes.
538 # nodes.
548 if not self.collapsef:
539 if not self.collapsef:
549 cl = repo.changelog
540 cl = repo.changelog
550 commitmsg = rewriteutil.update_hash_refs(
541 commitmsg = rewriteutil.update_hash_refs(
551 repo,
542 repo,
552 commitmsg,
543 commitmsg,
553 {
544 {
554 cl.node(oldrev): [cl.node(newrev)]
545 cl.node(oldrev): [cl.node(newrev)]
555 for oldrev, newrev in self.state.items()
546 for oldrev, newrev in self.state.items()
556 if newrev != revtodo
547 if newrev != revtodo
557 },
548 },
558 )
549 )
559
550
560 date = self.date
551 date = self.date
561 if date is None:
552 if date is None:
562 date = ctx.date()
553 date = ctx.date()
563 extra = {}
554 extra = {}
564 if repo.ui.configbool(b'rebase', b'store-source'):
555 if repo.ui.configbool(b'rebase', b'store-source'):
565 extra = {b'rebase_source': ctx.hex()}
556 extra = {b'rebase_source': ctx.hex()}
566 for c in self.extrafns:
557 for c in self.extrafns:
567 c(ctx, extra)
558 c(ctx, extra)
568 destphase = max(ctx.phase(), phases.draft)
559 destphase = max(ctx.phase(), phases.draft)
569 overrides = {
560 overrides = {
570 (b'phases', b'new-commit'): destphase,
561 (b'phases', b'new-commit'): destphase,
571 (b'ui', b'allowemptycommit'): not self.skipemptysuccessorf,
562 (b'ui', b'allowemptycommit'): not self.skipemptysuccessorf,
572 }
563 }
573 with repo.ui.configoverride(overrides, b'rebase'):
564 with repo.ui.configoverride(overrides, b'rebase'):
574 if self.inmemory:
565 if self.inmemory:
575 newnode = commitmemorynode(
566 newnode = commitmemorynode(
576 repo,
567 repo,
577 wctx=self.wctx,
568 wctx=self.wctx,
578 extra=extra,
569 extra=extra,
579 commitmsg=commitmsg,
570 commitmsg=commitmsg,
580 editor=editor,
571 editor=editor,
581 user=ctx.user(),
572 user=ctx.user(),
582 date=date,
573 date=date,
583 )
574 )
584 else:
575 else:
585 newnode = commitnode(
576 newnode = commitnode(
586 repo,
577 repo,
587 extra=extra,
578 extra=extra,
588 commitmsg=commitmsg,
579 commitmsg=commitmsg,
589 editor=editor,
580 editor=editor,
590 user=ctx.user(),
581 user=ctx.user(),
591 date=date,
582 date=date,
592 )
583 )
593
584
594 return newnode
585 return newnode
595
586
596 def _rebasenode(self, tr, rev, progressfn):
587 def _rebasenode(self, tr, rev, progressfn):
597 repo, ui, opts = self.repo, self.ui, self.opts
588 repo, ui, opts = self.repo, self.ui, self.opts
598 ctx = repo[rev]
589 ctx = repo[rev]
599 desc = _ctxdesc(ctx)
590 desc = _ctxdesc(ctx)
600 if self.state[rev] == rev:
591 if self.state[rev] == rev:
601 ui.status(_(b'already rebased %s\n') % desc)
592 ui.status(_(b'already rebased %s\n') % desc)
602 elif rev in self.obsolete_with_successor_in_rebase_set:
593 elif rev in self.obsolete_with_successor_in_rebase_set:
603 msg = (
594 msg = (
604 _(
595 _(
605 b'note: not rebasing %s and its descendants as '
596 b'note: not rebasing %s and its descendants as '
606 b'this would cause divergence\n'
597 b'this would cause divergence\n'
607 )
598 )
608 % desc
599 % desc
609 )
600 )
610 repo.ui.status(msg)
601 repo.ui.status(msg)
611 self.skipped.add(rev)
602 self.skipped.add(rev)
612 elif rev in self.obsolete_with_successor_in_destination:
603 elif rev in self.obsolete_with_successor_in_destination:
613 succ = self.obsolete_with_successor_in_destination[rev]
604 succ = self.obsolete_with_successor_in_destination[rev]
614 if succ is None:
605 if succ is None:
615 msg = _(b'note: not rebasing %s, it has no successor\n') % desc
606 msg = _(b'note: not rebasing %s, it has no successor\n') % desc
616 else:
607 else:
617 succdesc = _ctxdesc(repo[succ])
608 succdesc = _ctxdesc(repo[succ])
618 msg = _(
609 msg = _(
619 b'note: not rebasing %s, already in destination as %s\n'
610 b'note: not rebasing %s, already in destination as %s\n'
620 ) % (desc, succdesc)
611 ) % (desc, succdesc)
621 repo.ui.status(msg)
612 repo.ui.status(msg)
622 # Make clearrebased aware state[rev] is not a true successor
613 # Make clearrebased aware state[rev] is not a true successor
623 self.skipped.add(rev)
614 self.skipped.add(rev)
624 # Record rev as moved to its desired destination in self.state.
615 # Record rev as moved to its desired destination in self.state.
625 # This helps bookmark and working parent movement.
616 # This helps bookmark and working parent movement.
626 dest = max(
617 dest = max(
627 adjustdest(repo, rev, self.destmap, self.state, self.skipped)
618 adjustdest(repo, rev, self.destmap, self.state, self.skipped)
628 )
619 )
629 self.state[rev] = dest
620 self.state[rev] = dest
630 elif self.state[rev] == revtodo:
621 elif self.state[rev] == revtodo:
631 ui.status(_(b'rebasing %s\n') % desc)
622 ui.status(_(b'rebasing %s\n') % desc)
632 progressfn(ctx)
623 progressfn(ctx)
633 p1, p2, base = defineparents(
624 p1, p2, base = defineparents(
634 repo,
625 repo,
635 rev,
626 rev,
636 self.destmap,
627 self.destmap,
637 self.state,
628 self.state,
638 self.skipped,
629 self.skipped,
639 self.obsolete_with_successor_in_destination,
630 self.obsolete_with_successor_in_destination,
640 )
631 )
641 if self.resume and self.wctx.p1().rev() == p1:
632 if self.resume and self.wctx.p1().rev() == p1:
642 repo.ui.debug(b'resuming interrupted rebase\n')
633 repo.ui.debug(b'resuming interrupted rebase\n')
643 self.resume = False
634 self.resume = False
644 else:
635 else:
645 overrides = {(b'ui', b'forcemerge'): opts.get('tool', b'')}
636 overrides = {(b'ui', b'forcemerge'): opts.get('tool', b'')}
646 with ui.configoverride(overrides, b'rebase'):
637 with ui.configoverride(overrides, b'rebase'):
647 try:
638 try:
648 rebasenode(
639 rebasenode(
649 repo,
640 repo,
650 rev,
641 rev,
651 p1,
642 p1,
652 p2,
643 p2,
653 base,
644 base,
654 self.collapsef,
645 self.collapsef,
655 wctx=self.wctx,
646 wctx=self.wctx,
656 )
647 )
657 except error.InMemoryMergeConflictsError:
648 except error.InMemoryMergeConflictsError:
658 if self.dryrun:
649 if self.dryrun:
659 raise error.ConflictResolutionRequired(b'rebase')
650 raise error.ConflictResolutionRequired(b'rebase')
660 if self.collapsef:
651 if self.collapsef:
661 # TODO: Make the overlayworkingctx reflected
652 # TODO: Make the overlayworkingctx reflected
662 # in the working copy here instead of re-raising
653 # in the working copy here instead of re-raising
663 # so the entire rebase operation is retried.
654 # so the entire rebase operation is retried.
664 raise
655 raise
665 ui.status(
656 ui.status(
666 _(
657 _(
667 b"hit merge conflicts; rebasing that "
658 b"hit merge conflicts; rebasing that "
668 b"commit again in the working copy\n"
659 b"commit again in the working copy\n"
669 )
660 )
670 )
661 )
671 try:
662 try:
672 cmdutil.bailifchanged(repo)
663 cmdutil.bailifchanged(repo)
673 except error.Abort:
664 except error.Abort:
674 clearstatus(repo)
665 clearstatus(repo)
675 clearcollapsemsg(repo)
666 clearcollapsemsg(repo)
676 raise
667 raise
677 self.inmemory = False
668 self.inmemory = False
678 self._assignworkingcopy()
669 self._assignworkingcopy()
679 mergemod.update(repo[p1], wc=self.wctx)
670 mergemod.update(repo[p1], wc=self.wctx)
680 rebasenode(
671 rebasenode(
681 repo,
672 repo,
682 rev,
673 rev,
683 p1,
674 p1,
684 p2,
675 p2,
685 base,
676 base,
686 self.collapsef,
677 self.collapsef,
687 wctx=self.wctx,
678 wctx=self.wctx,
688 )
679 )
689 if not self.collapsef:
680 if not self.collapsef:
690 merging = p2 != nullrev
681 merging = p2 != nullrev
691 editform = cmdutil.mergeeditform(merging, b'rebase')
682 editform = cmdutil.mergeeditform(merging, b'rebase')
692 editor = cmdutil.getcommiteditor(editform=editform, **opts)
683 editor = cmdutil.getcommiteditor(editform=editform, **opts)
693 # We need to set parents again here just in case we're continuing
684 # We need to set parents again here just in case we're continuing
694 # a rebase started with an old hg version (before 9c9cfecd4600),
685 # a rebase started with an old hg version (before 9c9cfecd4600),
695 # because those old versions would have left us with two dirstate
686 # because those old versions would have left us with two dirstate
696 # parents, and we don't want to create a merge commit here (unless
687 # parents, and we don't want to create a merge commit here (unless
697 # we're rebasing a merge commit).
688 # we're rebasing a merge commit).
698 self.wctx.setparents(repo[p1].node(), repo[p2].node())
689 self.wctx.setparents(repo[p1].node(), repo[p2].node())
699 newnode = self._concludenode(rev, editor)
690 newnode = self._concludenode(rev, editor)
700 else:
691 else:
701 # Skip commit if we are collapsing
692 # Skip commit if we are collapsing
702 newnode = None
693 newnode = None
703 # Update the state
694 # Update the state
704 if newnode is not None:
695 if newnode is not None:
705 self.state[rev] = repo[newnode].rev()
696 self.state[rev] = repo[newnode].rev()
706 ui.debug(b'rebased as %s\n' % short(newnode))
697 ui.debug(b'rebased as %s\n' % short(newnode))
707 if repo[newnode].isempty():
698 if repo[newnode].isempty():
708 ui.warn(
699 ui.warn(
709 _(
700 _(
710 b'note: created empty successor for %s, its '
701 b'note: created empty successor for %s, its '
711 b'destination already has all its changes\n'
702 b'destination already has all its changes\n'
712 )
703 )
713 % desc
704 % desc
714 )
705 )
715 else:
706 else:
716 if not self.collapsef:
707 if not self.collapsef:
717 ui.warn(
708 ui.warn(
718 _(
709 _(
719 b'note: not rebasing %s, its destination already '
710 b'note: not rebasing %s, its destination already '
720 b'has all its changes\n'
711 b'has all its changes\n'
721 )
712 )
722 % desc
713 % desc
723 )
714 )
724 self.skipped.add(rev)
715 self.skipped.add(rev)
725 self.state[rev] = p1
716 self.state[rev] = p1
726 ui.debug(b'next revision set to %d\n' % p1)
717 ui.debug(b'next revision set to %d\n' % p1)
727 else:
718 else:
728 ui.status(
719 ui.status(
729 _(b'already rebased %s as %s\n') % (desc, repo[self.state[rev]])
720 _(b'already rebased %s as %s\n') % (desc, repo[self.state[rev]])
730 )
721 )
731 if not tr:
722 if not tr:
732 # When not using single transaction, store state after each
723 # When not using single transaction, store state after each
733 # commit is completely done. On InterventionRequired, we thus
724 # commit is completely done. On InterventionRequired, we thus
734 # won't store the status. Instead, we'll hit the "len(parents) == 2"
725 # won't store the status. Instead, we'll hit the "len(parents) == 2"
735 # case and realize that the commit was in progress.
726 # case and realize that the commit was in progress.
736 self.storestatus()
727 self.storestatus()
737
728
738 def _finishrebase(self):
729 def _finishrebase(self):
739 repo, ui, opts = self.repo, self.ui, self.opts
730 repo, ui, opts = self.repo, self.ui, self.opts
740 fm = ui.formatter(b'rebase', pycompat.byteskwargs(opts))
731 fm = ui.formatter(b'rebase', pycompat.byteskwargs(opts))
741 fm.startitem()
732 fm.startitem()
742 if self.collapsef:
733 if self.collapsef:
743 p1, p2, _base = defineparents(
734 p1, p2, _base = defineparents(
744 repo,
735 repo,
745 min(self.state),
736 min(self.state),
746 self.destmap,
737 self.destmap,
747 self.state,
738 self.state,
748 self.skipped,
739 self.skipped,
749 self.obsolete_with_successor_in_destination,
740 self.obsolete_with_successor_in_destination,
750 )
741 )
751 editopt = opts.get('edit')
742 editopt = opts.get('edit')
752 editform = b'rebase.collapse'
743 editform = b'rebase.collapse'
753 if self.collapsemsg:
744 if self.collapsemsg:
754 commitmsg = self.collapsemsg
745 commitmsg = self.collapsemsg
755 else:
746 else:
756 commitmsg = b'Collapsed revision'
747 commitmsg = b'Collapsed revision'
757 for rebased in sorted(self.state):
748 for rebased in sorted(self.state):
758 if rebased not in self.skipped:
749 if rebased not in self.skipped:
759 commitmsg += b'\n* %s' % repo[rebased].description()
750 commitmsg += b'\n* %s' % repo[rebased].description()
760 editopt = True
751 editopt = True
761 editor = cmdutil.getcommiteditor(edit=editopt, editform=editform)
752 editor = cmdutil.getcommiteditor(edit=editopt, editform=editform)
762 revtoreuse = max(self.state)
753 revtoreuse = max(self.state)
763
754
764 self.wctx.setparents(repo[p1].node(), repo[self.external].node())
755 self.wctx.setparents(repo[p1].node(), repo[self.external].node())
765 newnode = self._concludenode(
756 newnode = self._concludenode(
766 revtoreuse, editor, commitmsg=commitmsg
757 revtoreuse, editor, commitmsg=commitmsg
767 )
758 )
768
759
769 if newnode is not None:
760 if newnode is not None:
770 newrev = repo[newnode].rev()
761 newrev = repo[newnode].rev()
771 for oldrev in self.state:
762 for oldrev in self.state:
772 self.state[oldrev] = newrev
763 self.state[oldrev] = newrev
773
764
774 if b'qtip' in repo.tags():
765 if b'qtip' in repo.tags():
775 updatemq(repo, self.state, self.skipped, **opts)
766 updatemq(repo, self.state, self.skipped, **opts)
776
767
777 # restore original working directory
768 # restore original working directory
778 # (we do this before stripping)
769 # (we do this before stripping)
779 newwd = self.state.get(self.originalwd, self.originalwd)
770 newwd = self.state.get(self.originalwd, self.originalwd)
780 if newwd < 0:
771 if newwd < 0:
781 # original directory is a parent of rebase set root or ignored
772 # original directory is a parent of rebase set root or ignored
782 newwd = self.originalwd
773 newwd = self.originalwd
783 if newwd not in [c.rev() for c in repo[None].parents()]:
774 if newwd not in [c.rev() for c in repo[None].parents()]:
784 ui.note(_(b"update back to initial working directory parent\n"))
775 ui.note(_(b"update back to initial working directory parent\n"))
785 mergemod.update(repo[newwd])
776 mergemod.update(repo[newwd])
786
777
787 collapsedas = None
778 collapsedas = None
788 if self.collapsef and not self.keepf:
779 if self.collapsef and not self.keepf:
789 collapsedas = newnode
780 collapsedas = newnode
790 clearrebased(
781 clearrebased(
791 ui,
782 ui,
792 repo,
783 repo,
793 self.destmap,
784 self.destmap,
794 self.state,
785 self.state,
795 self.skipped,
786 self.skipped,
796 collapsedas,
787 collapsedas,
797 self.keepf,
788 self.keepf,
798 fm=fm,
789 fm=fm,
799 backup=self.backupf,
790 backup=self.backupf,
800 )
791 )
801
792
802 clearstatus(repo)
793 clearstatus(repo)
803 clearcollapsemsg(repo)
794 clearcollapsemsg(repo)
804
795
805 ui.note(_(b"rebase completed\n"))
796 ui.note(_(b"rebase completed\n"))
806 util.unlinkpath(repo.sjoin(b'undo'), ignoremissing=True)
797 util.unlinkpath(repo.sjoin(b'undo'), ignoremissing=True)
807 if self.skipped:
798 if self.skipped:
808 skippedlen = len(self.skipped)
799 skippedlen = len(self.skipped)
809 ui.note(_(b"%d revisions have been skipped\n") % skippedlen)
800 ui.note(_(b"%d revisions have been skipped\n") % skippedlen)
810 fm.end()
801 fm.end()
811
802
812 if (
803 if (
813 self.activebookmark
804 self.activebookmark
814 and self.activebookmark in repo._bookmarks
805 and self.activebookmark in repo._bookmarks
815 and repo[b'.'].node() == repo._bookmarks[self.activebookmark]
806 and repo[b'.'].node() == repo._bookmarks[self.activebookmark]
816 ):
807 ):
817 bookmarks.activate(repo, self.activebookmark)
808 bookmarks.activate(repo, self.activebookmark)
818
809
819 def _abort(self, backup=True, suppwarns=False, dryrun=False, confirm=False):
810 def _abort(self, backup=True, suppwarns=False, dryrun=False, confirm=False):
820 '''Restore the repository to its original state.'''
811 '''Restore the repository to its original state.'''
821
812
822 repo = self.repo
813 repo = self.repo
823 try:
814 try:
824 # If the first commits in the rebased set get skipped during the
815 # If the first commits in the rebased set get skipped during the
825 # rebase, their values within the state mapping will be the dest
816 # rebase, their values within the state mapping will be the dest
826 # rev id. The rebased list must must not contain the dest rev
817 # rev id. The rebased list must must not contain the dest rev
827 # (issue4896)
818 # (issue4896)
828 rebased = [
819 rebased = [
829 s
820 s
830 for r, s in self.state.items()
821 for r, s in self.state.items()
831 if s >= 0 and s != r and s != self.destmap[r]
822 if s >= 0 and s != r and s != self.destmap[r]
832 ]
823 ]
833 immutable = [d for d in rebased if not repo[d].mutable()]
824 immutable = [d for d in rebased if not repo[d].mutable()]
834 cleanup = True
825 cleanup = True
835 if immutable:
826 if immutable:
836 repo.ui.warn(
827 repo.ui.warn(
837 _(b"warning: can't clean up public changesets %s\n")
828 _(b"warning: can't clean up public changesets %s\n")
838 % b', '.join(bytes(repo[r]) for r in immutable),
829 % b', '.join(bytes(repo[r]) for r in immutable),
839 hint=_(b"see 'hg help phases' for details"),
830 hint=_(b"see 'hg help phases' for details"),
840 )
831 )
841 cleanup = False
832 cleanup = False
842
833
843 descendants = set()
834 descendants = set()
844 if rebased:
835 if rebased:
845 descendants = set(repo.changelog.descendants(rebased))
836 descendants = set(repo.changelog.descendants(rebased))
846 if descendants - set(rebased):
837 if descendants - set(rebased):
847 repo.ui.warn(
838 repo.ui.warn(
848 _(
839 _(
849 b"warning: new changesets detected on "
840 b"warning: new changesets detected on "
850 b"destination branch, can't strip\n"
841 b"destination branch, can't strip\n"
851 )
842 )
852 )
843 )
853 cleanup = False
844 cleanup = False
854
845
855 if cleanup:
846 if cleanup:
856 if rebased:
847 if rebased:
857 strippoints = [
848 strippoints = [
858 c.node() for c in repo.set(b'roots(%ld)', rebased)
849 c.node() for c in repo.set(b'roots(%ld)', rebased)
859 ]
850 ]
860
851
861 updateifonnodes = set(rebased)
852 updateifonnodes = set(rebased)
862 updateifonnodes.update(self.destmap.values())
853 updateifonnodes.update(self.destmap.values())
863
854
864 if not dryrun and not confirm:
855 if not dryrun and not confirm:
865 updateifonnodes.add(self.originalwd)
856 updateifonnodes.add(self.originalwd)
866
857
867 shouldupdate = repo[b'.'].rev() in updateifonnodes
858 shouldupdate = repo[b'.'].rev() in updateifonnodes
868
859
869 # Update away from the rebase if necessary
860 # Update away from the rebase if necessary
870 if shouldupdate:
861 if shouldupdate:
871 mergemod.clean_update(repo[self.originalwd])
862 mergemod.clean_update(repo[self.originalwd])
872
863
873 # Strip from the first rebased revision
864 # Strip from the first rebased revision
874 if rebased:
865 if rebased:
875 repair.strip(repo.ui, repo, strippoints, backup=backup)
866 repair.strip(repo.ui, repo, strippoints, backup=backup)
876
867
877 if self.activebookmark and self.activebookmark in repo._bookmarks:
868 if self.activebookmark and self.activebookmark in repo._bookmarks:
878 bookmarks.activate(repo, self.activebookmark)
869 bookmarks.activate(repo, self.activebookmark)
879
870
880 finally:
871 finally:
881 clearstatus(repo)
872 clearstatus(repo)
882 clearcollapsemsg(repo)
873 clearcollapsemsg(repo)
883 if not suppwarns:
874 if not suppwarns:
884 repo.ui.warn(_(b'rebase aborted\n'))
875 repo.ui.warn(_(b'rebase aborted\n'))
885 return 0
876 return 0
886
877
887
878
888 @command(
879 @command(
889 b'rebase',
880 b'rebase',
890 [
881 [
891 (
882 (
892 b's',
883 b's',
893 b'source',
884 b'source',
894 [],
885 [],
895 _(b'rebase the specified changesets and their descendants'),
886 _(b'rebase the specified changesets and their descendants'),
896 _(b'REV'),
887 _(b'REV'),
897 ),
888 ),
898 (
889 (
899 b'b',
890 b'b',
900 b'base',
891 b'base',
901 [],
892 [],
902 _(b'rebase everything from branching point of specified changeset'),
893 _(b'rebase everything from branching point of specified changeset'),
903 _(b'REV'),
894 _(b'REV'),
904 ),
895 ),
905 (b'r', b'rev', [], _(b'rebase these revisions'), _(b'REV')),
896 (b'r', b'rev', [], _(b'rebase these revisions'), _(b'REV')),
906 (
897 (
907 b'd',
898 b'd',
908 b'dest',
899 b'dest',
909 b'',
900 b'',
910 _(b'rebase onto the specified changeset'),
901 _(b'rebase onto the specified changeset'),
911 _(b'REV'),
902 _(b'REV'),
912 ),
903 ),
913 (b'', b'collapse', False, _(b'collapse the rebased changesets')),
904 (b'', b'collapse', False, _(b'collapse the rebased changesets')),
914 (
905 (
915 b'm',
906 b'm',
916 b'message',
907 b'message',
917 b'',
908 b'',
918 _(b'use text as collapse commit message'),
909 _(b'use text as collapse commit message'),
919 _(b'TEXT'),
910 _(b'TEXT'),
920 ),
911 ),
921 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
912 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
922 (
913 (
923 b'l',
914 b'l',
924 b'logfile',
915 b'logfile',
925 b'',
916 b'',
926 _(b'read collapse commit message from file'),
917 _(b'read collapse commit message from file'),
927 _(b'FILE'),
918 _(b'FILE'),
928 ),
919 ),
929 (b'k', b'keep', False, _(b'keep original changesets')),
920 (b'k', b'keep', False, _(b'keep original changesets')),
930 (b'', b'keepbranches', False, _(b'keep original branch names')),
921 (b'', b'keepbranches', False, _(b'keep original branch names')),
931 (b'D', b'detach', False, _(b'(DEPRECATED)')),
922 (b'D', b'detach', False, _(b'(DEPRECATED)')),
932 (b'i', b'interactive', False, _(b'(DEPRECATED)')),
923 (b'i', b'interactive', False, _(b'(DEPRECATED)')),
933 (b't', b'tool', b'', _(b'specify merge tool')),
924 (b't', b'tool', b'', _(b'specify merge tool')),
934 (b'', b'stop', False, _(b'stop interrupted rebase')),
925 (b'', b'stop', False, _(b'stop interrupted rebase')),
935 (b'c', b'continue', False, _(b'continue an interrupted rebase')),
926 (b'c', b'continue', False, _(b'continue an interrupted rebase')),
936 (b'a', b'abort', False, _(b'abort an interrupted rebase')),
927 (b'a', b'abort', False, _(b'abort an interrupted rebase')),
937 (
928 (
938 b'',
929 b'',
939 b'auto-orphans',
930 b'auto-orphans',
940 b'',
931 b'',
941 _(
932 _(
942 b'automatically rebase orphan revisions '
933 b'automatically rebase orphan revisions '
943 b'in the specified revset (EXPERIMENTAL)'
934 b'in the specified revset (EXPERIMENTAL)'
944 ),
935 ),
945 ),
936 ),
946 ]
937 ]
947 + cmdutil.dryrunopts
938 + cmdutil.dryrunopts
948 + cmdutil.formatteropts
939 + cmdutil.formatteropts
949 + cmdutil.confirmopts,
940 + cmdutil.confirmopts,
950 _(b'[[-s REV]... | [-b REV]... | [-r REV]...] [-d REV] [OPTION]...'),
941 _(b'[[-s REV]... | [-b REV]... | [-r REV]...] [-d REV] [OPTION]...'),
951 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
942 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
952 )
943 )
953 def rebase(ui, repo, **opts):
944 def rebase(ui, repo, **opts):
954 """move changeset (and descendants) to a different branch
945 """move changeset (and descendants) to a different branch
955
946
956 Rebase uses repeated merging to graft changesets from one part of
947 Rebase uses repeated merging to graft changesets from one part of
957 history (the source) onto another (the destination). This can be
948 history (the source) onto another (the destination). This can be
958 useful for linearizing *local* changes relative to a master
949 useful for linearizing *local* changes relative to a master
959 development tree.
950 development tree.
960
951
961 Published commits cannot be rebased (see :hg:`help phases`).
952 Published commits cannot be rebased (see :hg:`help phases`).
962 To copy commits, see :hg:`help graft`.
953 To copy commits, see :hg:`help graft`.
963
954
964 If you don't specify a destination changeset (``-d/--dest``), rebase
955 If you don't specify a destination changeset (``-d/--dest``), rebase
965 will use the same logic as :hg:`merge` to pick a destination. if
956 will use the same logic as :hg:`merge` to pick a destination. if
966 the current branch contains exactly one other head, the other head
957 the current branch contains exactly one other head, the other head
967 is merged with by default. Otherwise, an explicit revision with
958 is merged with by default. Otherwise, an explicit revision with
968 which to merge with must be provided. (destination changeset is not
959 which to merge with must be provided. (destination changeset is not
969 modified by rebasing, but new changesets are added as its
960 modified by rebasing, but new changesets are added as its
970 descendants.)
961 descendants.)
971
962
972 Here are the ways to select changesets:
963 Here are the ways to select changesets:
973
964
974 1. Explicitly select them using ``--rev``.
965 1. Explicitly select them using ``--rev``.
975
966
976 2. Use ``--source`` to select a root changeset and include all of its
967 2. Use ``--source`` to select a root changeset and include all of its
977 descendants.
968 descendants.
978
969
979 3. Use ``--base`` to select a changeset; rebase will find ancestors
970 3. Use ``--base`` to select a changeset; rebase will find ancestors
980 and their descendants which are not also ancestors of the destination.
971 and their descendants which are not also ancestors of the destination.
981
972
982 4. If you do not specify any of ``--rev``, ``--source``, or ``--base``,
973 4. If you do not specify any of ``--rev``, ``--source``, or ``--base``,
983 rebase will use ``--base .`` as above.
974 rebase will use ``--base .`` as above.
984
975
985 If ``--source`` or ``--rev`` is used, special names ``SRC`` and ``ALLSRC``
976 If ``--source`` or ``--rev`` is used, special names ``SRC`` and ``ALLSRC``
986 can be used in ``--dest``. Destination would be calculated per source
977 can be used in ``--dest``. Destination would be calculated per source
987 revision with ``SRC`` substituted by that single source revision and
978 revision with ``SRC`` substituted by that single source revision and
988 ``ALLSRC`` substituted by all source revisions.
979 ``ALLSRC`` substituted by all source revisions.
989
980
990 Rebase will destroy original changesets unless you use ``--keep``.
981 Rebase will destroy original changesets unless you use ``--keep``.
991 It will also move your bookmarks (even if you do).
982 It will also move your bookmarks (even if you do).
992
983
993 Some changesets may be dropped if they do not contribute changes
984 Some changesets may be dropped if they do not contribute changes
994 (e.g. merges from the destination branch).
985 (e.g. merges from the destination branch).
995
986
996 Unlike ``merge``, rebase will do nothing if you are at the branch tip of
987 Unlike ``merge``, rebase will do nothing if you are at the branch tip of
997 a named branch with two heads. You will need to explicitly specify source
988 a named branch with two heads. You will need to explicitly specify source
998 and/or destination.
989 and/or destination.
999
990
1000 If you need to use a tool to automate merge/conflict decisions, you
991 If you need to use a tool to automate merge/conflict decisions, you
1001 can specify one with ``--tool``, see :hg:`help merge-tools`.
992 can specify one with ``--tool``, see :hg:`help merge-tools`.
1002 As a caveat: the tool will not be used to mediate when a file was
993 As a caveat: the tool will not be used to mediate when a file was
1003 deleted, there is no hook presently available for this.
994 deleted, there is no hook presently available for this.
1004
995
1005 If a rebase is interrupted to manually resolve a conflict, it can be
996 If a rebase is interrupted to manually resolve a conflict, it can be
1006 continued with --continue/-c, aborted with --abort/-a, or stopped with
997 continued with --continue/-c, aborted with --abort/-a, or stopped with
1007 --stop.
998 --stop.
1008
999
1009 .. container:: verbose
1000 .. container:: verbose
1010
1001
1011 Examples:
1002 Examples:
1012
1003
1013 - move "local changes" (current commit back to branching point)
1004 - move "local changes" (current commit back to branching point)
1014 to the current branch tip after a pull::
1005 to the current branch tip after a pull::
1015
1006
1016 hg rebase
1007 hg rebase
1017
1008
1018 - move a single changeset to the stable branch::
1009 - move a single changeset to the stable branch::
1019
1010
1020 hg rebase -r 5f493448 -d stable
1011 hg rebase -r 5f493448 -d stable
1021
1012
1022 - splice a commit and all its descendants onto another part of history::
1013 - splice a commit and all its descendants onto another part of history::
1023
1014
1024 hg rebase --source c0c3 --dest 4cf9
1015 hg rebase --source c0c3 --dest 4cf9
1025
1016
1026 - rebase everything on a branch marked by a bookmark onto the
1017 - rebase everything on a branch marked by a bookmark onto the
1027 default branch::
1018 default branch::
1028
1019
1029 hg rebase --base myfeature --dest default
1020 hg rebase --base myfeature --dest default
1030
1021
1031 - collapse a sequence of changes into a single commit::
1022 - collapse a sequence of changes into a single commit::
1032
1023
1033 hg rebase --collapse -r 1520:1525 -d .
1024 hg rebase --collapse -r 1520:1525 -d .
1034
1025
1035 - move a named branch while preserving its name::
1026 - move a named branch while preserving its name::
1036
1027
1037 hg rebase -r "branch(featureX)" -d 1.3 --keepbranches
1028 hg rebase -r "branch(featureX)" -d 1.3 --keepbranches
1038
1029
1039 - stabilize orphaned changesets so history looks linear::
1030 - stabilize orphaned changesets so history looks linear::
1040
1031
1041 hg rebase -r 'orphan()-obsolete()'\
1032 hg rebase -r 'orphan()-obsolete()'\
1042 -d 'first(max((successors(max(roots(ALLSRC) & ::SRC)^)-obsolete())::) +\
1033 -d 'first(max((successors(max(roots(ALLSRC) & ::SRC)^)-obsolete())::) +\
1043 max(::((roots(ALLSRC) & ::SRC)^)-obsolete()))'
1034 max(::((roots(ALLSRC) & ::SRC)^)-obsolete()))'
1044
1035
1045 Configuration Options:
1036 Configuration Options:
1046
1037
1047 You can make rebase require a destination if you set the following config
1038 You can make rebase require a destination if you set the following config
1048 option::
1039 option::
1049
1040
1050 [commands]
1041 [commands]
1051 rebase.requiredest = True
1042 rebase.requiredest = True
1052
1043
1053 By default, rebase will close the transaction after each commit. For
1044 By default, rebase will close the transaction after each commit. For
1054 performance purposes, you can configure rebase to use a single transaction
1045 performance purposes, you can configure rebase to use a single transaction
1055 across the entire rebase. WARNING: This setting introduces a significant
1046 across the entire rebase. WARNING: This setting introduces a significant
1056 risk of losing the work you've done in a rebase if the rebase aborts
1047 risk of losing the work you've done in a rebase if the rebase aborts
1057 unexpectedly::
1048 unexpectedly::
1058
1049
1059 [rebase]
1050 [rebase]
1060 singletransaction = True
1051 singletransaction = True
1061
1052
1062 By default, rebase writes to the working copy, but you can configure it to
1053 By default, rebase writes to the working copy, but you can configure it to
1063 run in-memory for better performance. When the rebase is not moving the
1054 run in-memory for better performance. When the rebase is not moving the
1064 parent(s) of the working copy (AKA the "currently checked out changesets"),
1055 parent(s) of the working copy (AKA the "currently checked out changesets"),
1065 this may also allow it to run even if the working copy is dirty::
1056 this may also allow it to run even if the working copy is dirty::
1066
1057
1067 [rebase]
1058 [rebase]
1068 experimental.inmemory = True
1059 experimental.inmemory = True
1069
1060
1070 Return Values:
1061 Return Values:
1071
1062
1072 Returns 0 on success, 1 if nothing to rebase or there are
1063 Returns 0 on success, 1 if nothing to rebase or there are
1073 unresolved conflicts.
1064 unresolved conflicts.
1074
1065
1075 """
1066 """
1076 inmemory = ui.configbool(b'rebase', b'experimental.inmemory')
1067 inmemory = ui.configbool(b'rebase', b'experimental.inmemory')
1077 action = cmdutil.check_at_most_one_arg(opts, 'abort', 'stop', 'continue')
1068 action = cmdutil.check_at_most_one_arg(opts, 'abort', 'stop', 'continue')
1078 if action:
1069 if action:
1079 cmdutil.check_incompatible_arguments(
1070 cmdutil.check_incompatible_arguments(
1080 opts, action, ['confirm', 'dry_run']
1071 opts, action, ['confirm', 'dry_run']
1081 )
1072 )
1082 cmdutil.check_incompatible_arguments(
1073 cmdutil.check_incompatible_arguments(
1083 opts, action, ['rev', 'source', 'base', 'dest']
1074 opts, action, ['rev', 'source', 'base', 'dest']
1084 )
1075 )
1085 cmdutil.check_at_most_one_arg(opts, 'confirm', 'dry_run')
1076 cmdutil.check_at_most_one_arg(opts, 'confirm', 'dry_run')
1086 cmdutil.check_at_most_one_arg(opts, 'rev', 'source', 'base')
1077 cmdutil.check_at_most_one_arg(opts, 'rev', 'source', 'base')
1087
1078
1088 if action or repo.currenttransaction() is not None:
1079 if action or repo.currenttransaction() is not None:
1089 # in-memory rebase is not compatible with resuming rebases.
1080 # in-memory rebase is not compatible with resuming rebases.
1090 # (Or if it is run within a transaction, since the restart logic can
1081 # (Or if it is run within a transaction, since the restart logic can
1091 # fail the entire transaction.)
1082 # fail the entire transaction.)
1092 inmemory = False
1083 inmemory = False
1093
1084
1094 if opts.get('auto_orphans'):
1085 if opts.get('auto_orphans'):
1095 disallowed_opts = set(opts) - {'auto_orphans'}
1086 disallowed_opts = set(opts) - {'auto_orphans'}
1096 cmdutil.check_incompatible_arguments(
1087 cmdutil.check_incompatible_arguments(
1097 opts, 'auto_orphans', disallowed_opts
1088 opts, 'auto_orphans', disallowed_opts
1098 )
1089 )
1099
1090
1100 userrevs = list(repo.revs(opts.get('auto_orphans')))
1091 userrevs = list(repo.revs(opts.get('auto_orphans')))
1101 opts['rev'] = [revsetlang.formatspec(b'%ld and orphan()', userrevs)]
1092 opts['rev'] = [revsetlang.formatspec(b'%ld and orphan()', userrevs)]
1102 opts['dest'] = b'_destautoorphanrebase(SRC)'
1093 opts['dest'] = b'_destautoorphanrebase(SRC)'
1103
1094
1104 if opts.get('dry_run') or opts.get('confirm'):
1095 if opts.get('dry_run') or opts.get('confirm'):
1105 return _dryrunrebase(ui, repo, action, opts)
1096 return _dryrunrebase(ui, repo, action, opts)
1106 elif action == 'stop':
1097 elif action == 'stop':
1107 rbsrt = rebaseruntime(repo, ui)
1098 rbsrt = rebaseruntime(repo, ui)
1108 with repo.wlock(), repo.lock():
1099 with repo.wlock(), repo.lock():
1109 rbsrt.restorestatus()
1100 rbsrt.restorestatus()
1110 if rbsrt.collapsef:
1101 if rbsrt.collapsef:
1111 raise error.StateError(_(b"cannot stop in --collapse session"))
1102 raise error.StateError(_(b"cannot stop in --collapse session"))
1112 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
1103 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
1113 if not (rbsrt.keepf or allowunstable):
1104 if not (rbsrt.keepf or allowunstable):
1114 raise error.StateError(
1105 raise error.StateError(
1115 _(
1106 _(
1116 b"cannot remove original changesets with"
1107 b"cannot remove original changesets with"
1117 b" unrebased descendants"
1108 b" unrebased descendants"
1118 ),
1109 ),
1119 hint=_(
1110 hint=_(
1120 b'either enable obsmarkers to allow unstable '
1111 b'either enable obsmarkers to allow unstable '
1121 b'revisions or use --keep to keep original '
1112 b'revisions or use --keep to keep original '
1122 b'changesets'
1113 b'changesets'
1123 ),
1114 ),
1124 )
1115 )
1125 # update to the current working revision
1116 # update to the current working revision
1126 # to clear interrupted merge
1117 # to clear interrupted merge
1127 mergemod.clean_update(repo[rbsrt.originalwd])
1118 mergemod.clean_update(repo[rbsrt.originalwd])
1128 rbsrt._finishrebase()
1119 rbsrt._finishrebase()
1129 return 0
1120 return 0
1130 elif inmemory:
1121 elif inmemory:
1131 try:
1122 try:
1132 # in-memory merge doesn't support conflicts, so if we hit any, abort
1123 # in-memory merge doesn't support conflicts, so if we hit any, abort
1133 # and re-run as an on-disk merge.
1124 # and re-run as an on-disk merge.
1134 overrides = {(b'rebase', b'singletransaction'): True}
1125 overrides = {(b'rebase', b'singletransaction'): True}
1135 with ui.configoverride(overrides, b'rebase'):
1126 with ui.configoverride(overrides, b'rebase'):
1136 return _dorebase(ui, repo, action, opts, inmemory=inmemory)
1127 return _dorebase(ui, repo, action, opts, inmemory=inmemory)
1137 except error.InMemoryMergeConflictsError:
1128 except error.InMemoryMergeConflictsError:
1138 if ui.configbool(b'devel', b'rebase.force-in-memory-merge'):
1129 if ui.configbool(b'devel', b'rebase.force-in-memory-merge'):
1139 raise
1130 raise
1140 ui.warn(
1131 ui.warn(
1141 _(
1132 _(
1142 b'hit merge conflicts; re-running rebase without in-memory'
1133 b'hit merge conflicts; re-running rebase without in-memory'
1143 b' merge\n'
1134 b' merge\n'
1144 )
1135 )
1145 )
1136 )
1146 clearstatus(repo)
1137 clearstatus(repo)
1147 clearcollapsemsg(repo)
1138 clearcollapsemsg(repo)
1148 return _dorebase(ui, repo, action, opts, inmemory=False)
1139 return _dorebase(ui, repo, action, opts, inmemory=False)
1149 else:
1140 else:
1150 return _dorebase(ui, repo, action, opts)
1141 return _dorebase(ui, repo, action, opts)
1151
1142
1152
1143
1153 def _dryrunrebase(ui, repo, action, opts):
1144 def _dryrunrebase(ui, repo, action, opts):
1154 rbsrt = rebaseruntime(repo, ui, inmemory=True, dryrun=True, opts=opts)
1145 rbsrt = rebaseruntime(repo, ui, inmemory=True, dryrun=True, opts=opts)
1155 confirm = opts.get('confirm')
1146 confirm = opts.get('confirm')
1156 if confirm:
1147 if confirm:
1157 ui.status(_(b'starting in-memory rebase\n'))
1148 ui.status(_(b'starting in-memory rebase\n'))
1158 else:
1149 else:
1159 ui.status(
1150 ui.status(
1160 _(b'starting dry-run rebase; repository will not be changed\n')
1151 _(b'starting dry-run rebase; repository will not be changed\n')
1161 )
1152 )
1162 with repo.wlock(), repo.lock():
1153 with repo.wlock(), repo.lock():
1163 needsabort = True
1154 needsabort = True
1164 try:
1155 try:
1165 overrides = {(b'rebase', b'singletransaction'): True}
1156 overrides = {(b'rebase', b'singletransaction'): True}
1166 with ui.configoverride(overrides, b'rebase'):
1157 with ui.configoverride(overrides, b'rebase'):
1167 res = _origrebase(
1158 res = _origrebase(
1168 ui,
1159 ui,
1169 repo,
1160 repo,
1170 action,
1161 action,
1171 opts,
1162 opts,
1172 rbsrt,
1163 rbsrt,
1173 )
1164 )
1174 if res == _nothingtorebase():
1165 if res == _nothingtorebase():
1175 needsabort = False
1166 needsabort = False
1176 return res
1167 return res
1177 except error.ConflictResolutionRequired:
1168 except error.ConflictResolutionRequired:
1178 ui.status(_(b'hit a merge conflict\n'))
1169 ui.status(_(b'hit a merge conflict\n'))
1179 return 1
1170 return 1
1180 except error.Abort:
1171 except error.Abort:
1181 needsabort = False
1172 needsabort = False
1182 raise
1173 raise
1183 else:
1174 else:
1184 if confirm:
1175 if confirm:
1185 ui.status(_(b'rebase completed successfully\n'))
1176 ui.status(_(b'rebase completed successfully\n'))
1186 if not ui.promptchoice(_(b'apply changes (yn)?$$ &Yes $$ &No')):
1177 if not ui.promptchoice(_(b'apply changes (yn)?$$ &Yes $$ &No')):
1187 # finish unfinished rebase
1178 # finish unfinished rebase
1188 rbsrt._finishrebase()
1179 rbsrt._finishrebase()
1189 else:
1180 else:
1190 rbsrt._prepareabortorcontinue(
1181 rbsrt._prepareabortorcontinue(
1191 isabort=True,
1182 isabort=True,
1192 backup=False,
1183 backup=False,
1193 suppwarns=True,
1184 suppwarns=True,
1194 confirm=confirm,
1185 confirm=confirm,
1195 )
1186 )
1196 needsabort = False
1187 needsabort = False
1197 else:
1188 else:
1198 ui.status(
1189 ui.status(
1199 _(
1190 _(
1200 b'dry-run rebase completed successfully; run without'
1191 b'dry-run rebase completed successfully; run without'
1201 b' -n/--dry-run to perform this rebase\n'
1192 b' -n/--dry-run to perform this rebase\n'
1202 )
1193 )
1203 )
1194 )
1204 return 0
1195 return 0
1205 finally:
1196 finally:
1206 if needsabort:
1197 if needsabort:
1207 # no need to store backup in case of dryrun
1198 # no need to store backup in case of dryrun
1208 rbsrt._prepareabortorcontinue(
1199 rbsrt._prepareabortorcontinue(
1209 isabort=True,
1200 isabort=True,
1210 backup=False,
1201 backup=False,
1211 suppwarns=True,
1202 suppwarns=True,
1212 dryrun=opts.get('dry_run'),
1203 dryrun=opts.get('dry_run'),
1213 )
1204 )
1214
1205
1215
1206
1216 def _dorebase(ui, repo, action, opts, inmemory=False):
1207 def _dorebase(ui, repo, action, opts, inmemory=False):
1217 rbsrt = rebaseruntime(repo, ui, inmemory, opts=opts)
1208 rbsrt = rebaseruntime(repo, ui, inmemory, opts=opts)
1218 return _origrebase(ui, repo, action, opts, rbsrt)
1209 return _origrebase(ui, repo, action, opts, rbsrt)
1219
1210
1220
1211
1221 def _origrebase(ui, repo, action, opts, rbsrt):
1212 def _origrebase(ui, repo, action, opts, rbsrt):
1222 assert action != 'stop'
1213 assert action != 'stop'
1223 with repo.wlock(), repo.lock():
1214 with repo.wlock(), repo.lock():
1224 if opts.get('interactive'):
1215 if opts.get('interactive'):
1225 try:
1216 try:
1226 if extensions.find(b'histedit'):
1217 if extensions.find(b'histedit'):
1227 enablehistedit = b''
1218 enablehistedit = b''
1228 except KeyError:
1219 except KeyError:
1229 enablehistedit = b" --config extensions.histedit="
1220 enablehistedit = b" --config extensions.histedit="
1230 help = b"hg%s help -e histedit" % enablehistedit
1221 help = b"hg%s help -e histedit" % enablehistedit
1231 msg = (
1222 msg = (
1232 _(
1223 _(
1233 b"interactive history editing is supported by the "
1224 b"interactive history editing is supported by the "
1234 b"'histedit' extension (see \"%s\")"
1225 b"'histedit' extension (see \"%s\")"
1235 )
1226 )
1236 % help
1227 % help
1237 )
1228 )
1238 raise error.InputError(msg)
1229 raise error.InputError(msg)
1239
1230
1240 if rbsrt.collapsemsg and not rbsrt.collapsef:
1231 if rbsrt.collapsemsg and not rbsrt.collapsef:
1241 raise error.InputError(
1232 raise error.InputError(
1242 _(b'message can only be specified with collapse')
1233 _(b'message can only be specified with collapse')
1243 )
1234 )
1244
1235
1245 if action:
1236 if action:
1246 if rbsrt.collapsef:
1237 if rbsrt.collapsef:
1247 raise error.InputError(
1238 raise error.InputError(
1248 _(b'cannot use collapse with continue or abort')
1239 _(b'cannot use collapse with continue or abort')
1249 )
1240 )
1250 if action == 'abort' and opts.get('tool', False):
1241 if action == 'abort' and opts.get('tool', False):
1251 ui.warn(_(b'tool option will be ignored\n'))
1242 ui.warn(_(b'tool option will be ignored\n'))
1252 if action == 'continue':
1243 if action == 'continue':
1253 ms = mergestatemod.mergestate.read(repo)
1244 ms = mergestatemod.mergestate.read(repo)
1254 mergeutil.checkunresolved(ms)
1245 mergeutil.checkunresolved(ms)
1255
1246
1256 retcode = rbsrt._prepareabortorcontinue(isabort=(action == 'abort'))
1247 retcode = rbsrt._prepareabortorcontinue(isabort=(action == 'abort'))
1257 if retcode is not None:
1248 if retcode is not None:
1258 return retcode
1249 return retcode
1259 else:
1250 else:
1260 # search default destination in this space
1251 # search default destination in this space
1261 # used in the 'hg pull --rebase' case, see issue 5214.
1252 # used in the 'hg pull --rebase' case, see issue 5214.
1262 destspace = opts.get('_destspace')
1253 destspace = opts.get('_destspace')
1263 destmap = _definedestmap(
1254 destmap = _definedestmap(
1264 ui,
1255 ui,
1265 repo,
1256 repo,
1266 rbsrt.inmemory,
1257 rbsrt.inmemory,
1267 opts.get('dest', None),
1258 opts.get('dest', None),
1268 opts.get('source', []),
1259 opts.get('source', []),
1269 opts.get('base', []),
1260 opts.get('base', []),
1270 opts.get('rev', []),
1261 opts.get('rev', []),
1271 destspace=destspace,
1262 destspace=destspace,
1272 )
1263 )
1273 retcode = rbsrt._preparenewrebase(destmap)
1264 retcode = rbsrt._preparenewrebase(destmap)
1274 if retcode is not None:
1265 if retcode is not None:
1275 return retcode
1266 return retcode
1276 storecollapsemsg(repo, rbsrt.collapsemsg)
1267 storecollapsemsg(repo, rbsrt.collapsemsg)
1277
1268
1278 tr = None
1269 tr = None
1279
1270
1280 singletr = ui.configbool(b'rebase', b'singletransaction')
1271 singletr = ui.configbool(b'rebase', b'singletransaction')
1281 if singletr:
1272 if singletr:
1282 tr = repo.transaction(b'rebase')
1273 tr = repo.transaction(b'rebase')
1283
1274
1284 # If `rebase.singletransaction` is enabled, wrap the entire operation in
1275 # If `rebase.singletransaction` is enabled, wrap the entire operation in
1285 # one transaction here. Otherwise, transactions are obtained when
1276 # one transaction here. Otherwise, transactions are obtained when
1286 # committing each node, which is slower but allows partial success.
1277 # committing each node, which is slower but allows partial success.
1287 with util.acceptintervention(tr):
1278 with util.acceptintervention(tr):
1288 rbsrt._performrebase(tr)
1279 rbsrt._performrebase(tr)
1289 if not rbsrt.dryrun:
1280 if not rbsrt.dryrun:
1290 rbsrt._finishrebase()
1281 rbsrt._finishrebase()
1291
1282
1292
1283
1293 def _definedestmap(ui, repo, inmemory, destf, srcf, basef, revf, destspace):
1284 def _definedestmap(ui, repo, inmemory, destf, srcf, basef, revf, destspace):
1294 """use revisions argument to define destmap {srcrev: destrev}"""
1285 """use revisions argument to define destmap {srcrev: destrev}"""
1295 if revf is None:
1286 if revf is None:
1296 revf = []
1287 revf = []
1297
1288
1298 # destspace is here to work around issues with `hg pull --rebase` see
1289 # destspace is here to work around issues with `hg pull --rebase` see
1299 # issue5214 for details
1290 # issue5214 for details
1300
1291
1301 cmdutil.checkunfinished(repo)
1292 cmdutil.checkunfinished(repo)
1302 if not inmemory:
1293 if not inmemory:
1303 cmdutil.bailifchanged(repo)
1294 cmdutil.bailifchanged(repo)
1304
1295
1305 if ui.configbool(b'commands', b'rebase.requiredest') and not destf:
1296 if ui.configbool(b'commands', b'rebase.requiredest') and not destf:
1306 raise error.InputError(
1297 raise error.InputError(
1307 _(b'you must specify a destination'),
1298 _(b'you must specify a destination'),
1308 hint=_(b'use: hg rebase -d REV'),
1299 hint=_(b'use: hg rebase -d REV'),
1309 )
1300 )
1310
1301
1311 dest = None
1302 dest = None
1312
1303
1313 if revf:
1304 if revf:
1314 rebaseset = logcmdutil.revrange(repo, revf)
1305 rebaseset = logcmdutil.revrange(repo, revf)
1315 if not rebaseset:
1306 if not rebaseset:
1316 ui.status(_(b'empty "rev" revision set - nothing to rebase\n'))
1307 ui.status(_(b'empty "rev" revision set - nothing to rebase\n'))
1317 return None
1308 return None
1318 elif srcf:
1309 elif srcf:
1319 src = logcmdutil.revrange(repo, srcf)
1310 src = logcmdutil.revrange(repo, srcf)
1320 if not src:
1311 if not src:
1321 ui.status(_(b'empty "source" revision set - nothing to rebase\n'))
1312 ui.status(_(b'empty "source" revision set - nothing to rebase\n'))
1322 return None
1313 return None
1323 # `+ (%ld)` to work around `wdir()::` being empty
1314 # `+ (%ld)` to work around `wdir()::` being empty
1324 rebaseset = repo.revs(b'(%ld):: + (%ld)', src, src)
1315 rebaseset = repo.revs(b'(%ld):: + (%ld)', src, src)
1325 else:
1316 else:
1326 base = logcmdutil.revrange(repo, basef or [b'.'])
1317 base = logcmdutil.revrange(repo, basef or [b'.'])
1327 if not base:
1318 if not base:
1328 ui.status(
1319 ui.status(
1329 _(b'empty "base" revision set - ' b"can't compute rebase set\n")
1320 _(b'empty "base" revision set - ' b"can't compute rebase set\n")
1330 )
1321 )
1331 return None
1322 return None
1332 if destf:
1323 if destf:
1333 # --base does not support multiple destinations
1324 # --base does not support multiple destinations
1334 dest = logcmdutil.revsingle(repo, destf)
1325 dest = logcmdutil.revsingle(repo, destf)
1335 else:
1326 else:
1336 dest = repo[_destrebase(repo, base, destspace=destspace)]
1327 dest = repo[_destrebase(repo, base, destspace=destspace)]
1337 destf = bytes(dest)
1328 destf = bytes(dest)
1338
1329
1339 roots = [] # selected children of branching points
1330 roots = [] # selected children of branching points
1340 bpbase = {} # {branchingpoint: [origbase]}
1331 bpbase = {} # {branchingpoint: [origbase]}
1341 for b in base: # group bases by branching points
1332 for b in base: # group bases by branching points
1342 bp = repo.revs(b'ancestor(%d, %d)', b, dest.rev()).first()
1333 bp = repo.revs(b'ancestor(%d, %d)', b, dest.rev()).first()
1343 bpbase[bp] = bpbase.get(bp, []) + [b]
1334 bpbase[bp] = bpbase.get(bp, []) + [b]
1344 if None in bpbase:
1335 if None in bpbase:
1345 # emulate the old behavior, showing "nothing to rebase" (a better
1336 # emulate the old behavior, showing "nothing to rebase" (a better
1346 # behavior may be abort with "cannot find branching point" error)
1337 # behavior may be abort with "cannot find branching point" error)
1347 bpbase.clear()
1338 bpbase.clear()
1348 for bp, bs in bpbase.items(): # calculate roots
1339 for bp, bs in bpbase.items(): # calculate roots
1349 roots += list(repo.revs(b'children(%d) & ancestors(%ld)', bp, bs))
1340 roots += list(repo.revs(b'children(%d) & ancestors(%ld)', bp, bs))
1350
1341
1351 rebaseset = repo.revs(b'%ld::', roots)
1342 rebaseset = repo.revs(b'%ld::', roots)
1352
1343
1353 if not rebaseset:
1344 if not rebaseset:
1354 # transform to list because smartsets are not comparable to
1345 # transform to list because smartsets are not comparable to
1355 # lists. This should be improved to honor laziness of
1346 # lists. This should be improved to honor laziness of
1356 # smartset.
1347 # smartset.
1357 if list(base) == [dest.rev()]:
1348 if list(base) == [dest.rev()]:
1358 if basef:
1349 if basef:
1359 ui.status(
1350 ui.status(
1360 _(
1351 _(
1361 b'nothing to rebase - %s is both "base"'
1352 b'nothing to rebase - %s is both "base"'
1362 b' and destination\n'
1353 b' and destination\n'
1363 )
1354 )
1364 % dest
1355 % dest
1365 )
1356 )
1366 else:
1357 else:
1367 ui.status(
1358 ui.status(
1368 _(
1359 _(
1369 b'nothing to rebase - working directory '
1360 b'nothing to rebase - working directory '
1370 b'parent is also destination\n'
1361 b'parent is also destination\n'
1371 )
1362 )
1372 )
1363 )
1373 elif not repo.revs(b'%ld - ::%d', base, dest.rev()):
1364 elif not repo.revs(b'%ld - ::%d', base, dest.rev()):
1374 if basef:
1365 if basef:
1375 ui.status(
1366 ui.status(
1376 _(
1367 _(
1377 b'nothing to rebase - "base" %s is '
1368 b'nothing to rebase - "base" %s is '
1378 b'already an ancestor of destination '
1369 b'already an ancestor of destination '
1379 b'%s\n'
1370 b'%s\n'
1380 )
1371 )
1381 % (b'+'.join(bytes(repo[r]) for r in base), dest)
1372 % (b'+'.join(bytes(repo[r]) for r in base), dest)
1382 )
1373 )
1383 else:
1374 else:
1384 ui.status(
1375 ui.status(
1385 _(
1376 _(
1386 b'nothing to rebase - working '
1377 b'nothing to rebase - working '
1387 b'directory parent is already an '
1378 b'directory parent is already an '
1388 b'ancestor of destination %s\n'
1379 b'ancestor of destination %s\n'
1389 )
1380 )
1390 % dest
1381 % dest
1391 )
1382 )
1392 else: # can it happen?
1383 else: # can it happen?
1393 ui.status(
1384 ui.status(
1394 _(b'nothing to rebase from %s to %s\n')
1385 _(b'nothing to rebase from %s to %s\n')
1395 % (b'+'.join(bytes(repo[r]) for r in base), dest)
1386 % (b'+'.join(bytes(repo[r]) for r in base), dest)
1396 )
1387 )
1397 return None
1388 return None
1398
1389
1399 if wdirrev in rebaseset:
1390 if wdirrev in rebaseset:
1400 raise error.InputError(_(b'cannot rebase the working copy'))
1391 raise error.InputError(_(b'cannot rebase the working copy'))
1401 rebasingwcp = repo[b'.'].rev() in rebaseset
1392 rebasingwcp = repo[b'.'].rev() in rebaseset
1402 ui.log(
1393 ui.log(
1403 b"rebase",
1394 b"rebase",
1404 b"rebasing working copy parent: %r\n",
1395 b"rebasing working copy parent: %r\n",
1405 rebasingwcp,
1396 rebasingwcp,
1406 rebase_rebasing_wcp=rebasingwcp,
1397 rebase_rebasing_wcp=rebasingwcp,
1407 )
1398 )
1408 if inmemory and rebasingwcp:
1399 if inmemory and rebasingwcp:
1409 # Check these since we did not before.
1400 # Check these since we did not before.
1410 cmdutil.checkunfinished(repo)
1401 cmdutil.checkunfinished(repo)
1411 cmdutil.bailifchanged(repo)
1402 cmdutil.bailifchanged(repo)
1412
1403
1413 if not destf:
1404 if not destf:
1414 dest = repo[_destrebase(repo, rebaseset, destspace=destspace)]
1405 dest = repo[_destrebase(repo, rebaseset, destspace=destspace)]
1415 destf = bytes(dest)
1406 destf = bytes(dest)
1416
1407
1417 allsrc = revsetlang.formatspec(b'%ld', rebaseset)
1408 allsrc = revsetlang.formatspec(b'%ld', rebaseset)
1418 alias = {b'ALLSRC': allsrc}
1409 alias = {b'ALLSRC': allsrc}
1419
1410
1420 if dest is None:
1411 if dest is None:
1421 try:
1412 try:
1422 # fast path: try to resolve dest without SRC alias
1413 # fast path: try to resolve dest without SRC alias
1423 dest = scmutil.revsingle(repo, destf, localalias=alias)
1414 dest = scmutil.revsingle(repo, destf, localalias=alias)
1424 except error.RepoLookupError:
1415 except error.RepoLookupError:
1425 # multi-dest path: resolve dest for each SRC separately
1416 # multi-dest path: resolve dest for each SRC separately
1426 destmap = {}
1417 destmap = {}
1427 for r in rebaseset:
1418 for r in rebaseset:
1428 alias[b'SRC'] = revsetlang.formatspec(b'%d', r)
1419 alias[b'SRC'] = revsetlang.formatspec(b'%d', r)
1429 # use repo.anyrevs instead of scmutil.revsingle because we
1420 # use repo.anyrevs instead of scmutil.revsingle because we
1430 # don't want to abort if destset is empty.
1421 # don't want to abort if destset is empty.
1431 destset = repo.anyrevs([destf], user=True, localalias=alias)
1422 destset = repo.anyrevs([destf], user=True, localalias=alias)
1432 size = len(destset)
1423 size = len(destset)
1433 if size == 1:
1424 if size == 1:
1434 destmap[r] = destset.first()
1425 destmap[r] = destset.first()
1435 elif size == 0:
1426 elif size == 0:
1436 ui.note(_(b'skipping %s - empty destination\n') % repo[r])
1427 ui.note(_(b'skipping %s - empty destination\n') % repo[r])
1437 else:
1428 else:
1438 raise error.InputError(
1429 raise error.InputError(
1439 _(b'rebase destination for %s is not unique') % repo[r]
1430 _(b'rebase destination for %s is not unique') % repo[r]
1440 )
1431 )
1441
1432
1442 if dest is not None:
1433 if dest is not None:
1443 # single-dest case: assign dest to each rev in rebaseset
1434 # single-dest case: assign dest to each rev in rebaseset
1444 destrev = dest.rev()
1435 destrev = dest.rev()
1445 destmap = {r: destrev for r in rebaseset} # {srcrev: destrev}
1436 destmap = {r: destrev for r in rebaseset} # {srcrev: destrev}
1446
1437
1447 if not destmap:
1438 if not destmap:
1448 ui.status(_(b'nothing to rebase - empty destination\n'))
1439 ui.status(_(b'nothing to rebase - empty destination\n'))
1449 return None
1440 return None
1450
1441
1451 return destmap
1442 return destmap
1452
1443
1453
1444
1454 def externalparent(repo, state, destancestors):
1445 def externalparent(repo, state, destancestors):
1455 """Return the revision that should be used as the second parent
1446 """Return the revision that should be used as the second parent
1456 when the revisions in state is collapsed on top of destancestors.
1447 when the revisions in state is collapsed on top of destancestors.
1457 Abort if there is more than one parent.
1448 Abort if there is more than one parent.
1458 """
1449 """
1459 parents = set()
1450 parents = set()
1460 source = min(state)
1451 source = min(state)
1461 for rev in state:
1452 for rev in state:
1462 if rev == source:
1453 if rev == source:
1463 continue
1454 continue
1464 for p in repo[rev].parents():
1455 for p in repo[rev].parents():
1465 if p.rev() not in state and p.rev() not in destancestors:
1456 if p.rev() not in state and p.rev() not in destancestors:
1466 parents.add(p.rev())
1457 parents.add(p.rev())
1467 if not parents:
1458 if not parents:
1468 return nullrev
1459 return nullrev
1469 if len(parents) == 1:
1460 if len(parents) == 1:
1470 return parents.pop()
1461 return parents.pop()
1471 raise error.StateError(
1462 raise error.StateError(
1472 _(
1463 _(
1473 b'unable to collapse on top of %d, there is more '
1464 b'unable to collapse on top of %d, there is more '
1474 b'than one external parent: %s'
1465 b'than one external parent: %s'
1475 )
1466 )
1476 % (max(destancestors), b', '.join(b"%d" % p for p in sorted(parents)))
1467 % (max(destancestors), b', '.join(b"%d" % p for p in sorted(parents)))
1477 )
1468 )
1478
1469
1479
1470
1480 def commitmemorynode(repo, wctx, editor, extra, user, date, commitmsg):
1471 def commitmemorynode(repo, wctx, editor, extra, user, date, commitmsg):
1481 """Commit the memory changes with parents p1 and p2.
1472 """Commit the memory changes with parents p1 and p2.
1482 Return node of committed revision."""
1473 Return node of committed revision."""
1483 # By convention, ``extra['branch']`` (set by extrafn) clobbers
1474 # By convention, ``extra['branch']`` (set by extrafn) clobbers
1484 # ``branch`` (used when passing ``--keepbranches``).
1475 # ``branch`` (used when passing ``--keepbranches``).
1485 branch = None
1476 branch = None
1486 if b'branch' in extra:
1477 if b'branch' in extra:
1487 branch = extra[b'branch']
1478 branch = extra[b'branch']
1488
1479
1489 # FIXME: We call _compact() because it's required to correctly detect
1480 # FIXME: We call _compact() because it's required to correctly detect
1490 # changed files. This was added to fix a regression shortly before the 5.5
1481 # changed files. This was added to fix a regression shortly before the 5.5
1491 # release. A proper fix will be done in the default branch.
1482 # release. A proper fix will be done in the default branch.
1492 wctx._compact()
1483 wctx._compact()
1493 memctx = wctx.tomemctx(
1484 memctx = wctx.tomemctx(
1494 commitmsg,
1485 commitmsg,
1495 date=date,
1486 date=date,
1496 extra=extra,
1487 extra=extra,
1497 user=user,
1488 user=user,
1498 branch=branch,
1489 branch=branch,
1499 editor=editor,
1490 editor=editor,
1500 )
1491 )
1501 if memctx.isempty() and not repo.ui.configbool(b'ui', b'allowemptycommit'):
1492 if memctx.isempty() and not repo.ui.configbool(b'ui', b'allowemptycommit'):
1502 return None
1493 return None
1503 commitres = repo.commitctx(memctx)
1494 commitres = repo.commitctx(memctx)
1504 wctx.clean() # Might be reused
1495 wctx.clean() # Might be reused
1505 return commitres
1496 return commitres
1506
1497
1507
1498
1508 def commitnode(repo, editor, extra, user, date, commitmsg):
1499 def commitnode(repo, editor, extra, user, date, commitmsg):
1509 """Commit the wd changes with parents p1 and p2.
1500 """Commit the wd changes with parents p1 and p2.
1510 Return node of committed revision."""
1501 Return node of committed revision."""
1511 tr = util.nullcontextmanager
1502 tr = util.nullcontextmanager
1512 if not repo.ui.configbool(b'rebase', b'singletransaction'):
1503 if not repo.ui.configbool(b'rebase', b'singletransaction'):
1513 tr = lambda: repo.transaction(b'rebase')
1504 tr = lambda: repo.transaction(b'rebase')
1514 with tr():
1505 with tr():
1515 # Commit might fail if unresolved files exist
1506 # Commit might fail if unresolved files exist
1516 newnode = repo.commit(
1507 newnode = repo.commit(
1517 text=commitmsg, user=user, date=date, extra=extra, editor=editor
1508 text=commitmsg, user=user, date=date, extra=extra, editor=editor
1518 )
1509 )
1519
1510
1520 repo.dirstate.setbranch(
1511 repo.dirstate.setbranch(
1521 repo[newnode].branch(), repo.currenttransaction()
1512 repo[newnode].branch(), repo.currenttransaction()
1522 )
1513 )
1523 return newnode
1514 return newnode
1524
1515
1525
1516
1526 def rebasenode(repo, rev, p1, p2, base, collapse, wctx):
1517 def rebasenode(repo, rev, p1, p2, base, collapse, wctx):
1527 """Rebase a single revision rev on top of p1 using base as merge ancestor"""
1518 """Rebase a single revision rev on top of p1 using base as merge ancestor"""
1528 # Merge phase
1519 # Merge phase
1529 # Update to destination and merge it with local
1520 # Update to destination and merge it with local
1530 p1ctx = repo[p1]
1521 p1ctx = repo[p1]
1531 if wctx.isinmemory():
1522 if wctx.isinmemory():
1532 wctx.setbase(p1ctx)
1523 wctx.setbase(p1ctx)
1533 scope = util.nullcontextmanager
1524 scope = util.nullcontextmanager
1534 else:
1525 else:
1535 if repo[b'.'].rev() != p1:
1526 if repo[b'.'].rev() != p1:
1536 repo.ui.debug(b" update to %d:%s\n" % (p1, p1ctx))
1527 repo.ui.debug(b" update to %d:%s\n" % (p1, p1ctx))
1537 mergemod.clean_update(p1ctx)
1528 mergemod.clean_update(p1ctx)
1538 else:
1529 else:
1539 repo.ui.debug(b" already in destination\n")
1530 repo.ui.debug(b" already in destination\n")
1540 scope = lambda: repo.dirstate.changing_parents(repo)
1531 scope = lambda: repo.dirstate.changing_parents(repo)
1541 # This is, alas, necessary to invalidate workingctx's manifest cache,
1532 # This is, alas, necessary to invalidate workingctx's manifest cache,
1542 # as well as other data we litter on it in other places.
1533 # as well as other data we litter on it in other places.
1543 wctx = repo[None]
1534 wctx = repo[None]
1544 repo.dirstate.write(repo.currenttransaction())
1535 repo.dirstate.write(repo.currenttransaction())
1545 ctx = repo[rev]
1536 ctx = repo[rev]
1546 repo.ui.debug(b" merge against %d:%s\n" % (rev, ctx))
1537 repo.ui.debug(b" merge against %d:%s\n" % (rev, ctx))
1547 if base is not None:
1538 if base is not None:
1548 repo.ui.debug(b" detach base %d:%s\n" % (base, repo[base]))
1539 repo.ui.debug(b" detach base %d:%s\n" % (base, repo[base]))
1549
1540
1550 with scope():
1541 with scope():
1551 # See explanation in merge.graft()
1542 # See explanation in merge.graft()
1552 mergeancestor = repo.changelog.isancestor(p1ctx.node(), ctx.node())
1543 mergeancestor = repo.changelog.isancestor(p1ctx.node(), ctx.node())
1553 stats = mergemod._update(
1544 stats = mergemod._update(
1554 repo,
1545 repo,
1555 rev,
1546 rev,
1556 branchmerge=True,
1547 branchmerge=True,
1557 force=True,
1548 force=True,
1558 ancestor=base,
1549 ancestor=base,
1559 mergeancestor=mergeancestor,
1550 mergeancestor=mergeancestor,
1560 labels=[b'dest', b'source', b'parent of source'],
1551 labels=[b'dest', b'source', b'parent of source'],
1561 wc=wctx,
1552 wc=wctx,
1562 )
1553 )
1563 wctx.setparents(p1ctx.node(), repo[p2].node())
1554 wctx.setparents(p1ctx.node(), repo[p2].node())
1564 if collapse:
1555 if collapse:
1565 copies.graftcopies(wctx, ctx, p1ctx)
1556 copies.graftcopies(wctx, ctx, p1ctx)
1566 else:
1557 else:
1567 # If we're not using --collapse, we need to
1558 # If we're not using --collapse, we need to
1568 # duplicate copies between the revision we're
1559 # duplicate copies between the revision we're
1569 # rebasing and its first parent.
1560 # rebasing and its first parent.
1570 copies.graftcopies(wctx, ctx, ctx.p1())
1561 copies.graftcopies(wctx, ctx, ctx.p1())
1571
1562
1572 if stats.unresolvedcount > 0:
1563 if stats.unresolvedcount > 0:
1573 if wctx.isinmemory():
1564 if wctx.isinmemory():
1574 raise error.InMemoryMergeConflictsError()
1565 raise error.InMemoryMergeConflictsError()
1575 else:
1566 else:
1576 raise error.ConflictResolutionRequired(b'rebase')
1567 raise error.ConflictResolutionRequired(b'rebase')
1577
1568
1578
1569
1579 def adjustdest(repo, rev, destmap, state, skipped):
1570 def adjustdest(repo, rev, destmap, state, skipped):
1580 r"""adjust rebase destination given the current rebase state
1571 r"""adjust rebase destination given the current rebase state
1581
1572
1582 rev is what is being rebased. Return a list of two revs, which are the
1573 rev is what is being rebased. Return a list of two revs, which are the
1583 adjusted destinations for rev's p1 and p2, respectively. If a parent is
1574 adjusted destinations for rev's p1 and p2, respectively. If a parent is
1584 nullrev, return dest without adjustment for it.
1575 nullrev, return dest without adjustment for it.
1585
1576
1586 For example, when doing rebasing B+E to F, C to G, rebase will first move B
1577 For example, when doing rebasing B+E to F, C to G, rebase will first move B
1587 to B1, and E's destination will be adjusted from F to B1.
1578 to B1, and E's destination will be adjusted from F to B1.
1588
1579
1589 B1 <- written during rebasing B
1580 B1 <- written during rebasing B
1590 |
1581 |
1591 F <- original destination of B, E
1582 F <- original destination of B, E
1592 |
1583 |
1593 | E <- rev, which is being rebased
1584 | E <- rev, which is being rebased
1594 | |
1585 | |
1595 | D <- prev, one parent of rev being checked
1586 | D <- prev, one parent of rev being checked
1596 | |
1587 | |
1597 | x <- skipped, ex. no successor or successor in (::dest)
1588 | x <- skipped, ex. no successor or successor in (::dest)
1598 | |
1589 | |
1599 | C <- rebased as C', different destination
1590 | C <- rebased as C', different destination
1600 | |
1591 | |
1601 | B <- rebased as B1 C'
1592 | B <- rebased as B1 C'
1602 |/ |
1593 |/ |
1603 A G <- destination of C, different
1594 A G <- destination of C, different
1604
1595
1605 Another example about merge changeset, rebase -r C+G+H -d K, rebase will
1596 Another example about merge changeset, rebase -r C+G+H -d K, rebase will
1606 first move C to C1, G to G1, and when it's checking H, the adjusted
1597 first move C to C1, G to G1, and when it's checking H, the adjusted
1607 destinations will be [C1, G1].
1598 destinations will be [C1, G1].
1608
1599
1609 H C1 G1
1600 H C1 G1
1610 /| | /
1601 /| | /
1611 F G |/
1602 F G |/
1612 K | | -> K
1603 K | | -> K
1613 | C D |
1604 | C D |
1614 | |/ |
1605 | |/ |
1615 | B | ...
1606 | B | ...
1616 |/ |/
1607 |/ |/
1617 A A
1608 A A
1618
1609
1619 Besides, adjust dest according to existing rebase information. For example,
1610 Besides, adjust dest according to existing rebase information. For example,
1620
1611
1621 B C D B needs to be rebased on top of C, C needs to be rebased on top
1612 B C D B needs to be rebased on top of C, C needs to be rebased on top
1622 \|/ of D. We will rebase C first.
1613 \|/ of D. We will rebase C first.
1623 A
1614 A
1624
1615
1625 C' After rebasing C, when considering B's destination, use C'
1616 C' After rebasing C, when considering B's destination, use C'
1626 | instead of the original C.
1617 | instead of the original C.
1627 B D
1618 B D
1628 \ /
1619 \ /
1629 A
1620 A
1630 """
1621 """
1631 # pick already rebased revs with same dest from state as interesting source
1622 # pick already rebased revs with same dest from state as interesting source
1632 dest = destmap[rev]
1623 dest = destmap[rev]
1633 source = [
1624 source = [
1634 s
1625 s
1635 for s, d in state.items()
1626 for s, d in state.items()
1636 if d > 0 and destmap[s] == dest and s not in skipped
1627 if d > 0 and destmap[s] == dest and s not in skipped
1637 ]
1628 ]
1638
1629
1639 result = []
1630 result = []
1640 for prev in repo.changelog.parentrevs(rev):
1631 for prev in repo.changelog.parentrevs(rev):
1641 adjusted = dest
1632 adjusted = dest
1642 if prev != nullrev:
1633 if prev != nullrev:
1643 candidate = repo.revs(b'max(%ld and (::%d))', source, prev).first()
1634 candidate = repo.revs(b'max(%ld and (::%d))', source, prev).first()
1644 if candidate is not None:
1635 if candidate is not None:
1645 adjusted = state[candidate]
1636 adjusted = state[candidate]
1646 if adjusted == dest and dest in state:
1637 if adjusted == dest and dest in state:
1647 adjusted = state[dest]
1638 adjusted = state[dest]
1648 if adjusted == revtodo:
1639 if adjusted == revtodo:
1649 # sortsource should produce an order that makes this impossible
1640 # sortsource should produce an order that makes this impossible
1650 raise error.ProgrammingError(
1641 raise error.ProgrammingError(
1651 b'rev %d should be rebased already at this time' % dest
1642 b'rev %d should be rebased already at this time' % dest
1652 )
1643 )
1653 result.append(adjusted)
1644 result.append(adjusted)
1654 return result
1645 return result
1655
1646
1656
1647
1657 def _checkobsrebase(repo, ui, rebaseobsrevs, rebaseobsskipped):
1648 def _checkobsrebase(repo, ui, rebaseobsrevs, rebaseobsskipped):
1658 """
1649 """
1659 Abort if rebase will create divergence or rebase is noop because of markers
1650 Abort if rebase will create divergence or rebase is noop because of markers
1660
1651
1661 `rebaseobsrevs`: set of obsolete revision in source
1652 `rebaseobsrevs`: set of obsolete revision in source
1662 `rebaseobsskipped`: set of revisions from source skipped because they have
1653 `rebaseobsskipped`: set of revisions from source skipped because they have
1663 successors in destination or no non-obsolete successor.
1654 successors in destination or no non-obsolete successor.
1664 """
1655 """
1665 # Obsolete node with successors not in dest leads to divergence
1656 # Obsolete node with successors not in dest leads to divergence
1666 divergenceok = obsolete.isenabled(repo, obsolete.allowdivergenceopt)
1657 divergenceok = obsolete.isenabled(repo, obsolete.allowdivergenceopt)
1667 divergencebasecandidates = rebaseobsrevs - rebaseobsskipped
1658 divergencebasecandidates = rebaseobsrevs - rebaseobsskipped
1668
1659
1669 if divergencebasecandidates and not divergenceok:
1660 if divergencebasecandidates and not divergenceok:
1670 divhashes = (bytes(repo[r]) for r in divergencebasecandidates)
1661 divhashes = (bytes(repo[r]) for r in divergencebasecandidates)
1671 msg = _(b"this rebase will cause divergences from: %s")
1662 msg = _(b"this rebase will cause divergences from: %s")
1672 h = _(
1663 h = _(
1673 b"to force the rebase please set "
1664 b"to force the rebase please set "
1674 b"experimental.evolution.allowdivergence=True"
1665 b"experimental.evolution.allowdivergence=True"
1675 )
1666 )
1676 raise error.StateError(msg % (b",".join(divhashes),), hint=h)
1667 raise error.StateError(msg % (b",".join(divhashes),), hint=h)
1677
1668
1678
1669
1679 def successorrevs(unfi, rev):
1670 def successorrevs(unfi, rev):
1680 """yield revision numbers for successors of rev"""
1671 """yield revision numbers for successors of rev"""
1681 assert unfi.filtername is None
1672 assert unfi.filtername is None
1682 get_rev = unfi.changelog.index.get_rev
1673 get_rev = unfi.changelog.index.get_rev
1683 for s in obsutil.allsuccessors(unfi.obsstore, [unfi[rev].node()]):
1674 for s in obsutil.allsuccessors(unfi.obsstore, [unfi[rev].node()]):
1684 r = get_rev(s)
1675 r = get_rev(s)
1685 if r is not None:
1676 if r is not None:
1686 yield r
1677 yield r
1687
1678
1688
1679
1689 def defineparents(repo, rev, destmap, state, skipped, obsskipped):
1680 def defineparents(repo, rev, destmap, state, skipped, obsskipped):
1690 """Return new parents and optionally a merge base for rev being rebased
1681 """Return new parents and optionally a merge base for rev being rebased
1691
1682
1692 The destination specified by "dest" cannot always be used directly because
1683 The destination specified by "dest" cannot always be used directly because
1693 previously rebase result could affect destination. For example,
1684 previously rebase result could affect destination. For example,
1694
1685
1695 D E rebase -r C+D+E -d B
1686 D E rebase -r C+D+E -d B
1696 |/ C will be rebased to C'
1687 |/ C will be rebased to C'
1697 B C D's new destination will be C' instead of B
1688 B C D's new destination will be C' instead of B
1698 |/ E's new destination will be C' instead of B
1689 |/ E's new destination will be C' instead of B
1699 A
1690 A
1700
1691
1701 The new parents of a merge is slightly more complicated. See the comment
1692 The new parents of a merge is slightly more complicated. See the comment
1702 block below.
1693 block below.
1703 """
1694 """
1704 # use unfiltered changelog since successorrevs may return filtered nodes
1695 # use unfiltered changelog since successorrevs may return filtered nodes
1705 assert repo.filtername is None
1696 assert repo.filtername is None
1706 cl = repo.changelog
1697 cl = repo.changelog
1707 isancestor = cl.isancestorrev
1698 isancestor = cl.isancestorrev
1708
1699
1709 dest = destmap[rev]
1700 dest = destmap[rev]
1710 oldps = repo.changelog.parentrevs(rev) # old parents
1701 oldps = repo.changelog.parentrevs(rev) # old parents
1711 newps = [nullrev, nullrev] # new parents
1702 newps = [nullrev, nullrev] # new parents
1712 dests = adjustdest(repo, rev, destmap, state, skipped)
1703 dests = adjustdest(repo, rev, destmap, state, skipped)
1713 bases = list(oldps) # merge base candidates, initially just old parents
1704 bases = list(oldps) # merge base candidates, initially just old parents
1714
1705
1715 if all(r == nullrev for r in oldps[1:]):
1706 if all(r == nullrev for r in oldps[1:]):
1716 # For non-merge changeset, just move p to adjusted dest as requested.
1707 # For non-merge changeset, just move p to adjusted dest as requested.
1717 newps[0] = dests[0]
1708 newps[0] = dests[0]
1718 else:
1709 else:
1719 # For merge changeset, if we move p to dests[i] unconditionally, both
1710 # For merge changeset, if we move p to dests[i] unconditionally, both
1720 # parents may change and the end result looks like "the merge loses a
1711 # parents may change and the end result looks like "the merge loses a
1721 # parent", which is a surprise. This is a limit because "--dest" only
1712 # parent", which is a surprise. This is a limit because "--dest" only
1722 # accepts one dest per src.
1713 # accepts one dest per src.
1723 #
1714 #
1724 # Therefore, only move p with reasonable conditions (in this order):
1715 # Therefore, only move p with reasonable conditions (in this order):
1725 # 1. use dest, if dest is a descendent of (p or one of p's successors)
1716 # 1. use dest, if dest is a descendent of (p or one of p's successors)
1726 # 2. use p's rebased result, if p is rebased (state[p] > 0)
1717 # 2. use p's rebased result, if p is rebased (state[p] > 0)
1727 #
1718 #
1728 # Comparing with adjustdest, the logic here does some additional work:
1719 # Comparing with adjustdest, the logic here does some additional work:
1729 # 1. decide which parents will not be moved towards dest
1720 # 1. decide which parents will not be moved towards dest
1730 # 2. if the above decision is "no", should a parent still be moved
1721 # 2. if the above decision is "no", should a parent still be moved
1731 # because it was rebased?
1722 # because it was rebased?
1732 #
1723 #
1733 # For example:
1724 # For example:
1734 #
1725 #
1735 # C # "rebase -r C -d D" is an error since none of the parents
1726 # C # "rebase -r C -d D" is an error since none of the parents
1736 # /| # can be moved. "rebase -r B+C -d D" will move C's parent
1727 # /| # can be moved. "rebase -r B+C -d D" will move C's parent
1737 # A B D # B (using rule "2."), since B will be rebased.
1728 # A B D # B (using rule "2."), since B will be rebased.
1738 #
1729 #
1739 # The loop tries to be not rely on the fact that a Mercurial node has
1730 # The loop tries to be not rely on the fact that a Mercurial node has
1740 # at most 2 parents.
1731 # at most 2 parents.
1741 for i, p in enumerate(oldps):
1732 for i, p in enumerate(oldps):
1742 np = p # new parent
1733 np = p # new parent
1743 if any(isancestor(x, dests[i]) for x in successorrevs(repo, p)):
1734 if any(isancestor(x, dests[i]) for x in successorrevs(repo, p)):
1744 np = dests[i]
1735 np = dests[i]
1745 elif p in state and state[p] > 0:
1736 elif p in state and state[p] > 0:
1746 np = state[p]
1737 np = state[p]
1747
1738
1748 # If one parent becomes an ancestor of the other, drop the ancestor
1739 # If one parent becomes an ancestor of the other, drop the ancestor
1749 for j, x in enumerate(newps[:i]):
1740 for j, x in enumerate(newps[:i]):
1750 if x == nullrev:
1741 if x == nullrev:
1751 continue
1742 continue
1752 if isancestor(np, x): # CASE-1
1743 if isancestor(np, x): # CASE-1
1753 np = nullrev
1744 np = nullrev
1754 elif isancestor(x, np): # CASE-2
1745 elif isancestor(x, np): # CASE-2
1755 newps[j] = np
1746 newps[j] = np
1756 np = nullrev
1747 np = nullrev
1757 # New parents forming an ancestor relationship does not
1748 # New parents forming an ancestor relationship does not
1758 # mean the old parents have a similar relationship. Do not
1749 # mean the old parents have a similar relationship. Do not
1759 # set bases[x] to nullrev.
1750 # set bases[x] to nullrev.
1760 bases[j], bases[i] = bases[i], bases[j]
1751 bases[j], bases[i] = bases[i], bases[j]
1761
1752
1762 newps[i] = np
1753 newps[i] = np
1763
1754
1764 # "rebasenode" updates to new p1, and the old p1 will be used as merge
1755 # "rebasenode" updates to new p1, and the old p1 will be used as merge
1765 # base. If only p2 changes, merging using unchanged p1 as merge base is
1756 # base. If only p2 changes, merging using unchanged p1 as merge base is
1766 # suboptimal. Therefore swap parents to make the merge sane.
1757 # suboptimal. Therefore swap parents to make the merge sane.
1767 if newps[1] != nullrev and oldps[0] == newps[0]:
1758 if newps[1] != nullrev and oldps[0] == newps[0]:
1768 assert len(newps) == 2 and len(oldps) == 2
1759 assert len(newps) == 2 and len(oldps) == 2
1769 newps.reverse()
1760 newps.reverse()
1770 bases.reverse()
1761 bases.reverse()
1771
1762
1772 # No parent change might be an error because we fail to make rev a
1763 # No parent change might be an error because we fail to make rev a
1773 # descendent of requested dest. This can happen, for example:
1764 # descendent of requested dest. This can happen, for example:
1774 #
1765 #
1775 # C # rebase -r C -d D
1766 # C # rebase -r C -d D
1776 # /| # None of A and B will be changed to D and rebase fails.
1767 # /| # None of A and B will be changed to D and rebase fails.
1777 # A B D
1768 # A B D
1778 if set(newps) == set(oldps) and dest not in newps:
1769 if set(newps) == set(oldps) and dest not in newps:
1779 raise error.InputError(
1770 raise error.InputError(
1780 _(
1771 _(
1781 b'cannot rebase %d:%s without '
1772 b'cannot rebase %d:%s without '
1782 b'moving at least one of its parents'
1773 b'moving at least one of its parents'
1783 )
1774 )
1784 % (rev, repo[rev])
1775 % (rev, repo[rev])
1785 )
1776 )
1786
1777
1787 # Source should not be ancestor of dest. The check here guarantees it's
1778 # Source should not be ancestor of dest. The check here guarantees it's
1788 # impossible. With multi-dest, the initial check does not cover complex
1779 # impossible. With multi-dest, the initial check does not cover complex
1789 # cases since we don't have abstractions to dry-run rebase cheaply.
1780 # cases since we don't have abstractions to dry-run rebase cheaply.
1790 if any(p != nullrev and isancestor(rev, p) for p in newps):
1781 if any(p != nullrev and isancestor(rev, p) for p in newps):
1791 raise error.InputError(_(b'source is ancestor of destination'))
1782 raise error.InputError(_(b'source is ancestor of destination'))
1792
1783
1793 # Check if the merge will contain unwanted changes. That may happen if
1784 # Check if the merge will contain unwanted changes. That may happen if
1794 # there are multiple special (non-changelog ancestor) merge bases, which
1785 # there are multiple special (non-changelog ancestor) merge bases, which
1795 # cannot be handled well by the 3-way merge algorithm. For example:
1786 # cannot be handled well by the 3-way merge algorithm. For example:
1796 #
1787 #
1797 # F
1788 # F
1798 # /|
1789 # /|
1799 # D E # "rebase -r D+E+F -d Z", when rebasing F, if "D" was chosen
1790 # D E # "rebase -r D+E+F -d Z", when rebasing F, if "D" was chosen
1800 # | | # as merge base, the difference between D and F will include
1791 # | | # as merge base, the difference between D and F will include
1801 # B C # C, so the rebased F will contain C surprisingly. If "E" was
1792 # B C # C, so the rebased F will contain C surprisingly. If "E" was
1802 # |/ # chosen, the rebased F will contain B.
1793 # |/ # chosen, the rebased F will contain B.
1803 # A Z
1794 # A Z
1804 #
1795 #
1805 # But our merge base candidates (D and E in above case) could still be
1796 # But our merge base candidates (D and E in above case) could still be
1806 # better than the default (ancestor(F, Z) == null). Therefore still
1797 # better than the default (ancestor(F, Z) == null). Therefore still
1807 # pick one (so choose p1 above).
1798 # pick one (so choose p1 above).
1808 if sum(1 for b in set(bases) if b != nullrev and b not in newps) > 1:
1799 if sum(1 for b in set(bases) if b != nullrev and b not in newps) > 1:
1809 unwanted = [None, None] # unwanted[i]: unwanted revs if choose bases[i]
1800 unwanted = [None, None] # unwanted[i]: unwanted revs if choose bases[i]
1810 for i, base in enumerate(bases):
1801 for i, base in enumerate(bases):
1811 if base == nullrev or base in newps:
1802 if base == nullrev or base in newps:
1812 continue
1803 continue
1813 # Revisions in the side (not chosen as merge base) branch that
1804 # Revisions in the side (not chosen as merge base) branch that
1814 # might contain "surprising" contents
1805 # might contain "surprising" contents
1815 other_bases = set(bases) - {base}
1806 other_bases = set(bases) - {base}
1816 siderevs = list(
1807 siderevs = list(
1817 repo.revs(b'(%ld %% (%d+%d))', other_bases, base, dest)
1808 repo.revs(b'(%ld %% (%d+%d))', other_bases, base, dest)
1818 )
1809 )
1819
1810
1820 # If those revisions are covered by rebaseset, the result is good.
1811 # If those revisions are covered by rebaseset, the result is good.
1821 # A merge in rebaseset would be considered to cover its ancestors.
1812 # A merge in rebaseset would be considered to cover its ancestors.
1822 if siderevs:
1813 if siderevs:
1823 rebaseset = [
1814 rebaseset = [
1824 r for r, d in state.items() if d > 0 and r not in obsskipped
1815 r for r, d in state.items() if d > 0 and r not in obsskipped
1825 ]
1816 ]
1826 merges = [
1817 merges = [
1827 r for r in rebaseset if cl.parentrevs(r)[1] != nullrev
1818 r for r in rebaseset if cl.parentrevs(r)[1] != nullrev
1828 ]
1819 ]
1829 unwanted[i] = list(
1820 unwanted[i] = list(
1830 repo.revs(
1821 repo.revs(
1831 b'%ld - (::%ld) - %ld', siderevs, merges, rebaseset
1822 b'%ld - (::%ld) - %ld', siderevs, merges, rebaseset
1832 )
1823 )
1833 )
1824 )
1834
1825
1835 if any(revs is not None for revs in unwanted):
1826 if any(revs is not None for revs in unwanted):
1836 # Choose a merge base that has a minimal number of unwanted revs.
1827 # Choose a merge base that has a minimal number of unwanted revs.
1837 l, i = min(
1828 l, i = min(
1838 (len(revs), i)
1829 (len(revs), i)
1839 for i, revs in enumerate(unwanted)
1830 for i, revs in enumerate(unwanted)
1840 if revs is not None
1831 if revs is not None
1841 )
1832 )
1842
1833
1843 # The merge will include unwanted revisions. Abort now. Revisit this if
1834 # The merge will include unwanted revisions. Abort now. Revisit this if
1844 # we have a more advanced merge algorithm that handles multiple bases.
1835 # we have a more advanced merge algorithm that handles multiple bases.
1845 if l > 0:
1836 if l > 0:
1846 unwanteddesc = _(b' or ').join(
1837 unwanteddesc = _(b' or ').join(
1847 (
1838 (
1848 b', '.join(b'%d:%s' % (r, repo[r]) for r in revs)
1839 b', '.join(b'%d:%s' % (r, repo[r]) for r in revs)
1849 for revs in unwanted
1840 for revs in unwanted
1850 if revs is not None
1841 if revs is not None
1851 )
1842 )
1852 )
1843 )
1853 raise error.InputError(
1844 raise error.InputError(
1854 _(b'rebasing %d:%s will include unwanted changes from %s')
1845 _(b'rebasing %d:%s will include unwanted changes from %s')
1855 % (rev, repo[rev], unwanteddesc)
1846 % (rev, repo[rev], unwanteddesc)
1856 )
1847 )
1857
1848
1858 # newps[0] should match merge base if possible. Currently, if newps[i]
1849 # newps[0] should match merge base if possible. Currently, if newps[i]
1859 # is nullrev, the only case is newps[i] and newps[j] (j < i), one is
1850 # is nullrev, the only case is newps[i] and newps[j] (j < i), one is
1860 # the other's ancestor. In that case, it's fine to not swap newps here.
1851 # the other's ancestor. In that case, it's fine to not swap newps here.
1861 # (see CASE-1 and CASE-2 above)
1852 # (see CASE-1 and CASE-2 above)
1862 if i != 0:
1853 if i != 0:
1863 if newps[i] != nullrev:
1854 if newps[i] != nullrev:
1864 newps[0], newps[i] = newps[i], newps[0]
1855 newps[0], newps[i] = newps[i], newps[0]
1865 bases[0], bases[i] = bases[i], bases[0]
1856 bases[0], bases[i] = bases[i], bases[0]
1866
1857
1867 # "rebasenode" updates to new p1, use the corresponding merge base.
1858 # "rebasenode" updates to new p1, use the corresponding merge base.
1868 base = bases[0]
1859 base = bases[0]
1869
1860
1870 repo.ui.debug(b" future parents are %d and %d\n" % tuple(newps))
1861 repo.ui.debug(b" future parents are %d and %d\n" % tuple(newps))
1871
1862
1872 return newps[0], newps[1], base
1863 return newps[0], newps[1], base
1873
1864
1874
1865
1875 def isagitpatch(repo, patchname):
1866 def isagitpatch(repo, patchname):
1876 """Return true if the given patch is in git format"""
1867 """Return true if the given patch is in git format"""
1877 mqpatch = os.path.join(repo.mq.path, patchname)
1868 mqpatch = os.path.join(repo.mq.path, patchname)
1878 for line in patch.linereader(open(mqpatch, b'rb')):
1869 for line in patch.linereader(open(mqpatch, b'rb')):
1879 if line.startswith(b'diff --git'):
1870 if line.startswith(b'diff --git'):
1880 return True
1871 return True
1881 return False
1872 return False
1882
1873
1883
1874
1884 def updatemq(repo, state, skipped, **opts):
1875 def updatemq(repo, state, skipped, **opts):
1885 """Update rebased mq patches - finalize and then import them"""
1876 """Update rebased mq patches - finalize and then import them"""
1886 mqrebase = {}
1877 mqrebase = {}
1887 mq = repo.mq
1878 mq = repo.mq
1888 original_series = mq.fullseries[:]
1879 original_series = mq.fullseries[:]
1889 skippedpatches = set()
1880 skippedpatches = set()
1890
1881
1891 for p in mq.applied:
1882 for p in mq.applied:
1892 rev = repo[p.node].rev()
1883 rev = repo[p.node].rev()
1893 if rev in state:
1884 if rev in state:
1894 repo.ui.debug(
1885 repo.ui.debug(
1895 b'revision %d is an mq patch (%s), finalize it.\n'
1886 b'revision %d is an mq patch (%s), finalize it.\n'
1896 % (rev, p.name)
1887 % (rev, p.name)
1897 )
1888 )
1898 mqrebase[rev] = (p.name, isagitpatch(repo, p.name))
1889 mqrebase[rev] = (p.name, isagitpatch(repo, p.name))
1899 else:
1890 else:
1900 # Applied but not rebased, not sure this should happen
1891 # Applied but not rebased, not sure this should happen
1901 skippedpatches.add(p.name)
1892 skippedpatches.add(p.name)
1902
1893
1903 if mqrebase:
1894 if mqrebase:
1904 mq.finish(repo, mqrebase.keys())
1895 mq.finish(repo, mqrebase.keys())
1905
1896
1906 # We must start import from the newest revision
1897 # We must start import from the newest revision
1907 for rev in sorted(mqrebase, reverse=True):
1898 for rev in sorted(mqrebase, reverse=True):
1908 if rev not in skipped:
1899 if rev not in skipped:
1909 name, isgit = mqrebase[rev]
1900 name, isgit = mqrebase[rev]
1910 repo.ui.note(
1901 repo.ui.note(
1911 _(b'updating mq patch %s to %d:%s\n')
1902 _(b'updating mq patch %s to %d:%s\n')
1912 % (name, state[rev], repo[state[rev]])
1903 % (name, state[rev], repo[state[rev]])
1913 )
1904 )
1914 mq.qimport(
1905 mq.qimport(
1915 repo,
1906 repo,
1916 (),
1907 (),
1917 patchname=name,
1908 patchname=name,
1918 git=isgit,
1909 git=isgit,
1919 rev=[b"%d" % state[rev]],
1910 rev=[b"%d" % state[rev]],
1920 )
1911 )
1921 else:
1912 else:
1922 # Rebased and skipped
1913 # Rebased and skipped
1923 skippedpatches.add(mqrebase[rev][0])
1914 skippedpatches.add(mqrebase[rev][0])
1924
1915
1925 # Patches were either applied and rebased and imported in
1916 # Patches were either applied and rebased and imported in
1926 # order, applied and removed or unapplied. Discard the removed
1917 # order, applied and removed or unapplied. Discard the removed
1927 # ones while preserving the original series order and guards.
1918 # ones while preserving the original series order and guards.
1928 newseries = [
1919 newseries = [
1929 s
1920 s
1930 for s in original_series
1921 for s in original_series
1931 if mq.guard_re.split(s, 1)[0] not in skippedpatches
1922 if mq.guard_re.split(s, 1)[0] not in skippedpatches
1932 ]
1923 ]
1933 mq.fullseries[:] = newseries
1924 mq.fullseries[:] = newseries
1934 mq.seriesdirty = True
1925 mq.seriesdirty = True
1935 mq.savedirty()
1926 mq.savedirty()
1936
1927
1937
1928
1938 def storecollapsemsg(repo, collapsemsg):
1929 def storecollapsemsg(repo, collapsemsg):
1939 """Store the collapse message to allow recovery"""
1930 """Store the collapse message to allow recovery"""
1940 collapsemsg = collapsemsg or b''
1931 collapsemsg = collapsemsg or b''
1941 f = repo.vfs(b"last-message.txt", b"w")
1932 f = repo.vfs(b"last-message.txt", b"w")
1942 f.write(b"%s\n" % collapsemsg)
1933 f.write(b"%s\n" % collapsemsg)
1943 f.close()
1934 f.close()
1944
1935
1945
1936
1946 def clearcollapsemsg(repo):
1937 def clearcollapsemsg(repo):
1947 """Remove collapse message file"""
1938 """Remove collapse message file"""
1948 repo.vfs.unlinkpath(b"last-message.txt", ignoremissing=True)
1939 repo.vfs.unlinkpath(b"last-message.txt", ignoremissing=True)
1949
1940
1950
1941
1951 def restorecollapsemsg(repo, isabort):
1942 def restorecollapsemsg(repo, isabort):
1952 """Restore previously stored collapse message"""
1943 """Restore previously stored collapse message"""
1953 try:
1944 try:
1954 f = repo.vfs(b"last-message.txt")
1945 f = repo.vfs(b"last-message.txt")
1955 collapsemsg = f.readline().strip()
1946 collapsemsg = f.readline().strip()
1956 f.close()
1947 f.close()
1957 except FileNotFoundError:
1948 except FileNotFoundError:
1958 if isabort:
1949 if isabort:
1959 # Oh well, just abort like normal
1950 # Oh well, just abort like normal
1960 collapsemsg = b''
1951 collapsemsg = b''
1961 else:
1952 else:
1962 raise error.Abort(_(b'missing .hg/last-message.txt for rebase'))
1953 raise error.Abort(_(b'missing .hg/last-message.txt for rebase'))
1963 return collapsemsg
1954 return collapsemsg
1964
1955
1965
1956
1966 def clearstatus(repo):
1957 def clearstatus(repo):
1967 """Remove the status files"""
1958 """Remove the status files"""
1968 # Make sure the active transaction won't write the state file
1959 # Make sure the active transaction won't write the state file
1969 tr = repo.currenttransaction()
1960 tr = repo.currenttransaction()
1970 if tr:
1961 if tr:
1971 tr.removefilegenerator(b'rebasestate')
1962 tr.removefilegenerator(b'rebasestate')
1972 repo.vfs.unlinkpath(b"rebasestate", ignoremissing=True)
1963 repo.vfs.unlinkpath(b"rebasestate", ignoremissing=True)
1973
1964
1974
1965
1975 def sortsource(destmap):
1966 def sortsource(destmap):
1976 """yield source revisions in an order that we only rebase things once
1967 """yield source revisions in an order that we only rebase things once
1977
1968
1978 If source and destination overlaps, we should filter out revisions
1969 If source and destination overlaps, we should filter out revisions
1979 depending on other revisions which hasn't been rebased yet.
1970 depending on other revisions which hasn't been rebased yet.
1980
1971
1981 Yield a sorted list of revisions each time.
1972 Yield a sorted list of revisions each time.
1982
1973
1983 For example, when rebasing A to B, B to C. This function yields [B], then
1974 For example, when rebasing A to B, B to C. This function yields [B], then
1984 [A], indicating B needs to be rebased first.
1975 [A], indicating B needs to be rebased first.
1985
1976
1986 Raise if there is a cycle so the rebase is impossible.
1977 Raise if there is a cycle so the rebase is impossible.
1987 """
1978 """
1988 srcset = set(destmap)
1979 srcset = set(destmap)
1989 while srcset:
1980 while srcset:
1990 srclist = sorted(srcset)
1981 srclist = sorted(srcset)
1991 result = []
1982 result = []
1992 for r in srclist:
1983 for r in srclist:
1993 if destmap[r] not in srcset:
1984 if destmap[r] not in srcset:
1994 result.append(r)
1985 result.append(r)
1995 if not result:
1986 if not result:
1996 raise error.InputError(_(b'source and destination form a cycle'))
1987 raise error.InputError(_(b'source and destination form a cycle'))
1997 srcset -= set(result)
1988 srcset -= set(result)
1998 yield result
1989 yield result
1999
1990
2000
1991
2001 def buildstate(repo, destmap, collapse):
1992 def buildstate(repo, destmap, collapse):
2002 """Define which revisions are going to be rebased and where
1993 """Define which revisions are going to be rebased and where
2003
1994
2004 repo: repo
1995 repo: repo
2005 destmap: {srcrev: destrev}
1996 destmap: {srcrev: destrev}
2006 """
1997 """
2007 rebaseset = destmap.keys()
1998 rebaseset = destmap.keys()
2008 originalwd = repo[b'.'].rev()
1999 originalwd = repo[b'.'].rev()
2009
2000
2010 # This check isn't strictly necessary, since mq detects commits over an
2001 # This check isn't strictly necessary, since mq detects commits over an
2011 # applied patch. But it prevents messing up the working directory when
2002 # applied patch. But it prevents messing up the working directory when
2012 # a partially completed rebase is blocked by mq.
2003 # a partially completed rebase is blocked by mq.
2013 if b'qtip' in repo.tags():
2004 if b'qtip' in repo.tags():
2014 mqapplied = {repo[s.node].rev() for s in repo.mq.applied}
2005 mqapplied = {repo[s.node].rev() for s in repo.mq.applied}
2015 if set(destmap.values()) & mqapplied:
2006 if set(destmap.values()) & mqapplied:
2016 raise error.StateError(_(b'cannot rebase onto an applied mq patch'))
2007 raise error.StateError(_(b'cannot rebase onto an applied mq patch'))
2017
2008
2018 # Get "cycle" error early by exhausting the generator.
2009 # Get "cycle" error early by exhausting the generator.
2019 sortedsrc = list(sortsource(destmap)) # a list of sorted revs
2010 sortedsrc = list(sortsource(destmap)) # a list of sorted revs
2020 if not sortedsrc:
2011 if not sortedsrc:
2021 raise error.InputError(_(b'no matching revisions'))
2012 raise error.InputError(_(b'no matching revisions'))
2022
2013
2023 # Only check the first batch of revisions to rebase not depending on other
2014 # Only check the first batch of revisions to rebase not depending on other
2024 # rebaseset. This means "source is ancestor of destination" for the second
2015 # rebaseset. This means "source is ancestor of destination" for the second
2025 # (and following) batches of revisions are not checked here. We rely on
2016 # (and following) batches of revisions are not checked here. We rely on
2026 # "defineparents" to do that check.
2017 # "defineparents" to do that check.
2027 roots = list(repo.set(b'roots(%ld)', sortedsrc[0]))
2018 roots = list(repo.set(b'roots(%ld)', sortedsrc[0]))
2028 if not roots:
2019 if not roots:
2029 raise error.InputError(_(b'no matching revisions'))
2020 raise error.InputError(_(b'no matching revisions'))
2030
2021
2031 def revof(r):
2022 def revof(r):
2032 return r.rev()
2023 return r.rev()
2033
2024
2034 roots = sorted(roots, key=revof)
2025 roots = sorted(roots, key=revof)
2035 state = dict.fromkeys(rebaseset, revtodo)
2026 state = dict.fromkeys(rebaseset, revtodo)
2036 emptyrebase = len(sortedsrc) == 1
2027 emptyrebase = len(sortedsrc) == 1
2037 for root in roots:
2028 for root in roots:
2038 dest = repo[destmap[root.rev()]]
2029 dest = repo[destmap[root.rev()]]
2039 commonbase = root.ancestor(dest)
2030 commonbase = root.ancestor(dest)
2040 if commonbase == root:
2031 if commonbase == root:
2041 raise error.InputError(_(b'source is ancestor of destination'))
2032 raise error.InputError(_(b'source is ancestor of destination'))
2042 if commonbase == dest:
2033 if commonbase == dest:
2043 wctx = repo[None]
2034 wctx = repo[None]
2044 if dest == wctx.p1():
2035 if dest == wctx.p1():
2045 # when rebasing to '.', it will use the current wd branch name
2036 # when rebasing to '.', it will use the current wd branch name
2046 samebranch = root.branch() == wctx.branch()
2037 samebranch = root.branch() == wctx.branch()
2047 else:
2038 else:
2048 samebranch = root.branch() == dest.branch()
2039 samebranch = root.branch() == dest.branch()
2049 if not collapse and samebranch and dest in root.parents():
2040 if not collapse and samebranch and dest in root.parents():
2050 # mark the revision as done by setting its new revision
2041 # mark the revision as done by setting its new revision
2051 # equal to its old (current) revisions
2042 # equal to its old (current) revisions
2052 state[root.rev()] = root.rev()
2043 state[root.rev()] = root.rev()
2053 repo.ui.debug(b'source is a child of destination\n')
2044 repo.ui.debug(b'source is a child of destination\n')
2054 continue
2045 continue
2055
2046
2056 emptyrebase = False
2047 emptyrebase = False
2057 repo.ui.debug(b'rebase onto %s starting from %s\n' % (dest, root))
2048 repo.ui.debug(b'rebase onto %s starting from %s\n' % (dest, root))
2058 if emptyrebase:
2049 if emptyrebase:
2059 return None
2050 return None
2060 for rev in sorted(state):
2051 for rev in sorted(state):
2061 parents = [p for p in repo.changelog.parentrevs(rev) if p != nullrev]
2052 parents = [p for p in repo.changelog.parentrevs(rev) if p != nullrev]
2062 # if all parents of this revision are done, then so is this revision
2053 # if all parents of this revision are done, then so is this revision
2063 if parents and all((state.get(p) == p for p in parents)):
2054 if parents and all((state.get(p) == p for p in parents)):
2064 state[rev] = rev
2055 state[rev] = rev
2065 return originalwd, destmap, state
2056 return originalwd, destmap, state
2066
2057
2067
2058
2068 def clearrebased(
2059 def clearrebased(
2069 ui,
2060 ui,
2070 repo,
2061 repo,
2071 destmap,
2062 destmap,
2072 state,
2063 state,
2073 skipped,
2064 skipped,
2074 collapsedas=None,
2065 collapsedas=None,
2075 keepf=False,
2066 keepf=False,
2076 fm=None,
2067 fm=None,
2077 backup=True,
2068 backup=True,
2078 ):
2069 ):
2079 """dispose of rebased revision at the end of the rebase
2070 """dispose of rebased revision at the end of the rebase
2080
2071
2081 If `collapsedas` is not None, the rebase was a collapse whose result if the
2072 If `collapsedas` is not None, the rebase was a collapse whose result if the
2082 `collapsedas` node.
2073 `collapsedas` node.
2083
2074
2084 If `keepf` is not True, the rebase has --keep set and no nodes should be
2075 If `keepf` is not True, the rebase has --keep set and no nodes should be
2085 removed (but bookmarks still need to be moved).
2076 removed (but bookmarks still need to be moved).
2086
2077
2087 If `backup` is False, no backup will be stored when stripping rebased
2078 If `backup` is False, no backup will be stored when stripping rebased
2088 revisions.
2079 revisions.
2089 """
2080 """
2090 tonode = repo.changelog.node
2081 tonode = repo.changelog.node
2091 replacements = {}
2082 replacements = {}
2092 moves = {}
2083 moves = {}
2093 stripcleanup = not obsolete.isenabled(repo, obsolete.createmarkersopt)
2084 stripcleanup = not obsolete.isenabled(repo, obsolete.createmarkersopt)
2094
2085
2095 collapsednodes = []
2086 collapsednodes = []
2096 for rev, newrev in sorted(state.items()):
2087 for rev, newrev in sorted(state.items()):
2097 if newrev >= 0 and newrev != rev:
2088 if newrev >= 0 and newrev != rev:
2098 oldnode = tonode(rev)
2089 oldnode = tonode(rev)
2099 newnode = collapsedas or tonode(newrev)
2090 newnode = collapsedas or tonode(newrev)
2100 moves[oldnode] = newnode
2091 moves[oldnode] = newnode
2101 succs = None
2092 succs = None
2102 if rev in skipped:
2093 if rev in skipped:
2103 if stripcleanup or not repo[rev].obsolete():
2094 if stripcleanup or not repo[rev].obsolete():
2104 succs = ()
2095 succs = ()
2105 elif collapsedas:
2096 elif collapsedas:
2106 collapsednodes.append(oldnode)
2097 collapsednodes.append(oldnode)
2107 else:
2098 else:
2108 succs = (newnode,)
2099 succs = (newnode,)
2109 if succs is not None:
2100 if succs is not None:
2110 replacements[(oldnode,)] = succs
2101 replacements[(oldnode,)] = succs
2111 if collapsednodes:
2102 if collapsednodes:
2112 replacements[tuple(collapsednodes)] = (collapsedas,)
2103 replacements[tuple(collapsednodes)] = (collapsedas,)
2113 if fm:
2104 if fm:
2114 hf = fm.hexfunc
2105 hf = fm.hexfunc
2115 fl = fm.formatlist
2106 fl = fm.formatlist
2116 fd = fm.formatdict
2107 fd = fm.formatdict
2117 changes = {}
2108 changes = {}
2118 for oldns, newn in replacements.items():
2109 for oldns, newn in replacements.items():
2119 for oldn in oldns:
2110 for oldn in oldns:
2120 changes[hf(oldn)] = fl([hf(n) for n in newn], name=b'node')
2111 changes[hf(oldn)] = fl([hf(n) for n in newn], name=b'node')
2121 nodechanges = fd(changes, key=b"oldnode", value=b"newnodes")
2112 nodechanges = fd(changes, key=b"oldnode", value=b"newnodes")
2122 fm.data(nodechanges=nodechanges)
2113 fm.data(nodechanges=nodechanges)
2123 if keepf:
2114 if keepf:
2124 replacements = {}
2115 replacements = {}
2125 scmutil.cleanupnodes(repo, replacements, b'rebase', moves, backup=backup)
2116 scmutil.cleanupnodes(repo, replacements, b'rebase', moves, backup=backup)
2126
2117
2127
2118
2128 def pullrebase(orig, ui, repo, *args, **opts):
2119 def pullrebase(orig, ui, repo, *args, **opts):
2129 """Call rebase after pull if the latter has been invoked with --rebase"""
2120 """Call rebase after pull if the latter has been invoked with --rebase"""
2130 if opts.get('rebase'):
2121 if opts.get('rebase'):
2131 if ui.configbool(b'commands', b'rebase.requiredest'):
2122 if ui.configbool(b'commands', b'rebase.requiredest'):
2132 msg = _(b'rebase destination required by configuration')
2123 msg = _(b'rebase destination required by configuration')
2133 hint = _(b'use hg pull followed by hg rebase -d DEST')
2124 hint = _(b'use hg pull followed by hg rebase -d DEST')
2134 raise error.InputError(msg, hint=hint)
2125 raise error.InputError(msg, hint=hint)
2135
2126
2136 with repo.wlock(), repo.lock():
2127 with repo.wlock(), repo.lock():
2137 if opts.get('update'):
2128 if opts.get('update'):
2138 del opts['update']
2129 del opts['update']
2139 ui.debug(
2130 ui.debug(
2140 b'--update and --rebase are not compatible, ignoring '
2131 b'--update and --rebase are not compatible, ignoring '
2141 b'the update flag\n'
2132 b'the update flag\n'
2142 )
2133 )
2143
2134
2144 cmdutil.checkunfinished(repo, skipmerge=True)
2135 cmdutil.checkunfinished(repo, skipmerge=True)
2145 cmdutil.bailifchanged(
2136 cmdutil.bailifchanged(
2146 repo,
2137 repo,
2147 hint=_(
2138 hint=_(
2148 b'cannot pull with rebase: '
2139 b'cannot pull with rebase: '
2149 b'please commit or shelve your changes first'
2140 b'please commit or shelve your changes first'
2150 ),
2141 ),
2151 )
2142 )
2152
2143
2153 revsprepull = len(repo)
2144 revsprepull = len(repo)
2154 origpostincoming = commands.postincoming
2145 origpostincoming = commands.postincoming
2155
2146
2156 def _dummy(*args, **kwargs):
2147 def _dummy(*args, **kwargs):
2157 pass
2148 pass
2158
2149
2159 commands.postincoming = _dummy
2150 commands.postincoming = _dummy
2160 try:
2151 try:
2161 ret = orig(ui, repo, *args, **opts)
2152 ret = orig(ui, repo, *args, **opts)
2162 finally:
2153 finally:
2163 commands.postincoming = origpostincoming
2154 commands.postincoming = origpostincoming
2164 revspostpull = len(repo)
2155 revspostpull = len(repo)
2165 if revspostpull > revsprepull:
2156 if revspostpull > revsprepull:
2166 # --rev option from pull conflict with rebase own --rev
2157 # --rev option from pull conflict with rebase own --rev
2167 # dropping it
2158 # dropping it
2168 if 'rev' in opts:
2159 if 'rev' in opts:
2169 del opts['rev']
2160 del opts['rev']
2170 # positional argument from pull conflicts with rebase's own
2161 # positional argument from pull conflicts with rebase's own
2171 # --source.
2162 # --source.
2172 if 'source' in opts:
2163 if 'source' in opts:
2173 del opts['source']
2164 del opts['source']
2174 # revsprepull is the len of the repo, not revnum of tip.
2165 # revsprepull is the len of the repo, not revnum of tip.
2175 destspace = list(repo.changelog.revs(start=revsprepull))
2166 destspace = list(repo.changelog.revs(start=revsprepull))
2176 opts['_destspace'] = destspace
2167 opts['_destspace'] = destspace
2177 try:
2168 try:
2178 rebase(ui, repo, **opts)
2169 rebase(ui, repo, **opts)
2179 except error.NoMergeDestAbort:
2170 except error.NoMergeDestAbort:
2180 # we can maybe update instead
2171 # we can maybe update instead
2181 rev, _a, _b = destutil.destupdate(repo)
2172 rev, _a, _b = destutil.destupdate(repo)
2182 if rev == repo[b'.'].rev():
2173 if rev == repo[b'.'].rev():
2183 ui.status(_(b'nothing to rebase\n'))
2174 ui.status(_(b'nothing to rebase\n'))
2184 else:
2175 else:
2185 ui.status(_(b'nothing to rebase - updating instead\n'))
2176 ui.status(_(b'nothing to rebase - updating instead\n'))
2186 # not passing argument to get the bare update behavior
2177 # not passing argument to get the bare update behavior
2187 # with warning and trumpets
2178 # with warning and trumpets
2188 commands.update(ui, repo)
2179 commands.update(ui, repo)
2189 else:
2180 else:
2190 if opts.get('tool'):
2181 if opts.get('tool'):
2191 raise error.InputError(_(b'--tool can only be used with --rebase'))
2182 raise error.InputError(_(b'--tool can only be used with --rebase'))
2192 ret = orig(ui, repo, *args, **opts)
2183 ret = orig(ui, repo, *args, **opts)
2193
2184
2194 return ret
2185 return ret
2195
2186
2196
2187
2197 def _compute_obsolete_sets(repo, rebaseobsrevs, destmap):
2188 def _compute_obsolete_sets(repo, rebaseobsrevs, destmap):
2198 """Figure out what to do about about obsolete revisions
2189 """Figure out what to do about about obsolete revisions
2199
2190
2200 `obsolete_with_successor_in_destination` is a mapping mapping obsolete => successor for all
2191 `obsolete_with_successor_in_destination` is a mapping mapping obsolete => successor for all
2201 obsolete nodes to be rebased given in `rebaseobsrevs`.
2192 obsolete nodes to be rebased given in `rebaseobsrevs`.
2202
2193
2203 `obsolete_with_successor_in_rebase_set` is a set with obsolete revisions,
2194 `obsolete_with_successor_in_rebase_set` is a set with obsolete revisions,
2204 without a successor in destination, that would cause divergence.
2195 without a successor in destination, that would cause divergence.
2205 """
2196 """
2206 obsolete_with_successor_in_destination = {}
2197 obsolete_with_successor_in_destination = {}
2207 obsolete_with_successor_in_rebase_set = set()
2198 obsolete_with_successor_in_rebase_set = set()
2208
2199
2209 cl = repo.changelog
2200 cl = repo.changelog
2210 get_rev = cl.index.get_rev
2201 get_rev = cl.index.get_rev
2211 extinctrevs = set(repo.revs(b'extinct()'))
2202 extinctrevs = set(repo.revs(b'extinct()'))
2212 for srcrev in rebaseobsrevs:
2203 for srcrev in rebaseobsrevs:
2213 srcnode = cl.node(srcrev)
2204 srcnode = cl.node(srcrev)
2214 # XXX: more advanced APIs are required to handle split correctly
2205 # XXX: more advanced APIs are required to handle split correctly
2215 successors = set(obsutil.allsuccessors(repo.obsstore, [srcnode]))
2206 successors = set(obsutil.allsuccessors(repo.obsstore, [srcnode]))
2216 # obsutil.allsuccessors includes node itself
2207 # obsutil.allsuccessors includes node itself
2217 successors.remove(srcnode)
2208 successors.remove(srcnode)
2218 succrevs = {get_rev(s) for s in successors}
2209 succrevs = {get_rev(s) for s in successors}
2219 succrevs.discard(None)
2210 succrevs.discard(None)
2220 if not successors or succrevs.issubset(extinctrevs):
2211 if not successors or succrevs.issubset(extinctrevs):
2221 # no successor, or all successors are extinct
2212 # no successor, or all successors are extinct
2222 obsolete_with_successor_in_destination[srcrev] = None
2213 obsolete_with_successor_in_destination[srcrev] = None
2223 else:
2214 else:
2224 dstrev = destmap[srcrev]
2215 dstrev = destmap[srcrev]
2225 for succrev in succrevs:
2216 for succrev in succrevs:
2226 if cl.isancestorrev(succrev, dstrev):
2217 if cl.isancestorrev(succrev, dstrev):
2227 obsolete_with_successor_in_destination[srcrev] = succrev
2218 obsolete_with_successor_in_destination[srcrev] = succrev
2228 break
2219 break
2229 else:
2220 else:
2230 # If 'srcrev' has a successor in rebase set but none in
2221 # If 'srcrev' has a successor in rebase set but none in
2231 # destination (which would be catched above), we shall skip it
2222 # destination (which would be catched above), we shall skip it
2232 # and its descendants to avoid divergence.
2223 # and its descendants to avoid divergence.
2233 if srcrev in extinctrevs or any(s in destmap for s in succrevs):
2224 if srcrev in extinctrevs or any(s in destmap for s in succrevs):
2234 obsolete_with_successor_in_rebase_set.add(srcrev)
2225 obsolete_with_successor_in_rebase_set.add(srcrev)
2235
2226
2236 return (
2227 return (
2237 obsolete_with_successor_in_destination,
2228 obsolete_with_successor_in_destination,
2238 obsolete_with_successor_in_rebase_set,
2229 obsolete_with_successor_in_rebase_set,
2239 )
2230 )
2240
2231
2241
2232
2242 def abortrebase(ui, repo):
2233 def abortrebase(ui, repo):
2243 with repo.wlock(), repo.lock():
2234 with repo.wlock(), repo.lock():
2244 rbsrt = rebaseruntime(repo, ui)
2235 rbsrt = rebaseruntime(repo, ui)
2245 rbsrt._prepareabortorcontinue(isabort=True)
2236 rbsrt._prepareabortorcontinue(isabort=True)
2246
2237
2247
2238
2248 def continuerebase(ui, repo):
2239 def continuerebase(ui, repo):
2249 with repo.wlock(), repo.lock():
2240 with repo.wlock(), repo.lock():
2250 rbsrt = rebaseruntime(repo, ui)
2241 rbsrt = rebaseruntime(repo, ui)
2251 ms = mergestatemod.mergestate.read(repo)
2242 ms = mergestatemod.mergestate.read(repo)
2252 mergeutil.checkunresolved(ms)
2243 mergeutil.checkunresolved(ms)
2253 retcode = rbsrt._prepareabortorcontinue(isabort=False)
2244 retcode = rbsrt._prepareabortorcontinue(isabort=False)
2254 if retcode is not None:
2245 if retcode is not None:
2255 return retcode
2246 return retcode
2256 rbsrt._performrebase(None)
2247 rbsrt._performrebase(None)
2257 rbsrt._finishrebase()
2248 rbsrt._finishrebase()
2258
2249
2259
2250
2260 def summaryhook(ui, repo):
2251 def summaryhook(ui, repo):
2261 if not repo.vfs.exists(b'rebasestate'):
2252 if not repo.vfs.exists(b'rebasestate'):
2262 return
2253 return
2263 try:
2254 try:
2264 rbsrt = rebaseruntime(repo, ui, {})
2255 rbsrt = rebaseruntime(repo, ui, {})
2265 rbsrt.restorestatus()
2256 rbsrt.restorestatus()
2266 state = rbsrt.state
2257 state = rbsrt.state
2267 except error.RepoLookupError:
2258 except error.RepoLookupError:
2268 # i18n: column positioning for "hg summary"
2259 # i18n: column positioning for "hg summary"
2269 msg = _(b'rebase: (use "hg rebase --abort" to clear broken state)\n')
2260 msg = _(b'rebase: (use "hg rebase --abort" to clear broken state)\n')
2270 ui.write(msg)
2261 ui.write(msg)
2271 return
2262 return
2272 numrebased = len([i for i in state.values() if i >= 0])
2263 numrebased = len([i for i in state.values() if i >= 0])
2273 # i18n: column positioning for "hg summary"
2264 # i18n: column positioning for "hg summary"
2274 ui.write(
2265 ui.write(
2275 _(b'rebase: %s, %s (rebase --continue)\n')
2266 _(b'rebase: %s, %s (rebase --continue)\n')
2276 % (
2267 % (
2277 ui.label(_(b'%d rebased'), b'rebase.rebased') % numrebased,
2268 ui.label(_(b'%d rebased'), b'rebase.rebased') % numrebased,
2278 ui.label(_(b'%d remaining'), b'rebase.remaining')
2269 ui.label(_(b'%d remaining'), b'rebase.remaining')
2279 % (len(state) - numrebased),
2270 % (len(state) - numrebased),
2280 )
2271 )
2281 )
2272 )
2282
2273
2283
2274
2284 def uisetup(ui):
2275 def uisetup(ui):
2285 # Replace pull with a decorator to provide --rebase option
2276 # Replace pull with a decorator to provide --rebase option
2286 entry = extensions.wrapcommand(commands.table, b'pull', pullrebase)
2277 entry = extensions.wrapcommand(commands.table, b'pull', pullrebase)
2287 entry[1].append(
2278 entry[1].append(
2288 (b'', b'rebase', None, _(b"rebase working directory to branch head"))
2279 (b'', b'rebase', None, _(b"rebase working directory to branch head"))
2289 )
2280 )
2290 entry[1].append((b't', b'tool', b'', _(b"specify merge tool for rebase")))
2281 entry[1].append((b't', b'tool', b'', _(b"specify merge tool for rebase")))
2291 cmdutil.summaryhooks.add(b'rebase', summaryhook)
2282 cmdutil.summaryhooks.add(b'rebase', summaryhook)
2292 statemod.addunfinished(
2283 statemod.addunfinished(
2293 b'rebase',
2284 b'rebase',
2294 fname=b'rebasestate',
2285 fname=b'rebasestate',
2295 stopflag=True,
2286 stopflag=True,
2296 continueflag=True,
2287 continueflag=True,
2297 abortfunc=abortrebase,
2288 abortfunc=abortrebase,
2298 continuefunc=continuerebase,
2289 continuefunc=continuerebase,
2299 )
2290 )
@@ -1,1803 +1,1804 b''
1 #
1 #
2 # This is the mercurial setup script.
2 # This is the mercurial setup script.
3 #
3 #
4 # 'python setup.py install', or
4 # 'python setup.py install', or
5 # 'python setup.py --help' for more options
5 # 'python setup.py --help' for more options
6 import os
6 import os
7
7
8 # Mercurial can't work on 3.6.0 or 3.6.1 due to a bug in % formatting
8 # Mercurial can't work on 3.6.0 or 3.6.1 due to a bug in % formatting
9 # in bytestrings.
9 # in bytestrings.
10 supportedpy = ','.join(
10 supportedpy = ','.join(
11 [
11 [
12 '>=3.6.2',
12 '>=3.6.2',
13 ]
13 ]
14 )
14 )
15
15
16 import sys, platform
16 import sys, platform
17 import sysconfig
17 import sysconfig
18
18
19
19
20 def sysstr(s):
20 def sysstr(s):
21 return s.decode('latin-1')
21 return s.decode('latin-1')
22
22
23
23
24 def eprint(*args, **kwargs):
24 def eprint(*args, **kwargs):
25 kwargs['file'] = sys.stderr
25 kwargs['file'] = sys.stderr
26 print(*args, **kwargs)
26 print(*args, **kwargs)
27
27
28
28
29 import ssl
29 import ssl
30
30
31 # ssl.HAS_TLSv1* are preferred to check support but they were added in Python
31 # ssl.HAS_TLSv1* are preferred to check support but they were added in Python
32 # 3.7. Prior to CPython commit 6e8cda91d92da72800d891b2fc2073ecbc134d98
32 # 3.7. Prior to CPython commit 6e8cda91d92da72800d891b2fc2073ecbc134d98
33 # (backported to the 3.7 branch), ssl.PROTOCOL_TLSv1_1 / ssl.PROTOCOL_TLSv1_2
33 # (backported to the 3.7 branch), ssl.PROTOCOL_TLSv1_1 / ssl.PROTOCOL_TLSv1_2
34 # were defined only if compiled against a OpenSSL version with TLS 1.1 / 1.2
34 # were defined only if compiled against a OpenSSL version with TLS 1.1 / 1.2
35 # support. At the mentioned commit, they were unconditionally defined.
35 # support. At the mentioned commit, they were unconditionally defined.
36 _notset = object()
36 _notset = object()
37 has_tlsv1_1 = getattr(ssl, 'HAS_TLSv1_1', _notset)
37 has_tlsv1_1 = getattr(ssl, 'HAS_TLSv1_1', _notset)
38 if has_tlsv1_1 is _notset:
38 if has_tlsv1_1 is _notset:
39 has_tlsv1_1 = getattr(ssl, 'PROTOCOL_TLSv1_1', _notset) is not _notset
39 has_tlsv1_1 = getattr(ssl, 'PROTOCOL_TLSv1_1', _notset) is not _notset
40 has_tlsv1_2 = getattr(ssl, 'HAS_TLSv1_2', _notset)
40 has_tlsv1_2 = getattr(ssl, 'HAS_TLSv1_2', _notset)
41 if has_tlsv1_2 is _notset:
41 if has_tlsv1_2 is _notset:
42 has_tlsv1_2 = getattr(ssl, 'PROTOCOL_TLSv1_2', _notset) is not _notset
42 has_tlsv1_2 = getattr(ssl, 'PROTOCOL_TLSv1_2', _notset) is not _notset
43 if not (has_tlsv1_1 or has_tlsv1_2):
43 if not (has_tlsv1_1 or has_tlsv1_2):
44 error = """
44 error = """
45 The `ssl` module does not advertise support for TLS 1.1 or TLS 1.2.
45 The `ssl` module does not advertise support for TLS 1.1 or TLS 1.2.
46 Please make sure that your Python installation was compiled against an OpenSSL
46 Please make sure that your Python installation was compiled against an OpenSSL
47 version enabling these features (likely this requires the OpenSSL version to
47 version enabling these features (likely this requires the OpenSSL version to
48 be at least 1.0.1).
48 be at least 1.0.1).
49 """
49 """
50 print(error, file=sys.stderr)
50 print(error, file=sys.stderr)
51 sys.exit(1)
51 sys.exit(1)
52
52
53 DYLIB_SUFFIX = sysconfig.get_config_vars()['EXT_SUFFIX']
53 DYLIB_SUFFIX = sysconfig.get_config_vars()['EXT_SUFFIX']
54
54
55 # Solaris Python packaging brain damage
55 # Solaris Python packaging brain damage
56 try:
56 try:
57 import hashlib
57 import hashlib
58
58
59 sha = hashlib.sha1()
59 sha = hashlib.sha1()
60 except ImportError:
60 except ImportError:
61 try:
61 try:
62 import sha
62 import sha
63
63
64 sha.sha # silence unused import warning
64 sha.sha # silence unused import warning
65 except ImportError:
65 except ImportError:
66 raise SystemExit(
66 raise SystemExit(
67 "Couldn't import standard hashlib (incomplete Python install)."
67 "Couldn't import standard hashlib (incomplete Python install)."
68 )
68 )
69
69
70 try:
70 try:
71 import zlib
71 import zlib
72
72
73 zlib.compressobj # silence unused import warning
73 zlib.compressobj # silence unused import warning
74 except ImportError:
74 except ImportError:
75 raise SystemExit(
75 raise SystemExit(
76 "Couldn't import standard zlib (incomplete Python install)."
76 "Couldn't import standard zlib (incomplete Python install)."
77 )
77 )
78
78
79 # The base IronPython distribution (as of 2.7.1) doesn't support bz2
79 # The base IronPython distribution (as of 2.7.1) doesn't support bz2
80 isironpython = False
80 isironpython = False
81 try:
81 try:
82 isironpython = (
82 isironpython = (
83 platform.python_implementation().lower().find("ironpython") != -1
83 platform.python_implementation().lower().find("ironpython") != -1
84 )
84 )
85 except AttributeError:
85 except AttributeError:
86 pass
86 pass
87
87
88 if isironpython:
88 if isironpython:
89 sys.stderr.write("warning: IronPython detected (no bz2 support)\n")
89 sys.stderr.write("warning: IronPython detected (no bz2 support)\n")
90 else:
90 else:
91 try:
91 try:
92 import bz2
92 import bz2
93
93
94 bz2.BZ2Compressor # silence unused import warning
94 bz2.BZ2Compressor # silence unused import warning
95 except ImportError:
95 except ImportError:
96 raise SystemExit(
96 raise SystemExit(
97 "Couldn't import standard bz2 (incomplete Python install)."
97 "Couldn't import standard bz2 (incomplete Python install)."
98 )
98 )
99
99
100 ispypy = "PyPy" in sys.version
100 ispypy = "PyPy" in sys.version
101
101
102 import ctypes
102 import ctypes
103 import stat, subprocess, time
103 import stat, subprocess, time
104 import re
104 import re
105 import shutil
105 import shutil
106 import tempfile
106 import tempfile
107
107
108 # We have issues with setuptools on some platforms and builders. Until
108 # We have issues with setuptools on some platforms and builders. Until
109 # those are resolved, setuptools is opt-in except for platforms where
109 # those are resolved, setuptools is opt-in except for platforms where
110 # we don't have issues.
110 # we don't have issues.
111 issetuptools = os.name == 'nt' or 'FORCE_SETUPTOOLS' in os.environ
111 issetuptools = os.name == 'nt' or 'FORCE_SETUPTOOLS' in os.environ
112 if issetuptools:
112 if issetuptools:
113 from setuptools import setup
113 from setuptools import setup
114 else:
114 else:
115 from distutils.core import setup
115 from distutils.core import setup
116 from distutils.ccompiler import new_compiler
116 from distutils.ccompiler import new_compiler
117 from distutils.core import Command, Extension
117 from distutils.core import Command, Extension
118 from distutils.dist import Distribution
118 from distutils.dist import Distribution
119 from distutils.command.build import build
119 from distutils.command.build import build
120 from distutils.command.build_ext import build_ext
120 from distutils.command.build_ext import build_ext
121 from distutils.command.build_py import build_py
121 from distutils.command.build_py import build_py
122 from distutils.command.build_scripts import build_scripts
122 from distutils.command.build_scripts import build_scripts
123 from distutils.command.install import install
123 from distutils.command.install import install
124 from distutils.command.install_lib import install_lib
124 from distutils.command.install_lib import install_lib
125 from distutils.command.install_scripts import install_scripts
125 from distutils.command.install_scripts import install_scripts
126 from distutils import log
126 from distutils import log
127 from distutils.spawn import spawn, find_executable
127 from distutils.spawn import spawn, find_executable
128 from distutils import file_util
128 from distutils import file_util
129 from distutils.errors import (
129 from distutils.errors import (
130 CCompilerError,
130 CCompilerError,
131 DistutilsError,
131 DistutilsError,
132 DistutilsExecError,
132 DistutilsExecError,
133 )
133 )
134 from distutils.sysconfig import get_python_inc
134 from distutils.sysconfig import get_python_inc
135
135
136
136
137 def write_if_changed(path, content):
137 def write_if_changed(path, content):
138 """Write content to a file iff the content hasn't changed."""
138 """Write content to a file iff the content hasn't changed."""
139 if os.path.exists(path):
139 if os.path.exists(path):
140 with open(path, 'rb') as fh:
140 with open(path, 'rb') as fh:
141 current = fh.read()
141 current = fh.read()
142 else:
142 else:
143 current = b''
143 current = b''
144
144
145 if current != content:
145 if current != content:
146 with open(path, 'wb') as fh:
146 with open(path, 'wb') as fh:
147 fh.write(content)
147 fh.write(content)
148
148
149
149
150 scripts = ['hg']
150 scripts = ['hg']
151 if os.name == 'nt':
151 if os.name == 'nt':
152 # We remove hg.bat if we are able to build hg.exe.
152 # We remove hg.bat if we are able to build hg.exe.
153 scripts.append('contrib/win32/hg.bat')
153 scripts.append('contrib/win32/hg.bat')
154
154
155
155
156 def cancompile(cc, code):
156 def cancompile(cc, code):
157 tmpdir = tempfile.mkdtemp(prefix='hg-install-')
157 tmpdir = tempfile.mkdtemp(prefix='hg-install-')
158 devnull = oldstderr = None
158 devnull = oldstderr = None
159 try:
159 try:
160 fname = os.path.join(tmpdir, 'testcomp.c')
160 fname = os.path.join(tmpdir, 'testcomp.c')
161 f = open(fname, 'w')
161 f = open(fname, 'w')
162 f.write(code)
162 f.write(code)
163 f.close()
163 f.close()
164 # Redirect stderr to /dev/null to hide any error messages
164 # Redirect stderr to /dev/null to hide any error messages
165 # from the compiler.
165 # from the compiler.
166 # This will have to be changed if we ever have to check
166 # This will have to be changed if we ever have to check
167 # for a function on Windows.
167 # for a function on Windows.
168 devnull = open('/dev/null', 'w')
168 devnull = open('/dev/null', 'w')
169 oldstderr = os.dup(sys.stderr.fileno())
169 oldstderr = os.dup(sys.stderr.fileno())
170 os.dup2(devnull.fileno(), sys.stderr.fileno())
170 os.dup2(devnull.fileno(), sys.stderr.fileno())
171 objects = cc.compile([fname], output_dir=tmpdir)
171 objects = cc.compile([fname], output_dir=tmpdir)
172 cc.link_executable(objects, os.path.join(tmpdir, "a.out"))
172 cc.link_executable(objects, os.path.join(tmpdir, "a.out"))
173 return True
173 return True
174 except Exception:
174 except Exception:
175 return False
175 return False
176 finally:
176 finally:
177 if oldstderr is not None:
177 if oldstderr is not None:
178 os.dup2(oldstderr, sys.stderr.fileno())
178 os.dup2(oldstderr, sys.stderr.fileno())
179 if devnull is not None:
179 if devnull is not None:
180 devnull.close()
180 devnull.close()
181 shutil.rmtree(tmpdir)
181 shutil.rmtree(tmpdir)
182
182
183
183
184 # simplified version of distutils.ccompiler.CCompiler.has_function
184 # simplified version of distutils.ccompiler.CCompiler.has_function
185 # that actually removes its temporary files.
185 # that actually removes its temporary files.
186 def hasfunction(cc, funcname):
186 def hasfunction(cc, funcname):
187 code = 'int main(void) { %s(); }\n' % funcname
187 code = 'int main(void) { %s(); }\n' % funcname
188 return cancompile(cc, code)
188 return cancompile(cc, code)
189
189
190
190
191 def hasheader(cc, headername):
191 def hasheader(cc, headername):
192 code = '#include <%s>\nint main(void) { return 0; }\n' % headername
192 code = '#include <%s>\nint main(void) { return 0; }\n' % headername
193 return cancompile(cc, code)
193 return cancompile(cc, code)
194
194
195
195
196 # py2exe needs to be installed to work
196 # py2exe needs to be installed to work
197 try:
197 try:
198 import py2exe
198 import py2exe
199
199
200 py2exe.patch_distutils()
200 py2exe.patch_distutils()
201 py2exeloaded = True
201 py2exeloaded = True
202 # import py2exe's patched Distribution class
202 # import py2exe's patched Distribution class
203 from distutils.core import Distribution
203 from distutils.core import Distribution
204 except ImportError:
204 except ImportError:
205 py2exeloaded = False
205 py2exeloaded = False
206
206
207
207
208 def runcmd(cmd, env, cwd=None):
208 def runcmd(cmd, env, cwd=None):
209 p = subprocess.Popen(
209 p = subprocess.Popen(
210 cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env, cwd=cwd
210 cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env, cwd=cwd
211 )
211 )
212 out, err = p.communicate()
212 out, err = p.communicate()
213 return p.returncode, out, err
213 return p.returncode, out, err
214
214
215
215
216 class hgcommand:
216 class hgcommand:
217 def __init__(self, cmd, env):
217 def __init__(self, cmd, env):
218 self.cmd = cmd
218 self.cmd = cmd
219 self.env = env
219 self.env = env
220
220
221 def run(self, args):
221 def run(self, args):
222 cmd = self.cmd + args
222 cmd = self.cmd + args
223 returncode, out, err = runcmd(cmd, self.env)
223 returncode, out, err = runcmd(cmd, self.env)
224 err = filterhgerr(err)
224 err = filterhgerr(err)
225 if err:
225 if err:
226 print("stderr from '%s':" % (' '.join(cmd)), file=sys.stderr)
226 print("stderr from '%s':" % (' '.join(cmd)), file=sys.stderr)
227 print(err, file=sys.stderr)
227 print(err, file=sys.stderr)
228 if returncode != 0:
228 if returncode != 0:
229 return b''
229 return b''
230 return out
230 return out
231
231
232
232
233 def filterhgerr(err):
233 def filterhgerr(err):
234 # If root is executing setup.py, but the repository is owned by
234 # If root is executing setup.py, but the repository is owned by
235 # another user (as in "sudo python setup.py install") we will get
235 # another user (as in "sudo python setup.py install") we will get
236 # trust warnings since the .hg/hgrc file is untrusted. That is
236 # trust warnings since the .hg/hgrc file is untrusted. That is
237 # fine, we don't want to load it anyway. Python may warn about
237 # fine, we don't want to load it anyway. Python may warn about
238 # a missing __init__.py in mercurial/locale, we also ignore that.
238 # a missing __init__.py in mercurial/locale, we also ignore that.
239 err = [
239 err = [
240 e
240 e
241 for e in err.splitlines()
241 for e in err.splitlines()
242 if (
242 if (
243 not e.startswith(b'not trusting file')
243 not e.startswith(b'not trusting file')
244 and not e.startswith(b'warning: Not importing')
244 and not e.startswith(b'warning: Not importing')
245 and not e.startswith(b'obsolete feature not enabled')
245 and not e.startswith(b'obsolete feature not enabled')
246 and not e.startswith(b'*** failed to import extension')
246 and not e.startswith(b'*** failed to import extension')
247 and not e.startswith(b'devel-warn:')
247 and not e.startswith(b'devel-warn:')
248 and not (
248 and not (
249 e.startswith(b'(third party extension')
249 e.startswith(b'(third party extension')
250 and e.endswith(b'or newer of Mercurial; disabling)')
250 and e.endswith(b'or newer of Mercurial; disabling)')
251 )
251 )
252 )
252 )
253 ]
253 ]
254 return b'\n'.join(b' ' + e for e in err)
254 return b'\n'.join(b' ' + e for e in err)
255
255
256
256
257 def findhg():
257 def findhg():
258 """Try to figure out how we should invoke hg for examining the local
258 """Try to figure out how we should invoke hg for examining the local
259 repository contents.
259 repository contents.
260
260
261 Returns an hgcommand object."""
261 Returns an hgcommand object."""
262 # By default, prefer the "hg" command in the user's path. This was
262 # By default, prefer the "hg" command in the user's path. This was
263 # presumably the hg command that the user used to create this repository.
263 # presumably the hg command that the user used to create this repository.
264 #
264 #
265 # This repository may require extensions or other settings that would not
265 # This repository may require extensions or other settings that would not
266 # be enabled by running the hg script directly from this local repository.
266 # be enabled by running the hg script directly from this local repository.
267 hgenv = os.environ.copy()
267 hgenv = os.environ.copy()
268 # Use HGPLAIN to disable hgrc settings that would change output formatting,
268 # Use HGPLAIN to disable hgrc settings that would change output formatting,
269 # and disable localization for the same reasons.
269 # and disable localization for the same reasons.
270 hgenv['HGPLAIN'] = '1'
270 hgenv['HGPLAIN'] = '1'
271 hgenv['LANGUAGE'] = 'C'
271 hgenv['LANGUAGE'] = 'C'
272 hgcmd = ['hg']
272 hgcmd = ['hg']
273 # Run a simple "hg log" command just to see if using hg from the user's
273 # Run a simple "hg log" command just to see if using hg from the user's
274 # path works and can successfully interact with this repository. Windows
274 # path works and can successfully interact with this repository. Windows
275 # gives precedence to hg.exe in the current directory, so fall back to the
275 # gives precedence to hg.exe in the current directory, so fall back to the
276 # python invocation of local hg, where pythonXY.dll can always be found.
276 # python invocation of local hg, where pythonXY.dll can always be found.
277 check_cmd = ['log', '-r.', '-Ttest']
277 check_cmd = ['log', '-r.', '-Ttest']
278 if os.name != 'nt' or not os.path.exists("hg.exe"):
278 if os.name != 'nt' or not os.path.exists("hg.exe"):
279 try:
279 try:
280 retcode, out, err = runcmd(hgcmd + check_cmd, hgenv)
280 retcode, out, err = runcmd(hgcmd + check_cmd, hgenv)
281 except EnvironmentError:
281 except EnvironmentError:
282 retcode = -1
282 retcode = -1
283 if retcode == 0 and not filterhgerr(err):
283 if retcode == 0 and not filterhgerr(err):
284 return hgcommand(hgcmd, hgenv)
284 return hgcommand(hgcmd, hgenv)
285
285
286 # Fall back to trying the local hg installation.
286 # Fall back to trying the local hg installation.
287 hgenv = localhgenv()
287 hgenv = localhgenv()
288 hgcmd = [sys.executable, 'hg']
288 hgcmd = [sys.executable, 'hg']
289 try:
289 try:
290 retcode, out, err = runcmd(hgcmd + check_cmd, hgenv)
290 retcode, out, err = runcmd(hgcmd + check_cmd, hgenv)
291 except EnvironmentError:
291 except EnvironmentError:
292 retcode = -1
292 retcode = -1
293 if retcode == 0 and not filterhgerr(err):
293 if retcode == 0 and not filterhgerr(err):
294 return hgcommand(hgcmd, hgenv)
294 return hgcommand(hgcmd, hgenv)
295
295
296 eprint("/!\\")
296 eprint("/!\\")
297 eprint(r"/!\ Unable to find a working hg binary")
297 eprint(r"/!\ Unable to find a working hg binary")
298 eprint(r"/!\ Version cannot be extract from the repository")
298 eprint(r"/!\ Version cannot be extract from the repository")
299 eprint(r"/!\ Re-run the setup once a first version is built")
299 eprint(r"/!\ Re-run the setup once a first version is built")
300 return None
300 return None
301
301
302
302
303 def localhgenv():
303 def localhgenv():
304 """Get an environment dictionary to use for invoking or importing
304 """Get an environment dictionary to use for invoking or importing
305 mercurial from the local repository."""
305 mercurial from the local repository."""
306 # Execute hg out of this directory with a custom environment which takes
306 # Execute hg out of this directory with a custom environment which takes
307 # care to not use any hgrc files and do no localization.
307 # care to not use any hgrc files and do no localization.
308 env = {
308 env = {
309 'HGMODULEPOLICY': 'py',
309 'HGMODULEPOLICY': 'py',
310 'HGRCPATH': '',
310 'HGRCPATH': '',
311 'LANGUAGE': 'C',
311 'LANGUAGE': 'C',
312 'PATH': '',
312 'PATH': '',
313 } # make pypi modules that use os.environ['PATH'] happy
313 } # make pypi modules that use os.environ['PATH'] happy
314 if 'LD_LIBRARY_PATH' in os.environ:
314 if 'LD_LIBRARY_PATH' in os.environ:
315 env['LD_LIBRARY_PATH'] = os.environ['LD_LIBRARY_PATH']
315 env['LD_LIBRARY_PATH'] = os.environ['LD_LIBRARY_PATH']
316 if 'SystemRoot' in os.environ:
316 if 'SystemRoot' in os.environ:
317 # SystemRoot is required by Windows to load various DLLs. See:
317 # SystemRoot is required by Windows to load various DLLs. See:
318 # https://bugs.python.org/issue13524#msg148850
318 # https://bugs.python.org/issue13524#msg148850
319 env['SystemRoot'] = os.environ['SystemRoot']
319 env['SystemRoot'] = os.environ['SystemRoot']
320 return env
320 return env
321
321
322
322
323 version = ''
323 version = ''
324
324
325
325
326 def _try_get_version():
326 def _try_get_version():
327 hg = findhg()
327 hg = findhg()
328 if hg is None:
328 if hg is None:
329 return ''
329 return ''
330 hgid = None
330 hgid = None
331 numerictags = []
331 numerictags = []
332 cmd = ['log', '-r', '.', '--template', '{tags}\n']
332 cmd = ['log', '-r', '.', '--template', '{tags}\n']
333 pieces = sysstr(hg.run(cmd)).split()
333 pieces = sysstr(hg.run(cmd)).split()
334 numerictags = [t for t in pieces if t[0:1].isdigit()]
334 numerictags = [t for t in pieces if t[0:1].isdigit()]
335 hgid = sysstr(hg.run(['id', '-i'])).strip()
335 hgid = sysstr(hg.run(['id', '-i'])).strip()
336 if hgid.count('+') == 2:
336 if hgid.count('+') == 2:
337 hgid = hgid.replace("+", ".", 1)
337 hgid = hgid.replace("+", ".", 1)
338 if not hgid:
338 if not hgid:
339 eprint("/!\\")
339 eprint("/!\\")
340 eprint(r"/!\ Unable to determine hg version from local repository")
340 eprint(r"/!\ Unable to determine hg version from local repository")
341 eprint(r"/!\ Failed to retrieve current revision tags")
341 eprint(r"/!\ Failed to retrieve current revision tags")
342 return ''
342 return ''
343 if numerictags: # tag(s) found
343 if numerictags: # tag(s) found
344 version = numerictags[-1]
344 version = numerictags[-1]
345 if hgid.endswith('+'): # propagate the dirty status to the tag
345 if hgid.endswith('+'): # propagate the dirty status to the tag
346 version += '+'
346 version += '+'
347 else: # no tag found on the checked out revision
347 else: # no tag found on the checked out revision
348 ltagcmd = ['log', '--rev', 'wdir()', '--template', '{latesttag}']
348 ltagcmd = ['log', '--rev', 'wdir()', '--template', '{latesttag}']
349 ltag = sysstr(hg.run(ltagcmd))
349 ltag = sysstr(hg.run(ltagcmd))
350 if not ltag:
350 if not ltag:
351 eprint("/!\\")
351 eprint("/!\\")
352 eprint(r"/!\ Unable to determine hg version from local repository")
352 eprint(r"/!\ Unable to determine hg version from local repository")
353 eprint(
353 eprint(
354 r"/!\ Failed to retrieve current revision distance to lated tag"
354 r"/!\ Failed to retrieve current revision distance to lated tag"
355 )
355 )
356 return ''
356 return ''
357 changessincecmd = [
357 changessincecmd = [
358 'log',
358 'log',
359 '-T',
359 '-T',
360 'x\n',
360 'x\n',
361 '-r',
361 '-r',
362 "only(parents(),'%s')" % ltag,
362 "only(parents(),'%s')" % ltag,
363 ]
363 ]
364 changessince = len(hg.run(changessincecmd).splitlines())
364 changessince = len(hg.run(changessincecmd).splitlines())
365 version = '%s+hg%s.%s' % (ltag, changessince, hgid)
365 version = '%s+hg%s.%s' % (ltag, changessince, hgid)
366 if version.endswith('+'):
366 if version.endswith('+'):
367 version = version[:-1] + 'local' + time.strftime('%Y%m%d')
367 version = version[:-1] + 'local' + time.strftime('%Y%m%d')
368 return version
368 return version
369
369
370
370
371 if os.path.isdir('.hg'):
371 if os.path.isdir('.hg'):
372 version = _try_get_version()
372 version = _try_get_version()
373 elif os.path.exists('.hg_archival.txt'):
373 elif os.path.exists('.hg_archival.txt'):
374 kw = dict(
374 kw = dict(
375 [[t.strip() for t in l.split(':', 1)] for l in open('.hg_archival.txt')]
375 [[t.strip() for t in l.split(':', 1)] for l in open('.hg_archival.txt')]
376 )
376 )
377 if 'tag' in kw:
377 if 'tag' in kw:
378 version = kw['tag']
378 version = kw['tag']
379 elif 'latesttag' in kw:
379 elif 'latesttag' in kw:
380 if 'changessincelatesttag' in kw:
380 if 'changessincelatesttag' in kw:
381 version = (
381 version = (
382 '%(latesttag)s+hg%(changessincelatesttag)s.%(node).12s' % kw
382 '%(latesttag)s+hg%(changessincelatesttag)s.%(node).12s' % kw
383 )
383 )
384 else:
384 else:
385 version = '%(latesttag)s+hg%(latesttagdistance)s.%(node).12s' % kw
385 version = '%(latesttag)s+hg%(latesttagdistance)s.%(node).12s' % kw
386 else:
386 else:
387 version = '0+hg' + kw.get('node', '')[:12]
387 version = '0+hg' + kw.get('node', '')[:12]
388 elif os.path.exists('mercurial/__version__.py'):
388 elif os.path.exists('mercurial/__version__.py'):
389 with open('mercurial/__version__.py') as f:
389 with open('mercurial/__version__.py') as f:
390 data = f.read()
390 data = f.read()
391 version = re.search('version = b"(.*)"', data).group(1)
391 version = re.search('version = b"(.*)"', data).group(1)
392 if not version:
392 if not version:
393 if os.environ.get("MERCURIAL_SETUP_MAKE_LOCAL") == "1":
393 if os.environ.get("MERCURIAL_SETUP_MAKE_LOCAL") == "1":
394 version = "0.0+0"
394 version = "0.0+0"
395 eprint("/!\\")
395 eprint("/!\\")
396 eprint(r"/!\ Using '0.0+0' as the default version")
396 eprint(r"/!\ Using '0.0+0' as the default version")
397 eprint(r"/!\ Re-run make local once that first version is built")
397 eprint(r"/!\ Re-run make local once that first version is built")
398 eprint("/!\\")
398 eprint("/!\\")
399 else:
399 else:
400 eprint("/!\\")
400 eprint("/!\\")
401 eprint(r"/!\ Could not determine the Mercurial version")
401 eprint(r"/!\ Could not determine the Mercurial version")
402 eprint(r"/!\ You need to build a local version first")
402 eprint(r"/!\ You need to build a local version first")
403 eprint(r"/!\ Run `make local` and try again")
403 eprint(r"/!\ Run `make local` and try again")
404 eprint("/!\\")
404 eprint("/!\\")
405 msg = "Run `make local` first to get a working local version"
405 msg = "Run `make local` first to get a working local version"
406 raise SystemExit(msg)
406 raise SystemExit(msg)
407
407
408 versionb = version
408 versionb = version
409 if not isinstance(versionb, bytes):
409 if not isinstance(versionb, bytes):
410 versionb = versionb.encode('ascii')
410 versionb = versionb.encode('ascii')
411
411
412 write_if_changed(
412 write_if_changed(
413 'mercurial/__version__.py',
413 'mercurial/__version__.py',
414 b''.join(
414 b''.join(
415 [
415 [
416 b'# this file is autogenerated by setup.py\n'
416 b'# this file is autogenerated by setup.py\n'
417 b'version = b"%s"\n' % versionb,
417 b'version = b"%s"\n' % versionb,
418 ]
418 ]
419 ),
419 ),
420 )
420 )
421
421
422
422
423 class hgbuild(build):
423 class hgbuild(build):
424 # Insert hgbuildmo first so that files in mercurial/locale/ are found
424 # Insert hgbuildmo first so that files in mercurial/locale/ are found
425 # when build_py is run next.
425 # when build_py is run next.
426 sub_commands = [('build_mo', None)] + build.sub_commands
426 sub_commands = [('build_mo', None)] + build.sub_commands
427
427
428
428
429 class hgbuildmo(build):
429 class hgbuildmo(build):
430
430
431 description = "build translations (.mo files)"
431 description = "build translations (.mo files)"
432
432
433 def run(self):
433 def run(self):
434 if not find_executable('msgfmt'):
434 if not find_executable('msgfmt'):
435 self.warn(
435 self.warn(
436 "could not find msgfmt executable, no translations "
436 "could not find msgfmt executable, no translations "
437 "will be built"
437 "will be built"
438 )
438 )
439 return
439 return
440
440
441 podir = 'i18n'
441 podir = 'i18n'
442 if not os.path.isdir(podir):
442 if not os.path.isdir(podir):
443 self.warn("could not find %s/ directory" % podir)
443 self.warn("could not find %s/ directory" % podir)
444 return
444 return
445
445
446 join = os.path.join
446 join = os.path.join
447 for po in os.listdir(podir):
447 for po in os.listdir(podir):
448 if not po.endswith('.po'):
448 if not po.endswith('.po'):
449 continue
449 continue
450 pofile = join(podir, po)
450 pofile = join(podir, po)
451 modir = join('locale', po[:-3], 'LC_MESSAGES')
451 modir = join('locale', po[:-3], 'LC_MESSAGES')
452 mofile = join(modir, 'hg.mo')
452 mofile = join(modir, 'hg.mo')
453 mobuildfile = join('mercurial', mofile)
453 mobuildfile = join('mercurial', mofile)
454 cmd = ['msgfmt', '-v', '-o', mobuildfile, pofile]
454 cmd = ['msgfmt', '-v', '-o', mobuildfile, pofile]
455 if sys.platform != 'sunos5':
455 if sys.platform != 'sunos5':
456 # msgfmt on Solaris does not know about -c
456 # msgfmt on Solaris does not know about -c
457 cmd.append('-c')
457 cmd.append('-c')
458 self.mkpath(join('mercurial', modir))
458 self.mkpath(join('mercurial', modir))
459 self.make_file([pofile], mobuildfile, spawn, (cmd,))
459 self.make_file([pofile], mobuildfile, spawn, (cmd,))
460
460
461
461
462 class hgdist(Distribution):
462 class hgdist(Distribution):
463 pure = False
463 pure = False
464 rust = False
464 rust = False
465 no_rust = False
465 no_rust = False
466 cffi = ispypy
466 cffi = ispypy
467
467
468 global_options = Distribution.global_options + [
468 global_options = Distribution.global_options + [
469 ('pure', None, "use pure (slow) Python code instead of C extensions"),
469 ('pure', None, "use pure (slow) Python code instead of C extensions"),
470 ('rust', None, "use Rust extensions additionally to C extensions"),
470 ('rust', None, "use Rust extensions additionally to C extensions"),
471 (
471 (
472 'no-rust',
472 'no-rust',
473 None,
473 None,
474 "do not use Rust extensions additionally to C extensions",
474 "do not use Rust extensions additionally to C extensions",
475 ),
475 ),
476 ]
476 ]
477
477
478 negative_opt = Distribution.negative_opt.copy()
478 negative_opt = Distribution.negative_opt.copy()
479 boolean_options = ['pure', 'rust', 'no-rust']
479 boolean_options = ['pure', 'rust', 'no-rust']
480 negative_opt['no-rust'] = 'rust'
480 negative_opt['no-rust'] = 'rust'
481
481
482 def _set_command_options(self, command_obj, option_dict=None):
482 def _set_command_options(self, command_obj, option_dict=None):
483 # Not all distutils versions in the wild have boolean_options.
483 # Not all distutils versions in the wild have boolean_options.
484 # This should be cleaned up when we're Python 3 only.
484 # This should be cleaned up when we're Python 3 only.
485 command_obj.boolean_options = (
485 command_obj.boolean_options = (
486 getattr(command_obj, 'boolean_options', []) + self.boolean_options
486 getattr(command_obj, 'boolean_options', []) + self.boolean_options
487 )
487 )
488 return Distribution._set_command_options(
488 return Distribution._set_command_options(
489 self, command_obj, option_dict=option_dict
489 self, command_obj, option_dict=option_dict
490 )
490 )
491
491
492 def parse_command_line(self):
492 def parse_command_line(self):
493 ret = Distribution.parse_command_line(self)
493 ret = Distribution.parse_command_line(self)
494 if not (self.rust or self.no_rust):
494 if not (self.rust or self.no_rust):
495 hgrustext = os.environ.get('HGWITHRUSTEXT')
495 hgrustext = os.environ.get('HGWITHRUSTEXT')
496 # TODO record it for proper rebuild upon changes
496 # TODO record it for proper rebuild upon changes
497 # (see mercurial/__modulepolicy__.py)
497 # (see mercurial/__modulepolicy__.py)
498 if hgrustext != 'cpython' and hgrustext is not None:
498 if hgrustext != 'cpython' and hgrustext is not None:
499 if hgrustext:
499 if hgrustext:
500 msg = 'unknown HGWITHRUSTEXT value: %s' % hgrustext
500 msg = 'unknown HGWITHRUSTEXT value: %s' % hgrustext
501 print(msg, file=sys.stderr)
501 print(msg, file=sys.stderr)
502 hgrustext = None
502 hgrustext = None
503 self.rust = hgrustext is not None
503 self.rust = hgrustext is not None
504 self.no_rust = not self.rust
504 self.no_rust = not self.rust
505 return ret
505 return ret
506
506
507 def has_ext_modules(self):
507 def has_ext_modules(self):
508 # self.ext_modules is emptied in hgbuildpy.finalize_options which is
508 # self.ext_modules is emptied in hgbuildpy.finalize_options which is
509 # too late for some cases
509 # too late for some cases
510 return not self.pure and Distribution.has_ext_modules(self)
510 return not self.pure and Distribution.has_ext_modules(self)
511
511
512
512
513 # This is ugly as a one-liner. So use a variable.
513 # This is ugly as a one-liner. So use a variable.
514 buildextnegops = dict(getattr(build_ext, 'negative_options', {}))
514 buildextnegops = dict(getattr(build_ext, 'negative_options', {}))
515 buildextnegops['no-zstd'] = 'zstd'
515 buildextnegops['no-zstd'] = 'zstd'
516 buildextnegops['no-rust'] = 'rust'
516 buildextnegops['no-rust'] = 'rust'
517
517
518
518
519 class hgbuildext(build_ext):
519 class hgbuildext(build_ext):
520 user_options = build_ext.user_options + [
520 user_options = build_ext.user_options + [
521 ('zstd', None, 'compile zstd bindings [default]'),
521 ('zstd', None, 'compile zstd bindings [default]'),
522 ('no-zstd', None, 'do not compile zstd bindings'),
522 ('no-zstd', None, 'do not compile zstd bindings'),
523 (
523 (
524 'rust',
524 'rust',
525 None,
525 None,
526 'compile Rust extensions if they are in use '
526 'compile Rust extensions if they are in use '
527 '(requires Cargo) [default]',
527 '(requires Cargo) [default]',
528 ),
528 ),
529 ('no-rust', None, 'do not compile Rust extensions'),
529 ('no-rust', None, 'do not compile Rust extensions'),
530 ]
530 ]
531
531
532 boolean_options = build_ext.boolean_options + ['zstd', 'rust']
532 boolean_options = build_ext.boolean_options + ['zstd', 'rust']
533 negative_opt = buildextnegops
533 negative_opt = buildextnegops
534
534
535 def initialize_options(self):
535 def initialize_options(self):
536 self.zstd = True
536 self.zstd = True
537 self.rust = True
537 self.rust = True
538
538
539 return build_ext.initialize_options(self)
539 return build_ext.initialize_options(self)
540
540
541 def finalize_options(self):
541 def finalize_options(self):
542 # Unless overridden by the end user, build extensions in parallel.
542 # Unless overridden by the end user, build extensions in parallel.
543 # Only influences behavior on Python 3.5+.
543 # Only influences behavior on Python 3.5+.
544 if getattr(self, 'parallel', None) is None:
544 if getattr(self, 'parallel', None) is None:
545 self.parallel = True
545 self.parallel = True
546
546
547 return build_ext.finalize_options(self)
547 return build_ext.finalize_options(self)
548
548
549 def build_extensions(self):
549 def build_extensions(self):
550 ruststandalones = [
550 ruststandalones = [
551 e for e in self.extensions if isinstance(e, RustStandaloneExtension)
551 e for e in self.extensions if isinstance(e, RustStandaloneExtension)
552 ]
552 ]
553 self.extensions = [
553 self.extensions = [
554 e for e in self.extensions if e not in ruststandalones
554 e for e in self.extensions if e not in ruststandalones
555 ]
555 ]
556 # Filter out zstd if disabled via argument.
556 # Filter out zstd if disabled via argument.
557 if not self.zstd:
557 if not self.zstd:
558 self.extensions = [
558 self.extensions = [
559 e for e in self.extensions if e.name != 'mercurial.zstd'
559 e for e in self.extensions if e.name != 'mercurial.zstd'
560 ]
560 ]
561
561
562 # Build Rust standalone extensions if it'll be used
562 # Build Rust standalone extensions if it'll be used
563 # and its build is not explicitly disabled (for external build
563 # and its build is not explicitly disabled (for external build
564 # as Linux distributions would do)
564 # as Linux distributions would do)
565 if self.distribution.rust and self.rust:
565 if self.distribution.rust and self.rust:
566 if not sys.platform.startswith('linux'):
566 if not sys.platform.startswith('linux'):
567 self.warn(
567 self.warn(
568 "rust extensions have only been tested on Linux "
568 "rust extensions have only been tested on Linux "
569 "and may not behave correctly on other platforms"
569 "and may not behave correctly on other platforms"
570 )
570 )
571
571
572 for rustext in ruststandalones:
572 for rustext in ruststandalones:
573 rustext.build('' if self.inplace else self.build_lib)
573 rustext.build('' if self.inplace else self.build_lib)
574
574
575 return build_ext.build_extensions(self)
575 return build_ext.build_extensions(self)
576
576
577 def build_extension(self, ext):
577 def build_extension(self, ext):
578 if (
578 if (
579 self.distribution.rust
579 self.distribution.rust
580 and self.rust
580 and self.rust
581 and isinstance(ext, RustExtension)
581 and isinstance(ext, RustExtension)
582 ):
582 ):
583 ext.rustbuild()
583 ext.rustbuild()
584 try:
584 try:
585 build_ext.build_extension(self, ext)
585 build_ext.build_extension(self, ext)
586 except CCompilerError:
586 except CCompilerError:
587 if not getattr(ext, 'optional', False):
587 if not getattr(ext, 'optional', False):
588 raise
588 raise
589 log.warn(
589 log.warn(
590 "Failed to build optional extension '%s' (skipping)", ext.name
590 "Failed to build optional extension '%s' (skipping)", ext.name
591 )
591 )
592
592
593
593
594 class hgbuildscripts(build_scripts):
594 class hgbuildscripts(build_scripts):
595 def run(self):
595 def run(self):
596 if os.name != 'nt' or self.distribution.pure:
596 if os.name != 'nt' or self.distribution.pure:
597 return build_scripts.run(self)
597 return build_scripts.run(self)
598
598
599 exebuilt = False
599 exebuilt = False
600 try:
600 try:
601 self.run_command('build_hgexe')
601 self.run_command('build_hgexe')
602 exebuilt = True
602 exebuilt = True
603 except (DistutilsError, CCompilerError):
603 except (DistutilsError, CCompilerError):
604 log.warn('failed to build optional hg.exe')
604 log.warn('failed to build optional hg.exe')
605
605
606 if exebuilt:
606 if exebuilt:
607 # Copying hg.exe to the scripts build directory ensures it is
607 # Copying hg.exe to the scripts build directory ensures it is
608 # installed by the install_scripts command.
608 # installed by the install_scripts command.
609 hgexecommand = self.get_finalized_command('build_hgexe')
609 hgexecommand = self.get_finalized_command('build_hgexe')
610 dest = os.path.join(self.build_dir, 'hg.exe')
610 dest = os.path.join(self.build_dir, 'hg.exe')
611 self.mkpath(self.build_dir)
611 self.mkpath(self.build_dir)
612 self.copy_file(hgexecommand.hgexepath, dest)
612 self.copy_file(hgexecommand.hgexepath, dest)
613
613
614 # Remove hg.bat because it is redundant with hg.exe.
614 # Remove hg.bat because it is redundant with hg.exe.
615 self.scripts.remove('contrib/win32/hg.bat')
615 self.scripts.remove('contrib/win32/hg.bat')
616
616
617 return build_scripts.run(self)
617 return build_scripts.run(self)
618
618
619
619
620 class hgbuildpy(build_py):
620 class hgbuildpy(build_py):
621 def finalize_options(self):
621 def finalize_options(self):
622 build_py.finalize_options(self)
622 build_py.finalize_options(self)
623
623
624 if self.distribution.pure:
624 if self.distribution.pure:
625 self.distribution.ext_modules = []
625 self.distribution.ext_modules = []
626 elif self.distribution.cffi:
626 elif self.distribution.cffi:
627 from mercurial.cffi import (
627 from mercurial.cffi import (
628 bdiffbuild,
628 bdiffbuild,
629 mpatchbuild,
629 mpatchbuild,
630 )
630 )
631
631
632 exts = [
632 exts = [
633 mpatchbuild.ffi.distutils_extension(),
633 mpatchbuild.ffi.distutils_extension(),
634 bdiffbuild.ffi.distutils_extension(),
634 bdiffbuild.ffi.distutils_extension(),
635 ]
635 ]
636 # cffi modules go here
636 # cffi modules go here
637 if sys.platform == 'darwin':
637 if sys.platform == 'darwin':
638 from mercurial.cffi import osutilbuild
638 from mercurial.cffi import osutilbuild
639
639
640 exts.append(osutilbuild.ffi.distutils_extension())
640 exts.append(osutilbuild.ffi.distutils_extension())
641 self.distribution.ext_modules = exts
641 self.distribution.ext_modules = exts
642 else:
642 else:
643 h = os.path.join(get_python_inc(), 'Python.h')
643 h = os.path.join(get_python_inc(), 'Python.h')
644 if not os.path.exists(h):
644 if not os.path.exists(h):
645 raise SystemExit(
645 raise SystemExit(
646 'Python headers are required to build '
646 'Python headers are required to build '
647 'Mercurial but weren\'t found in %s' % h
647 'Mercurial but weren\'t found in %s' % h
648 )
648 )
649
649
650 def run(self):
650 def run(self):
651 basepath = os.path.join(self.build_lib, 'mercurial')
651 basepath = os.path.join(self.build_lib, 'mercurial')
652 self.mkpath(basepath)
652 self.mkpath(basepath)
653
653
654 rust = self.distribution.rust
654 rust = self.distribution.rust
655 if self.distribution.pure:
655 if self.distribution.pure:
656 modulepolicy = 'py'
656 modulepolicy = 'py'
657 elif self.build_lib == '.':
657 elif self.build_lib == '.':
658 # in-place build should run without rebuilding and Rust extensions
658 # in-place build should run without rebuilding and Rust extensions
659 modulepolicy = 'rust+c-allow' if rust else 'allow'
659 modulepolicy = 'rust+c-allow' if rust else 'allow'
660 else:
660 else:
661 modulepolicy = 'rust+c' if rust else 'c'
661 modulepolicy = 'rust+c' if rust else 'c'
662
662
663 content = b''.join(
663 content = b''.join(
664 [
664 [
665 b'# this file is autogenerated by setup.py\n',
665 b'# this file is autogenerated by setup.py\n',
666 b'modulepolicy = b"%s"\n' % modulepolicy.encode('ascii'),
666 b'modulepolicy = b"%s"\n' % modulepolicy.encode('ascii'),
667 ]
667 ]
668 )
668 )
669 write_if_changed(os.path.join(basepath, '__modulepolicy__.py'), content)
669 write_if_changed(os.path.join(basepath, '__modulepolicy__.py'), content)
670
670
671 build_py.run(self)
671 build_py.run(self)
672
672
673
673
674 class buildhgextindex(Command):
674 class buildhgextindex(Command):
675 description = 'generate prebuilt index of hgext (for frozen package)'
675 description = 'generate prebuilt index of hgext (for frozen package)'
676 user_options = []
676 user_options = []
677 _indexfilename = 'hgext/__index__.py'
677 _indexfilename = 'hgext/__index__.py'
678
678
679 def initialize_options(self):
679 def initialize_options(self):
680 pass
680 pass
681
681
682 def finalize_options(self):
682 def finalize_options(self):
683 pass
683 pass
684
684
685 def run(self):
685 def run(self):
686 if os.path.exists(self._indexfilename):
686 if os.path.exists(self._indexfilename):
687 with open(self._indexfilename, 'w') as f:
687 with open(self._indexfilename, 'w') as f:
688 f.write('# empty\n')
688 f.write('# empty\n')
689
689
690 # here no extension enabled, disabled() lists up everything
690 # here no extension enabled, disabled() lists up everything
691 code = (
691 code = (
692 'import pprint; from mercurial import extensions; '
692 'import pprint; from mercurial import extensions; '
693 'ext = extensions.disabled();'
693 'ext = extensions.disabled();'
694 'ext.pop("__index__", None);'
694 'ext.pop("__index__", None);'
695 'pprint.pprint(ext)'
695 'pprint.pprint(ext)'
696 )
696 )
697 returncode, out, err = runcmd(
697 returncode, out, err = runcmd(
698 [sys.executable, '-c', code], localhgenv()
698 [sys.executable, '-c', code], localhgenv()
699 )
699 )
700 if err or returncode != 0:
700 if err or returncode != 0:
701 raise DistutilsExecError(err)
701 raise DistutilsExecError(err)
702
702
703 with open(self._indexfilename, 'wb') as f:
703 with open(self._indexfilename, 'wb') as f:
704 f.write(b'# this file is autogenerated by setup.py\n')
704 f.write(b'# this file is autogenerated by setup.py\n')
705 f.write(b'docs = ')
705 f.write(b'docs = ')
706 f.write(out)
706 f.write(out)
707
707
708
708
709 class buildhgexe(build_ext):
709 class buildhgexe(build_ext):
710 description = 'compile hg.exe from mercurial/exewrapper.c'
710 description = 'compile hg.exe from mercurial/exewrapper.c'
711
711
712 LONG_PATHS_MANIFEST = """\
712 LONG_PATHS_MANIFEST = """\
713 <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
713 <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
714 <assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
714 <assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
715 <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
715 <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
716 <security>
716 <security>
717 <requestedPrivileges>
717 <requestedPrivileges>
718 <requestedExecutionLevel
718 <requestedExecutionLevel
719 level="asInvoker"
719 level="asInvoker"
720 uiAccess="false"
720 uiAccess="false"
721 />
721 />
722 </requestedPrivileges>
722 </requestedPrivileges>
723 </security>
723 </security>
724 </trustInfo>
724 </trustInfo>
725 <compatibility xmlns="urn:schemas-microsoft-com:compatibility.v1">
725 <compatibility xmlns="urn:schemas-microsoft-com:compatibility.v1">
726 <application>
726 <application>
727 <!-- Windows Vista -->
727 <!-- Windows Vista -->
728 <supportedOS Id="{e2011457-1546-43c5-a5fe-008deee3d3f0}"/>
728 <supportedOS Id="{e2011457-1546-43c5-a5fe-008deee3d3f0}"/>
729 <!-- Windows 7 -->
729 <!-- Windows 7 -->
730 <supportedOS Id="{35138b9a-5d96-4fbd-8e2d-a2440225f93a}"/>
730 <supportedOS Id="{35138b9a-5d96-4fbd-8e2d-a2440225f93a}"/>
731 <!-- Windows 8 -->
731 <!-- Windows 8 -->
732 <supportedOS Id="{4a2f28e3-53b9-4441-ba9c-d69d4a4a6e38}"/>
732 <supportedOS Id="{4a2f28e3-53b9-4441-ba9c-d69d4a4a6e38}"/>
733 <!-- Windows 8.1 -->
733 <!-- Windows 8.1 -->
734 <supportedOS Id="{1f676c76-80e1-4239-95bb-83d0f6d0da78}"/>
734 <supportedOS Id="{1f676c76-80e1-4239-95bb-83d0f6d0da78}"/>
735 <!-- Windows 10 and Windows 11 -->
735 <!-- Windows 10 and Windows 11 -->
736 <supportedOS Id="{8e0f7a12-bfb3-4fe8-b9a5-48fd50a15a9a}"/>
736 <supportedOS Id="{8e0f7a12-bfb3-4fe8-b9a5-48fd50a15a9a}"/>
737 </application>
737 </application>
738 </compatibility>
738 </compatibility>
739 <application xmlns="urn:schemas-microsoft-com:asm.v3">
739 <application xmlns="urn:schemas-microsoft-com:asm.v3">
740 <windowsSettings
740 <windowsSettings
741 xmlns:ws2="http://schemas.microsoft.com/SMI/2016/WindowsSettings">
741 xmlns:ws2="http://schemas.microsoft.com/SMI/2016/WindowsSettings">
742 <ws2:longPathAware>true</ws2:longPathAware>
742 <ws2:longPathAware>true</ws2:longPathAware>
743 </windowsSettings>
743 </windowsSettings>
744 </application>
744 </application>
745 <dependency>
745 <dependency>
746 <dependentAssembly>
746 <dependentAssembly>
747 <assemblyIdentity type="win32"
747 <assemblyIdentity type="win32"
748 name="Microsoft.Windows.Common-Controls"
748 name="Microsoft.Windows.Common-Controls"
749 version="6.0.0.0"
749 version="6.0.0.0"
750 processorArchitecture="*"
750 processorArchitecture="*"
751 publicKeyToken="6595b64144ccf1df"
751 publicKeyToken="6595b64144ccf1df"
752 language="*" />
752 language="*" />
753 </dependentAssembly>
753 </dependentAssembly>
754 </dependency>
754 </dependency>
755 </assembly>
755 </assembly>
756 """
756 """
757
757
758 def initialize_options(self):
758 def initialize_options(self):
759 build_ext.initialize_options(self)
759 build_ext.initialize_options(self)
760
760
761 def build_extensions(self):
761 def build_extensions(self):
762 if os.name != 'nt':
762 if os.name != 'nt':
763 return
763 return
764 if isinstance(self.compiler, HackedMingw32CCompiler):
764 if isinstance(self.compiler, HackedMingw32CCompiler):
765 self.compiler.compiler_so = self.compiler.compiler # no -mdll
765 self.compiler.compiler_so = self.compiler.compiler # no -mdll
766 self.compiler.dll_libraries = [] # no -lmsrvc90
766 self.compiler.dll_libraries = [] # no -lmsrvc90
767
767
768 pythonlib = None
768 pythonlib = None
769
769
770 dirname = os.path.dirname(self.get_ext_fullpath('dummy'))
770 dirname = os.path.dirname(self.get_ext_fullpath('dummy'))
771 self.hgtarget = os.path.join(dirname, 'hg')
771 self.hgtarget = os.path.join(dirname, 'hg')
772
772
773 if getattr(sys, 'dllhandle', None):
773 if getattr(sys, 'dllhandle', None):
774 # Different Python installs can have different Python library
774 # Different Python installs can have different Python library
775 # names. e.g. the official CPython distribution uses pythonXY.dll
775 # names. e.g. the official CPython distribution uses pythonXY.dll
776 # and MinGW uses libpythonX.Y.dll.
776 # and MinGW uses libpythonX.Y.dll.
777 _kernel32 = ctypes.windll.kernel32
777 _kernel32 = ctypes.windll.kernel32
778 _kernel32.GetModuleFileNameA.argtypes = [
778 _kernel32.GetModuleFileNameA.argtypes = [
779 ctypes.c_void_p,
779 ctypes.c_void_p,
780 ctypes.c_void_p,
780 ctypes.c_void_p,
781 ctypes.c_ulong,
781 ctypes.c_ulong,
782 ]
782 ]
783 _kernel32.GetModuleFileNameA.restype = ctypes.c_ulong
783 _kernel32.GetModuleFileNameA.restype = ctypes.c_ulong
784 size = 1000
784 size = 1000
785 buf = ctypes.create_string_buffer(size + 1)
785 buf = ctypes.create_string_buffer(size + 1)
786 filelen = _kernel32.GetModuleFileNameA(
786 filelen = _kernel32.GetModuleFileNameA(
787 sys.dllhandle, ctypes.byref(buf), size
787 sys.dllhandle, ctypes.byref(buf), size
788 )
788 )
789
789
790 if filelen > 0 and filelen != size:
790 if filelen > 0 and filelen != size:
791 dllbasename = os.path.basename(buf.value)
791 dllbasename = os.path.basename(buf.value)
792 if not dllbasename.lower().endswith(b'.dll'):
792 if not dllbasename.lower().endswith(b'.dll'):
793 raise SystemExit(
793 raise SystemExit(
794 'Python DLL does not end with .dll: %s' % dllbasename
794 'Python DLL does not end with .dll: %s' % dllbasename
795 )
795 )
796 pythonlib = dllbasename[:-4]
796 pythonlib = dllbasename[:-4]
797
797
798 # Copy the pythonXY.dll next to the binary so that it runs
798 # Copy the pythonXY.dll next to the binary so that it runs
799 # without tampering with PATH.
799 # without tampering with PATH.
800 dest = os.path.join(
800 dest = os.path.join(
801 os.path.dirname(self.hgtarget),
801 os.path.dirname(self.hgtarget),
802 os.fsdecode(dllbasename),
802 os.fsdecode(dllbasename),
803 )
803 )
804
804
805 if not os.path.exists(dest):
805 if not os.path.exists(dest):
806 shutil.copy(buf.value, dest)
806 shutil.copy(buf.value, dest)
807
807
808 # Also overwrite python3.dll so that hgext.git is usable.
808 # Also overwrite python3.dll so that hgext.git is usable.
809 # TODO: also handle the MSYS flavor
809 # TODO: also handle the MSYS flavor
810 python_x = os.path.join(
810 python_x = os.path.join(
811 os.path.dirname(os.fsdecode(buf.value)),
811 os.path.dirname(os.fsdecode(buf.value)),
812 "python3.dll",
812 "python3.dll",
813 )
813 )
814
814
815 if os.path.exists(python_x):
815 if os.path.exists(python_x):
816 dest = os.path.join(
816 dest = os.path.join(
817 os.path.dirname(self.hgtarget),
817 os.path.dirname(self.hgtarget),
818 os.path.basename(python_x),
818 os.path.basename(python_x),
819 )
819 )
820
820
821 shutil.copy(python_x, dest)
821 shutil.copy(python_x, dest)
822
822
823 if not pythonlib:
823 if not pythonlib:
824 log.warn(
824 log.warn(
825 'could not determine Python DLL filename; assuming pythonXY'
825 'could not determine Python DLL filename; assuming pythonXY'
826 )
826 )
827
827
828 hv = sys.hexversion
828 hv = sys.hexversion
829 pythonlib = b'python%d%d' % (hv >> 24, (hv >> 16) & 0xFF)
829 pythonlib = b'python%d%d' % (hv >> 24, (hv >> 16) & 0xFF)
830
830
831 log.info('using %s as Python library name' % pythonlib)
831 log.info('using %s as Python library name' % pythonlib)
832 with open('mercurial/hgpythonlib.h', 'wb') as f:
832 with open('mercurial/hgpythonlib.h', 'wb') as f:
833 f.write(b'/* this file is autogenerated by setup.py */\n')
833 f.write(b'/* this file is autogenerated by setup.py */\n')
834 f.write(b'#define HGPYTHONLIB "%s"\n' % pythonlib)
834 f.write(b'#define HGPYTHONLIB "%s"\n' % pythonlib)
835
835
836 objects = self.compiler.compile(
836 objects = self.compiler.compile(
837 ['mercurial/exewrapper.c'],
837 ['mercurial/exewrapper.c'],
838 output_dir=self.build_temp,
838 output_dir=self.build_temp,
839 macros=[('_UNICODE', None), ('UNICODE', None)],
839 macros=[('_UNICODE', None), ('UNICODE', None)],
840 )
840 )
841 self.compiler.link_executable(
841 self.compiler.link_executable(
842 objects, self.hgtarget, libraries=[], output_dir=self.build_temp
842 objects, self.hgtarget, libraries=[], output_dir=self.build_temp
843 )
843 )
844
844
845 self.addlongpathsmanifest()
845 self.addlongpathsmanifest()
846
846
847 def addlongpathsmanifest(self):
847 def addlongpathsmanifest(self):
848 """Add manifest pieces so that hg.exe understands long paths
848 """Add manifest pieces so that hg.exe understands long paths
849
849
850 Why resource #1 should be used for .exe manifests? I don't know and
850 Why resource #1 should be used for .exe manifests? I don't know and
851 wasn't able to find an explanation for mortals. But it seems to work.
851 wasn't able to find an explanation for mortals. But it seems to work.
852 """
852 """
853 exefname = self.compiler.executable_filename(self.hgtarget)
853 exefname = self.compiler.executable_filename(self.hgtarget)
854 fdauto, manfname = tempfile.mkstemp(suffix='.hg.exe.manifest')
854 fdauto, manfname = tempfile.mkstemp(suffix='.hg.exe.manifest')
855 os.close(fdauto)
855 os.close(fdauto)
856 with open(manfname, 'w', encoding="UTF-8") as f:
856 with open(manfname, 'w', encoding="UTF-8") as f:
857 f.write(self.LONG_PATHS_MANIFEST)
857 f.write(self.LONG_PATHS_MANIFEST)
858 log.info("long paths manifest is written to '%s'" % manfname)
858 log.info("long paths manifest is written to '%s'" % manfname)
859 outputresource = '-outputresource:%s;#1' % exefname
859 outputresource = '-outputresource:%s;#1' % exefname
860 log.info("running mt.exe to update hg.exe's manifest in-place")
860 log.info("running mt.exe to update hg.exe's manifest in-place")
861
861
862 self.spawn(
862 self.spawn(
863 [
863 [
864 self.compiler.mt,
864 self.compiler.mt,
865 '-nologo',
865 '-nologo',
866 '-manifest',
866 '-manifest',
867 manfname,
867 manfname,
868 outputresource,
868 outputresource,
869 ]
869 ]
870 )
870 )
871 log.info("done updating hg.exe's manifest")
871 log.info("done updating hg.exe's manifest")
872 os.remove(manfname)
872 os.remove(manfname)
873
873
874 @property
874 @property
875 def hgexepath(self):
875 def hgexepath(self):
876 dir = os.path.dirname(self.get_ext_fullpath('dummy'))
876 dir = os.path.dirname(self.get_ext_fullpath('dummy'))
877 return os.path.join(self.build_temp, dir, 'hg.exe')
877 return os.path.join(self.build_temp, dir, 'hg.exe')
878
878
879
879
880 class hgbuilddoc(Command):
880 class hgbuilddoc(Command):
881 description = 'build documentation'
881 description = 'build documentation'
882 user_options = [
882 user_options = [
883 ('man', None, 'generate man pages'),
883 ('man', None, 'generate man pages'),
884 ('html', None, 'generate html pages'),
884 ('html', None, 'generate html pages'),
885 ]
885 ]
886
886
887 def initialize_options(self):
887 def initialize_options(self):
888 self.man = None
888 self.man = None
889 self.html = None
889 self.html = None
890
890
891 def finalize_options(self):
891 def finalize_options(self):
892 # If --man or --html are set, only generate what we're told to.
892 # If --man or --html are set, only generate what we're told to.
893 # Otherwise generate everything.
893 # Otherwise generate everything.
894 have_subset = self.man is not None or self.html is not None
894 have_subset = self.man is not None or self.html is not None
895
895
896 if have_subset:
896 if have_subset:
897 self.man = True if self.man else False
897 self.man = True if self.man else False
898 self.html = True if self.html else False
898 self.html = True if self.html else False
899 else:
899 else:
900 self.man = True
900 self.man = True
901 self.html = True
901 self.html = True
902
902
903 def run(self):
903 def run(self):
904 def normalizecrlf(p):
904 def normalizecrlf(p):
905 with open(p, 'rb') as fh:
905 with open(p, 'rb') as fh:
906 orig = fh.read()
906 orig = fh.read()
907
907
908 if b'\r\n' not in orig:
908 if b'\r\n' not in orig:
909 return
909 return
910
910
911 log.info('normalizing %s to LF line endings' % p)
911 log.info('normalizing %s to LF line endings' % p)
912 with open(p, 'wb') as fh:
912 with open(p, 'wb') as fh:
913 fh.write(orig.replace(b'\r\n', b'\n'))
913 fh.write(orig.replace(b'\r\n', b'\n'))
914
914
915 def gentxt(root):
915 def gentxt(root):
916 txt = 'doc/%s.txt' % root
916 txt = 'doc/%s.txt' % root
917 log.info('generating %s' % txt)
917 log.info('generating %s' % txt)
918 res, out, err = runcmd(
918 res, out, err = runcmd(
919 [sys.executable, 'gendoc.py', root], os.environ, cwd='doc'
919 [sys.executable, 'gendoc.py', root], os.environ, cwd='doc'
920 )
920 )
921 if res:
921 if res:
922 raise SystemExit(
922 raise SystemExit(
923 'error running gendoc.py: %s'
923 'error running gendoc.py: %s'
924 % '\n'.join([sysstr(out), sysstr(err)])
924 % '\n'.join([sysstr(out), sysstr(err)])
925 )
925 )
926
926
927 with open(txt, 'wb') as fh:
927 with open(txt, 'wb') as fh:
928 fh.write(out)
928 fh.write(out)
929
929
930 def gengendoc(root):
930 def gengendoc(root):
931 gendoc = 'doc/%s.gendoc.txt' % root
931 gendoc = 'doc/%s.gendoc.txt' % root
932
932
933 log.info('generating %s' % gendoc)
933 log.info('generating %s' % gendoc)
934 res, out, err = runcmd(
934 res, out, err = runcmd(
935 [sys.executable, 'gendoc.py', '%s.gendoc' % root],
935 [sys.executable, 'gendoc.py', '%s.gendoc' % root],
936 os.environ,
936 os.environ,
937 cwd='doc',
937 cwd='doc',
938 )
938 )
939 if res:
939 if res:
940 raise SystemExit(
940 raise SystemExit(
941 'error running gendoc: %s'
941 'error running gendoc: %s'
942 % '\n'.join([sysstr(out), sysstr(err)])
942 % '\n'.join([sysstr(out), sysstr(err)])
943 )
943 )
944
944
945 with open(gendoc, 'wb') as fh:
945 with open(gendoc, 'wb') as fh:
946 fh.write(out)
946 fh.write(out)
947
947
948 def genman(root):
948 def genman(root):
949 log.info('generating doc/%s' % root)
949 log.info('generating doc/%s' % root)
950 res, out, err = runcmd(
950 res, out, err = runcmd(
951 [
951 [
952 sys.executable,
952 sys.executable,
953 'runrst',
953 'runrst',
954 'hgmanpage',
954 'hgmanpage',
955 '--halt',
955 '--halt',
956 'warning',
956 'warning',
957 '--strip-elements-with-class',
957 '--strip-elements-with-class',
958 'htmlonly',
958 'htmlonly',
959 '%s.txt' % root,
959 '%s.txt' % root,
960 root,
960 root,
961 ],
961 ],
962 os.environ,
962 os.environ,
963 cwd='doc',
963 cwd='doc',
964 )
964 )
965 if res:
965 if res:
966 raise SystemExit(
966 raise SystemExit(
967 'error running runrst: %s'
967 'error running runrst: %s'
968 % '\n'.join([sysstr(out), sysstr(err)])
968 % '\n'.join([sysstr(out), sysstr(err)])
969 )
969 )
970
970
971 normalizecrlf('doc/%s' % root)
971 normalizecrlf('doc/%s' % root)
972
972
973 def genhtml(root):
973 def genhtml(root):
974 log.info('generating doc/%s.html' % root)
974 log.info('generating doc/%s.html' % root)
975 res, out, err = runcmd(
975 res, out, err = runcmd(
976 [
976 [
977 sys.executable,
977 sys.executable,
978 'runrst',
978 'runrst',
979 'html',
979 'html',
980 '--halt',
980 '--halt',
981 'warning',
981 'warning',
982 '--link-stylesheet',
982 '--link-stylesheet',
983 '--stylesheet-path',
983 '--stylesheet-path',
984 'style.css',
984 'style.css',
985 '%s.txt' % root,
985 '%s.txt' % root,
986 '%s.html' % root,
986 '%s.html' % root,
987 ],
987 ],
988 os.environ,
988 os.environ,
989 cwd='doc',
989 cwd='doc',
990 )
990 )
991 if res:
991 if res:
992 raise SystemExit(
992 raise SystemExit(
993 'error running runrst: %s'
993 'error running runrst: %s'
994 % '\n'.join([sysstr(out), sysstr(err)])
994 % '\n'.join([sysstr(out), sysstr(err)])
995 )
995 )
996
996
997 normalizecrlf('doc/%s.html' % root)
997 normalizecrlf('doc/%s.html' % root)
998
998
999 # This logic is duplicated in doc/Makefile.
999 # This logic is duplicated in doc/Makefile.
1000 sources = {
1000 sources = {
1001 f
1001 f
1002 for f in os.listdir('mercurial/helptext')
1002 for f in os.listdir('mercurial/helptext')
1003 if re.search(r'[0-9]\.txt$', f)
1003 if re.search(r'[0-9]\.txt$', f)
1004 }
1004 }
1005
1005
1006 # common.txt is a one-off.
1006 # common.txt is a one-off.
1007 gentxt('common')
1007 gentxt('common')
1008
1008
1009 for source in sorted(sources):
1009 for source in sorted(sources):
1010 assert source[-4:] == '.txt'
1010 assert source[-4:] == '.txt'
1011 root = source[:-4]
1011 root = source[:-4]
1012
1012
1013 gentxt(root)
1013 gentxt(root)
1014 gengendoc(root)
1014 gengendoc(root)
1015
1015
1016 if self.man:
1016 if self.man:
1017 genman(root)
1017 genman(root)
1018 if self.html:
1018 if self.html:
1019 genhtml(root)
1019 genhtml(root)
1020
1020
1021
1021
1022 class hginstall(install):
1022 class hginstall(install):
1023
1023
1024 user_options = install.user_options + [
1024 user_options = install.user_options + [
1025 (
1025 (
1026 'old-and-unmanageable',
1026 'old-and-unmanageable',
1027 None,
1027 None,
1028 'noop, present for eggless setuptools compat',
1028 'noop, present for eggless setuptools compat',
1029 ),
1029 ),
1030 (
1030 (
1031 'single-version-externally-managed',
1031 'single-version-externally-managed',
1032 None,
1032 None,
1033 'noop, present for eggless setuptools compat',
1033 'noop, present for eggless setuptools compat',
1034 ),
1034 ),
1035 ]
1035 ]
1036
1036
1037 sub_commands = install.sub_commands + [
1037 sub_commands = install.sub_commands + [
1038 ('install_completion', lambda self: True)
1038 ('install_completion', lambda self: True)
1039 ]
1039 ]
1040
1040
1041 # Also helps setuptools not be sad while we refuse to create eggs.
1041 # Also helps setuptools not be sad while we refuse to create eggs.
1042 single_version_externally_managed = True
1042 single_version_externally_managed = True
1043
1043
1044 def get_sub_commands(self):
1044 def get_sub_commands(self):
1045 # Screen out egg related commands to prevent egg generation. But allow
1045 # Screen out egg related commands to prevent egg generation. But allow
1046 # mercurial.egg-info generation, since that is part of modern
1046 # mercurial.egg-info generation, since that is part of modern
1047 # packaging.
1047 # packaging.
1048 excl = {'bdist_egg'}
1048 excl = {'bdist_egg'}
1049 return filter(lambda x: x not in excl, install.get_sub_commands(self))
1049 return filter(lambda x: x not in excl, install.get_sub_commands(self))
1050
1050
1051
1051
1052 class hginstalllib(install_lib):
1052 class hginstalllib(install_lib):
1053 """
1053 """
1054 This is a specialization of install_lib that replaces the copy_file used
1054 This is a specialization of install_lib that replaces the copy_file used
1055 there so that it supports setting the mode of files after copying them,
1055 there so that it supports setting the mode of files after copying them,
1056 instead of just preserving the mode that the files originally had. If your
1056 instead of just preserving the mode that the files originally had. If your
1057 system has a umask of something like 027, preserving the permissions when
1057 system has a umask of something like 027, preserving the permissions when
1058 copying will lead to a broken install.
1058 copying will lead to a broken install.
1059
1059
1060 Note that just passing keep_permissions=False to copy_file would be
1060 Note that just passing keep_permissions=False to copy_file would be
1061 insufficient, as it might still be applying a umask.
1061 insufficient, as it might still be applying a umask.
1062 """
1062 """
1063
1063
1064 def run(self):
1064 def run(self):
1065 realcopyfile = file_util.copy_file
1065 realcopyfile = file_util.copy_file
1066
1066
1067 def copyfileandsetmode(*args, **kwargs):
1067 def copyfileandsetmode(*args, **kwargs):
1068 src, dst = args[0], args[1]
1068 src, dst = args[0], args[1]
1069 dst, copied = realcopyfile(*args, **kwargs)
1069 dst, copied = realcopyfile(*args, **kwargs)
1070 if copied:
1070 if copied:
1071 st = os.stat(src)
1071 st = os.stat(src)
1072 # Persist executable bit (apply it to group and other if user
1072 # Persist executable bit (apply it to group and other if user
1073 # has it)
1073 # has it)
1074 if st[stat.ST_MODE] & stat.S_IXUSR:
1074 if st[stat.ST_MODE] & stat.S_IXUSR:
1075 setmode = int('0755', 8)
1075 setmode = int('0755', 8)
1076 else:
1076 else:
1077 setmode = int('0644', 8)
1077 setmode = int('0644', 8)
1078 m = stat.S_IMODE(st[stat.ST_MODE])
1078 m = stat.S_IMODE(st[stat.ST_MODE])
1079 m = (m & ~int('0777', 8)) | setmode
1079 m = (m & ~int('0777', 8)) | setmode
1080 os.chmod(dst, m)
1080 os.chmod(dst, m)
1081
1081
1082 file_util.copy_file = copyfileandsetmode
1082 file_util.copy_file = copyfileandsetmode
1083 try:
1083 try:
1084 install_lib.run(self)
1084 install_lib.run(self)
1085 finally:
1085 finally:
1086 file_util.copy_file = realcopyfile
1086 file_util.copy_file = realcopyfile
1087
1087
1088
1088
1089 class hginstallscripts(install_scripts):
1089 class hginstallscripts(install_scripts):
1090 """
1090 """
1091 This is a specialization of install_scripts that replaces the @LIBDIR@ with
1091 This is a specialization of install_scripts that replaces the @LIBDIR@ with
1092 the configured directory for modules. If possible, the path is made relative
1092 the configured directory for modules. If possible, the path is made relative
1093 to the directory for scripts.
1093 to the directory for scripts.
1094 """
1094 """
1095
1095
1096 def initialize_options(self):
1096 def initialize_options(self):
1097 install_scripts.initialize_options(self)
1097 install_scripts.initialize_options(self)
1098
1098
1099 self.install_lib = None
1099 self.install_lib = None
1100
1100
1101 def finalize_options(self):
1101 def finalize_options(self):
1102 install_scripts.finalize_options(self)
1102 install_scripts.finalize_options(self)
1103 self.set_undefined_options('install', ('install_lib', 'install_lib'))
1103 self.set_undefined_options('install', ('install_lib', 'install_lib'))
1104
1104
1105 def run(self):
1105 def run(self):
1106 install_scripts.run(self)
1106 install_scripts.run(self)
1107
1107
1108 # It only makes sense to replace @LIBDIR@ with the install path if
1108 # It only makes sense to replace @LIBDIR@ with the install path if
1109 # the install path is known. For wheels, the logic below calculates
1109 # the install path is known. For wheels, the logic below calculates
1110 # the libdir to be "../..". This is because the internal layout of a
1110 # the libdir to be "../..". This is because the internal layout of a
1111 # wheel archive looks like:
1111 # wheel archive looks like:
1112 #
1112 #
1113 # mercurial-3.6.1.data/scripts/hg
1113 # mercurial-3.6.1.data/scripts/hg
1114 # mercurial/__init__.py
1114 # mercurial/__init__.py
1115 #
1115 #
1116 # When installing wheels, the subdirectories of the "<pkg>.data"
1116 # When installing wheels, the subdirectories of the "<pkg>.data"
1117 # directory are translated to system local paths and files therein
1117 # directory are translated to system local paths and files therein
1118 # are copied in place. The mercurial/* files are installed into the
1118 # are copied in place. The mercurial/* files are installed into the
1119 # site-packages directory. However, the site-packages directory
1119 # site-packages directory. However, the site-packages directory
1120 # isn't known until wheel install time. This means we have no clue
1120 # isn't known until wheel install time. This means we have no clue
1121 # at wheel generation time what the installed site-packages directory
1121 # at wheel generation time what the installed site-packages directory
1122 # will be. And, wheels don't appear to provide the ability to register
1122 # will be. And, wheels don't appear to provide the ability to register
1123 # custom code to run during wheel installation. This all means that
1123 # custom code to run during wheel installation. This all means that
1124 # we can't reliably set the libdir in wheels: the default behavior
1124 # we can't reliably set the libdir in wheels: the default behavior
1125 # of looking in sys.path must do.
1125 # of looking in sys.path must do.
1126
1126
1127 if (
1127 if (
1128 os.path.splitdrive(self.install_dir)[0]
1128 os.path.splitdrive(self.install_dir)[0]
1129 != os.path.splitdrive(self.install_lib)[0]
1129 != os.path.splitdrive(self.install_lib)[0]
1130 ):
1130 ):
1131 # can't make relative paths from one drive to another, so use an
1131 # can't make relative paths from one drive to another, so use an
1132 # absolute path instead
1132 # absolute path instead
1133 libdir = self.install_lib
1133 libdir = self.install_lib
1134 else:
1134 else:
1135 libdir = os.path.relpath(self.install_lib, self.install_dir)
1135 libdir = os.path.relpath(self.install_lib, self.install_dir)
1136
1136
1137 for outfile in self.outfiles:
1137 for outfile in self.outfiles:
1138 with open(outfile, 'rb') as fp:
1138 with open(outfile, 'rb') as fp:
1139 data = fp.read()
1139 data = fp.read()
1140
1140
1141 # skip binary files
1141 # skip binary files
1142 if b'\0' in data:
1142 if b'\0' in data:
1143 continue
1143 continue
1144
1144
1145 # During local installs, the shebang will be rewritten to the final
1145 # During local installs, the shebang will be rewritten to the final
1146 # install path. During wheel packaging, the shebang has a special
1146 # install path. During wheel packaging, the shebang has a special
1147 # value.
1147 # value.
1148 if data.startswith(b'#!python'):
1148 if data.startswith(b'#!python'):
1149 log.info(
1149 log.info(
1150 'not rewriting @LIBDIR@ in %s because install path '
1150 'not rewriting @LIBDIR@ in %s because install path '
1151 'not known' % outfile
1151 'not known' % outfile
1152 )
1152 )
1153 continue
1153 continue
1154
1154
1155 data = data.replace(b'@LIBDIR@', libdir.encode('unicode_escape'))
1155 data = data.replace(b'@LIBDIR@', libdir.encode('unicode_escape'))
1156 with open(outfile, 'wb') as fp:
1156 with open(outfile, 'wb') as fp:
1157 fp.write(data)
1157 fp.write(data)
1158
1158
1159
1159
1160 class hginstallcompletion(Command):
1160 class hginstallcompletion(Command):
1161 description = 'Install shell completion'
1161 description = 'Install shell completion'
1162
1162
1163 def initialize_options(self):
1163 def initialize_options(self):
1164 self.install_dir = None
1164 self.install_dir = None
1165 self.outputs = []
1165 self.outputs = []
1166
1166
1167 def finalize_options(self):
1167 def finalize_options(self):
1168 self.set_undefined_options(
1168 self.set_undefined_options(
1169 'install_data', ('install_dir', 'install_dir')
1169 'install_data', ('install_dir', 'install_dir')
1170 )
1170 )
1171
1171
1172 def get_outputs(self):
1172 def get_outputs(self):
1173 return self.outputs
1173 return self.outputs
1174
1174
1175 def run(self):
1175 def run(self):
1176 for src, dir_path, dest in (
1176 for src, dir_path, dest in (
1177 (
1177 (
1178 'bash_completion',
1178 'bash_completion',
1179 ('share', 'bash-completion', 'completions'),
1179 ('share', 'bash-completion', 'completions'),
1180 'hg',
1180 'hg',
1181 ),
1181 ),
1182 ('zsh_completion', ('share', 'zsh', 'site-functions'), '_hg'),
1182 ('zsh_completion', ('share', 'zsh', 'site-functions'), '_hg'),
1183 ):
1183 ):
1184 dir = os.path.join(self.install_dir, *dir_path)
1184 dir = os.path.join(self.install_dir, *dir_path)
1185 self.mkpath(dir)
1185 self.mkpath(dir)
1186
1186
1187 dest = os.path.join(dir, dest)
1187 dest = os.path.join(dir, dest)
1188 self.outputs.append(dest)
1188 self.outputs.append(dest)
1189 self.copy_file(os.path.join('contrib', src), dest)
1189 self.copy_file(os.path.join('contrib', src), dest)
1190
1190
1191
1191
1192 # virtualenv installs custom distutils/__init__.py and
1192 # virtualenv installs custom distutils/__init__.py and
1193 # distutils/distutils.cfg files which essentially proxy back to the
1193 # distutils/distutils.cfg files which essentially proxy back to the
1194 # "real" distutils in the main Python install. The presence of this
1194 # "real" distutils in the main Python install. The presence of this
1195 # directory causes py2exe to pick up the "hacked" distutils package
1195 # directory causes py2exe to pick up the "hacked" distutils package
1196 # from the virtualenv and "import distutils" will fail from the py2exe
1196 # from the virtualenv and "import distutils" will fail from the py2exe
1197 # build because the "real" distutils files can't be located.
1197 # build because the "real" distutils files can't be located.
1198 #
1198 #
1199 # We work around this by monkeypatching the py2exe code finding Python
1199 # We work around this by monkeypatching the py2exe code finding Python
1200 # modules to replace the found virtualenv distutils modules with the
1200 # modules to replace the found virtualenv distutils modules with the
1201 # original versions via filesystem scanning. This is a bit hacky. But
1201 # original versions via filesystem scanning. This is a bit hacky. But
1202 # it allows us to use virtualenvs for py2exe packaging, which is more
1202 # it allows us to use virtualenvs for py2exe packaging, which is more
1203 # deterministic and reproducible.
1203 # deterministic and reproducible.
1204 #
1204 #
1205 # It's worth noting that the common StackOverflow suggestions for this
1205 # It's worth noting that the common StackOverflow suggestions for this
1206 # problem involve copying the original distutils files into the
1206 # problem involve copying the original distutils files into the
1207 # virtualenv or into the staging directory after setup() is invoked.
1207 # virtualenv or into the staging directory after setup() is invoked.
1208 # The former is very brittle and can easily break setup(). Our hacking
1208 # The former is very brittle and can easily break setup(). Our hacking
1209 # of the found modules routine has a similar result as copying the files
1209 # of the found modules routine has a similar result as copying the files
1210 # manually. But it makes fewer assumptions about how py2exe works and
1210 # manually. But it makes fewer assumptions about how py2exe works and
1211 # is less brittle.
1211 # is less brittle.
1212
1212
1213 # This only catches virtualenvs made with virtualenv (as opposed to
1213 # This only catches virtualenvs made with virtualenv (as opposed to
1214 # venv, which is likely what Python 3 uses).
1214 # venv, which is likely what Python 3 uses).
1215 py2exehacked = py2exeloaded and getattr(sys, 'real_prefix', None) is not None
1215 py2exehacked = py2exeloaded and getattr(sys, 'real_prefix', None) is not None
1216
1216
1217 if py2exehacked:
1217 if py2exehacked:
1218 from distutils.command.py2exe import py2exe as buildpy2exe
1218 from distutils.command.py2exe import py2exe as buildpy2exe
1219 from py2exe.mf import Module as py2exemodule
1219 from py2exe.mf import Module as py2exemodule
1220
1220
1221 class hgbuildpy2exe(buildpy2exe):
1221 class hgbuildpy2exe(buildpy2exe):
1222 def find_needed_modules(self, mf, files, modules):
1222 def find_needed_modules(self, mf, files, modules):
1223 res = buildpy2exe.find_needed_modules(self, mf, files, modules)
1223 res = buildpy2exe.find_needed_modules(self, mf, files, modules)
1224
1224
1225 # Replace virtualenv's distutils modules with the real ones.
1225 # Replace virtualenv's distutils modules with the real ones.
1226 modules = {}
1226 modules = {}
1227 for k, v in res.modules.items():
1227 for k, v in res.modules.items():
1228 if k != 'distutils' and not k.startswith('distutils.'):
1228 if k != 'distutils' and not k.startswith('distutils.'):
1229 modules[k] = v
1229 modules[k] = v
1230
1230
1231 res.modules = modules
1231 res.modules = modules
1232
1232
1233 import opcode
1233 import opcode
1234
1234
1235 distutilsreal = os.path.join(
1235 distutilsreal = os.path.join(
1236 os.path.dirname(opcode.__file__), 'distutils'
1236 os.path.dirname(opcode.__file__), 'distutils'
1237 )
1237 )
1238
1238
1239 for root, dirs, files in os.walk(distutilsreal):
1239 for root, dirs, files in os.walk(distutilsreal):
1240 for f in sorted(files):
1240 for f in sorted(files):
1241 if not f.endswith('.py'):
1241 if not f.endswith('.py'):
1242 continue
1242 continue
1243
1243
1244 full = os.path.join(root, f)
1244 full = os.path.join(root, f)
1245
1245
1246 parents = ['distutils']
1246 parents = ['distutils']
1247
1247
1248 if root != distutilsreal:
1248 if root != distutilsreal:
1249 rel = os.path.relpath(root, distutilsreal)
1249 rel = os.path.relpath(root, distutilsreal)
1250 parents.extend(p for p in rel.split(os.sep))
1250 parents.extend(p for p in rel.split(os.sep))
1251
1251
1252 modname = '%s.%s' % ('.'.join(parents), f[:-3])
1252 modname = '%s.%s' % ('.'.join(parents), f[:-3])
1253
1253
1254 if modname.startswith('distutils.tests.'):
1254 if modname.startswith('distutils.tests.'):
1255 continue
1255 continue
1256
1256
1257 if modname.endswith('.__init__'):
1257 if modname.endswith('.__init__'):
1258 modname = modname[: -len('.__init__')]
1258 modname = modname[: -len('.__init__')]
1259 path = os.path.dirname(full)
1259 path = os.path.dirname(full)
1260 else:
1260 else:
1261 path = None
1261 path = None
1262
1262
1263 res.modules[modname] = py2exemodule(
1263 res.modules[modname] = py2exemodule(
1264 modname, full, path=path
1264 modname, full, path=path
1265 )
1265 )
1266
1266
1267 if 'distutils' not in res.modules:
1267 if 'distutils' not in res.modules:
1268 raise SystemExit('could not find distutils modules')
1268 raise SystemExit('could not find distutils modules')
1269
1269
1270 return res
1270 return res
1271
1271
1272
1272
1273 cmdclass = {
1273 cmdclass = {
1274 'build': hgbuild,
1274 'build': hgbuild,
1275 'build_doc': hgbuilddoc,
1275 'build_doc': hgbuilddoc,
1276 'build_mo': hgbuildmo,
1276 'build_mo': hgbuildmo,
1277 'build_ext': hgbuildext,
1277 'build_ext': hgbuildext,
1278 'build_py': hgbuildpy,
1278 'build_py': hgbuildpy,
1279 'build_scripts': hgbuildscripts,
1279 'build_scripts': hgbuildscripts,
1280 'build_hgextindex': buildhgextindex,
1280 'build_hgextindex': buildhgextindex,
1281 'install': hginstall,
1281 'install': hginstall,
1282 'install_completion': hginstallcompletion,
1282 'install_completion': hginstallcompletion,
1283 'install_lib': hginstalllib,
1283 'install_lib': hginstalllib,
1284 'install_scripts': hginstallscripts,
1284 'install_scripts': hginstallscripts,
1285 'build_hgexe': buildhgexe,
1285 'build_hgexe': buildhgexe,
1286 }
1286 }
1287
1287
1288 if py2exehacked:
1288 if py2exehacked:
1289 cmdclass['py2exe'] = hgbuildpy2exe
1289 cmdclass['py2exe'] = hgbuildpy2exe
1290
1290
1291 packages = [
1291 packages = [
1292 'mercurial',
1292 'mercurial',
1293 'mercurial.cext',
1293 'mercurial.cext',
1294 'mercurial.cffi',
1294 'mercurial.cffi',
1295 'mercurial.defaultrc',
1295 'mercurial.defaultrc',
1296 'mercurial.dirstateutils',
1296 'mercurial.dirstateutils',
1297 'mercurial.helptext',
1297 'mercurial.helptext',
1298 'mercurial.helptext.internals',
1298 'mercurial.helptext.internals',
1299 'mercurial.hgweb',
1299 'mercurial.hgweb',
1300 'mercurial.interfaces',
1300 'mercurial.interfaces',
1301 'mercurial.pure',
1301 'mercurial.pure',
1302 'mercurial.templates',
1302 'mercurial.templates',
1303 'mercurial.thirdparty',
1303 'mercurial.thirdparty',
1304 'mercurial.thirdparty.attr',
1304 'mercurial.thirdparty.attr',
1305 'mercurial.thirdparty.jaraco',
1305 'mercurial.thirdparty.zope',
1306 'mercurial.thirdparty.zope',
1306 'mercurial.thirdparty.zope.interface',
1307 'mercurial.thirdparty.zope.interface',
1307 'mercurial.upgrade_utils',
1308 'mercurial.upgrade_utils',
1308 'mercurial.utils',
1309 'mercurial.utils',
1309 'mercurial.revlogutils',
1310 'mercurial.revlogutils',
1310 'mercurial.testing',
1311 'mercurial.testing',
1311 'hgext',
1312 'hgext',
1312 'hgext.convert',
1313 'hgext.convert',
1313 'hgext.fsmonitor',
1314 'hgext.fsmonitor',
1314 'hgext.fastannotate',
1315 'hgext.fastannotate',
1315 'hgext.fsmonitor.pywatchman',
1316 'hgext.fsmonitor.pywatchman',
1316 'hgext.git',
1317 'hgext.git',
1317 'hgext.highlight',
1318 'hgext.highlight',
1318 'hgext.hooklib',
1319 'hgext.hooklib',
1319 'hgext.infinitepush',
1320 'hgext.infinitepush',
1320 'hgext.largefiles',
1321 'hgext.largefiles',
1321 'hgext.lfs',
1322 'hgext.lfs',
1322 'hgext.narrow',
1323 'hgext.narrow',
1323 'hgext.remotefilelog',
1324 'hgext.remotefilelog',
1324 'hgext.zeroconf',
1325 'hgext.zeroconf',
1325 'hgext3rd',
1326 'hgext3rd',
1326 'hgdemandimport',
1327 'hgdemandimport',
1327 ]
1328 ]
1328
1329
1329 for name in os.listdir(os.path.join('mercurial', 'templates')):
1330 for name in os.listdir(os.path.join('mercurial', 'templates')):
1330 if name != '__pycache__' and os.path.isdir(
1331 if name != '__pycache__' and os.path.isdir(
1331 os.path.join('mercurial', 'templates', name)
1332 os.path.join('mercurial', 'templates', name)
1332 ):
1333 ):
1333 packages.append('mercurial.templates.%s' % name)
1334 packages.append('mercurial.templates.%s' % name)
1334
1335
1335 if 'HG_PY2EXE_EXTRA_INSTALL_PACKAGES' in os.environ:
1336 if 'HG_PY2EXE_EXTRA_INSTALL_PACKAGES' in os.environ:
1336 # py2exe can't cope with namespace packages very well, so we have to
1337 # py2exe can't cope with namespace packages very well, so we have to
1337 # install any hgext3rd.* extensions that we want in the final py2exe
1338 # install any hgext3rd.* extensions that we want in the final py2exe
1338 # image here. This is gross, but you gotta do what you gotta do.
1339 # image here. This is gross, but you gotta do what you gotta do.
1339 packages.extend(os.environ['HG_PY2EXE_EXTRA_INSTALL_PACKAGES'].split(' '))
1340 packages.extend(os.environ['HG_PY2EXE_EXTRA_INSTALL_PACKAGES'].split(' '))
1340
1341
1341 common_depends = [
1342 common_depends = [
1342 'mercurial/bitmanipulation.h',
1343 'mercurial/bitmanipulation.h',
1343 'mercurial/compat.h',
1344 'mercurial/compat.h',
1344 'mercurial/cext/util.h',
1345 'mercurial/cext/util.h',
1345 ]
1346 ]
1346 common_include_dirs = ['mercurial']
1347 common_include_dirs = ['mercurial']
1347
1348
1348 common_cflags = []
1349 common_cflags = []
1349
1350
1350 # MSVC 2008 still needs declarations at the top of the scope, but Python 3.9
1351 # MSVC 2008 still needs declarations at the top of the scope, but Python 3.9
1351 # makes declarations not at the top of a scope in the headers.
1352 # makes declarations not at the top of a scope in the headers.
1352 if os.name != 'nt' and sys.version_info[1] < 9:
1353 if os.name != 'nt' and sys.version_info[1] < 9:
1353 common_cflags = ['-Werror=declaration-after-statement']
1354 common_cflags = ['-Werror=declaration-after-statement']
1354
1355
1355 osutil_cflags = []
1356 osutil_cflags = []
1356 osutil_ldflags = []
1357 osutil_ldflags = []
1357
1358
1358 # platform specific macros
1359 # platform specific macros
1359 for plat, func in [('bsd', 'setproctitle')]:
1360 for plat, func in [('bsd', 'setproctitle')]:
1360 if re.search(plat, sys.platform) and hasfunction(new_compiler(), func):
1361 if re.search(plat, sys.platform) and hasfunction(new_compiler(), func):
1361 osutil_cflags.append('-DHAVE_%s' % func.upper())
1362 osutil_cflags.append('-DHAVE_%s' % func.upper())
1362
1363
1363 for plat, macro, code in [
1364 for plat, macro, code in [
1364 (
1365 (
1365 'bsd|darwin',
1366 'bsd|darwin',
1366 'BSD_STATFS',
1367 'BSD_STATFS',
1367 '''
1368 '''
1368 #include <sys/param.h>
1369 #include <sys/param.h>
1369 #include <sys/mount.h>
1370 #include <sys/mount.h>
1370 int main() { struct statfs s; return sizeof(s.f_fstypename); }
1371 int main() { struct statfs s; return sizeof(s.f_fstypename); }
1371 ''',
1372 ''',
1372 ),
1373 ),
1373 (
1374 (
1374 'linux',
1375 'linux',
1375 'LINUX_STATFS',
1376 'LINUX_STATFS',
1376 '''
1377 '''
1377 #include <linux/magic.h>
1378 #include <linux/magic.h>
1378 #include <sys/vfs.h>
1379 #include <sys/vfs.h>
1379 int main() { struct statfs s; return sizeof(s.f_type); }
1380 int main() { struct statfs s; return sizeof(s.f_type); }
1380 ''',
1381 ''',
1381 ),
1382 ),
1382 ]:
1383 ]:
1383 if re.search(plat, sys.platform) and cancompile(new_compiler(), code):
1384 if re.search(plat, sys.platform) and cancompile(new_compiler(), code):
1384 osutil_cflags.append('-DHAVE_%s' % macro)
1385 osutil_cflags.append('-DHAVE_%s' % macro)
1385
1386
1386 if sys.platform == 'darwin':
1387 if sys.platform == 'darwin':
1387 osutil_ldflags += ['-framework', 'ApplicationServices']
1388 osutil_ldflags += ['-framework', 'ApplicationServices']
1388
1389
1389 if sys.platform == 'sunos5':
1390 if sys.platform == 'sunos5':
1390 osutil_ldflags += ['-lsocket']
1391 osutil_ldflags += ['-lsocket']
1391
1392
1392 xdiff_srcs = [
1393 xdiff_srcs = [
1393 'mercurial/thirdparty/xdiff/xdiffi.c',
1394 'mercurial/thirdparty/xdiff/xdiffi.c',
1394 'mercurial/thirdparty/xdiff/xprepare.c',
1395 'mercurial/thirdparty/xdiff/xprepare.c',
1395 'mercurial/thirdparty/xdiff/xutils.c',
1396 'mercurial/thirdparty/xdiff/xutils.c',
1396 ]
1397 ]
1397
1398
1398 xdiff_headers = [
1399 xdiff_headers = [
1399 'mercurial/thirdparty/xdiff/xdiff.h',
1400 'mercurial/thirdparty/xdiff/xdiff.h',
1400 'mercurial/thirdparty/xdiff/xdiffi.h',
1401 'mercurial/thirdparty/xdiff/xdiffi.h',
1401 'mercurial/thirdparty/xdiff/xinclude.h',
1402 'mercurial/thirdparty/xdiff/xinclude.h',
1402 'mercurial/thirdparty/xdiff/xmacros.h',
1403 'mercurial/thirdparty/xdiff/xmacros.h',
1403 'mercurial/thirdparty/xdiff/xprepare.h',
1404 'mercurial/thirdparty/xdiff/xprepare.h',
1404 'mercurial/thirdparty/xdiff/xtypes.h',
1405 'mercurial/thirdparty/xdiff/xtypes.h',
1405 'mercurial/thirdparty/xdiff/xutils.h',
1406 'mercurial/thirdparty/xdiff/xutils.h',
1406 ]
1407 ]
1407
1408
1408
1409
1409 class RustCompilationError(CCompilerError):
1410 class RustCompilationError(CCompilerError):
1410 """Exception class for Rust compilation errors."""
1411 """Exception class for Rust compilation errors."""
1411
1412
1412
1413
1413 class RustExtension(Extension):
1414 class RustExtension(Extension):
1414 """Base classes for concrete Rust Extension classes."""
1415 """Base classes for concrete Rust Extension classes."""
1415
1416
1416 rusttargetdir = os.path.join('rust', 'target', 'release')
1417 rusttargetdir = os.path.join('rust', 'target', 'release')
1417
1418
1418 def __init__(self, mpath, sources, rustlibname, subcrate, **kw):
1419 def __init__(self, mpath, sources, rustlibname, subcrate, **kw):
1419 Extension.__init__(self, mpath, sources, **kw)
1420 Extension.__init__(self, mpath, sources, **kw)
1420 srcdir = self.rustsrcdir = os.path.join('rust', subcrate)
1421 srcdir = self.rustsrcdir = os.path.join('rust', subcrate)
1421
1422
1422 # adding Rust source and control files to depends so that the extension
1423 # adding Rust source and control files to depends so that the extension
1423 # gets rebuilt if they've changed
1424 # gets rebuilt if they've changed
1424 self.depends.append(os.path.join(srcdir, 'Cargo.toml'))
1425 self.depends.append(os.path.join(srcdir, 'Cargo.toml'))
1425 cargo_lock = os.path.join(srcdir, 'Cargo.lock')
1426 cargo_lock = os.path.join(srcdir, 'Cargo.lock')
1426 if os.path.exists(cargo_lock):
1427 if os.path.exists(cargo_lock):
1427 self.depends.append(cargo_lock)
1428 self.depends.append(cargo_lock)
1428 for dirpath, subdir, fnames in os.walk(os.path.join(srcdir, 'src')):
1429 for dirpath, subdir, fnames in os.walk(os.path.join(srcdir, 'src')):
1429 self.depends.extend(
1430 self.depends.extend(
1430 os.path.join(dirpath, fname)
1431 os.path.join(dirpath, fname)
1431 for fname in fnames
1432 for fname in fnames
1432 if os.path.splitext(fname)[1] == '.rs'
1433 if os.path.splitext(fname)[1] == '.rs'
1433 )
1434 )
1434
1435
1435 @staticmethod
1436 @staticmethod
1436 def rustdylibsuffix():
1437 def rustdylibsuffix():
1437 """Return the suffix for shared libraries produced by rustc.
1438 """Return the suffix for shared libraries produced by rustc.
1438
1439
1439 See also: https://doc.rust-lang.org/reference/linkage.html
1440 See also: https://doc.rust-lang.org/reference/linkage.html
1440 """
1441 """
1441 if sys.platform == 'darwin':
1442 if sys.platform == 'darwin':
1442 return '.dylib'
1443 return '.dylib'
1443 elif os.name == 'nt':
1444 elif os.name == 'nt':
1444 return '.dll'
1445 return '.dll'
1445 else:
1446 else:
1446 return '.so'
1447 return '.so'
1447
1448
1448 def rustbuild(self):
1449 def rustbuild(self):
1449 env = os.environ.copy()
1450 env = os.environ.copy()
1450 if 'HGTEST_RESTOREENV' in env:
1451 if 'HGTEST_RESTOREENV' in env:
1451 # Mercurial tests change HOME to a temporary directory,
1452 # Mercurial tests change HOME to a temporary directory,
1452 # but, if installed with rustup, the Rust toolchain needs
1453 # but, if installed with rustup, the Rust toolchain needs
1453 # HOME to be correct (otherwise the 'no default toolchain'
1454 # HOME to be correct (otherwise the 'no default toolchain'
1454 # error message is issued and the build fails).
1455 # error message is issued and the build fails).
1455 # This happens currently with test-hghave.t, which does
1456 # This happens currently with test-hghave.t, which does
1456 # invoke this build.
1457 # invoke this build.
1457
1458
1458 # Unix only fix (os.path.expanduser not really reliable if
1459 # Unix only fix (os.path.expanduser not really reliable if
1459 # HOME is shadowed like this)
1460 # HOME is shadowed like this)
1460 import pwd
1461 import pwd
1461
1462
1462 env['HOME'] = pwd.getpwuid(os.getuid()).pw_dir
1463 env['HOME'] = pwd.getpwuid(os.getuid()).pw_dir
1463
1464
1464 cargocmd = ['cargo', 'rustc', '--release']
1465 cargocmd = ['cargo', 'rustc', '--release']
1465
1466
1466 rust_features = env.get("HG_RUST_FEATURES")
1467 rust_features = env.get("HG_RUST_FEATURES")
1467 if rust_features:
1468 if rust_features:
1468 cargocmd.extend(('--features', rust_features))
1469 cargocmd.extend(('--features', rust_features))
1469
1470
1470 cargocmd.append('--')
1471 cargocmd.append('--')
1471 if sys.platform == 'darwin':
1472 if sys.platform == 'darwin':
1472 cargocmd.extend(
1473 cargocmd.extend(
1473 ("-C", "link-arg=-undefined", "-C", "link-arg=dynamic_lookup")
1474 ("-C", "link-arg=-undefined", "-C", "link-arg=dynamic_lookup")
1474 )
1475 )
1475 try:
1476 try:
1476 subprocess.check_call(cargocmd, env=env, cwd=self.rustsrcdir)
1477 subprocess.check_call(cargocmd, env=env, cwd=self.rustsrcdir)
1477 except FileNotFoundError:
1478 except FileNotFoundError:
1478 raise RustCompilationError("Cargo not found")
1479 raise RustCompilationError("Cargo not found")
1479 except PermissionError:
1480 except PermissionError:
1480 raise RustCompilationError(
1481 raise RustCompilationError(
1481 "Cargo found, but permission to execute it is denied"
1482 "Cargo found, but permission to execute it is denied"
1482 )
1483 )
1483 except subprocess.CalledProcessError:
1484 except subprocess.CalledProcessError:
1484 raise RustCompilationError(
1485 raise RustCompilationError(
1485 "Cargo failed. Working directory: %r, "
1486 "Cargo failed. Working directory: %r, "
1486 "command: %r, environment: %r"
1487 "command: %r, environment: %r"
1487 % (self.rustsrcdir, cargocmd, env)
1488 % (self.rustsrcdir, cargocmd, env)
1488 )
1489 )
1489
1490
1490
1491
1491 class RustStandaloneExtension(RustExtension):
1492 class RustStandaloneExtension(RustExtension):
1492 def __init__(self, pydottedname, rustcrate, dylibname, **kw):
1493 def __init__(self, pydottedname, rustcrate, dylibname, **kw):
1493 RustExtension.__init__(
1494 RustExtension.__init__(
1494 self, pydottedname, [], dylibname, rustcrate, **kw
1495 self, pydottedname, [], dylibname, rustcrate, **kw
1495 )
1496 )
1496 self.dylibname = dylibname
1497 self.dylibname = dylibname
1497
1498
1498 def build(self, target_dir):
1499 def build(self, target_dir):
1499 self.rustbuild()
1500 self.rustbuild()
1500 target = [target_dir]
1501 target = [target_dir]
1501 target.extend(self.name.split('.'))
1502 target.extend(self.name.split('.'))
1502 target[-1] += DYLIB_SUFFIX
1503 target[-1] += DYLIB_SUFFIX
1503 target = os.path.join(*target)
1504 target = os.path.join(*target)
1504 os.makedirs(os.path.dirname(target), exist_ok=True)
1505 os.makedirs(os.path.dirname(target), exist_ok=True)
1505 shutil.copy2(
1506 shutil.copy2(
1506 os.path.join(
1507 os.path.join(
1507 self.rusttargetdir, self.dylibname + self.rustdylibsuffix()
1508 self.rusttargetdir, self.dylibname + self.rustdylibsuffix()
1508 ),
1509 ),
1509 target,
1510 target,
1510 )
1511 )
1511
1512
1512
1513
1513 extmodules = [
1514 extmodules = [
1514 Extension(
1515 Extension(
1515 'mercurial.cext.base85',
1516 'mercurial.cext.base85',
1516 ['mercurial/cext/base85.c'],
1517 ['mercurial/cext/base85.c'],
1517 include_dirs=common_include_dirs,
1518 include_dirs=common_include_dirs,
1518 extra_compile_args=common_cflags,
1519 extra_compile_args=common_cflags,
1519 depends=common_depends,
1520 depends=common_depends,
1520 ),
1521 ),
1521 Extension(
1522 Extension(
1522 'mercurial.cext.bdiff',
1523 'mercurial.cext.bdiff',
1523 ['mercurial/bdiff.c', 'mercurial/cext/bdiff.c'] + xdiff_srcs,
1524 ['mercurial/bdiff.c', 'mercurial/cext/bdiff.c'] + xdiff_srcs,
1524 include_dirs=common_include_dirs,
1525 include_dirs=common_include_dirs,
1525 extra_compile_args=common_cflags,
1526 extra_compile_args=common_cflags,
1526 depends=common_depends + ['mercurial/bdiff.h'] + xdiff_headers,
1527 depends=common_depends + ['mercurial/bdiff.h'] + xdiff_headers,
1527 ),
1528 ),
1528 Extension(
1529 Extension(
1529 'mercurial.cext.mpatch',
1530 'mercurial.cext.mpatch',
1530 ['mercurial/mpatch.c', 'mercurial/cext/mpatch.c'],
1531 ['mercurial/mpatch.c', 'mercurial/cext/mpatch.c'],
1531 include_dirs=common_include_dirs,
1532 include_dirs=common_include_dirs,
1532 extra_compile_args=common_cflags,
1533 extra_compile_args=common_cflags,
1533 depends=common_depends,
1534 depends=common_depends,
1534 ),
1535 ),
1535 Extension(
1536 Extension(
1536 'mercurial.cext.parsers',
1537 'mercurial.cext.parsers',
1537 [
1538 [
1538 'mercurial/cext/charencode.c',
1539 'mercurial/cext/charencode.c',
1539 'mercurial/cext/dirs.c',
1540 'mercurial/cext/dirs.c',
1540 'mercurial/cext/manifest.c',
1541 'mercurial/cext/manifest.c',
1541 'mercurial/cext/parsers.c',
1542 'mercurial/cext/parsers.c',
1542 'mercurial/cext/pathencode.c',
1543 'mercurial/cext/pathencode.c',
1543 'mercurial/cext/revlog.c',
1544 'mercurial/cext/revlog.c',
1544 ],
1545 ],
1545 include_dirs=common_include_dirs,
1546 include_dirs=common_include_dirs,
1546 extra_compile_args=common_cflags,
1547 extra_compile_args=common_cflags,
1547 depends=common_depends
1548 depends=common_depends
1548 + [
1549 + [
1549 'mercurial/cext/charencode.h',
1550 'mercurial/cext/charencode.h',
1550 'mercurial/cext/revlog.h',
1551 'mercurial/cext/revlog.h',
1551 ],
1552 ],
1552 ),
1553 ),
1553 Extension(
1554 Extension(
1554 'mercurial.cext.osutil',
1555 'mercurial.cext.osutil',
1555 ['mercurial/cext/osutil.c'],
1556 ['mercurial/cext/osutil.c'],
1556 include_dirs=common_include_dirs,
1557 include_dirs=common_include_dirs,
1557 extra_compile_args=common_cflags + osutil_cflags,
1558 extra_compile_args=common_cflags + osutil_cflags,
1558 extra_link_args=osutil_ldflags,
1559 extra_link_args=osutil_ldflags,
1559 depends=common_depends,
1560 depends=common_depends,
1560 ),
1561 ),
1561 Extension(
1562 Extension(
1562 'mercurial.thirdparty.zope.interface._zope_interface_coptimizations',
1563 'mercurial.thirdparty.zope.interface._zope_interface_coptimizations',
1563 [
1564 [
1564 'mercurial/thirdparty/zope/interface/_zope_interface_coptimizations.c',
1565 'mercurial/thirdparty/zope/interface/_zope_interface_coptimizations.c',
1565 ],
1566 ],
1566 extra_compile_args=common_cflags,
1567 extra_compile_args=common_cflags,
1567 ),
1568 ),
1568 Extension(
1569 Extension(
1569 'mercurial.thirdparty.sha1dc',
1570 'mercurial.thirdparty.sha1dc',
1570 [
1571 [
1571 'mercurial/thirdparty/sha1dc/cext.c',
1572 'mercurial/thirdparty/sha1dc/cext.c',
1572 'mercurial/thirdparty/sha1dc/lib/sha1.c',
1573 'mercurial/thirdparty/sha1dc/lib/sha1.c',
1573 'mercurial/thirdparty/sha1dc/lib/ubc_check.c',
1574 'mercurial/thirdparty/sha1dc/lib/ubc_check.c',
1574 ],
1575 ],
1575 extra_compile_args=common_cflags,
1576 extra_compile_args=common_cflags,
1576 ),
1577 ),
1577 Extension(
1578 Extension(
1578 'hgext.fsmonitor.pywatchman.bser',
1579 'hgext.fsmonitor.pywatchman.bser',
1579 ['hgext/fsmonitor/pywatchman/bser.c'],
1580 ['hgext/fsmonitor/pywatchman/bser.c'],
1580 extra_compile_args=common_cflags,
1581 extra_compile_args=common_cflags,
1581 ),
1582 ),
1582 RustStandaloneExtension(
1583 RustStandaloneExtension(
1583 'mercurial.rustext',
1584 'mercurial.rustext',
1584 'hg-cpython',
1585 'hg-cpython',
1585 'librusthg',
1586 'librusthg',
1586 ),
1587 ),
1587 ]
1588 ]
1588
1589
1589
1590
1590 sys.path.insert(0, 'contrib/python-zstandard')
1591 sys.path.insert(0, 'contrib/python-zstandard')
1591 import setup_zstd
1592 import setup_zstd
1592
1593
1593 zstd = setup_zstd.get_c_extension(
1594 zstd = setup_zstd.get_c_extension(
1594 name='mercurial.zstd', root=os.path.abspath(os.path.dirname(__file__))
1595 name='mercurial.zstd', root=os.path.abspath(os.path.dirname(__file__))
1595 )
1596 )
1596 zstd.extra_compile_args += common_cflags
1597 zstd.extra_compile_args += common_cflags
1597 extmodules.append(zstd)
1598 extmodules.append(zstd)
1598
1599
1599 try:
1600 try:
1600 from distutils import cygwinccompiler
1601 from distutils import cygwinccompiler
1601
1602
1602 # the -mno-cygwin option has been deprecated for years
1603 # the -mno-cygwin option has been deprecated for years
1603 mingw32compilerclass = cygwinccompiler.Mingw32CCompiler
1604 mingw32compilerclass = cygwinccompiler.Mingw32CCompiler
1604
1605
1605 class HackedMingw32CCompiler(cygwinccompiler.Mingw32CCompiler):
1606 class HackedMingw32CCompiler(cygwinccompiler.Mingw32CCompiler):
1606 def __init__(self, *args, **kwargs):
1607 def __init__(self, *args, **kwargs):
1607 mingw32compilerclass.__init__(self, *args, **kwargs)
1608 mingw32compilerclass.__init__(self, *args, **kwargs)
1608 for i in 'compiler compiler_so linker_exe linker_so'.split():
1609 for i in 'compiler compiler_so linker_exe linker_so'.split():
1609 try:
1610 try:
1610 getattr(self, i).remove('-mno-cygwin')
1611 getattr(self, i).remove('-mno-cygwin')
1611 except ValueError:
1612 except ValueError:
1612 pass
1613 pass
1613
1614
1614 cygwinccompiler.Mingw32CCompiler = HackedMingw32CCompiler
1615 cygwinccompiler.Mingw32CCompiler = HackedMingw32CCompiler
1615 except ImportError:
1616 except ImportError:
1616 # the cygwinccompiler package is not available on some Python
1617 # the cygwinccompiler package is not available on some Python
1617 # distributions like the ones from the optware project for Synology
1618 # distributions like the ones from the optware project for Synology
1618 # DiskStation boxes
1619 # DiskStation boxes
1619 class HackedMingw32CCompiler:
1620 class HackedMingw32CCompiler:
1620 pass
1621 pass
1621
1622
1622
1623
1623 if os.name == 'nt':
1624 if os.name == 'nt':
1624 # Allow compiler/linker flags to be added to Visual Studio builds. Passing
1625 # Allow compiler/linker flags to be added to Visual Studio builds. Passing
1625 # extra_link_args to distutils.extensions.Extension() doesn't have any
1626 # extra_link_args to distutils.extensions.Extension() doesn't have any
1626 # effect.
1627 # effect.
1627 from distutils import msvccompiler
1628 from distutils import msvccompiler
1628
1629
1629 msvccompilerclass = msvccompiler.MSVCCompiler
1630 msvccompilerclass = msvccompiler.MSVCCompiler
1630
1631
1631 class HackedMSVCCompiler(msvccompiler.MSVCCompiler):
1632 class HackedMSVCCompiler(msvccompiler.MSVCCompiler):
1632 def initialize(self):
1633 def initialize(self):
1633 msvccompilerclass.initialize(self)
1634 msvccompilerclass.initialize(self)
1634 # "warning LNK4197: export 'func' specified multiple times"
1635 # "warning LNK4197: export 'func' specified multiple times"
1635 self.ldflags_shared.append('/ignore:4197')
1636 self.ldflags_shared.append('/ignore:4197')
1636 self.ldflags_shared_debug.append('/ignore:4197')
1637 self.ldflags_shared_debug.append('/ignore:4197')
1637
1638
1638 msvccompiler.MSVCCompiler = HackedMSVCCompiler
1639 msvccompiler.MSVCCompiler = HackedMSVCCompiler
1639
1640
1640 packagedata = {
1641 packagedata = {
1641 'mercurial': [
1642 'mercurial': [
1642 'locale/*/LC_MESSAGES/hg.mo',
1643 'locale/*/LC_MESSAGES/hg.mo',
1643 'dummycert.pem',
1644 'dummycert.pem',
1644 ],
1645 ],
1645 'mercurial.defaultrc': [
1646 'mercurial.defaultrc': [
1646 '*.rc',
1647 '*.rc',
1647 ],
1648 ],
1648 'mercurial.helptext': [
1649 'mercurial.helptext': [
1649 '*.txt',
1650 '*.txt',
1650 ],
1651 ],
1651 'mercurial.helptext.internals': [
1652 'mercurial.helptext.internals': [
1652 '*.txt',
1653 '*.txt',
1653 ],
1654 ],
1654 'mercurial.thirdparty.attr': [
1655 'mercurial.thirdparty.attr': [
1655 '*.pyi',
1656 '*.pyi',
1656 'py.typed',
1657 'py.typed',
1657 ],
1658 ],
1658 }
1659 }
1659
1660
1660
1661
1661 def ordinarypath(p):
1662 def ordinarypath(p):
1662 return p and p[0] != '.' and p[-1] != '~'
1663 return p and p[0] != '.' and p[-1] != '~'
1663
1664
1664
1665
1665 for root in ('templates',):
1666 for root in ('templates',):
1666 for curdir, dirs, files in os.walk(os.path.join('mercurial', root)):
1667 for curdir, dirs, files in os.walk(os.path.join('mercurial', root)):
1667 packagename = curdir.replace(os.sep, '.')
1668 packagename = curdir.replace(os.sep, '.')
1668 packagedata[packagename] = list(filter(ordinarypath, files))
1669 packagedata[packagename] = list(filter(ordinarypath, files))
1669
1670
1670 datafiles = []
1671 datafiles = []
1671
1672
1672 # distutils expects version to be str/unicode. Converting it to
1673 # distutils expects version to be str/unicode. Converting it to
1673 # unicode on Python 2 still works because it won't contain any
1674 # unicode on Python 2 still works because it won't contain any
1674 # non-ascii bytes and will be implicitly converted back to bytes
1675 # non-ascii bytes and will be implicitly converted back to bytes
1675 # when operated on.
1676 # when operated on.
1676 assert isinstance(version, str)
1677 assert isinstance(version, str)
1677 setupversion = version
1678 setupversion = version
1678
1679
1679 extra = {}
1680 extra = {}
1680
1681
1681 py2exepackages = [
1682 py2exepackages = [
1682 'hgdemandimport',
1683 'hgdemandimport',
1683 'hgext3rd',
1684 'hgext3rd',
1684 'hgext',
1685 'hgext',
1685 'email',
1686 'email',
1686 # implicitly imported per module policy
1687 # implicitly imported per module policy
1687 # (cffi wouldn't be used as a frozen exe)
1688 # (cffi wouldn't be used as a frozen exe)
1688 'mercurial.cext',
1689 'mercurial.cext',
1689 #'mercurial.cffi',
1690 #'mercurial.cffi',
1690 'mercurial.pure',
1691 'mercurial.pure',
1691 ]
1692 ]
1692
1693
1693 py2exe_includes = []
1694 py2exe_includes = []
1694
1695
1695 py2exeexcludes = []
1696 py2exeexcludes = []
1696 py2exedllexcludes = ['crypt32.dll']
1697 py2exedllexcludes = ['crypt32.dll']
1697
1698
1698 if issetuptools:
1699 if issetuptools:
1699 extra['python_requires'] = supportedpy
1700 extra['python_requires'] = supportedpy
1700
1701
1701 if py2exeloaded:
1702 if py2exeloaded:
1702 extra['console'] = [
1703 extra['console'] = [
1703 {
1704 {
1704 'script': 'hg',
1705 'script': 'hg',
1705 'copyright': 'Copyright (C) 2005-2023 Olivia Mackall and others',
1706 'copyright': 'Copyright (C) 2005-2023 Olivia Mackall and others',
1706 'product_version': version,
1707 'product_version': version,
1707 }
1708 }
1708 ]
1709 ]
1709 # Sub command of 'build' because 'py2exe' does not handle sub_commands.
1710 # Sub command of 'build' because 'py2exe' does not handle sub_commands.
1710 # Need to override hgbuild because it has a private copy of
1711 # Need to override hgbuild because it has a private copy of
1711 # build.sub_commands.
1712 # build.sub_commands.
1712 hgbuild.sub_commands.insert(0, ('build_hgextindex', None))
1713 hgbuild.sub_commands.insert(0, ('build_hgextindex', None))
1713 # put dlls in sub directory so that they won't pollute PATH
1714 # put dlls in sub directory so that they won't pollute PATH
1714 extra['zipfile'] = 'lib/library.zip'
1715 extra['zipfile'] = 'lib/library.zip'
1715
1716
1716 # We allow some configuration to be supplemented via environment
1717 # We allow some configuration to be supplemented via environment
1717 # variables. This is better than setup.cfg files because it allows
1718 # variables. This is better than setup.cfg files because it allows
1718 # supplementing configs instead of replacing them.
1719 # supplementing configs instead of replacing them.
1719 extrapackages = os.environ.get('HG_PY2EXE_EXTRA_PACKAGES')
1720 extrapackages = os.environ.get('HG_PY2EXE_EXTRA_PACKAGES')
1720 if extrapackages:
1721 if extrapackages:
1721 py2exepackages.extend(extrapackages.split(' '))
1722 py2exepackages.extend(extrapackages.split(' '))
1722
1723
1723 extra_includes = os.environ.get('HG_PY2EXE_EXTRA_INCLUDES')
1724 extra_includes = os.environ.get('HG_PY2EXE_EXTRA_INCLUDES')
1724 if extra_includes:
1725 if extra_includes:
1725 py2exe_includes.extend(extra_includes.split(' '))
1726 py2exe_includes.extend(extra_includes.split(' '))
1726
1727
1727 excludes = os.environ.get('HG_PY2EXE_EXTRA_EXCLUDES')
1728 excludes = os.environ.get('HG_PY2EXE_EXTRA_EXCLUDES')
1728 if excludes:
1729 if excludes:
1729 py2exeexcludes.extend(excludes.split(' '))
1730 py2exeexcludes.extend(excludes.split(' '))
1730
1731
1731 dllexcludes = os.environ.get('HG_PY2EXE_EXTRA_DLL_EXCLUDES')
1732 dllexcludes = os.environ.get('HG_PY2EXE_EXTRA_DLL_EXCLUDES')
1732 if dllexcludes:
1733 if dllexcludes:
1733 py2exedllexcludes.extend(dllexcludes.split(' '))
1734 py2exedllexcludes.extend(dllexcludes.split(' '))
1734
1735
1735 if os.environ.get('PYOXIDIZER'):
1736 if os.environ.get('PYOXIDIZER'):
1736 hgbuild.sub_commands.insert(0, ('build_hgextindex', None))
1737 hgbuild.sub_commands.insert(0, ('build_hgextindex', None))
1737
1738
1738 if os.name == 'nt':
1739 if os.name == 'nt':
1739 # Windows binary file versions for exe/dll files must have the
1740 # Windows binary file versions for exe/dll files must have the
1740 # form W.X.Y.Z, where W,X,Y,Z are numbers in the range 0..65535
1741 # form W.X.Y.Z, where W,X,Y,Z are numbers in the range 0..65535
1741 setupversion = setupversion.split(r'+', 1)[0]
1742 setupversion = setupversion.split(r'+', 1)[0]
1742
1743
1743 setup(
1744 setup(
1744 name='mercurial',
1745 name='mercurial',
1745 version=setupversion,
1746 version=setupversion,
1746 author='Olivia Mackall and many others',
1747 author='Olivia Mackall and many others',
1747 author_email='mercurial@mercurial-scm.org',
1748 author_email='mercurial@mercurial-scm.org',
1748 url='https://mercurial-scm.org/',
1749 url='https://mercurial-scm.org/',
1749 download_url='https://mercurial-scm.org/release/',
1750 download_url='https://mercurial-scm.org/release/',
1750 description=(
1751 description=(
1751 'Fast scalable distributed SCM (revision control, version '
1752 'Fast scalable distributed SCM (revision control, version '
1752 'control) system'
1753 'control) system'
1753 ),
1754 ),
1754 long_description=(
1755 long_description=(
1755 'Mercurial is a distributed SCM tool written in Python.'
1756 'Mercurial is a distributed SCM tool written in Python.'
1756 ' It is used by a number of large projects that require'
1757 ' It is used by a number of large projects that require'
1757 ' fast, reliable distributed revision control, such as '
1758 ' fast, reliable distributed revision control, such as '
1758 'Mozilla.'
1759 'Mozilla.'
1759 ),
1760 ),
1760 license='GNU GPLv2 or any later version',
1761 license='GNU GPLv2 or any later version',
1761 classifiers=[
1762 classifiers=[
1762 'Development Status :: 6 - Mature',
1763 'Development Status :: 6 - Mature',
1763 'Environment :: Console',
1764 'Environment :: Console',
1764 'Intended Audience :: Developers',
1765 'Intended Audience :: Developers',
1765 'Intended Audience :: System Administrators',
1766 'Intended Audience :: System Administrators',
1766 'License :: OSI Approved :: GNU General Public License (GPL)',
1767 'License :: OSI Approved :: GNU General Public License (GPL)',
1767 'Natural Language :: Danish',
1768 'Natural Language :: Danish',
1768 'Natural Language :: English',
1769 'Natural Language :: English',
1769 'Natural Language :: German',
1770 'Natural Language :: German',
1770 'Natural Language :: Italian',
1771 'Natural Language :: Italian',
1771 'Natural Language :: Japanese',
1772 'Natural Language :: Japanese',
1772 'Natural Language :: Portuguese (Brazilian)',
1773 'Natural Language :: Portuguese (Brazilian)',
1773 'Operating System :: Microsoft :: Windows',
1774 'Operating System :: Microsoft :: Windows',
1774 'Operating System :: OS Independent',
1775 'Operating System :: OS Independent',
1775 'Operating System :: POSIX',
1776 'Operating System :: POSIX',
1776 'Programming Language :: C',
1777 'Programming Language :: C',
1777 'Programming Language :: Python',
1778 'Programming Language :: Python',
1778 'Topic :: Software Development :: Version Control',
1779 'Topic :: Software Development :: Version Control',
1779 ],
1780 ],
1780 scripts=scripts,
1781 scripts=scripts,
1781 packages=packages,
1782 packages=packages,
1782 ext_modules=extmodules,
1783 ext_modules=extmodules,
1783 data_files=datafiles,
1784 data_files=datafiles,
1784 package_data=packagedata,
1785 package_data=packagedata,
1785 cmdclass=cmdclass,
1786 cmdclass=cmdclass,
1786 distclass=hgdist,
1787 distclass=hgdist,
1787 options={
1788 options={
1788 'py2exe': {
1789 'py2exe': {
1789 'bundle_files': 3,
1790 'bundle_files': 3,
1790 'dll_excludes': py2exedllexcludes,
1791 'dll_excludes': py2exedllexcludes,
1791 'includes': py2exe_includes,
1792 'includes': py2exe_includes,
1792 'excludes': py2exeexcludes,
1793 'excludes': py2exeexcludes,
1793 'packages': py2exepackages,
1794 'packages': py2exepackages,
1794 },
1795 },
1795 'bdist_mpkg': {
1796 'bdist_mpkg': {
1796 'zipdist': False,
1797 'zipdist': False,
1797 'license': 'COPYING',
1798 'license': 'COPYING',
1798 'readme': 'contrib/packaging/macosx/Readme.html',
1799 'readme': 'contrib/packaging/macosx/Readme.html',
1799 'welcome': 'contrib/packaging/macosx/Welcome.html',
1800 'welcome': 'contrib/packaging/macosx/Welcome.html',
1800 },
1801 },
1801 },
1802 },
1802 **extra
1803 **extra
1803 )
1804 )
General Comments 0
You need to be logged in to leave comments. Login now